diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml
index a68ddd21..ee899b61 100644
--- a/.github/workflows/pytest.yml
+++ b/.github/workflows/pytest.yml
@@ -35,9 +35,28 @@ jobs:
python -m pip install .[test]
- name: Run tests
+ id: tests
run: make test
+ continue-on-error: true
- - name: Analysing the code with pylint
- if: ${{ matrix.python-version }} == '3.10'
+ - name: Pylint analysis
+ id: pylint_analysis
run: |
- pylint --fail-under=9 $(git ls-files '*.py')
\ No newline at end of file
+ python -m pylint --fail-under=10 $(git ls-files '*.py')
+ continue-on-error: true
+
+ - name: mypy analysis
+ id: mypy_analysis
+ run: |
+ python -m mypy *.py finquant
+ continue-on-error: true
+
+ - name: Check for Failures
+ run: |
+ if [[ "${{ steps.tests.outcome }}" != "success" || "${{ steps.pylint_analysis.outcome }}" != "success" || "${{ steps.mypy_analysis.outcome }}" != "success" ]]; then
+ echo "Pipeline failed due to errors in the following steps:"
+ echo "Tests: ${{ steps.tests.outcome }}"
+ echo "Pylint: ${{ steps.pylint_analysis.outcome }}"
+ echo "mypy: ${{ steps.mypy_analysis.outcome }}"
+ exit 1
+ fi
\ No newline at end of file
diff --git a/.pylintrc b/.pylintrc
index 43014f10..38083b74 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -1,6 +1,7 @@
[MASTER]
# Ignore certain files or directories during analysis
ignore-paths=tests/,docs/,example/
+extension-pkg-whitelist=pydantic
[REPORTS]
# Set the output format for `pylint` messages (text, colorized, json, etc.)
@@ -14,11 +15,17 @@ max-line-length=120
disable=
C0114, # Missing module docstring
C0116, # Missing function docstring
+ R0902, # Too many instance attributes
R0903, # Too few public methods
R0913, # Too many arguments
R0914, # Too many local variables
+ R1705, # Unnecessary "else" after "return"
+ W1514, # Unspecified encoding,
# Include additional pylint messages or message categories
#enable=
# C0114, # Missing module, function, class docstring
# R0903, # Too few public methods
+
+[FORMAT]
+good-names = pf, df, ef, mc, mu
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 6d8d3f84..27c61cfc 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -35,6 +35,24 @@ your branch name should be something like bugfix/print-statement-portfolio-prope
For the automated versioning to work, the branch name is required to start with `bugfix/` or one of the other
above mentioned patterns.
+### Custom data types
+[FinQuant defines a number of custom data types](https://finquant.readthedocs.io/en/latest/developers.html#data-types)
+in the module `finquant.data_types`.
+
+These data types are useful as lots of functions/methods in FinQuant allow arguments to be of different data types.
+For example:
+- `data` is often accepted as either a `pandas.Series` or `pandas.DataFrame`, or
+- `risk_free_rate` could be a Python `float` or a `numpy.float64` among others.
+
+To accommodate and simplify this, custom data types are defined in the module `finquant.data_types`.
+Please familiarize yourself with those and add more if your code requires them.
+
+### Data type validation
+[FinQuant provides a module/function for type validation](https://finquant.readthedocs.io/en/latest/developers.html#type-validation),
+which is used throughout the code base for type validation purposes. Said function simplifies checking an argument
+against its expected type and reduces the amount of copy-pasted `if` and `raise` statements.
+You can check out the source code in `finquant.type_utilities`.
+
### Commit your changes
Make your changes to the code, and write sensible commit messages.
diff --git a/README.md b/README.md
index 8dc05d47..9ae1244d 100644
--- a/README.md
+++ b/README.md
@@ -7,7 +7,7 @@
-
+
diff --git a/README.tex.md b/README.tex.md
index 972e92e8..cdf10d64 100644
--- a/README.tex.md
+++ b/README.tex.md
@@ -7,7 +7,7 @@
-
+
diff --git a/docs/assets.rst b/docs/assets.rst
new file mode 100644
index 00000000..e7542cad
--- /dev/null
+++ b/docs/assets.rst
@@ -0,0 +1,36 @@
+.. _assets:
+
+#################
+Individual Assets
+#################
+FinQuant provides classes for individual assets, such as stocks or funds. These are explained below.
+
+Asset
+=====
+.. automodule:: finquant.asset
+.. autoclass:: finquant.asset.Asset
+ :members:
+
+ .. automethod:: __init__
+
+
+Stock
+=====
+Inherits from ``Asset``.
+
+.. automodule:: finquant.stock
+.. autoclass:: finquant.stock.Stock
+ :members:
+
+ .. automethod:: __init__
+
+
+Market
+======
+Inherits from ``Asset``.
+
+.. automodule:: finquant.market
+.. autoclass:: finquant.market.Market
+ :members:
+
+ .. automethod:: __init__
diff --git a/docs/conf.py b/docs/conf.py
index 7b4d02f3..d3c4b44d 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -44,7 +44,14 @@
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
-extensions = ["sphinx.ext.autodoc", "sphinx.ext.githubpages"]
+extensions = [
+ "sphinx.ext.autodoc",
+ "sphinx.ext.githubpages",
+ "sphinx_autodoc_typehints",
+]
+
+# Make sure the 'members' flag is included
+autodoc_default_flags = ["members"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["ntemplates"]
@@ -63,7 +70,7 @@
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
-language = None
+language = "en"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
diff --git a/docs/developers.rst b/docs/developers.rst
new file mode 100644
index 00000000..f472a3e6
--- /dev/null
+++ b/docs/developers.rst
@@ -0,0 +1,78 @@
+.. _developers:
+
+####################
+Notes for Developers
+####################
+
+.. note:: Contributions are welcome. If you want to add new functionality please
+
+ 1. read through `CONTRIBUTIONS.md` in the root directory of the repository, and
+ 2. familiarize yourself with the custom data types defined in FinQuant, and how type validation is achieved. You find relevant information below.
+
+**********
+Data Types
+**********
+
+Various custom data types are defined in ``finquant.data_types`` and used in FinQuant as type hints.
+
+Description
+###########
+
+.. automodule:: finquant.data_types
+
+
+
+Code Definitions
+################
+
+Array/List-Like Types
+---------------------
+
+.. autodata:: finquant.data_types.ARRAY_OR_LIST
+ :annotation:
+
+.. autodata:: finquant.data_types.ARRAY_OR_DATAFRAME
+ :annotation:
+
+.. autodata:: finquant.data_types.ARRAY_OR_SERIES
+ :annotation:
+
+.. autodata:: finquant.data_types.SERIES_OR_DATAFRAME
+ :annotation:
+
+List of Dict keys
+-----------------
+
+.. autodata:: finquant.data_types.LIST_DICT_KEYS
+ :annotation:
+
+Numeric Types
+-------------
+
+.. autodata:: finquant.data_types.FLOAT
+ :annotation:
+
+.. autodata:: finquant.data_types.INT
+ :annotation:
+
+.. autodata:: finquant.data_types.NUMERIC
+ :annotation:
+
+
+***************
+Type validation
+***************
+
+This module provides a function ``type_validation`` that allow to effortlessly implement type validation.
+
+Description
+###########
+
+.. automodule:: finquant.type_utilities
+
+
+Code Definitions
+################
+
+.. autodata:: finquant.type_utilities.type_validation
+ :annotation:
diff --git a/docs/index.rst b/docs/index.rst
index 320a6198..86dfd2cc 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -14,9 +14,9 @@
.. _PyPI: https://pypi.org/project/FinQuant/
-#####################################
+###################################
Welcome to FinQuant's documentation
-#####################################
+###################################
*FinQuant* is a program for financial portfolio management, analysis and optimisation. It is designed to generate an object that holds your data, e.g. stock prices of different stocks, which automatically computes the most common quantities, such as *Expected annual Return*, *Volatility* and *Sharpe Ratio*. Moreover, it provides a library for computing different kinds of *Returns* and visualising *Moving Averages* and *Bollinger Bands*. Finally, given a set of stocks, it also allows for finding optimised portfolios.
@@ -74,13 +74,15 @@ Table of Contents
:maxdepth: 2
quickstart
+ examples
portfolio
+ assets
quants
returns
movingaverage
efficientfrontier
montecarlo
- examples
+ developers
license
about
diff --git a/docs/portfolio.rst b/docs/portfolio.rst
index 287ec4ff..2a942673 100644
--- a/docs/portfolio.rst
+++ b/docs/portfolio.rst
@@ -4,21 +4,15 @@
Portfolio Management
####################
-As mentioned above, *FinQuant* is a program for financial portfolio management, among others. The module ``finquant.portfolio`` does exactly that.
+As mentioned above, *FinQuant* is a program for financial portfolio management, among others.
+The module ``finquant.portfolio`` does exactly that.
-.. note:: The impatient reader who simply wants to jump in and start using *FinQuant* is advised to jump to `build_portfolio`_ and have a look at and play around with the :ref:`examples`.
+.. note:: The impatient reader who simply wants to jump in and start using *FinQuant* is advised
+ to jump to `build_portfolio`_ and have a look at and play around with the :ref:`examples`.
.. automodule:: finquant.portfolio
-Stock
-=====
-.. autoclass:: finquant.stock.Stock
- :members:
-
- .. automethod:: __init__
-
-
Portfolio
=========
.. autoclass:: finquant.portfolio.Portfolio
diff --git a/docs/quickstart.rst b/docs/quickstart.rst
index 48c58eba..32e11d29 100644
--- a/docs/quickstart.rst
+++ b/docs/quickstart.rst
@@ -15,7 +15,7 @@ Building a Portfolio
Getting an object of ``Portfolio`` that holds stock prices of four different stocks, as well as its properties and interfaces to optimisation methods is as simple as:
-.. code:: python
+.. code-block:: python
from finquant.portfolio import build_portfolio
names = ['GOOG', 'AMZN', 'MCD', 'DIS']
@@ -25,7 +25,7 @@ The above uses *Quandl* in the background to download the requested data. For mo
If preferred, *FinQuant* also allows to fetch stock price data from |yahoofinance|_. The code snippet below is the equivalent to the above, but using yfinance_ instead (default value for ``data_api`` is ``"quandl"``):
-.. code:: python
+.. code-block:: python
from finquant.portfolio import build_portfolio
names = ['GOOG', 'AMZN', 'MCD', 'DIS']
@@ -33,7 +33,7 @@ If preferred, *FinQuant* also allows to fetch stock price data from |yahoofinanc
Alternatively, if you already are in possession of stock prices you want to analyse/optimise, you can do the following.
-.. code:: python
+.. code-block:: python
import pathlib
from finquant.portfolio import build_portfolio
@@ -50,13 +50,13 @@ Properties of the Portfolio
The portfolio's properties are automatically computed as it is being built. One can have a look at them with
-.. code:: python
+.. code-block:: python
pf.properties()
which shows
-.. code::
+.. code-block:: python
----------------------------------------------------------------------
Stocks: GOOG, AMZN, MCD, DIS
@@ -89,7 +89,7 @@ Moving Averages
.. note:: When computing/visualising a *band* of Moving Averages, ``compute_ma`` automatically finds the buy/sell signals based on the minimum/maximum *Moving Average* that were computed and highlights those with arrow up/down markers.
-.. code:: python
+.. code-block:: python
from finquant.moving_average import compute_ma, ema
# get stock data for Disney
@@ -120,7 +120,7 @@ Portfolio Optimisation
======================
*FinQuant* allows the optimisation of financial portfolios along the *Efficient Frontier* by minimising a cost/objective function. *FinQuant* uses the Python package ``scipy`` for the minimisation. Alternatively, a *Monte Carlo* approach is implemented as well. The below demonstrates how *FinQuant* performs such an optimisation and visualisation of the results.
-.. code::
+.. code-block:: python
# Monte Carlo optimisation
opt_w, opt_res = pf.mc_optimisation(num_trials=5000)
diff --git a/finquant/asset.py b/finquant/asset.py
index ea51c50d..5fddcb5d 100644
--- a/finquant/asset.py
+++ b/finquant/asset.py
@@ -13,20 +13,21 @@ class provides common functionality and attributes that are shared among differe
"""
-
import numpy as np
import pandas as pd
+from finquant.data_types import FLOAT, INT
from finquant.returns import daily_returns, historical_mean_return
+from finquant.type_utilities import type_validation
class Asset:
"""
Parent class representing a generic financial asset.
- :param ``data``: Historical price data of the asset as a ``pandas.Series``.
- :param ``name``: Name of the asset.
- :param ``asset_type``: Type of the asset (e.g., "Stock" or "Market index").
+ :param data: Historical price data of the asset.
+ :param name: Name of the asset.
+ :param asset_type: Type of the asset (e.g., "Stock" or "Market index").
The ``Asset`` class provides common functionality and attributes for financial assets.
It represents a generic asset and serves as the parent class for specialized asset classes.
@@ -45,16 +46,24 @@ class Asset:
"""
+ # Attributes:
+ data: pd.Series
+ name: str
+ asset_type: str
+ expected_return: pd.Series
+ volatility: FLOAT
+ skew: FLOAT
+ kurtosis: FLOAT
+
def __init__(
self, data: pd.Series, name: str, asset_type: str = "Market index"
) -> None:
"""
- :Input:
- :data: ``pandas.Series``, of asset prices
- :name: ``str``, Name of the asset
- :asset_type: ``str`` (default: ``'Market index'``), Type of the asset (e.g., "Stock" or "Market index")
+ :param data: Historical price data of the asset.
+ :param name: Name of the asset
+ :param asset_type: Type of the asset (e.g., "Stock" or "Market index"), default: "Market index"
"""
- self.data = data
+ self.data = data.astype(np.float64)
self.name = name
# compute expected return and volatility of asset
self.expected_return = self.comp_expected_return()
@@ -71,40 +80,39 @@ def comp_daily_returns(self) -> pd.Series:
"""
return daily_returns(self.data)
- def comp_expected_return(self, freq=252) -> float:
+ def comp_expected_return(self, freq: INT = 252) -> pd.Series:
"""Computes the Expected Return of the asset.
See ``finquant.returns.historical_mean_return``.
- :Input:
- :freq: ``int`` (default: ``252``), number of trading days, default
- value corresponds to trading days in a year
-
- :Output:
- :expected_return: ``float``, Expected Return of asset.
+ :param freq: Number of trading days in a year, default: 252
+ :type freq: :py:data:`~.finquant.data_types.INT`
"""
return historical_mean_return(self.data, freq=freq)
- def comp_volatility(self, freq=252) -> float:
+ def comp_volatility(self, freq: INT = 252) -> FLOAT:
"""Computes the Volatility of the asset.
- :Input:
- :freq: ``int`` (default: ``252``), number of trading days, default
- value corresponds to trading days in a year
+ :param freq: Number of trading days in a year, default: 252
+ :type freq: :py:data:`~.finquant.data_types.INT`
- :Output:
- :volatility: ``float``, Volatility of asset.
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
"""
- return self.comp_daily_returns().std() * np.sqrt(freq)
+ # Type validations:
+ type_validation(freq=freq)
+ volatility: FLOAT = self.comp_daily_returns().std() * np.sqrt(freq)
+ return volatility
- def _comp_skew(self) -> float:
+ def _comp_skew(self) -> FLOAT:
"""Computes and returns the skewness of the asset."""
- return self.data.skew()
+ skew: FLOAT = self.data.skew()
+ return skew
- def _comp_kurtosis(self) -> float:
+ def _comp_kurtosis(self) -> FLOAT:
"""Computes and returns the kurtosis of the asset."""
- return self.data.kurt()
+ kurtosis: FLOAT = self.data.kurt()
+ return kurtosis
- def properties(self):
+ def properties(self) -> None:
"""Nicely prints out the properties of the asset,
with customized messages based on the asset type.
"""
@@ -118,7 +126,7 @@ def properties(self):
string += "\n" + "-" * 50
print(string)
- def __str__(self):
+ def __str__(self) -> str:
# print short description
string = f"Contains information about {self.asset_type}: {self.name}."
return string
diff --git a/finquant/data_types.py b/finquant/data_types.py
new file mode 100644
index 00000000..b4b1bbb4
--- /dev/null
+++ b/finquant/data_types.py
@@ -0,0 +1,74 @@
+"""
+``finquant.data_types`` Module
+
+This module defines type aliases and utility functions for working with arrays, data frames,
+and various numeric types in Python, utilizing the 'numpy', 'numpy.typing', and 'pandas' libraries.
+
+Generic List Element Type
+-------------------------
+- ``ELEMENT_TYPE``: A type alias representing a generic element type
+
+Array/List-Like Types
+---------------------
+- ``ARRAY_OR_LIST``: A type alias representing either a NumPy ``ndarray`` or a Python ``List``.
+- ``ARRAY_OR_DATAFRAME``: A type alias representing either a NumPy ``ndarray`` or a pandas ``DataFrame``.
+- ``ARRAY_OR_SERIES``: A type alias representing either a NumPy ``ndarray`` or a pandas ``Series``.
+- ``SERIES_OR_DATAFRAME``: A type alias representing either a pandas ``Series`` or a pandas ``DataFrame``.
+
+Numeric Types
+-------------
+- ``FLOAT``: A type alias representing either a NumPy floating-point number or a Python float.
+- ``INT``: A type alias representing either a NumPy integer or a Python int.
+- ``NUMERIC``: A type alias representing either an ``INT`` or a ``FLOAT``.
+
+String/Datetime Types
+---------------------
+- ``STRING_OR_DATETIME``: A type alias representing either a Python string or a ``datetime.datetime`` object.
+
+Dependencies
+------------
+This module requires the following external libraries:
+
+- ``numpy`` (imported as ``np``)
+- ``pandas`` (imported as ``pd``)
+
+Usage Example
+-------------
+
+.. code-block:: python
+
+ from finquant.data_types import ARRAY_OR_DATAFRAME, NUMERIC
+ # Use the defined type aliases
+ def process_data(data: ARRAY_OR_DATAFRAME) -> FLOAT:
+ # Process the data and return a floating point number
+ return 5.0
+
+"""
+# pylint: disable=C0103
+
+
+from datetime import datetime
+from typing import Any, KeysView, List, TypeVar, Union
+
+import numpy as np
+import pandas as pd
+
+# Generic List Element Type
+ELEMENT_TYPE = TypeVar("ELEMENT_TYPE")
+
+# Type Aliases:
+ARRAY_OR_LIST = Union[np.ndarray[ELEMENT_TYPE, Any], List[ELEMENT_TYPE]]
+ARRAY_OR_DATAFRAME = Union[np.ndarray[ELEMENT_TYPE, Any], pd.DataFrame]
+ARRAY_OR_SERIES = Union[np.ndarray[ELEMENT_TYPE, Any], pd.Series]
+SERIES_OR_DATAFRAME = Union[pd.Series, pd.DataFrame]
+
+# To support Dict listkeys:
+LIST_DICT_KEYS = Union[ARRAY_OR_LIST[ELEMENT_TYPE], KeysView[ELEMENT_TYPE]]
+
+# Numeric types
+FLOAT = Union[np.floating, float]
+INT = Union[np.integer, int]
+NUMERIC = Union[INT, FLOAT]
+
+# String/Datetime types
+STRING_OR_DATETIME = Union[str, datetime]
diff --git a/finquant/efficient_frontier.py b/finquant/efficient_frontier.py
index ef1db601..ce84ecd0 100644
--- a/finquant/efficient_frontier.py
+++ b/finquant/efficient_frontier.py
@@ -3,13 +3,17 @@
"""
+from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+
import matplotlib.pylab as plt
import numpy as np
import pandas as pd
import scipy.optimize as sco
import finquant.minimise_fun as min_fun
+from finquant.data_types import ARRAY_OR_DATAFRAME, ARRAY_OR_LIST, FLOAT, INT, NUMERIC
from finquant.quants import annualised_portfolio_quantities
+from finquant.type_utilities import type_validation
class EfficientFrontier:
@@ -26,19 +30,42 @@ class EfficientFrontier:
(minimum Volatility and maximum Sharpe Ratio).
"""
+ # Attributes:
+ mean_returns: pd.Series
+ cov_matrix: pd.DataFrame
+ risk_free_rate: FLOAT
+ freq: INT
+ method: str
+ names: List[str]
+ num_stocks: int
+ last_optimisation: str
+ bounds: Tuple[Tuple[int, int], ...]
+ x_0: np.ndarray[np.float64, Any]
+ constraints: Dict[str, Union[str, Callable[[Any], FLOAT]]]
+ weights: np.ndarray[np.float64, Any]
+ df_weights: pd.DataFrame
+ efrontier: np.ndarray[np.float64, Any]
+
def __init__(
- self, mean_returns, cov_matrix, risk_free_rate=0.005, freq=252, method="SLSQP"
+ self,
+ mean_returns: pd.Series,
+ cov_matrix: pd.DataFrame,
+ risk_free_rate: FLOAT = 0.005,
+ freq: INT = 252,
+ method: str = "SLSQP",
):
"""
- :Input:
- :mean_returns: ``pandas.Series``, individual expected returns for all
- stocks in the portfolio
- :cov_matrix: ``pandas.DataFrame``, covariance matrix of returns
- :risk_free_rate: ``int``/``float`` (default= ``0.005``), risk free rate
- :freq: ``int`` (default= ``252``), number of trading days, default
- value corresponds to trading days in a year
- :method: ``string`` (default= ``"SLSQP"``), type of solver method to use,
- must be one of:
+ :param mean_returns: A Series of individual expected returns for all stocks
+
+ :param cov_matrix: Covariance matrix of returns
+
+ :param risk_free_rate: Risk free rate, default: 0.005
+ :type risk_free_rate: :py:data:`~.finquant.data_types.FLOAT`
+
+ :param freq: Number of trading days in a year, default: 252
+ :type freq: :py:data:`~.finquant.data_types.INT`
+
+ :param method: Type of solver method to use (default: SLSQP), must be one of:
- 'Nelder-Mead'
- 'Powell'
@@ -57,10 +84,14 @@ def __init__(
all of which are officially supported by scipy.optimize.minimize
"""
- if not isinstance(mean_returns, pd.Series):
- raise ValueError("mean_returns is expected to be a pandas.Series.")
- if not isinstance(cov_matrix, pd.DataFrame):
- raise ValueError("cov_matrix is expected to be a pandas.DataFrame")
+ # Type validations:
+ type_validation(
+ returns_series=mean_returns,
+ cov_matrix=cov_matrix,
+ risk_free_rate=risk_free_rate,
+ freq=freq,
+ method=method,
+ )
supported_methods = [
"Nelder-Mead",
"Powell",
@@ -77,10 +108,6 @@ def __init__(
"trust-exact",
"trust-krylov",
]
- if not isinstance(risk_free_rate, (int, float)):
- raise ValueError("risk_free_rate is expected to be an integer or float.")
- if not isinstance(method, str):
- raise ValueError("method is expected to be a string.")
if method not in supported_methods:
raise ValueError("method is not supported by scipy.optimize.minimize.")
@@ -96,36 +123,37 @@ def __init__(
# set numerical parameters
bound = (0, 1)
- self.bounds = tuple(bound for stock in range(self.num_stocks))
- self.x0 = np.array(self.num_stocks * [1.0 / self.num_stocks])
- self.constraints = {"type": "eq", "fun": lambda x: np.sum(x) - 1}
+ self.bounds = tuple(bound for _ in range(self.num_stocks))
+ self.x_0 = np.array(self.num_stocks * [1.0 / self.num_stocks], dtype=np.float64)
+ self.constraints = {"type": "eq", "fun": lambda x: np.sum(x) - 1.0}
# placeholder for optimised values/weights
- self.weights = None
- self.df_weights = None
- self.efrontier = None
+ self.weights = np.empty(0, dtype=np.float64)
+ self.df_weights = pd.DataFrame()
+ self.efrontier = np.empty((0, 2), dtype=np.float64)
- def minimum_volatility(self, save_weights=True):
+ def minimum_volatility(
+ self, save_weights: bool = True
+ ) -> ARRAY_OR_DATAFRAME[FLOAT]:
"""Finds the portfolio with the minimum volatility.
- :Input:
- :save_weights: ``boolean`` (default= ``True``), for internal use only.
+ :param save_weights: For internal use only, default: True
Whether to save the optimised weights in the instance variable
``weights`` (and ``df_weights``). Useful for the case of computing
the efficient frontier after doing an optimisation, else the optimal
- weights would be overwritten by the efficient frontier computations.
+ weights are overwritten by the efficient frontier computations.
Best to ignore this argument.
- :Output:
- :df_weights:
- - if "save_weights" is True: a ``pandas.DataFrame`` of weights/allocation
- of stocks within the optimised portfolio.
- :weights:
- - if "save_weights" is False: a ``numpy.ndarray`` of weights/allocation
- of stocks within the optimised portfolio.
+
+ :rtype: :py:data:`~.finquant.data_types.ARRAY_OR_DATAFRAME`
+ :return:
+ - if ``save_weights`` is True:
+ a DataFrame of weights/allocation of stocks within the optimised portfolio.
+ - if ``save_weights`` is False:
+ a ``numpy.ndarray`` of weights/allocation of stocks within the optimised portfolio.
"""
- if not isinstance(save_weights, bool):
- raise ValueError("save_weights is expected to be a boolean.")
+ # Type validations:
+ type_validation(save_weights=save_weights)
args = (self.mean_returns.values, self.cov_matrix.values)
@@ -133,7 +161,7 @@ def minimum_volatility(self, save_weights=True):
result = sco.minimize(
min_fun.portfolio_volatility,
args=args,
- x0=self.x0,
+ x0=self.x_0,
method=self.method,
bounds=self.bounds,
constraints=self.constraints,
@@ -152,34 +180,34 @@ def minimum_volatility(self, save_weights=True):
# of pandas.DataFrame
return result["x"]
- def maximum_sharpe_ratio(self, save_weights=True):
+ def maximum_sharpe_ratio(
+ self, save_weights: bool = True
+ ) -> ARRAY_OR_DATAFRAME[FLOAT]:
"""Finds the portfolio with the maximum Sharpe Ratio, also called the
tangency portfolio.
- :Input:
- :save_weights: ``boolean`` (default= ``True``), for internal use only.
+ :param save_weights: For internal use only, default: True
Whether to save the optimised weights in the instance variable
``weights`` (and ``df_weights``). Useful for the case of computing
the efficient frontier after doing an optimisation, else the optimal
- weights would be overwritten by the efficient frontier computations.
+ weights are overwritten by the efficient frontier computations.
Best to ignore this argument.
- :Output:
- :df_weights:
- - if "save_weights" is True: a ``pandas.DataFrame`` of weights/allocation
- of stocks within the optimised portfolio.
- :weights:
- - if "save_weights" is False: a ``numpy.ndarray`` of weights/allocation
- of stocks within the optimised portfolio.
+ :rtype: :py:data:`~.finquant.data_types.ARRAY_OR_DATAFRAME`
+ :return:
+ - if ``save_weights`` is True:
+ a DataFrame of weights/allocation of stocks within the optimised portfolio.
+ - if ``save_weights`` is False:
+ a ``numpy.ndarray`` of weights/allocation of stocks within the optimised portfolio.
"""
- if not isinstance(save_weights, bool):
- raise ValueError("save_weights is expected to be a boolean.")
+ # Type validations:
+ type_validation(save_weights=save_weights)
args = (self.mean_returns.values, self.cov_matrix.values, self.risk_free_rate)
# optimisation
result = sco.minimize(
min_fun.negative_sharpe_ratio,
args=args,
- x0=self.x0,
+ x0=self.x_0,
method=self.method,
bounds=self.bounds,
constraints=self.constraints,
@@ -196,31 +224,29 @@ def maximum_sharpe_ratio(self, save_weights=True):
# of pandas.DataFrame
return result["x"]
- def efficient_return(self, target, save_weights=True):
+ def efficient_return(
+ self, target: NUMERIC, save_weights: bool = True
+ ) -> ARRAY_OR_DATAFRAME[FLOAT]:
"""Finds the portfolio with the minimum volatility for a given target
return.
- :Input:
- :target: ``float``, the target return of the optimised portfolio.
- :save_weights: ``boolean`` (default= ``True``), for internal use only.
+ :param target: The target return of the optimised portfolio.
+ :param save_weights: For internal use only, default: True
Whether to save the optimised weights in the instance variable
``weights`` (and ``df_weights``). Useful for the case of computing
the efficient frontier after doing an optimisation, else the optimal
- weights would be overwritten by the efficient frontier computations.
+ weights are overwritten by the efficient frontier computations.
Best to ignore this argument.
- :Output:
- :df_weights:
- - if "save_weights" is True: a ``pandas.DataFrame`` of weights/allocation
- of stocks within the optimised portfolio.
- :weights:
- - if "save_weights" is False: a ``numpy.ndarray`` of weights/allocation
- of stocks within the optimised portfolio.
+ :rtype: :py:data:`~.finquant.data_types.ARRAY_OR_DATAFRAME`
+ :return:
+ - if ``save_weights`` is True:
+ a DataFrame of weights/allocation of stocks within the optimised portfolio.
+ - if ``save_weights`` is False:
+ a ``numpy.ndarray`` of weights/allocation of stocks within the optimised portfolio.
"""
- if not isinstance(target, (int, float)):
- raise ValueError("target is expected to be an integer or float.")
- if not isinstance(save_weights, bool):
- raise ValueError("save_weights is expected to be a boolean.")
+ # Type validations:
+ type_validation(target=target, save_weights=save_weights)
args = (self.mean_returns.values, self.cov_matrix.values)
# here we have an additional constraint:
constraints = (
@@ -237,7 +263,7 @@ def efficient_return(self, target, save_weights=True):
result = sco.minimize(
min_fun.portfolio_volatility,
args=args,
- x0=self.x0,
+ x0=self.x_0,
method=self.method,
bounds=self.bounds,
constraints=constraints,
@@ -254,19 +280,16 @@ def efficient_return(self, target, save_weights=True):
# of pandas.DataFrame
return result["x"]
- def efficient_volatility(self, target):
+ def efficient_volatility(self, target: NUMERIC) -> pd.DataFrame:
"""Finds the portfolio with the maximum Sharpe ratio for a given
target volatility.
- :Input:
- :target: ``float``, the target volatility of the optimised portfolio.
+ :param target: The target return of the optimised portfolio.
- :Output:
- :df_weights: a ``pandas.DataFrame`` of weights/allocation of stocks within
- the optimised portfolio.
+ :return: DataFrame of weights/allocation of stocks within the optimised portfolio.
"""
- if not isinstance(target, (int, float)):
- raise ValueError("target is expected to be an integer or float.")
+ # Type validations:
+ type_validation(target=target)
args = (self.mean_returns.values, self.cov_matrix.values, self.risk_free_rate)
# here we have an additional constraint:
constraints = (
@@ -283,7 +306,7 @@ def efficient_volatility(self, target):
result = sco.minimize(
min_fun.negative_sharpe_ratio,
args=args,
- x0=self.x0,
+ x0=self.x_0,
method=self.method,
bounds=self.bounds,
constraints=constraints,
@@ -295,22 +318,25 @@ def efficient_volatility(self, target):
self.df_weights = self._dataframe_weights(self.weights)
return self.df_weights
- def efficient_frontier(self, targets=None):
+ def efficient_frontier(
+ self, targets: Optional[ARRAY_OR_LIST[FLOAT]] = None
+ ) -> np.ndarray[np.float64, Any]:
"""Gets portfolios for a range of given target returns.
If no targets were provided, the algorithm will find the minimum
and maximum returns of the portfolio's individual stocks, and set
the target range according to those values.
Results in the Efficient Frontier.
- :Input:
- :targets: ``list``/``numpy.ndarray`` (default= ``None``) of ``floats``,
- range of target returns.
+ :param targets: A list/array: range of target returns, default: ``None``
- :Output:
- :efrontier: ``numpy.ndarray`` of (volatility, return) values
+ :return: Array of (volatility, return) values
"""
+ # Type validations:
if targets is not None and not isinstance(targets, (list, np.ndarray)):
raise ValueError("targets is expected to be a list or numpy.ndarray")
+ if targets is not None:
+ for target in targets:
+ type_validation(target=target)
if targets is None:
# set range of target returns from the individual expected
# returns of the stocks in the portfolio.
@@ -329,12 +355,16 @@ def efficient_frontier(self, targets=None):
target,
]
)
- self.efrontier = np.array(efrontier)
+ self.efrontier: np.ndarray[np.float64, Any] = np.array(
+ efrontier, dtype=np.float64
+ )
+ if self.efrontier.size == 0 or self.efrontier.ndim != 2:
+ raise ValueError("Error: Efficient frontier could not be computed.")
return self.efrontier
- def plot_efrontier(self):
+ def plot_efrontier(self) -> None:
"""Plots the Efficient Frontier."""
- if self.efrontier is None:
+ if self.efrontier.size == 0:
# compute efficient frontier first
self.efficient_frontier()
plt.plot(
@@ -350,7 +380,7 @@ def plot_efrontier(self):
plt.ylabel("Expected Return")
plt.legend()
- def plot_optimal_portfolios(self):
+ def plot_optimal_portfolios(self) -> None:
"""Plots markers of the optimised portfolios for
- minimum Volatility, and
@@ -390,31 +420,35 @@ def plot_optimal_portfolios(self):
)
plt.legend()
- def _dataframe_weights(self, weights):
- """Generates and returns a ``pandas.DataFrame`` from given
- array weights.
+ def _dataframe_weights(
+ self, weights: Optional[np.ndarray[np.float64, Any]]
+ ) -> pd.DataFrame:
+ """Generates and returns a DataFrame from given array weights.
- :Input:
- :weights: ``numpy.ndarray``, weights of the stock of the portfolio
+ :param weights: An array of weights of the stock of the portfolio.
- :Output:
- :weights: ``pandas.DataFrame`` with the weights/allocation of stocks
+ :return: A DataFrame with the weights/allocation of stocks
"""
- if not isinstance(weights, np.ndarray):
- raise ValueError("weights is expected to be a numpy.ndarray")
- return pd.DataFrame(weights, index=self.names, columns=["Allocation"])
+ # Type validations:
+ type_validation(weights_array=weights)
+ return pd.DataFrame(weights, index=self.names, columns=["Allocation"]).astype(
+ np.float64
+ )
- def properties(self, verbose=False):
+ def properties(self, verbose: bool = False) -> Tuple[NUMERIC, FLOAT, FLOAT]:
"""Calculates and prints out Expected annualised Return,
Volatility and Sharpe Ratio of optimised portfolio.
- :Input:
- :verbose: ``boolean`` (default= ``False``), whether to print out properties or not
+ :param verbose: Whether to print out properties or not, default: ``False``
"""
+ # Type validations:
+ type_validation(verbose=verbose)
if not isinstance(verbose, bool):
raise ValueError("verbose is expected to be a boolean.")
- if self.weights is None:
- raise ValueError("Perform an optimisation first.")
+ if self.weights.size == 0:
+ raise ValueError(
+ "Error: weights are empty. Please perform an optimisation first."
+ )
expected_return, volatility, sharpe = annualised_portfolio_quantities(
self.weights,
self.mean_returns,
diff --git a/finquant/exceptions.py b/finquant/exceptions.py
new file mode 100644
index 00000000..64b571d9
--- /dev/null
+++ b/finquant/exceptions.py
@@ -0,0 +1,98 @@
+"""
+Custom Exceptions Module
+
+This module defines custom exception classes that represent various error scenarios
+related to financial data retrieval from external APIs. The exceptions are designed
+to provide specific context and information about different types of errors that
+can occur during the data retrieval process.
+
+Exceptions:
+ - InvalidDateFormatError: Raised when an invalid date format is encountered during
+ date parsing for financial data retrieval.
+ - QuandlLimitError: Raised when the API limit for Quandl data requests is reached.
+ - QuandlError: Raised for general errors that occur during Quandl data retrieval.
+ - YFinanceError: Raised for general errors that occur during YFinance data retrieval.
+
+Usage:
+ These custom exceptions can be raised within the respective functions that handle
+ data retrieval from external APIs, such as Quandl and YFinance. When an exception
+ is raised, it provides specific information about the error, making it easier to
+ diagnose and handle exceptional cases during data retrieval operations.
+
+Example:
+ try:
+ # Code that may raise one of the custom exceptions.
+ except InvalidDateFormatError as exc:
+ # Handle the invalid date format error here.
+ except QuandlLimitError as exc:
+ # Handle the Quandl API limit error here.
+ except QuandlError as exc:
+ # Handle other Quandl-related errors here.
+ except YFinanceError as exc:
+ # Handle YFinance-related errors here.
+
+"""
+
+
+class InvalidDateFormatError(Exception):
+ """
+ Exception for Invalid Date Format
+
+ This exception is raised when an invalid date format is encountered during date
+ parsing for financial data retrieval. It is typically raised when attempting to
+ convert a string to a datetime object with an incorrect format.
+
+ Example:
+ try:
+ start_date = datetime.datetime.strptime("2023/08/01", "%Y-%m-%d")
+ except ValueError as exc:
+ raise InvalidDateFormatError("Invalid date format. Use 'YYYY-MM-DD'.") from exc
+ """
+
+
+class QuandlLimitError(Exception):
+ """
+ Exception for Quandl API Limit Reached
+
+ This exception is raised when the API limit for Quandl data requests is reached.
+ It indicates that the rate limit or request quota for the Quandl API has been
+ exceeded, and no more requests can be made until the limit is reset.
+
+ Example:
+ try:
+ resp = quandl.get("GOOG", start_date="2023-08-01", end_date="2023-08-05")
+ except quandl.errors.QuandlLimit as exc:
+ raise QuandlLimitError("Quandl API limit reached. Try again later.") from exc
+ """
+
+
+class QuandlError(Exception):
+ """
+ Exception for Quandl Data Retrieval Error
+
+ This exception is raised for general errors that occur during Quandl data retrieval.
+ It can be used to handle any unexpected issues that arise while interacting with
+ the Quandl API.
+
+ Example:
+ try:
+ resp = quandl.get("GOOG", start_date="2023-08-01", end_date="2023-08-05")
+ except Exception as exc:
+ raise QuandlError("An error occurred while retrieving data from Quandl.") from exc
+ """
+
+
+class YFinanceError(Exception):
+ """
+ Exception for YFinance Data Retrieval Error
+
+ This exception is raised for general errors that occur during YFinance data retrieval.
+ It can be used to handle any unexpected issues that arise while interacting with
+ the YFinance library.
+
+ Example:
+ try:
+ data = yfinance.download("GOOG", start="2023-08-01", end="2023-08-05")
+ except Exception as exc:
+ raise YFinanceError("An error occurred while retrieving data from YFinance.") from exc
+ """
diff --git a/finquant/market.py b/finquant/market.py
index 426bbe20..89cf3be5 100644
--- a/finquant/market.py
+++ b/finquant/market.py
@@ -12,18 +12,18 @@
"""
-import numpy as np
+
import pandas as pd
from finquant.asset import Asset
-from finquant.returns import daily_returns, historical_mean_return
+from finquant.returns import daily_returns
class Market(Asset):
"""
Class representing a market index.
- :param data: Historical price data of the market index as a ``pandas.Series``.
+ :param data: Historical price data of the market index.
The ``Market`` class extends the ``Asset`` class and represents a specific type of asset,
specifically a market index.
@@ -31,10 +31,12 @@ class Market(Asset):
"""
+ # Attributes:
+ daily_returns: pd.DataFrame
+
def __init__(self, data: pd.Series) -> None:
"""
- :Input:
- :data: ``pandas.Series`` of market index prices
+ :param data: Historical price data of the market index.
"""
super().__init__(data, name=data.name, asset_type="Market index")
self.daily_returns = self.comp_daily_returns()
diff --git a/finquant/minimise_fun.py b/finquant/minimise_fun.py
index f7886de5..83871f80 100644
--- a/finquant/minimise_fun.py
+++ b/finquant/minimise_fun.py
@@ -3,37 +3,65 @@
"""
+from finquant.data_types import ARRAY_OR_DATAFRAME, ARRAY_OR_SERIES, FLOAT, NUMERIC
from finquant.quants import annualised_portfolio_quantities
+from finquant.type_utilities import type_validation
-def portfolio_volatility(weights, mean_returns, cov_matrix):
+def portfolio_volatility(
+ weights: ARRAY_OR_SERIES[FLOAT],
+ mean_returns: ARRAY_OR_SERIES[FLOAT],
+ cov_matrix: ARRAY_OR_DATAFRAME[FLOAT],
+) -> FLOAT:
"""Calculates the volatility of a portfolio
- :Input:
- :weights: numpy.ndarray, weights of the stocks in the portfolio
- :mean_returns: pandas.Series, individual expected returns for all stocks
- in the portfolio
- :cov_matrix: pandas.DataFrame, covariance matrix of returns
+ :param weights: An array of weights
+ :type weights: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
- Output:
- :volatility: annualised volatility
+ :param mean_returns: An array of individual expected returns for all stocks
+ :type mean_returns: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
+
+ :param cov_matrix: Covariance matrix of returns
+ :type cov_matrix: :py:data:`~.finquant.data_types.ARRAY_OR_DATAFRAME`
+
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
+ :return: Annualised volatility
"""
+ # Type validations:
+ type_validation(weights=weights, means=mean_returns, cov_matrix=cov_matrix)
return annualised_portfolio_quantities(weights, mean_returns, cov_matrix)[1]
-def negative_sharpe_ratio(weights, mean_returns, cov_matrix, risk_free_rate):
+def negative_sharpe_ratio(
+ weights: ARRAY_OR_SERIES[FLOAT],
+ mean_returns: ARRAY_OR_SERIES[FLOAT],
+ cov_matrix: ARRAY_OR_DATAFRAME[FLOAT],
+ risk_free_rate: FLOAT,
+) -> FLOAT:
"""Calculates the negative Sharpe ratio of a portfolio
- :Input:
- :weights: numpy.ndarray, weights of the stocks in the portfolio
- :mean_returns: pandas.Series, individual expected returns for all stocks
- in the portfolio
- :cov_matrix: pandas.DataFrame, covariance matrix of returns
- :risk_free_rate: Float (default=0.005), risk free rate
+ :param weights: An array of weights
+ :type weights: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
+
+ :param mean_returns: An array of individual expected returns for all stocks
+ :type mean_returns: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
+
+ :param cov_matrix: Covariance matrix of returns
+ :type cov_matrix: :py:data:`~.finquant.data_types.ARRAY_OR_DATAFRAME`
- Output:
- :sharpe: sharpe ratio * (-1)
+ :param risk_free_rate: Risk free rate
+ :type risk_free_rate: :py:data:`~.finquant.data_types.FLOAT`
+
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
+ :return: Negative sharpe ratio
"""
+ # Type validations:
+ type_validation(
+ weights=weights,
+ means=mean_returns,
+ cov_matrix=cov_matrix,
+ risk_free_rate=risk_free_rate,
+ )
sharpe = annualised_portfolio_quantities(
weights, mean_returns, cov_matrix, risk_free_rate=risk_free_rate
)[2]
@@ -42,16 +70,25 @@ def negative_sharpe_ratio(weights, mean_returns, cov_matrix, risk_free_rate):
return -sharpe
-def portfolio_return(weights, mean_returns, cov_matrix):
+def portfolio_return(
+ weights: ARRAY_OR_SERIES[FLOAT],
+ mean_returns: ARRAY_OR_SERIES[FLOAT],
+ cov_matrix: ARRAY_OR_DATAFRAME[FLOAT],
+) -> NUMERIC:
"""Calculates the expected annualised return of a portfolio
- :Input:
- :weights: numpy.ndarray, weights of the stocks in the portfolio
- :mean_returns: pandas.Series, individual expected returns for all stocks
- in the portfolio
- :cov_matrix: pandas.DataFrame, covariance matrix of returns
+ :param weights: An array of weights
+ :type weights: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
+
+ :param mean_returns: An array of individual expected returns for all stocks
+ :type mean_returns: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
+
+ :param cov_matrix: Covariance matrix of returns
+ :type cov_matrix: :py:data:`~.finquant.data_types.ARRAY_OR_DATAFRAME`
- Output:
- :return: expected annualised return
+ :rtype: :py:data:`~.finquant.data_types.NUMERIC`
+ :return: Expected annualised return
"""
+ # Type validations:
+ type_validation(weights=weights, means=mean_returns, cov_matrix=cov_matrix)
return annualised_portfolio_quantities(weights, mean_returns, cov_matrix)[0]
diff --git a/finquant/monte_carlo.py b/finquant/monte_carlo.py
index f87960d9..0ec3a858 100644
--- a/finquant/monte_carlo.py
+++ b/finquant/monte_carlo.py
@@ -1,41 +1,49 @@
"""The module provides a class ``MonteCarlo`` which is an implementation of the
Monte Carlo method and a class ``MonteCarloOpt`` which allows the user to perform a
-Monte Carlo run to find optimised financial portfolios, given an intial portfolio.
+Monte Carlo run to find optimised financial portfolios, given an initial portfolio.
"""
+from typing import Any, Callable, Dict, Optional, Tuple
+
import matplotlib.pylab as plt
import numpy as np
import pandas as pd
+from finquant.data_types import FLOAT, INT
from finquant.quants import annualised_portfolio_quantities
+from finquant.type_utilities import type_validation
class MonteCarlo:
"""An object to perform a Monte Carlo run/simulation."""
- def __init__(self, num_trials=1000):
+ # Attributes:
+ num_trials: int
+
+ def __init__(self, num_trials: int = 1000):
"""
- :Input:
- :num_trials: ``int`` (default: ``1000``), number of iterations of the
- Monte Carlo run/simulation.
+ :param num_trials: Number of iterations of the Monte Carlo run/simulation, default: 1000
"""
self.num_trials = num_trials
- def run(self, fun, **kwargs):
+ def run(
+ self, fun: Callable[..., Any], **kwargs: Dict[str, Any]
+ ) -> np.ndarray[np.float64, Any]:
"""
- :Input:
- :fun: Function to call at each iteration of the Monte Carlo run.
- :kwargs: (optional) Additional arguments that are passed to `fun`.
+ :param fun: Function to call at each iteration of the Monte Carlo run.
+
+ :param kwargs: (optional) Additional arguments that are passed to ``fun``.
- :Output:
- :result: List of quantities returned from `fun` at each iteration.
+ :result: Array of quantities returned from ``fun`` at each iteration.
"""
+ # Type validations:
+ type_validation(fun=fun)
result = []
for _ in range(self.num_trials):
res = fun(**kwargs)
result.append(res)
- return np.asarray(result, dtype=object)
+ return np.asarray(result, dtype=np.ndarray)
class MonteCarloOpt(MonteCarlo):
@@ -45,53 +53,44 @@ class MonteCarloOpt(MonteCarlo):
Inherits from `MonteCarlo`.
"""
+ # Attributes:
+ returns: pd.DataFrame
+ risk_free_rate: FLOAT
+ freq: INT
+ initial_weights: Optional[np.ndarray[np.float64, Any]]
+
def __init__(
self,
- returns,
- num_trials=1000,
- risk_free_rate=0.005,
- freq=252,
- initial_weights=None,
- ):
+ returns: pd.DataFrame,
+ num_trials: int = 1000,
+ risk_free_rate: FLOAT = 0.005,
+ freq: INT = 252,
+ initial_weights: Optional[np.ndarray[np.float64, Any]] = None,
+ ) -> None:
"""
- :Input:
- :returns: A ``pandas.DataFrame`` which contains the returns of stocks.
- Note: If applicable, the given returns should be computed with the
- same risk free rate and time window/frequency (arguments
- ``risk_free_rate`` and ``freq`` as passed down here.
- :num_trials: ``int`` (default: ``1000``), number of portfolios to be
- computed, each with a random distribution of weights/allocation
- in each stock
- :risk_free_rate: ``float`` (default: ``0.005``), the risk free rate as
- required for the Sharpe Ratio
- :freq: ``int`` (default: ``252``), number of trading days, default
- value corresponds to trading days in a year
- :initial_weights: ``list``/``numpy.ndarray`` (default: ``None``), weights of
- initial/given portfolio, only used to plot a marker for the
- initial portfolio in the optimisation plot.
-
- :Output:
- :opt: ``pandas.DataFrame`` with optimised investment strategies for maximum
- Sharpe Ratio and minimum volatility.
+ :param returns: DataFrame of returns of stocks
+ Note: If applicable, the given returns should be computed with the same risk free rate
+ and time window/frequency (arguments ``risk_free_rate`` and ``freq`` as passed in here.
+ :param num_trials: Number of portfolios to be computed,
+ each with a random distribution of weights/allocation in each stock, default: 1000
+ :param risk_free_rate: Risk free rate as required for the Sharpe Ratio, default: 0.005
+ :param freq: Number of trading days in a year, default: 252
+ :param initial_weights: Weights of initial/given portfolio, only used to plot a marker for the
+ initial portfolio in the optimisation plot, default: ``None``
"""
- if initial_weights is not None and not isinstance(initial_weights, np.ndarray):
- raise ValueError(
- "If given, optional argument 'initial_weights' "
- + "must be of type numpy.ndarray"
- )
- if not isinstance(returns, pd.DataFrame):
- raise ValueError("returns is expected to be a pandas.DataFrame")
- if not isinstance(num_trials, int):
- raise ValueError("num_trials is expected to be an integer")
- if not isinstance(risk_free_rate, (int, float)):
- raise ValueError("risk_free_rate is expected to be an integer or float.")
- if not isinstance(freq, int):
- raise ValueError("freq is expected to be an integer.")
+ # Type validations:
+ type_validation(
+ returns_df=returns,
+ num_trials=num_trials,
+ risk_free_rate=risk_free_rate,
+ freq=freq,
+ initial_weights=initial_weights,
+ )
self.returns = returns
self.num_trials = num_trials
self.risk_free_rate = risk_free_rate
self.freq = freq
- self.initial_weights = initial_weights
+ self.initial_weights: np.ndarray[float, Any] = initial_weights
# initiate super class
super().__init__(num_trials=self.num_trials)
# setting additional variables
@@ -104,53 +103,65 @@ def __init__(
self.opt_weights = None
self.opt_results = None
- def _random_weights(self):
+ def _random_weights(
+ self,
+ ) -> Tuple[np.ndarray[np.float64, Any], np.ndarray[np.float64, Any]]:
"""Computes random weights for the stocks of a portfolio and the
corresponding Expected Return, Volatility and Sharpe Ratio.
- :Output:
- :(weights, quantities): Tuple of weights (np.ndarray) and a
- list of [expected return, volatility, sharpe ratio].
+ :result: Tuple of (weights (array), and array of expected return, volatility, sharpe ratio)
"""
# select random weights for portfolio
- weights = np.array(np.random.random(self.num_stocks))
+ weights: np.ndarray[np.float64, Any] = np.array(
+ np.random.random(self.num_stocks), dtype=np.float64
+ )
# rebalance weights
weights = weights / np.sum(weights)
# compute portfolio return and volatility
- portfolio_values = annualised_portfolio_quantities(
- weights, self.return_means, self.cov_matrix, self.risk_free_rate, self.freq
+ portfolio_values: np.ndarray[np.float64, Any] = np.array(
+ annualised_portfolio_quantities(
+ weights,
+ self.return_means,
+ self.cov_matrix,
+ self.risk_free_rate,
+ self.freq,
+ ),
+ dtype=np.float64,
)
- return (weights, np.array(portfolio_values))
+ return (weights, portfolio_values)
- def _random_portfolios(self):
+ def _random_portfolios(self) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""Performs a Monte Carlo run and gets a list of random portfolios
and their corresponding quantities (Expected Return, Volatility,
- Sharpe Ratio). Returns ``pandas.DataFrame`` of weights and results.
+ Sharpe Ratio).
- :Output:
- :df_weights: ``pandas.DataFrame``, holds the weights for each randomly
- generated portfolio
- :df_results: ``pandas.DataFrame``, holds Expected Annualised Return,
- Volatility and Sharpe Ratio of each randomly generated portfolio
+ :return:
+ :df_weights: DataFrame, holds the weights for each randomly generated portfolio
+ :df_results: DataFrame, holds Expected Annualised Return, Volatility and
+ Sharpe Ratio of each randomly generated portfolio
"""
# run Monte Carlo to get random weights and corresponding quantities
res = self.run(self._random_weights)
# convert to pandas.DataFrame:
weights_columns = list(self.returns.columns)
result_columns = ["Expected Return", "Volatility", "Sharpe Ratio"]
- df_weights = pd.DataFrame(data=res[:, 0].tolist(), columns=weights_columns)
- df_results = pd.DataFrame(data=res[:, 1].tolist(), columns=result_columns)
+ df_weights = pd.DataFrame(
+ data=res[:, 0].tolist(), columns=weights_columns
+ ).astype(np.float64)
+ df_results = pd.DataFrame(
+ data=res[:, 1].tolist(), columns=result_columns
+ ).astype(np.float64)
return (df_weights, df_results)
- def optimisation(self):
+ def optimisation(self) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""Optimisation of the portfolio by performing a Monte Carlo
simulation.
- :Output:
- :opt_w: ``pandas.DataFrame`` with optimised investment strategies for maximum
- Sharpe Ratio and minimum volatility.
- :opt_res: ``pandas.DataFrame`` with Expected Return, Volatility and Sharpe Ratio
- for portfolios with minimum Volatility and maximum Sharpe Ratio.
+ :return:
+ :opt_w: DataFrame with optimised investment strategies for maximum
+ Sharpe Ratio and minimum volatility.
+ :opt_res: DataFrame with Expected Return, Volatility and Sharpe Ratio
+ for portfolios with minimum Volatility and maximum Sharpe Ratio.
"""
# perform Monte Carlo run and get weights and results
df_weights, df_results = self._random_portfolios()
@@ -159,22 +170,22 @@ def optimisation(self):
index_min_volatility = df_results["Volatility"].idxmin()
index_max_sharpe = df_results["Sharpe Ratio"].idxmax()
# storing optimal results to DataFrames
- opt_w = pd.DataFrame(
+ opt_w: pd.DataFrame = pd.DataFrame(
[df_weights.iloc[index_min_volatility], df_weights.iloc[index_max_sharpe]],
index=["Min Volatility", "Max Sharpe Ratio"],
)
- opt_res = pd.DataFrame(
+ opt_res: pd.DataFrame = pd.DataFrame(
[df_results.iloc[index_min_volatility], df_results.iloc[index_max_sharpe]],
index=["Min Volatility", "Max Sharpe Ratio"],
)
# setting instance variables:
- self.df_weights = df_weights
- self.df_results = df_results
- self.opt_weights = opt_w
- self.opt_results = opt_res
+ self.df_weights = df_weights.astype(np.float64)
+ self.df_results = df_results.astype(np.float64)
+ self.opt_weights = opt_w.astype(np.float64)
+ self.opt_results = opt_res.astype(np.float64)
return opt_w, opt_res
- def plot_results(self):
+ def plot_results(self) -> None:
"""Plots the results of the Monte Carlo run, with all of the
randomly generated weights/portfolios, as well as markers
for the portfolios with the minimum Volatility and maximum
@@ -246,27 +257,34 @@ def plot_results(self):
cbar.ax.set_ylabel("Sharpe Ratio [period=" + str(self.freq) + "]", rotation=90)
plt.legend()
- def properties(self):
+ def properties(self) -> None:
"""Prints out the properties of the Monte Carlo optimisation."""
- # print out results
- opt_vals = ["Min Volatility", "Max Sharpe Ratio"]
- string = ""
- for val in opt_vals:
- string += "-" * 70
- string += f"\nOptimised portfolio for {val.replace('Min', 'Minimum').replace('Max', 'Maximum')}"
- string += f"\n\nTime period: {self.freq} days"
- string += f"\nExpected return: {self.opt_results.loc[val]['Expected Return']:0.3f}"
- string += f"\nVolatility: {self.opt_results.loc[val]['Volatility']:0.3f}"
- string += (
- f"\nSharpe Ratio: {self.opt_results.loc[val]['Sharpe Ratio']:0.3f}"
+ if self.opt_weights is None or self.opt_results is None:
+ print(
+ "Error: Optimal weights and/or results are not computed. Please perform a Monte Carlo run first."
)
- string += "\n\nOptimal weights:"
- string += "\n" + str(
- self.opt_weights.loc[val]
- .to_frame()
- .transpose()
- .rename(index={val: "Allocation"})
- )
- string += "\n"
- string += "-" * 70
- print(string)
+ else:
+ # print out results
+ opt_vals = ["Min Volatility", "Max Sharpe Ratio"]
+ string = ""
+ for val in opt_vals:
+ string += "-" * 70
+ string += f"\nOptimised portfolio for {val.replace('Min', 'Minimum').replace('Max', 'Maximum')}"
+ string += f"\n\nTime period: {self.freq} days"
+ string += f"\nExpected return: {self.opt_results.loc[val]['Expected Return']:0.3f}"
+ string += (
+ f"\nVolatility: {self.opt_results.loc[val]['Volatility']:0.3f}"
+ )
+ string += (
+ f"\nSharpe Ratio: {self.opt_results.loc[val]['Sharpe Ratio']:0.3f}"
+ )
+ string += "\n\nOptimal weights:"
+ string += "\n" + str(
+ self.opt_weights.loc[val]
+ .to_frame()
+ .transpose()
+ .rename(index={val: "Allocation"})
+ )
+ string += "\n"
+ string += "-" * 70
+ print(string)
diff --git a/finquant/moving_average.py b/finquant/moving_average.py
index 515a6b46..b54c58c7 100644
--- a/finquant/moving_average.py
+++ b/finquant/moving_average.py
@@ -13,58 +13,61 @@
import numpy as np
import pandas as pd
+from finquant.data_types import SERIES_OR_DATAFRAME
+from finquant.type_utilities import type_validation
+
def compute_ma(
- data, fun: Callable, spans: List[int], plot: bool = True
+ data: SERIES_OR_DATAFRAME,
+ fun: Callable[[SERIES_OR_DATAFRAME, int], pd.Series],
+ spans: List[int],
+ plot: bool = True,
) -> pd.DataFrame:
"""Computes a band of moving averages (sma or ema, depends on the input argument
`fun`) for a number of different time windows. If `plot` is `True`, it also
computes and sets markers for buy/sell signals based on crossovers of the Moving
Averages with the shortest/longest spans.
- :Input:
- :data: pandas.DataFrame, or pandas.Series, with stock prices
- (if pandas.DataFrame: only one column is expected)
- :fun: function that computes a moving average, e.g. sma (simple) or
- ema (exponential).
- :spans: list of integers, time windows to compute the Moving Average on.
- :plot: boolean (default: True), whether to plot the moving averages
+ :param data: A series/dataframe of daily stock prices (if DataFrame,
+ only one column is expected)
+ :type data: :py:data:`~.finquant.data_types.SERIES_OR_DATAFRAME`
+ :param fun: Function that computes a moving average, e.g. ```sma``` (simple) or
+ ```ema``` (exponential).
+ :param spans: List of integers, time windows to compute the Moving Average on.
+ :param plot: boolean, whether to plot the moving averages
and buy/sell signals based on crossovers of shortest and longest
- moving average.
+ moving average, or not, default: True
- :Output:
- :ma: pandas.DataFrame with moving averages of given data.
+ :return: Moving averages of given data.
"""
- if not isinstance(data, (pd.Series, pd.DataFrame)):
- raise ValueError(
- "data is expected to be of type pandas.Series or pandas.DataFrame"
- )
- ma = data.copy(deep=True)
+ # Type validations:
+ type_validation(data=data, fun=fun, spans=spans, plot=plot)
+ m_a = data.copy(deep=True)
# converting data to pd.DataFrame if it is a pd.Series (for subsequent function calls):
- if isinstance(ma, pd.Series):
- ma = ma.to_frame()
+ if isinstance(m_a, pd.Series):
+ m_a = m_a.to_frame()
# compute moving averages
for span in spans:
- ma[str(span) + "d"] = fun(data, span=span)
+ m_a[str(span) + "d"] = fun(data, span)
if plot:
fig = plt.figure()
- ax = fig.add_subplot(111)
+ axis = fig.add_subplot(111)
# plot moving averages
- ma.plot(ax=ax)
+ m_a.plot(ax=axis)
# Create buy/sell signals of shortest and longest span
minspan = min(spans)
minlabel = str(minspan) + "d"
maxspan = max(spans)
maxlabel = str(maxspan) + "d"
- signals = ma.copy(deep=True)
+ signals = m_a.copy(deep=True)
signals["diff"] = 0.0
signals["diff"][minspan:] = np.where(
- ma[minlabel][minspan:] > ma[maxlabel][minspan:], 1.0, 0.0
+ m_a[minlabel][minspan:] > m_a[maxlabel][minspan:], 1.0, 0.0
)
# Generate trading orders
signals["signal"] = signals["diff"].diff()
# marker for buy signal
- ax.plot(
+ axis.plot(
signals.loc[signals["signal"] == 1.0].index.values,
signals[minlabel][signals["signal"] == 1.0].values,
marker="^",
@@ -73,7 +76,7 @@ def compute_ma(
label="buy signal",
)
# marker for sell signal
- ax.plot(
+ axis.plot(
signals.loc[signals["signal"] == -1.0].index.values,
signals[minlabel][signals["signal"] == -1.0].values,
marker="v",
@@ -89,7 +92,7 @@ def compute_ma(
# axis labels
plt.xlabel(data.index.name)
plt.ylabel("Price")
- return ma
+ return m_a
def sma(data: pd.DataFrame, span: int = 100) -> pd.DataFrame:
@@ -97,14 +100,13 @@ def sma(data: pd.DataFrame, span: int = 100) -> pd.DataFrame:
Note: the moving average is computed on all columns.
- :Input:
- :data: pandas.DataFrame with stock prices in columns
- :span: int (default: 100), number of days/values over which
- the average is computed
+ :param data: A dataframe of daily stock prices
+ :param span: Number of days/values over which the average is computed, default: 100
- :Output:
- :sma: pandas.DataFrame of simple moving average
+ :return: Simple moving average
"""
+ # Type validations:
+ type_validation(data=data, span=span)
return data.rolling(window=span, center=False).mean()
@@ -113,14 +115,13 @@ def ema(data: pd.DataFrame, span: int = 100) -> pd.DataFrame:
Note: the moving average is computed on all columns.
- :Input:
- :data: pandas.DataFrame with stock prices in columns
- :span: int (default: 100), number of days/values over which
- the average is computed
+ :param data: A dataframe of daily stock prices
+ :param span: Number of days/values over which the average is computed, default: 100
- :Output:
- :ema: pandas.DataFrame of exponential moving average
+ :return: Exponential moving average
"""
+ # Type validations:
+ type_validation(data=data, span=span)
return data.ewm(span=span, adjust=False, min_periods=span).mean()
@@ -128,15 +129,13 @@ def sma_std(data: pd.DataFrame, span: int = 100) -> pd.DataFrame:
"""Computes and returns the standard deviation of the simple moving
average.
- :Input:
- :data: pandas.DataFrame with stock prices in columns
- :span: int (default: 100), number of days/values over which
- the average is computed
+ :param data: A dataframe of daily stock prices
+ :param span: Number of days/values over which the average is computed, default: 100
- :Output:
- :sma_std: pandas.DataFrame of standard deviation of
- simple moving average
+ :return: Standard deviation of simple moving average
"""
+ # Type validations:
+ type_validation(data=data, span=span)
return data.rolling(window=span, center=False).std()
@@ -144,60 +143,57 @@ def ema_std(data: pd.DataFrame, span: int = 100) -> pd.DataFrame:
"""Computes and returns the standard deviation of the exponential
moving average.
- :Input:
- :data: pandas.DataFrame with stock prices in columns
- :span: int (default: 100), number of days/values over which
- the average is computed
+ :param data: A dataframe of daily stock prices
+ :param span: Number of days/values over which the average is computed, default: 100
- :Output:
- :ema_std: pandas.DataFrame of standard deviation of
- exponential moving average
+ :return: Standard deviation of exponential moving average
"""
+ # Type validations:
+ type_validation(data=data, span=span)
return data.ewm(span=span, adjust=False, min_periods=span).std()
-def plot_bollinger_band(data, fun: Callable, span: int = 100) -> None:
+def plot_bollinger_band(
+ data: pd.DataFrame,
+ fun: Callable[[pd.DataFrame, int], pd.DataFrame],
+ span: int = 100,
+) -> None:
"""Computes and visualises a Bolling Band.
- :Input:
- :data: pandas.Series or pandas.DataFrame with stock prices in columns
- :fun: function that computes a moving average, e.g. sma (simple) or
- ema (exponential).
- :span: int (default: 100), number of days/values over which
- the average is computed
+ :param data: A dataframe of daily stock prices
+ :param fun: function that computes a moving average, e.g. ``sma`` (simple) or
+ ``ema`` (exponential).
+ :param span: Number of days/values over which the average is computed, default: 100
"""
- if not isinstance(data, (pd.Series, pd.DataFrame)):
- raise ValueError(
- "data is expected to be of type pandas.Series or pandas.DataFrame"
- )
+ # Type validations:
+ type_validation(data=data, fun=fun, span=span)
+ # special requirement for dataframe "data":
if isinstance(data, pd.DataFrame) and not len(data.columns.values) == 1:
raise ValueError("data is expected to have only one column.")
- if not isinstance(span, int):
- raise ValueError("span must be an integer.")
# converting data to pd.DataFrame if it is a pd.Series (for subsequent function calls):
if isinstance(data, pd.Series):
data = data.to_frame()
# compute moving average
- ma = compute_ma(data, fun, [span], plot=False)
+ m_a = compute_ma(data, fun, [span], plot=False)
# create dataframes for bollinger band object and standard
# deviation
- bol = ma.copy(deep=True)
- std = ma.copy(deep=True)
+ bol = m_a.copy(deep=True)
+ std = m_a.copy(deep=True)
# get column label
collabel = data.columns.values[0]
# get standard deviation
- if fun == sma:
+ if fun is sma:
std[str(span) + "d std"] = sma_std(data[collabel], span=span)
- elif fun == ema:
+ elif fun is ema:
std[str(span) + "d std"] = ema_std(data[collabel], span=span)
# compute upper and lower band
bol["Lower Band"] = bol[str(span) + "d"] - (std[str(span) + "d std"] * 2)
bol["Upper Band"] = bol[str(span) + "d"] + (std[str(span) + "d std"] * 2)
# plot
fig = plt.figure()
- ax = fig.add_subplot(111)
+ axis = fig.add_subplot(111)
# bollinger band
- ax.fill_between(
+ axis.fill_between(
data.index.values,
bol["Upper Band"],
bol["Lower Band"],
@@ -205,8 +201,8 @@ def plot_bollinger_band(data, fun: Callable, span: int = 100) -> None:
label="Bollinger Band",
)
# plot data and moving average
- bol[collabel].plot(ax=ax)
- bol[str(span) + "d"].plot(ax=ax)
+ bol[collabel].plot(ax=axis)
+ bol[str(span) + "d"].plot(ax=axis)
# title
title = (
"Bollinger Band of +/- 2$\\sigma$, Moving Average of "
diff --git a/finquant/portfolio.py b/finquant/portfolio.py
index e3d1f3be..ad840b72 100644
--- a/finquant/portfolio.py
+++ b/finquant/portfolio.py
@@ -1,7 +1,9 @@
"""This module is the **core** of `FinQuant`. It provides
- a public class ``Portfolio`` that holds and calculates quantities of a financial
- portfolio, which is a collection of ``Stock`` instances (the ``Stock`` class is provided in ``finquant.stock``).
+ portfolio, which is a collection of ``Stock`` instances (the ``Stock`` class is
+ provided in ``finquant.stock`` and is a child class of ``Asset`` defined in
+ ``finquant.asset``).
- a public function ``build_portfolio()`` that automatically constructs and returns
an instance of ``Portfolio`` and instances of ``Stock``.
The relevant stock data is either retrieved through `quandl`/`yfinance` or provided by the user as a
@@ -38,7 +40,7 @@
The former should be the preferred method for reasons of computational effort
and accuracy. The latter is only included for the sake of completeness.
-Finally, functions are implemented to generated the following plots:
+Finally, functions are implemented to generate the following plots:
- Monte Carlo run to find optimal portfolio(s)
- Efficient Frontier
@@ -50,15 +52,32 @@
based on a numerical optimisation
- Individual stocks of the portfolio (Expected Return over Volatility)
"""
+# supress some pylint complaints for this module only
+# pylint: disable=C0302,R0904,,R0912,W0212
import datetime
-from typing import List
+from typing import Any, Dict, List, Optional, Tuple, Union, cast
import matplotlib.pylab as plt
import numpy as np
import pandas as pd
+from finquant.data_types import (
+ ARRAY_OR_LIST,
+ ELEMENT_TYPE,
+ FLOAT,
+ INT,
+ LIST_DICT_KEYS,
+ NUMERIC,
+ STRING_OR_DATETIME,
+)
from finquant.efficient_frontier import EfficientFrontier
+from finquant.exceptions import (
+ InvalidDateFormatError,
+ QuandlError,
+ QuandlLimitError,
+ YFinanceError,
+)
from finquant.market import Market
from finquant.monte_carlo import MonteCarloOpt
from finquant.quants import (
@@ -76,6 +95,7 @@
historical_mean_return,
)
from finquant.stock import Stock
+from finquant.type_utilities import type_validation
class Portfolio:
@@ -86,40 +106,52 @@ class Portfolio:
an object of ``Stock``.
"""
- def __init__(self):
+ # Attributes:
+ portfolio: pd.DataFrame
+ stocks: Dict[str, Stock]
+ data: pd.DataFrame
+ expected_return: FLOAT
+ volatility: FLOAT
+ downside_risk: FLOAT
+ var: FLOAT
+ sharpe: FLOAT
+ sortino: FLOAT
+ skew: pd.Series
+ kurtosis: pd.Series
+ __totalinvestment: NUMERIC
+ __var_confidence_level: FLOAT
+ __risk_free_rate: FLOAT
+ __freq: INT
+ ef: Optional[EfficientFrontier]
+ mc: Optional[MonteCarloOpt]
+ __market_index: Optional[Market]
+ beta_stocks: pd.DataFrame
+ beta: Optional[FLOAT]
+
+ def __init__(self) -> None:
"""Initiates ``Portfolio``."""
# initilisating instance variables
self.portfolio = pd.DataFrame()
self.stocks = {}
self.data = pd.DataFrame()
- self.expected_return = None
- self.volatility = None
- self.downside_risk = None
- self.var = None
- self.sharpe = None
- self.sortino = None
- self.skew = None
- self.kurtosis = None
- self.totalinvestment = None
- self.var_confidence_level = 0.95
- self.risk_free_rate = 0.005
- self.freq = 252
- # instance variables for Efficient Frontier and
- # Monte Carlo optimisations
+ self.__var_confidence_level = 0.95
+ self.__risk_free_rate = 0.005
+ self.__freq = 252
+ # instance variables for Efficient Frontier and Monte Carlo optimisations
self.ef = None
self.mc = None
# instance variable for Market class
- self.market_index = None
+ self.__market_index = None
# dataframe containing beta values of stocks
self.beta_stocks = pd.DataFrame(index=["beta"])
self.beta = None
@property
- def totalinvestment(self):
+ def totalinvestment(self) -> NUMERIC:
return self.__totalinvestment
@totalinvestment.setter
- def totalinvestment(self, val):
+ def totalinvestment(self, val: NUMERIC) -> None:
if val is not None:
# treat "None" as initialisation
if not isinstance(val, (float, int, np.floating, np.integer)):
@@ -131,12 +163,12 @@ def totalinvestment(self, val):
self.__totalinvestment = val
@property
- def freq(self):
+ def freq(self) -> INT:
return self.__freq
@freq.setter
- def freq(self, val):
- if not isinstance(val, int):
+ def freq(self, val: INT) -> None:
+ if not isinstance(val, (int, np.integer)):
raise ValueError("Time window/frequency must be an integer.")
if val <= 0:
raise ValueError("freq must be > 0.")
@@ -145,19 +177,19 @@ def freq(self, val):
self._update()
@property
- def risk_free_rate(self):
+ def risk_free_rate(self) -> FLOAT:
return self.__risk_free_rate
@risk_free_rate.setter
- def risk_free_rate(self, val):
- if not isinstance(val, (float, int)):
- raise ValueError("Risk free rate must be a float or an integer.")
+ def risk_free_rate(self, val: FLOAT) -> None:
+ if not isinstance(val, (float, np.floating)):
+ raise ValueError("Risk free rate must be a float.")
self.__risk_free_rate = val
# now that this changed, update other quantities
self._update()
@property
- def market_index(self) -> Market:
+ def market_index(self) -> Optional[Market]:
return self.__market_index
@market_index.setter
@@ -169,12 +201,12 @@ def market_index(self, index: Market) -> None:
self.__market_index = index
@property
- def var_confidence_level(self):
+ def var_confidence_level(self) -> FLOAT:
return self.__var_confidence_level
@var_confidence_level.setter
- def var_confidence_level(self, val):
- if not isinstance(val, float):
+ def var_confidence_level(self, val: FLOAT) -> None:
+ if not isinstance(val, (float, np.floating)):
raise ValueError("confidence level is expected to be a float.")
if val >= 1 or val <= 0:
raise ValueError("confidence level is expected to be between 0 and 1.")
@@ -182,7 +214,7 @@ def var_confidence_level(self, val):
# now that this changed, update VaR
self._update()
- def add_stock(self, stock: Stock, defer_update=False) -> None:
+ def add_stock(self, stock: Stock, defer_update: bool = False) -> None:
"""Adds a stock of type ``Stock`` to the portfolio. Each time ``add_stock``
is called, the following instance variables are updated:
@@ -202,9 +234,8 @@ def add_stock(self, stock: Stock, defer_update=False) -> None:
- ``skew``: Skewness of the portfolio's stocks
- ``kurtosis``: Kurtosis of the portfolio's stocks
- :Input:
- :stock: an object of ``Stock``
- :defer_update: bool, if True _update() is not called after the stock is added.
+ :param stock: An instance of the class ``Stock``.
+ :param defer_update: bool, if True instance variables are not (re-)computed at the end of this method.
"""
# adding stock to dictionary containing all stocks provided
self.stocks.update({stock.name: stock})
@@ -237,7 +268,7 @@ def _add_stock_data(self, stock: Stock) -> None:
# add beta of stock to portfolio's betas dataframe
self.beta_stocks[stock.name] = [beta_stock]
- def _update(self):
+ def _update(self) -> None:
# sanity check (only update values if none of the below is empty):
if not (self.portfolio.empty or not self.stocks or self.data.empty):
self.totalinvestment = self.portfolio.Allocation.sum()
@@ -252,170 +283,155 @@ def _update(self):
if self.market_index is not None:
self.beta = self.comp_beta()
- def get_stock(self, name):
+ def get_stock(self, name: str) -> Stock:
"""Returns the instance of ``Stock`` with name ``name``.
- :Input:
- :name: ``string`` of the name of the stock that is returned. Must match
- one of the labels in the dictionary ``self.stocks``.
+ :param name: String of the name of the stock that is returned. Must match
+ one of the labels in the dictionary ``pf.stocks``.
- :Output:
- :stock: instance of ``Stock``.
+ :return: Instance of ``Stock`` taken from the portfolio.
"""
return self.stocks[name]
- def comp_cumulative_returns(self):
- """Computes the cumulative returns of all stocks in the
- portfolio.
+ def comp_cumulative_returns(self) -> pd.DataFrame:
+ """Computes the cumulative returns of all stocks in the portfolio.
See ``finquant.returns.cumulative_returns``.
- :Output:
- :ret: a ``pandas.DataFrame`` of cumulative returns of given stock prices.
+ :return: Cumulative returns of given stock prices.
"""
return cumulative_returns(self.data)
- def comp_daily_returns(self):
+ def comp_daily_returns(self) -> pd.DataFrame:
"""Computes the daily returns (percentage change) of all
stocks in the portfolio. See ``finquant.returns.daily_returns``.
- :Output:
- :ret: a ``pandas.DataFrame`` of daily percentage change of Returns
- of given stock prices.
+ :return: Daily percentage change of Returns of given stock prices.
"""
return daily_returns(self.data)
- def comp_daily_log_returns(self):
+ def comp_daily_log_returns(self) -> pd.DataFrame:
"""Computes the daily log returns of all stocks in the portfolio.
See ``finquant.returns.daily_log_returns``.
- :Output:
- :ret: a ``pandas.DataFrame`` of log Returns
+ :return: Daily log Returns of given stock prices.
"""
return daily_log_returns(self.data)
- def comp_mean_returns(self, freq=252):
+ def comp_mean_returns(self, freq: INT = 252) -> pd.Series:
"""Computes the mean returns based on historical stock price data.
See ``finquant.returns.historical_mean_return``.
- :Input:
- :freq: ``int`` (default: ``252``), number of trading days, default
- value corresponds to trading days in a year.
+ :param freq: Number of trading days in a year, default: 252
+ :type freq: :py:data:`~.finquant.data_types.INT`
- :Output:
- :ret: a ``pandas.DataFrame`` of historical mean Returns.
+ :return: Historical mean Returns.
"""
+ # Type validations:
+ type_validation(freq=freq)
return historical_mean_return(self.data, freq=freq)
- def comp_stock_volatility(self, freq=252):
+ def comp_stock_volatility(self, freq: INT = 252) -> pd.Series:
"""Computes the Volatilities of all the stocks individually
- :Input:
- :freq: ``int`` (default: ``252``), number of trading days, default
- value corresponds to trading days in a year.
+ :param freq: Number of trading days in a year, default: 252
+ :type freq: :py:data:`~.finquant.data_types.INT`
- :Output:
- :volatilies: ``pandas.DataFrame`` with the individual Volatilities of all stocks
- of the portfolio.
+ :return: Individual volatilities of all stocks in the portfolio.
"""
- if not isinstance(freq, int):
- raise ValueError("freq is expected to be an integer.")
+ # Type validations:
+ type_validation(freq=freq)
return self.comp_daily_returns().std() * np.sqrt(freq)
- def comp_weights(self):
+ def comp_weights(self) -> pd.Series:
"""Computes and returns a ``pandas.Series`` of the weights/allocation
of the stocks of the portfolio.
- :Output:
- :weights: a ``pandas.Series`` with weights/allocation of all stocks
- within the portfolio.
+ :return: A Series with weights/allocation of all stocks within the portfolio.
"""
# computes the weights of the stocks in the given portfolio
# in respect of the total investment
- return self.portfolio["Allocation"] / self.totalinvestment
+ return (self.portfolio["Allocation"] / self.totalinvestment).astype(np.float64)
- def comp_expected_return(self, freq=252):
+ def comp_expected_return(self, freq: INT = 252) -> FLOAT:
"""Computes the Expected Return of the portfolio.
- :Input:
- :freq: ``int`` (default: ``252``), number of trading days, default
- value corresponds to trading days in a year.
+ :param freq: Number of trading days in a year, default: 252
+ :type freq: :py:data:`~.finquant.data_types.INT`
- :Output:
- :expected_return: ``float`` the Expected Return of the portfolio.
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
+ :return: Expected Return of the portfolio.
"""
- if not isinstance(freq, int):
- raise ValueError("freq is expected to be an integer.")
- pf_return_means = historical_mean_return(self.data, freq=freq)
- weights = self.comp_weights()
- expected_return = weighted_mean(pf_return_means.values, weights)
+ # Type validations:
+ type_validation(freq=freq)
+ pf_return_means: pd.Series = historical_mean_return(self.data, freq=freq)
+ weights: pd.Series = self.comp_weights()
+ expected_return: FLOAT = weighted_mean(pf_return_means.values, weights)
self.expected_return = expected_return
return expected_return
- def comp_volatility(self, freq=252):
+ def comp_volatility(self, freq: INT = 252) -> FLOAT:
"""Computes the Volatility of the given portfolio.
- :Input:
- :freq: ``int`` (default: ``252``), number of trading days, default
- value corresponds to trading days in a year.
+ :param freq: Number of trading days in a year, default: 252
+ :type freq: :py:data:`~.finquant.data_types.INT`
- :Output:
- :volatility: ``float`` the Volatility of the portfolio.
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
+ :return: The volatility of the portfolio.
"""
- if not isinstance(freq, int):
- raise ValueError("freq is expected to be an integer.")
+ # Type validations:
+ type_validation(freq=freq)
# computing the volatility of a portfolio
- volatility = weighted_std(self.comp_cov(), self.comp_weights()) * np.sqrt(freq)
+ volatility: FLOAT = weighted_std(
+ self.comp_cov(), self.comp_weights()
+ ) * np.sqrt(freq)
self.volatility = volatility
return volatility
- def comp_downside_risk(self, freq=252):
+ def comp_downside_risk(self, freq: INT = 252) -> FLOAT:
"""Computes the downside risk of the portfolio.
- :Input:
- :freq: ``int`` (default: ``252``), number of trading days, default
- value corresponds to trading days in a year
+ :param freq: Number of trading days in a year, default: 252
+ :type freq: :py:data:`~.finquant.data_types.INT`
- :Output:
- :downside risk: ``float`` downside risk of the portfolio.
+ :return: Downside risk of the portfolio.
"""
- downs_risk = downside_risk(
+ downs_risk: FLOAT = downside_risk(
self.data, self.comp_weights(), self.risk_free_rate
) * np.sqrt(freq)
self.downside_risk = downs_risk
return downs_risk
- def comp_cov(self):
- """Compute and return a ``pandas.DataFrame`` of the covariance matrix
+ def comp_cov(self) -> pd.DataFrame:
+ """Compute and return a DataFrame of the covariance matrix
of the portfolio.
- :Output:
- :cov: a ``pandas.DataFrame`` of the covariance matrix of the portfolio.
+ :return: Covariance matrix of the portfolio.
"""
# get the covariance matrix of the mean returns of the portfolio
returns = daily_returns(self.data)
return returns.cov()
- def comp_sharpe(self):
+ def comp_sharpe(self) -> FLOAT:
"""Compute and return the Sharpe Ratio of the portfolio.
- :Output:
- :sharpe: ``float``, the Sharpe Ratio of the portfolio
+ :type freq: :py:data:`~.finquant.data_types.FLOAT`
+ :return: The Sharpe Ratio of the portfolio.
"""
# compute the Sharpe Ratio of the portfolio
- sharpe = sharpe_ratio(
+ sharpe: FLOAT = sharpe_ratio(
self.expected_return, self.volatility, self.risk_free_rate
)
self.sharpe = sharpe
return sharpe
- def comp_var(self):
+ def comp_var(self) -> FLOAT:
"""Compute and return the Value at Risk of the portfolio.
- :Output:
- :VaR: ``float``, the Value at Risk of the portfolio
+ :type freq: :py:data:`~.finquant.data_types.FLOAT`
+ :return: The Value at Risk (VaR) of the portfolio.
"""
# compute the Value at Risk of the portfolio
- var = value_at_risk(
+ var: FLOAT = value_at_risk(
investment=self.totalinvestment,
mu=self.expected_return,
sigma=self.volatility,
@@ -424,41 +440,45 @@ def comp_var(self):
self.var = var
return var
- def comp_beta(self) -> float:
+ def comp_beta(self) -> Optional[FLOAT]:
"""Compute and return the Beta parameter of the portfolio.
- :Output:
- :sharpe: ``float``, the Beta parameter of the portfolio
+ :return: Beta parameter of the portfolio
"""
# compute the Beta parameter of the portfolio
- weights = self.comp_weights()
- beta = weighted_mean(self.beta_stocks.transpose()["beta"].values, weights)
+ weights: pd.Series = self.comp_weights()
+ if weights.size == self.beta_stocks.size:
+ beta: FLOAT = weighted_mean(
+ self.beta_stocks.transpose()["beta"].values, weights
+ )
- self.beta = beta
- return beta
+ self.beta = beta
+ return beta
+ else:
+ return None
- def comp_sortino(self):
+ def comp_sortino(self) -> FLOAT:
"""Compute and return the Sortino Ratio of the portfolio
- :Output:
- :sortino: ``float``, the Sortino Ratio of the portfolio
- May be NaN if the portoflio outperformed the risk free rate at every point
+ :type freq: :py:data:`~.finquant.data_types.FLOAT`
+ :return: The Sortino Ratio of the portfolio.
+ May be ``NaN`` if the portoflio outperformed the risk free rate at every point
"""
return sortino_ratio(
self.expected_return, self.downside_risk, self.risk_free_rate
)
- def _comp_skew(self):
+ def _comp_skew(self) -> pd.Series:
"""Computes and returns the skewness of the stocks in the portfolio."""
return self.data.skew()
- def _comp_kurtosis(self):
+ def _comp_kurtosis(self) -> pd.Series:
"""Computes and returns the Kurtosis of the stocks in the portfolio."""
return self.data.kurt()
# optimising the investments with the efficient frontier class
- def _get_ef(self):
+ def _get_ef(self) -> EfficientFrontier:
"""If self.ef does not exist, create and return an instance of
finquant.efficient_frontier.EfficientFrontier, else, return the
existing instance.
@@ -473,103 +493,100 @@ def _get_ef(self):
)
return self.ef
- def ef_minimum_volatility(self, verbose=False):
+ def ef_minimum_volatility(self, verbose: bool = False) -> pd.DataFrame:
"""Interface to
``finquant.efficient_frontier.EfficientFrontier.minimum_volatility``.
Finds the portfolio with the minimum Volatility.
- :Input:
- :verbose: ``boolean`` (default= ``False``), whether to print out properties
- or not.
+ :param verbose: Whether to print out properties or not, default: False
- :Output:
- :df_weights: a ``pandas.DataFrame`` of weights/allocation of stocks within
- the optimised portfolio.
+ :return: A DataFrame of weights/allocation of stocks within the optimised portfolio.
"""
# let EfficientFrontier.efficient_frontier handle input arguments
# get/create instance of EfficientFrontier
- ef = self._get_ef()
+ ef: EfficientFrontier = self._get_ef()
# perform optimisation
- opt_weights = ef.minimum_volatility()
+ opt_weights: pd.DataFrame = ef.minimum_volatility()
# if verbose==True, print out results
ef.properties(verbose=verbose)
return opt_weights
- def ef_maximum_sharpe_ratio(self, verbose=False):
+ def ef_maximum_sharpe_ratio(self, verbose: bool = False) -> pd.DataFrame:
"""Interface to
``finquant.efficient_frontier.EfficientFrontier.maximum_sharpe_ratio``.
Finds the portfolio with the maximum Sharpe Ratio, also called the
tangency portfolio.
- :Input:
- :verbose: ``boolean`` (default= ``False``), whether to print out properties
- or not.
+ :param verbose: Whether to print out properties or not, default: False
- :Output:
- :df_weights: a ``pandas.DataFrame`` of weights/allocation of stocks within
+ :return: A DataFrame of weights/allocation of stocks within
the optimised portfolio.
"""
# let EfficientFrontier.efficient_frontier handle input arguments
# get/create instance of EfficientFrontier
- ef = self._get_ef()
+ ef: EfficientFrontier = self._get_ef()
# perform optimisation
- opt_weights = ef.maximum_sharpe_ratio()
+ opt_weights: pd.DataFrame = ef.maximum_sharpe_ratio()
# if verbose==True, print out results
ef.properties(verbose=verbose)
return opt_weights
- def ef_efficient_return(self, target, verbose=False):
+ def ef_efficient_return(
+ self, target: NUMERIC, verbose: bool = False
+ ) -> pd.DataFrame:
"""Interface to
``finquant.efficient_frontier.EfficientFrontier.efficient_return``.
Finds the portfolio with the minimum Volatility for a given target return.
- :Input:
- :target: ``float``, the target return of the optimised portfolio.
- :verbose: ``boolean`` (default= ``False``), whether to print out properties
- or not.
+ :param target: The target return of the optimised portfolio.
+ :type target: :py:data:`~.finquant.data_types.NUMERIC`
+
+ :param verbose: Whether to print out properties or not, default: False
- :Output:
- :df_weights: a ``pandas.DataFrame`` of weights/allocation of stocks within
+ :return: A DataFrame of weights/allocation of stocks within
the optimised portfolio.
"""
# let EfficientFrontier.efficient_frontier handle input arguments
# get/create instance of EfficientFrontier
- ef = self._get_ef()
+ ef: EfficientFrontier = self._get_ef()
# perform optimisation
- opt_weights = ef.efficient_return(target)
+ opt_weights: pd.DataFrame = ef.efficient_return(target)
# if verbose==True, print out results
ef.properties(verbose=verbose)
return opt_weights
- def ef_efficient_volatility(self, target, verbose=False):
+ def ef_efficient_volatility(
+ self, target: NUMERIC, verbose: bool = False
+ ) -> pd.DataFrame:
"""Interface to
``finquant.efficient_frontier.EfficientFrontier.efficient_volatility``.
Finds the portfolio with the maximum Sharpe Ratio for a given
target Volatility.
- :Input:
- :target: ``float``, the target Volatility of the optimised portfolio.
- :verbose: ``boolean`` (default= ``False``), whether to print out properties
- or not.
+ :param target: The target return of the optimised portfolio.
+ :type target: :py:data:`~.finquant.data_types.NUMERIC`
+
+ :param verbose: Whether to print out properties or not, default: False
- :Output:
- :df_weights: a ``pandas.DataFrame`` of weights/allocation of stocks within
+ :return: A DataFrame of weights/allocation of stocks within
the optimised portfolio.
"""
# let EfficientFrontier.efficient_frontier handle input arguments
# get/create instance of EfficientFrontier
- ef = self._get_ef()
+ ef: EfficientFrontier = self._get_ef()
# perform optimisation
- opt_weights = ef.efficient_volatility(target)
+ opt_weights: pd.DataFrame = ef.efficient_volatility(target)
# if verbose==True, print out results
ef.properties(verbose=verbose)
return opt_weights
- def ef_efficient_frontier(self, targets=None):
+ def ef_efficient_frontier(
+ self, targets: Optional[ARRAY_OR_LIST[FLOAT]] = None
+ ) -> np.ndarray[np.float64, Any]:
"""Interface to
``finquant.efficient_frontier.EfficientFrontier.efficient_frontier``.
@@ -579,32 +596,29 @@ def ef_efficient_frontier(self, targets=None):
the target range according to those values.
Results in the Efficient Frontier.
- :Input:
- :targets: ``list``/``numpy.ndarray`` (default: ``None``) of ``floats``,
- range of target Returns.
+ :param targets: A list/array: range of target returns, default: ``None``
- :Output:
- :efrontier: ``numpy.ndarray`` of (Volatility, Return) values.
+ :return: Efficient Frontier as an array of (volatility, Return) values
"""
# let EfficientFrontier.efficient_frontier handle input arguments
# get/create instance of EfficientFrontier
- ef = self._get_ef()
+ ef: EfficientFrontier = self._get_ef()
# perform optimisation
- efrontier = ef.efficient_frontier(targets)
+ efrontier: np.ndarray[np.float64, Any] = ef.efficient_frontier(targets)
return efrontier
- def ef_plot_efrontier(self):
+ def ef_plot_efrontier(self) -> None:
"""Interface to
``finquant.efficient_frontier.EfficientFrontier.plot_efrontier``.
Plots the Efficient Frontier."""
# let EfficientFrontier.efficient_frontier handle input arguments
# get/create instance of EfficientFrontier
- ef = self._get_ef()
+ ef: EfficientFrontier = self._get_ef()
# plot efficient frontier
ef.plot_efrontier()
- def ef_plot_optimal_portfolios(self):
+ def ef_plot_optimal_portfolios(self) -> None:
"""Interface to
``finquant.efficient_frontier.EfficientFrontier.plot_optimal_portfolios``.
@@ -615,12 +629,12 @@ def ef_plot_optimal_portfolios(self):
"""
# let EfficientFrontier.efficient_frontier handle input arguments
# get/create instance of EfficientFrontier
- ef = self._get_ef()
+ ef: EfficientFrontier = self._get_ef()
# plot efficient frontier
ef.plot_optimal_portfolios()
# optimising the investments with the efficient frontier class
- def _get_mc(self, num_trials=1000):
+ def _get_mc(self, num_trials: int = 1000) -> MonteCarloOpt:
"""If self.mc does not exist, create and return an instance of
finquant.monte_carlo.MonteCarloOpt, else, return the existing instance.
"""
@@ -637,92 +651,96 @@ def _get_mc(self, num_trials=1000):
# optimising the investments by performing a Monte Carlo run
# based on volatility and sharpe ratio
- def mc_optimisation(self, num_trials=1000):
+ def mc_optimisation(
+ self, num_trials: int = 1000
+ ) -> Tuple[pd.DataFrame, pd.DataFrame]:
"""Interface to
``finquant.monte_carlo.MonteCarloOpt.optimisation``.
Optimisation of the portfolio by performing a Monte Carlo
simulation.
- :Input:
- :num_trials: ``int`` (default: ``1000``), number of portfolios to be
- computed, each with a random distribution of weights/allocation
- in each stock.
+ :param num_trials: Number of portfolios to be computed, each with a random distribution
+ of weights/allocation in each stock, default: 1000
- :Output:
- :opt_w: ``pandas.DataFrame`` with optimised investment strategies for maximum
- Sharpe Ratio and minimum Volatility.
- :opt_res: ``pandas.DataFrame`` with Expected Return, Volatility and Sharpe Ratio
- for portfolios with minimum Volatility and maximum Sharpe Ratio.
+ :return:
+ :opt_w: DataFrame with optimised investment strategies for maximum
+ Sharpe Ratio and minimum volatility.
+ :opt_res: DataFrame with Expected Return, Volatility and Sharpe Ratio
+ for portfolios with minimum Volatility and maximum Sharpe Ratio.
"""
# dismiss previous instance of mc, as we are performing a new MC optimisation:
self.mc = None
# get instance of MonteCarloOpt
- mc = self._get_mc(num_trials)
+ mc: MonteCarloOpt = self._get_mc(num_trials)
+ opt_weights: pd.DataFrame
+ opt_results: pd.DataFrame
opt_weights, opt_results = mc.optimisation()
return opt_weights, opt_results
- def mc_plot_results(self):
+ def mc_plot_results(self) -> None:
"""Plots the results of the Monte Carlo run, with all of the randomly
generated weights/portfolios, as well as markers for the portfolios with the
-
- - minimum Volatility, and
- - maximum Sharpe Ratio.
+ minimum Volatility, and maximum Sharpe Ratio.
"""
# get instance of MonteCarloOpt
- mc = self._get_mc()
+ mc: MonteCarloOpt = self._get_mc()
mc.plot_results()
- def mc_properties(self):
+ def mc_properties(self) -> None:
"""Calculates and prints out Expected annualised Return,
Volatility and Sharpe Ratio of optimised portfolio.
"""
# get instance of MonteCarloOpt
- mc = self._get_mc()
+ mc: MonteCarloOpt = self._get_mc()
mc.properties()
- def plot_stocks(self, freq=252):
+ def plot_stocks(self, freq: INT = 252) -> None:
"""Plots the Expected annual Returns over annual Volatility of
the stocks of the portfolio.
- :Input:
- :freq: ``int`` (default: ``252``), number of trading days, default
- value corresponds to trading days in a year.
+ :param freq: Number of trading days in a year, default: 252
+ :type freq: :py:data:`~.finquant.data_types.INT`
"""
+ # Type validations:
+ type_validation(freq=freq)
# annual mean returns of all stocks
- stock_returns = self.comp_mean_returns(freq=freq)
- stock_volatility = self.comp_stock_volatility(freq=freq)
+ stock_returns: pd.Series = self.comp_mean_returns(freq=freq)
+ stock_volatility: pd.Series = self.comp_stock_volatility(freq=freq)
# adding stocks of the portfolio to the plot
# plot stocks individually:
plt.scatter(stock_volatility, stock_returns, marker="o", s=100, label="Stocks")
# adding text to stocks in plot:
- for i, txt in enumerate(stock_returns.index):
+ for idx, txt in enumerate(stock_returns.index):
plt.annotate(
txt,
- (stock_volatility[i], stock_returns[i]),
+ (stock_volatility[idx], stock_returns[idx]),
xytext=(10, 0),
textcoords="offset points",
- label=i,
+ label=idx,
)
- def properties(self):
- """Nicely prints out the properties of the portfolio:
-
- - Expected Return,
- - Volatility,
- - Downside Risk,
- - Value at Risk (VaR),
- - Confidence level of VaR,
- - Sharpe Ratio,
- - Sortino Ratio,
- - Beta (optional),
- - skewness,
- - Kurtosis
+ def properties(self) -> None:
+ """
+ Nicely prints out the properties of the portfolio:
+
+ - Expected Return,
+ - Volatility,
+ - Downside Risk,
+ - Value at Risk (VaR),
+ - Confidence level of VaR,
+ - Sharpe Ratio,
+ - Sortino Ratio,
+ - Beta (optional),
+ - skewness,
+ - Kurtosis
as well as the allocation of the stocks across the portfolio.
+
+ :rtype: None
"""
# nicely printing out information and quantities of the portfolio
- string = "-" * 70
+ string: str = "-" * 70
stocknames = self.portfolio.Name.values.tolist()
string += f"\nStocks: {', '.join(stocknames)}"
if self.market_index is not None:
@@ -733,7 +751,7 @@ def properties(self):
string += f"\nPortfolio Volatility: {self.volatility:0.3f}"
string += f"\nPortfolio Downside Risk: {self.downside_risk:0.3f}"
string += f"\nPortfolio Value at Risk: {self.var:0.3f}"
- string += f"\nConfidence level of Value at Risk: "
+ string += "\nConfidence level of Value at Risk: "
string += f"{self.var_confidence_level * 100:0.2f} %"
string += f"\nPortfolio Sharpe Ratio: {self.sharpe:0.3f}"
string += f"\nPortfolio Sortino Ratio: {self.sortino:0.3f}"
@@ -749,75 +767,101 @@ def properties(self):
string += "-" * 70
print(string)
- def __str__(self):
+ def __str__(self) -> str:
# print short description
- string = "Contains information about a portfolio."
- return string
+ return "Contains information about a portfolio."
-def _correct_quandl_request_stock_name(names):
+def _correct_quandl_request_stock_name(
+ names: Union[str, ARRAY_OR_LIST[str]]
+) -> List[str]:
"""If given input argument is of type string,
this function converts it to a list, assuming the input argument
is only one stock name.
"""
+ # Type validations:
+ type_validation(names=names)
# make sure names is a list of names:
+ names_list: List[str]
if isinstance(names, str):
- names = [names]
- return names
+ names_list = list(names)
+ elif isinstance(names, np.ndarray):
+ names_list = names.tolist()
+ else:
+ names_list = names
+ return names_list
-def _quandl_request(names, start_date=None, end_date=None):
+def _quandl_request(
+ names: List[str],
+ start_date: Optional[STRING_OR_DATETIME] = None,
+ end_date: Optional[STRING_OR_DATETIME] = None,
+) -> pd.DataFrame:
"""This function performs a simple request from `quandl` and returns
- a ``pandas.DataFrame`` containing stock data.
-
- :Input:
- :names: List of strings of stock names to be requested
- :start_date (optional): String/datetime of the start date of
- relevant stock data.
- :end_date (optional): String/datetime of the end date of
- relevant stock data.
+ a DataFrame containing stock data.
+
+ :param names: List of strings of stock names to be requested
+ :param start_date: String/datetime of the start date of relevant stock data.
+ :param end_date: String/datetime of the end date of relevant stock data.
"""
try:
- import quandl
+ import quandl # pylint: disable=C0415
except ImportError:
print(
"The following package is required:\n - `quandl`\n"
+ "Please make sure that it is installed."
)
+ # Type validations:
+ type_validation(names=names, start_date=start_date, end_date=end_date)
+
# get correct stock names that quandl.get can request,
# e.g. "WIKI/GOOG" for Google
- reqnames = _correct_quandl_request_stock_name(names)
+ reqnames: List[str] = _correct_quandl_request_stock_name(names)
try:
- resp = quandl.get(reqnames, start_date=start_date, end_date=end_date)
+ resp: pd.DataFrame = quandl.get(
+ reqnames, start_date=start_date, end_date=end_date
+ )
+ except quandl.LimitExceededError as exc:
+ errormsg = (
+ "You exceeded Quandl's limit. Are you using your API key?\nQuandl Error: "
+ + str(exc)
+ )
+ raise QuandlLimitError(errormsg) from exc
except Exception as exc:
errormsg = (
- "Error during download of stock data from Quandl.\n"
+ "An error occurred while retrieving data from Quandl.\n"
+ "Make sure all the requested stock names/tickers are "
- + "supported by Quandl."
+ + "supported by Quandl.\n"
+ + "Quandl error: "
+ + str(exc)
)
- raise Exception(errormsg) from exc
+ raise QuandlError(errormsg) from exc
+
return resp
-def _yfinance_request(names, start_date=None, end_date=None):
+def _yfinance_request(
+ names: List[str],
+ start_date: Optional[STRING_OR_DATETIME] = None,
+ end_date: Optional[STRING_OR_DATETIME] = None,
+) -> pd.DataFrame:
"""This function performs a simple request from Yahoo Finance
- (using `yfinance`) and returns a ``pandas.DataFrame``
- containing stock data.
-
- :Input:
- :names: List of strings of stock names to be requested
- :start_date (optional): String/datetime of the start date of
- relevant stock data.
- :end_date (optional): String/datetime of the end date of
- relevant stock data.
+ (using `yfinance`) and returns a DataFrame containing stock data.
+
+ :param names: List of strings of stock names to be requested
+ :param start_date: (optional) String/datetime of the start date of relevant stock data.
+ :param end_date: (optional) String/datetime of the end date of relevant stock data.
"""
try:
- import yfinance as yf
+ import yfinance # pylint: disable=C0415
except ImportError:
print(
"The following package is required:\n - `yfinance`\n"
+ "Please make sure that it is installed."
)
+ # Type validations:
+ type_validation(names=names, start_date=start_date, end_date=end_date)
+
# yfinance does not exit safely if start/end date were not given correctly:
# this step is not required for quandl as it handles this exception properly
try:
@@ -825,32 +869,31 @@ def _yfinance_request(names, start_date=None, end_date=None):
start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d")
if isinstance(end_date, str):
end_date = datetime.datetime.strptime(end_date, "%Y-%m-%d")
- except ImportError:
- print(
- "The following package is required:\n - `datetime`\n"
- + "Please make sure that it is installed."
- )
- except Exception as exc:
- raise Exception(
- "Please provide valid values for and "
+ except ValueError as exc:
+ raise InvalidDateFormatError(
+ "Please provide valid values for and "
+ "(either as datetime object or as String in the format '%Y-%m-%d')."
) from exc
# unlike quandl, yfinance does not have a prefix in front of the ticker
# thus we do not need to correct them
try:
- resp = yf.download(names, start=start_date, end=end_date)
+ resp: pd.DataFrame = yfinance.download(names, start=start_date, end=end_date)
if not isinstance(resp.columns, pd.MultiIndex) and len(names) > 0:
# for single stock must make the dataframe multiindex
stock_tuples = [(col, names[0]) for col in list(resp.columns)]
resp.columns = pd.MultiIndex.from_tuples(stock_tuples)
except Exception as exc:
- raise Exception(
- "Error during download of stock data from Yahoo Finance with `yfinance`."
- ) from exc
+ errormsg: str = (
+ "An error occurred while retrieving data from Yahoo Finance with `yfinance`.\n"
+ + "yfinance error: "
+ + str(exc)
+ )
+ raise YFinanceError(errormsg) from exc
return resp
-def _get_quandl_data_column_label(stock_name, data_label):
+def _get_quandl_data_column_label(stock_name: str, data_label: str) -> str:
"""Given stock name and label of a data column, this function returns
the string " - " as it can be found in a
``pandas.DataFrame`` returned by `quandl`.
@@ -858,60 +901,60 @@ def _get_quandl_data_column_label(stock_name, data_label):
return stock_name + " - " + data_label
-def _get_stocks_data_columns(data, names, cols):
+def _get_stocks_data_columns(
+ data: pd.DataFrame, names: ARRAY_OR_LIST[str], cols: List[str]
+) -> pd.DataFrame:
"""This function returns a subset of the given ``pandas.DataFrame`` data, which
contains only the data columns as specified in the input cols.
- :Input:
- :data: A ``pandas.DataFrame`` which contains quantities of the stocks
- listed in pf_allocation.
- :names: A string or list of strings, containing the names of the
- stocks, e.g. 'WIKI/GOOG' for Google.
- :cols: A list of strings of column labels of data to be extracted.
- Currently only one column per stock is supported.
-
- :Output:
- :data: A ``pandas.DataFrame`` which contains only the data columns of
- data as specified in cols.
+ :param data: A DataFrame which contains quantities of the stocks listed in pf_allocation.
+ :param names: A list of strings, containing the names of the stocks, e.g. 'Google'.
+ :param cols: A list of strings of column labels of data to be extracted.
+ Currently only one column per stock is supported.
+
+ :return: A DataFrame which contains only the data columns of data as specified in cols.
"""
+ # Type validations:
+ type_validation(data=data, names=names, cols=cols)
# get correct stock names that quandl get request
- reqnames = _correct_quandl_request_stock_name(names)
+ reqnames: List[str] = _correct_quandl_request_stock_name(names)
# get current column labels and replacement labels
- reqcolnames = []
+ reqcolnames: List[str] = []
+ colname: str
# if dataframe is of type multiindex, also get first level colname
- firstlevel_colnames = []
- for i in range(len(names)):
+ firstlevel_colnames: List[str] = []
+ for idx, name in enumerate(names):
for col in cols:
# differ between dataframe directly from quandl and
# possibly previously processed dataframe, e.g.
# read in from disk with slightly modified column labels
# 1. if in column labels
- if names[i] in data.columns:
- colname = names[i]
+ if name in data.columns:
+ colname = name
# 2. if "WIKI/ - " in column labels
- elif _get_quandl_data_column_label(reqnames[i], col) in data.columns:
- colname = _get_quandl_data_column_label(reqnames[i], col)
+ elif _get_quandl_data_column_label(reqnames[idx], col) in data.columns:
+ colname = _get_quandl_data_column_label(reqnames[idx], col)
# 3. if " - " in column labels
- elif _get_quandl_data_column_label(names[i], col) in data.columns:
- colname = _get_quandl_data_column_label(names[i], col)
- # if column labels is of type multiindex, and the "Adj Close" is in
- # first level labels, we assume the dataframe comes from yfinance:
+ elif _get_quandl_data_column_label(name, col) in data.columns:
+ colname = _get_quandl_data_column_label(name, col)
+ # if column labels are of type multiindex, and the "Adj Close" is in
+ # first-level labels, we assume the dataframe comes from yfinance:
elif isinstance(data.columns, pd.MultiIndex):
# alter col for yfinance, as it returns column labels without '.'
col = col.replace(".", "")
if col in data.columns:
if not col in firstlevel_colnames:
firstlevel_colnames.append(col)
- if names[i] in data[col].columns:
- colname = names[i]
- else: # error, it must find names[i] on second level of column header
+ if name in data[col].columns:
+ colname = name
+ else: # error, it must find name on the second level of the column header
raise ValueError(
- "Could not find column labels in second level of MultiIndex pd.DataFrame"
+ "Could not find column labels in the second level of MultiIndex pd.DataFrame"
)
# else, error
else:
- raise ValueError("Could not find column labels in given dataframe.")
- # append correct name to list of correct names
+ raise ValueError("Could not find column labels in the given dataframe.")
+ # append the correct name to the list of correct names
reqcolnames.append(colname)
# if data comes from yfinance, it is a multiindex dataframe:
@@ -927,69 +970,80 @@ def _get_stocks_data_columns(data, names, cols):
# if only one data column per stock exists, rename column labels
# to the name of the corresponding stock
- newcolnames = {}
+ newcolnames: Dict[str, str] = {}
if len(cols) == 1:
- for i, name in enumerate(names):
+ for idx, name in enumerate(names):
newcolnames.update({_get_quandl_data_column_label(name, cols[0]): name})
data.rename(columns=newcolnames, inplace=True)
return data
def _build_portfolio_from_api(
- names,
- pf_allocation=None,
- start_date=None,
- end_date=None,
- data_api="quandl",
- market_index: str = None,
-):
+ names: ARRAY_OR_LIST[str],
+ pf_allocation: Optional[pd.DataFrame] = None,
+ start_date: Optional[STRING_OR_DATETIME] = None,
+ end_date: Optional[STRING_OR_DATETIME] = None,
+ data_api: str = "quandl",
+ market_index: Optional[str] = None,
+) -> Portfolio:
"""Returns a portfolio based on input in form of a list of strings/names
of stocks.
- :Input:
- :names: A string or list of strings, containing the names of the
- stocks, e.g. 'GOOG' for Google.
- :pf_allocation (optional): ``pandas.DataFrame`` with the required data column
- labels ``Name`` and ``Allocation`` of the stocks.
- :start_date (optional): String/datetime start date of stock data to
- be requested through `quandl`/`yfinance` (default: None)
- :end_date (optional): String/datetime end date of stock data to be
- requested through `quandl`/`yfinance` (default: None)
- :data_api: (optional) A ``string`` (default: ``quandl``) which determines how to
- obtain stock prices, if data is not provided by the user. Valid values:
+ :param names: A list of strings, containing the names of the stocks, e.g. 'GOOG' for Google.
+ :param pf_allocation: (optional) A DataFrame with the required data column labels ``Name`` and
+ ``Allocation`` of the stocks.
+
+ :param start_date: (optional) String/datetime of the start date of relevant stock data.
+ :param end_date: (optional) String/datetime of the end date of relevant stock data.
+ :param data_api: (optional, default: 'quandl') A string which determines what API to use to obtain stock prices,
+ if data is not provided by the user. Valid values:
- ``quandl`` (Python package/API to `Quandl`)
- ``yfinance`` (Python package formerly known as ``fix-yahoo-finance``)
- :market_index: (optional) ``string`` (default: ``None``) which determines the
- market index to be used for the computation of the beta parameter of the stocks.
+ :param market_index: (optional) A string which determines the market index to be used for the
+ computation of the beta parameter of the stocks, default: ``None``
- :Output:
- :pf: Instance of Portfolio which contains all the information
- requested by the user.
+ :return: Instance of Portfolio which contains all the information requested by the user.
"""
- # create an empty portfolio
- pf = Portfolio()
+ # Type validations:
+ type_validation(
+ names=names,
+ pf_allocation=pf_allocation,
+ start_date=start_date,
+ end_date=end_date,
+ data_api=data_api,
+ market_index=market_index,
+ )
+
+ # setting up variables:
+ stock_data: pd.DataFrame
# create empty dataframe for market data
- market_data = pd.DataFrame()
+ market_data: pd.DataFrame = pd.DataFrame()
# request data from service:
if data_api == "yfinance":
- stock_data = _yfinance_request(names, start_date, end_date)
+ stock_data = _yfinance_request(list(names), start_date, end_date)
if market_index is not None:
market_data = _yfinance_request([market_index], start_date, end_date)
elif data_api == "quandl":
- stock_data = _quandl_request(names, start_date, end_date)
+ stock_data = _quandl_request(list(names), start_date, end_date)
if market_index is not None:
# only generated if user explicitly requests market index with quandl
raise Warning("Market index is not supported for quandl data.")
-
+ else:
+ raise ValueError(
+ f"Error: value of data_api '{data_api}' is not supported. "
+ + "Choose between 'yfinance' and 'quandl'."
+ )
# check pf_allocation:
if pf_allocation is None:
- pf_allocation = _generate_pf_allocation(names=names)
+ pf_allocation = _generate_pf_allocation(names=list(names))
# build portfolio:
- pf = _build_portfolio_from_df(stock_data, pf_allocation, market_data=market_data)
+ pf: Portfolio = _build_portfolio_from_df(
+ stock_data, pf_allocation, market_data=market_data
+ )
return pf
-def _stocknames_in_data_columns(names, df):
+def _stocknames_in_data_columns(names: ARRAY_OR_LIST[str], df: pd.DataFrame) -> bool:
"""Returns True if at least one element of names was found as a column
label in the dataframe df.
"""
@@ -1000,42 +1054,41 @@ def _get_index_adj_clos_pr(data: pd.DataFrame) -> pd.Series:
"""This function returns a subset of the given ``pandas.DataFrame`` data, which
contains only the data columns corresponding to Adjusted Closing Price.
- :Input:
- :data: A ``pandas.DataFrame`` which contains financial data.
+ :param data: A DataFrame which contains financial data.
- :Output:
- :data: A ``pandas.Series`` which contains only the data column of
- data corresponding to Adjusted Closing Price.
+ :return: A Series which contains only the data column of data corresponding to Adjusted Closing Price.
"""
- return data["Adj Close"].squeeze()
+ return data["Adj Close"].squeeze().astype(np.float64)
-def _generate_pf_allocation(names=None, data=None):
+def _generate_pf_allocation(
+ names: Optional[List[str]] = None, data: Optional[pd.DataFrame] = None
+) -> pd.DataFrame:
"""Takes column names of provided ``pandas.DataFrame`` ``data``, and generates a
``pandas.DataFrame`` with columns ``Name`` and ``Allocation`` which contain the
names found in input ``data`` and 1.0/len(data.columns) respectively.
- :Input:
- :data: A ``pandas.DataFrame`` which contains prices of the stocks
+ :param data: A DataFrame which contains prices of the stocks.
- :Output:
- :pf_allocation: ``pandas.DataFrame`` with columns ``Name`` and ``Allocation``, which
- contain the names and weights of the stocks
+ :return: A DataFrame with columns ``Name`` and ``Allocation``, which contain the names
+ and weights of the stocks.
"""
# checking input arguments
if names is not None and data is not None or names is None and data is None:
raise ValueError("Pass one of the two: 'names' or 'data'.")
- if names is not None and not isinstance(names, list):
- raise ValueError("names is expected to be of type 'list'.")
- if data is not None and not isinstance(data, pd.DataFrame):
- raise ValueError("data is expected to be of type 'pandas.DataFrame'.")
+ # Type validations:
+ type_validation(names=names, data=data)
+
+ # defining new variable stock_names to circumvent the issue of "names" being Optional
+ stock_names: List[str]
+
# if data is given:
if data is not None:
# this case is more complex, as we need to check for column labels in
# data
- names = data.columns
+ stock_names = data.columns.tolist()
# potential error message
- errormsg = (
+ errormsg: str = (
"'data' pandas.DataFrame contains conflicting column labels."
+ "\nMultiple columns with a substring of\n {}\n"
+ "were found. You have two options:"
@@ -1048,129 +1101,142 @@ def _generate_pf_allocation(names=None, data=None):
+ "that does not have conflicting column labels, e.g. 'GOOG' and "
+ "'GOOG - Adj. Close' are considered conflicting column headers."
)
- # sanity check: split names at '-' and take the leading part of the
- # split string, and check if this occurs in any of the other names.
+ # sanity check: split stock_names at '-' and take the leading part of the
+ # split string, and check if this occurs in any of the other stock_names.
# if so, we treat this as a duplication, and ask the user to provide
# a DataFrame with one data column per stock.
- splitnames = [name.split("-")[0].strip() for name in names]
- for i, splitname in enumerate(splitnames):
- reducedlist = [elt for num, elt in enumerate(splitnames) if num != i]
+ splitnames: List[str] = [name.split("-")[0].strip() for name in stock_names]
+ for idx, splitname in enumerate(splitnames):
+ reducedlist: List[str] = [
+ elt for num, elt in enumerate(splitnames) if num != idx
+ ]
if splitname in reducedlist:
errormsg = errormsg.format(str(splitname))
raise ValueError(errormsg)
- # if names is given, we go directly to the below:
+ elif names is not None:
+ # if names is given, we use names as stock_names:
+ stock_names = names
+ # no else needed, this is already covered at the beginning of the function
# compute equal weights
- weights = [1.0 / len(names) for i in range(len(names))]
- return pd.DataFrame({"Allocation": weights, "Name": names})
+ weights = [1.0 / float(len(stock_names)) for _ in range(len(stock_names))]
+ return pd.DataFrame({"Allocation": weights, "Name": stock_names})
def _build_portfolio_from_df(
data: pd.DataFrame,
pf_allocation: pd.DataFrame = None,
- datacolumns: List[str] = None,
+ data_columns: Optional[List[str]] = None,
market_data: pd.DataFrame = None,
) -> Portfolio:
"""Returns a portfolio based on input in form of ``pandas.DataFrame``.
- :Input:
- :data: A ``pandas.DataFrame`` which contains prices of the stocks listed in
- pf_allocation
- :pf_allocation: (optional) ``pandas.DataFrame`` with the required data column
+ :param data: A DataFrame which contains prices of the stocks listed in pf_allocation.
+ :param pf_allocation: (optional) A DataFrame with the required data column
labels ``Name`` and ``Allocation`` of the stocks. If not given, it is
automatically generated with an equal weights for all stocks
- in the resulting portfolio.
- :datacolumns: (optional) A list of strings of data column labels
- to be extracted and returned (default: ["Adj. Close"]).
- :market_data: (optional) A ``pandas.DataFrame`` which contains data of the
+ in the resulting portfolio, default: ``None``.
+ :param data_columns: (optional) A list of strings of data column labels
+ to be extracted and returned (default: ``["Adj. Close"]``).
+ :param market_data: (optional) A DataFrame which contains data of the
market index (default: ``None``).
- :Output:
- :pf: Instance of Portfolio which contains all the information
- requested by the user.
+ :return: Instance of Portfolio which contains all the information requested by the user.
"""
# if pf_allocation is None, automatically generate it
if pf_allocation is None:
pf_allocation = _generate_pf_allocation(data=data)
- if datacolumns is None:
- datacolumns = ["Adj. Close"]
+ if data_columns is None:
+ data_columns = ["Adj. Close"]
+ # Enforcing types for pf_allocation:
+ pf_allocation = pf_allocation.astype({"Allocation": np.float64, "Name": str})
# make sure stock names are in data dataframe
if not _stocknames_in_data_columns(pf_allocation.Name.values, data):
raise ValueError(
"Error: None of the provided stock names were"
+ "found in the provided dataframe."
)
- # extract only "Adjusted Close" price ("Adj. Close" in quandl, "Adj Close" in yfinance)
- # column from DataFrame:
- data = _get_stocks_data_columns(data, pf_allocation.Name.values, datacolumns)
+ # Enforce np.float64 for data columns:
+ data = data.astype(np.float64)
+ # extract only "Adjusted Close" price column from DataFrame:
+ # in quandl: "Adj. Close"; in yfinance: "Adj Close"
+ data = _get_stocks_data_columns(data, pf_allocation.Name.values, data_columns)
# building portfolio:
- pf = Portfolio()
+ pf: Portfolio = Portfolio()
if market_data is not None and not market_data.empty:
# extract only "Adjusted Close" price column from market data
market_data = _get_index_adj_clos_pr(market_data)
# set market index of portfolio
pf.market_index = Market(data=market_data)
- for i in range(len(pf_allocation)):
+ for idx in range(len(pf_allocation)):
# get name of stock
- name = pf_allocation.iloc[i].Name
+ name: str = pf_allocation.iloc[idx].Name
# extract data column of said stock
- stock_data = data.loc[:, [name]].copy(deep=True).squeeze()
+ stock_data: pd.Series = data.loc[:, [name]].copy(deep=True).squeeze()
# create Stock instance and add it to portfolio,
# and defer updating portfolio attributes until all stocks are added
- pf.add_stock(Stock(pf_allocation.iloc[i], data=stock_data), defer_update=True)
+ pf.add_stock(
+ Stock(investmentinfo=pf_allocation.iloc[idx], data=stock_data),
+ defer_update=True,
+ )
# update the portfolio
pf._update()
return pf
-def _all_list_ele_in_other(l1, l2):
+def _all_list_ele_in_other(
+ l_1: LIST_DICT_KEYS[ELEMENT_TYPE], l_2: LIST_DICT_KEYS[ELEMENT_TYPE]
+) -> bool:
"""Returns True if all elements of list l1 are found in list l2."""
- return all(ele in l2 for ele in l1)
+ return all(ele in l_2 for ele in l_1)
-def _any_list_ele_in_other(l1, l2):
+def _any_list_ele_in_other(
+ l_1: LIST_DICT_KEYS[ELEMENT_TYPE], l_2: LIST_DICT_KEYS[ELEMENT_TYPE]
+) -> bool:
"""Returns True if any element of list l1 is found in list l2."""
- return any(ele in l2 for ele in l1)
+ return any(ele in l_2 for ele in l_1)
-def _list_complement(A, B):
+def _list_complement(
+ set_a: LIST_DICT_KEYS[ELEMENT_TYPE], set_b: LIST_DICT_KEYS[ELEMENT_TYPE]
+) -> List[ELEMENT_TYPE]:
"""Returns the relative complement of A in B (also denoted as A\\B)"""
- return list(set(B) - set(A))
+ return list(set(set_b) - set(set_a))
-def build_portfolio(**kwargs):
+def build_portfolio(**kwargs: Dict[str, Any]) -> Portfolio:
"""This function builds and returns an instance of ``Portfolio``
given a set of input arguments.
- :Input:
- :pf_allocation: (optional) ``pandas.DataFrame`` with the required data column
+ :param pf_allocation: (optional) A DataFrame with the required data column
labels ``Name`` and ``Allocation`` of the stocks. If not given, it is
automatically generated with an equal weights for all stocks
in the resulting portfolio.
- :names: (optional) A ``string`` or ``list`` of ``strings``, containing the names
+ :param names: (optional) A List of strings, containing the names
of the stocks, e.g. "GOOG" for Google.
- :start_date: (optional) ``string``/``datetime`` start date of stock data to be
+ :param start_date: (optional) string/datetime start date of stock data to be
requested through `quandl`/`yfinance` (default: ``None``).
- :end_date: (optional) ``string``/``datetime`` end date of stock data to be
+ :param end_date: (optional) string/datetime end date of stock data to be
requested through `quandl`/`yfinance` (default: ``None``).
- :data: (optional) A ``pandas.DataFrame`` which contains quantities of
+ :param data: (optional) A DataFrame which contains quantities of
the stocks listed in ``pf_allocation``.
- :data_api: (optional) A ``string`` (default: ``quandl``) which determines how to
- obtain stock prices, if data is not provided by the user. Valid values:
+ :param data_api: (optional) A string (default: ``quandl``) which determines how to obtain
+ stock prices, if data is not provided by the user. Valid values:
- ``quandl`` (Python package/API to `Quandl`)
- ``yfinance`` (Python package formerly known as ``fix-yahoo-finance``)
- :market_index: (optional) ``string`` (default: ``None``) which determines the
- market index to be used for the computation of the beta parameter of the stocks.
+ :param market_index: (optional) string which determines the
+ market index to be used for the computation of the beta parameter of the stocks,
+ default: ``None``.
- :Output:
- :pf: Instance of ``Portfolio`` which contains all the information
- requested by the user.
+ :return: Instance of ``Portfolio`` which contains all the information requested by the user.
.. note:: Only the following combinations of inputs are allowed:
- - ``names``, ``pf_allocation`` (optional), ``start_date`` (optional), ``end_date`` (optional), data_api (optional), ``market_index`` (optional)
+ - ``names``, ``pf_allocation`` (optional), ``start_date`` (optional), ``end_date`` (optional),\
+ ``data_api`` (optional), ``market_index`` (optional)
- ``data``, ``pf_allocation`` (optional)
The two different ways this function can be used are useful for:
@@ -1182,25 +1248,25 @@ def build_portfolio(**kwargs):
If used in an unsupported way, the function (or subsequently called function)
raises appropriate Exceptions with useful information what went wrong.
"""
- docstring_msg = (
+ docstring_msg: str = (
"Please read through the docstring, "
"'build_portfolio.__doc__' and/or have a look at the "
"examples in `examples/`."
)
- input_error = (
+ input_error: str = (
"You passed an unsupported argument to "
"build_portfolio. The following arguments are not "
"supported:"
"\n {}\nOnly the following arguments are allowed:\n "
"{}\n" + docstring_msg
)
- input_comb_error = (
+ input_comb_error: str = (
"Error: None of the input arguments {} are allowed "
"in combination with {}.\n" + docstring_msg
)
# list of all valid optional input arguments
- all_input_args = [
+ all_input_args: List[str] = [
"pf_allocation",
"names",
"start_date",
@@ -1217,17 +1283,17 @@ def build_portfolio(**kwargs):
)
# check for valid input arguments
if not _all_list_ele_in_other(kwargs.keys(), all_input_args):
- unsupported_input = _list_complement(all_input_args, kwargs.keys())
+ unsupported_input: List[str] = _list_complement(all_input_args, kwargs.keys())
raise ValueError(
"Error:\n" + input_error.format(unsupported_input, all_input_args)
)
# create an empty portfolio
- pf = Portfolio()
+ pf: Portfolio = Portfolio()
# 1. pf_allocation, names, start_date, end_date, data_api, market_index
- allowed_mandatory_args = ["names"]
- allowed_input_args = [
+ allowed_mandatory_args: List[str] = ["names"]
+ allowed_input_args: List[str] = [
"names",
"pf_allocation",
"start_date",
@@ -1235,15 +1301,34 @@ def build_portfolio(**kwargs):
"data_api",
"market_index",
]
- complement_input_args = _list_complement(allowed_input_args, all_input_args)
+ complement_input_args: List[str] = _list_complement(
+ allowed_input_args, all_input_args
+ )
+
if _all_list_ele_in_other(allowed_mandatory_args, kwargs.keys()):
# check that no input argument conflict arises:
if _any_list_ele_in_other(complement_input_args, kwargs.keys()):
raise ValueError(
input_comb_error.format(complement_input_args, allowed_mandatory_args)
)
+
+ # Extract given/potential arguments from kwargs:
+ names = cast(List[str], list(kwargs.get("names", [])))
+ pf_allocation = kwargs.get("pf_allocation", None)
+ start_date = cast(Optional[STRING_OR_DATETIME], kwargs.get("start_date", None))
+ end_date = cast(Optional[STRING_OR_DATETIME], kwargs.get("end_date", None))
+ data_api = cast(str, kwargs.get("data_api", "quandl"))
+ market_index = cast(Optional[str], kwargs.get("market_index", None))
+
# get portfolio:
- pf = _build_portfolio_from_api(**kwargs)
+ pf = _build_portfolio_from_api(
+ names=names,
+ pf_allocation=pf_allocation,
+ start_date=start_date,
+ end_date=end_date,
+ data_api=data_api,
+ market_index=market_index,
+ )
# 2. pf_allocation, data
allowed_mandatory_args = ["data"]
@@ -1255,8 +1340,13 @@ def build_portfolio(**kwargs):
raise ValueError(
input_comb_error.format(complement_input_args, allowed_mandatory_args)
)
+
+ # Extract given/potential arguments from kwargs:
+ data = kwargs.get("data", pd.DataFrame())
+ pf_allocation = kwargs.get("pf_allocation", None)
+
# get portfolio:
- pf = _build_portfolio_from_df(**kwargs)
+ pf = _build_portfolio_from_df(data=data, pf_allocation=pf_allocation)
# final check
# pylint: disable=R0916
@@ -1264,17 +1354,18 @@ def build_portfolio(**kwargs):
pf.portfolio.empty
or pf.data.empty
or not pf.stocks
- or pf.expected_return is None
- or pf.volatility is None
- or pf.downside_risk is None
- or pf.sharpe is None
- or pf.sortino is None
- or pf.skew is None
- or pf.kurtosis is None
+ or not hasattr(pf, "expected_return")
+ or not hasattr(pf, "volatility")
+ or not hasattr(pf, "downside_risk")
+ or not hasattr(pf, "var")
+ or not hasattr(pf, "sharpe")
+ or not hasattr(pf, "sortino")
+ or pf.skew.empty
+ or pf.kurtosis.empty
):
raise ValueError(
"Should not get here. Something went wrong while "
- + "creating an instance of Portfolio."
+ + "creating an instance of Portfolio. "
+ docstring_msg
)
diff --git a/finquant/quants.py b/finquant/quants.py
index c072448d..99bddaaf 100644
--- a/finquant/quants.py
+++ b/finquant/quants.py
@@ -4,181 +4,206 @@
"""
+from typing import Tuple
+
import numpy as np
import pandas as pd
from scipy.stats import norm
+from finquant.data_types import ARRAY_OR_DATAFRAME, ARRAY_OR_SERIES, FLOAT, INT, NUMERIC
from finquant.returns import weighted_mean_daily_returns
+from finquant.type_utilities import type_validation
-def weighted_mean(means, weights):
+def weighted_mean(
+ means: ARRAY_OR_SERIES[FLOAT], weights: ARRAY_OR_SERIES[FLOAT]
+) -> FLOAT:
"""Computes the weighted mean/average, or in the case of a
financial portfolio, it can be used for the Expected Return
of said portfolio.
- :Input:
- :means: ``numpy.ndarray``/``pd.Series`` of mean/average values
- :weights: ``numpy.ndarray``/``pd.Series`` of weights
+ :param means: An array representing mean/average values
+ :type means: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
+
+ :param weights: An array representing weights
+ :type weights: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
- :Output:
- :weighted mu: ``numpy.ndarray``: ``(np.sum(means*weights))``
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
+ :return: The weighted mean as a floating point number: ``np.sum(means*weights)``
"""
- if not isinstance(weights, (pd.Series, np.ndarray)):
- raise ValueError("weights is expected to be a pandas.Series/np.ndarray")
- if not isinstance(means, (pd.Series, np.ndarray)):
- raise ValueError("means is expected to be a pandas.Series/np.ndarray")
- return np.sum(means * weights)
+ # Type validations:
+ type_validation(means=means, weights=weights)
+ weighted_mu: FLOAT = float(np.sum(means * weights))
+ return weighted_mu
-def weighted_std(cov_matrix, weights):
+def weighted_std(
+ cov_matrix: ARRAY_OR_DATAFRAME[FLOAT], weights: ARRAY_OR_SERIES[FLOAT]
+) -> FLOAT:
"""Computes the weighted standard deviation, or Volatility of
a portfolio, which contains several stocks.
- :Input:
- :cov_matrix: ``numpy.ndarray``/``pandas.DataFrame``, covariance matrix
- :weights: ``numpy.ndarray``/``pd.Series`` of weights
+ :param cov_matrix: Covariance matrix
+ :type cov_matrix: :py:data:`~.finquant.data_types.ARRAY_OR_DATAFRAME`
+
+ :param weights: An array representing weights
+ :type weights: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
- :Output:
- :weighted sigma: ``numpy.ndarray``:
- ``np.sqrt(np.dot(weights.T, np.dot(cov_matrix, weights)))``
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
+ :return: Weighted sigma (standard deviation) as a floating point number:
+ ``np.sqrt(np.dot(weights.T, np.dot(cov_matrix, weights)))``
"""
- if not isinstance(weights, (pd.Series, np.ndarray)):
- raise ValueError("weights is expected to be a pandas.Series, np.array")
- if not isinstance(cov_matrix, (np.ndarray, (np.ndarray, pd.DataFrame))):
- raise ValueError(
- "cov_matrix is expected to be a numpy.ndarray/pandas.DataFrame"
- )
- return np.sqrt(np.dot(weights.T, np.dot(cov_matrix, weights)))
+ # Type validations:
+ type_validation(cov_matrix=cov_matrix, weights=weights)
+ weighted_sigma: FLOAT = np.sqrt(np.dot(weights.T, np.dot(cov_matrix, weights)))
+ return weighted_sigma
-def sharpe_ratio(exp_return, volatility, risk_free_rate=0.005):
+def sharpe_ratio(
+ exp_return: FLOAT, volatility: FLOAT, risk_free_rate: FLOAT = 0.005
+) -> FLOAT:
"""Computes the Sharpe Ratio
- :Input:
- :exp_return: ``int``/``float``, Expected Return of a portfolio
- :volatility: ``int``/``float``, Volatility of a portfolio
- :risk_free_rate: ``int``/``float`` (default= ``0.005``), risk free rate
+ :param exp_return: Expected Return of a portfolio
+ :type exp_return: :py:data:`~.finquant.data_types.FLOAT`
- :Output:
- :sharpe ratio: ``float`` ``(exp_return - risk_free_rate)/float(volatility)``
+ :param volatility: Volatility of a portfolio
+ :type volatility: :py:data:`~.finquant.data_types.FLOAT`
+
+ :param risk_free_rate: Risk free rate
+ :type risk_free_rate: :py:data:`~.finquant.data_types.FLOAT`, default: 0.005
+
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
+ :return: Sharpe Ratio as a floating point number:
+ ``(exp_return-risk_free_rate)/float(volatility)``
"""
- if not isinstance(
- exp_return, (int, float, np.int32, np.int64, np.float32, np.float64)
- ):
- raise ValueError("exp_return is expected to be an integer or float.")
- if not isinstance(
- volatility, (int, float, np.int32, np.int64, np.float32, np.float64)
- ):
- raise ValueError("volatility is expected to be an integer or float.")
- if not isinstance(
- risk_free_rate, (int, float, np.int32, np.int64, np.float32, np.float64)
- ):
- raise ValueError("risk_free_rate is expected to be an integer or float.")
- return (exp_return - risk_free_rate) / float(volatility)
-
-
-def sortino_ratio(exp_return, downside_risk, risk_free_rate=0.005):
- """Computes the Sortino Ratio
-
- :Input:
- :exp_return: ``int``/``float``, Expected Return of a portfolio
- :downside_risk: ``int``/``float``, Downside Risk of a portfolio
- :risk_free_rate: ``int``/``float`` (default= ``0.005``), risk free rate
-
- :Output:
- :sortino ratio: ``float``/``NaN`` ``(exp_return - risk_free_rate)/float(downside_risk)``.
- Can be ``NaN`` if ``downside_risk`` is zero
+ # Type validations:
+ type_validation(
+ expected_return=exp_return, volatility=volatility, risk_free_rate=risk_free_rate
+ )
+ res_sharpe_ratio: FLOAT = (exp_return - risk_free_rate) / float(volatility)
+ return res_sharpe_ratio
+
+
+def sortino_ratio(
+ exp_return: FLOAT, downs_risk: FLOAT, risk_free_rate: FLOAT = 0.005
+) -> FLOAT:
+ """Computes the Sortino Ratio.
+
+ :param exp_return: Expected Return of a portfolio
+ :type exp_return: :py:data:`~.finquant.data_types.FLOAT`
+
+ :param downs_risk: Downside Risk of a portfolio
+ :type exp_return: :py:data:`~.finquant.data_types.FLOAT`
+
+ :param risk_free_rate: Risk free rate
+ :type risk_free_rate: :py:data:`~.finquant.data_types.FLOAT`, default: 0.005
+
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
+ :return: Sortino Ratio as a floating point number:
+ ``(exp_return - risk_free_rate) / float(downs_risk)``
"""
- if not isinstance(
- exp_return, (int, float, np.int32, np.int64, np.float32, np.float64)
- ):
- raise ValueError("exp_return is expected to be an integer or float.")
- if not isinstance(
- downside_risk, (int, float, np.int32, np.int64, np.float32, np.float64)
- ):
- raise ValueError("volatility is expected to be an integer or float.")
- if not isinstance(
- risk_free_rate, (int, float, np.int32, np.int64, np.float32, np.float64)
- ):
- raise ValueError("risk_free_rate is expected to be an integer or float.")
- if float(downside_risk) == 0:
+ # Type validations:
+ type_validation(
+ expected_return=exp_return,
+ downside_risk=downs_risk,
+ risk_free_rate=risk_free_rate,
+ )
+ if float(downs_risk) == 0:
return np.nan
else:
- return (exp_return - risk_free_rate) / float(downside_risk)
+ return (exp_return - risk_free_rate) / float(downs_risk)
-def downside_risk(data: pd.DataFrame, weights, risk_free_rate=0.005) -> float:
+def downside_risk(
+ data: pd.DataFrame, weights: ARRAY_OR_SERIES[FLOAT], risk_free_rate: FLOAT = 0.005
+) -> FLOAT:
"""Computes the downside risk (target downside deviation of returns).
- :Input:
- :data: ``pandas.DataFrame`` with daily stock prices
- :weights: ``numpy.ndarray``/``pd.Series`` of weights
- :risk_free_rate: ``int``/``float`` (default=``0.005``), risk free rate
+ :param data: A dataframe of daily stock prices
- :Output:
- :downside_risk: ``float``, target downside deviation
- """
- if not isinstance(data, pd.DataFrame):
- raise ValueError("data is expected to be a Pandas.DataFrame.")
- if not isinstance(weights, (pd.Series, np.ndarray)):
- raise ValueError("weights is expected to be a pandas.Series/np.ndarray.")
- if not isinstance(
- risk_free_rate, (int, float, np.int32, np.int64, np.float32, np.float64)
- ):
- raise ValueError("risk_free_rate is expected to be an integer or float.")
+ :param weights: Downside Risk of a portfolio
+ :type weights: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
+ :param risk_free_rate: Risk free rate
+ :type risk_free_rate: :py:data:`~.finquant.data_types.FLOAT`, default: 0.005
+
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
+ :return: Target downside deviation
+ ``np.sqrt(np.mean(np.minimum(0, wtd_daily_mean - risk_free_rate) ** 2))``
+ """
+ # Type validations:
+ type_validation(data=data, weights=weights, risk_free_rate=risk_free_rate)
wtd_daily_mean = weighted_mean_daily_returns(data, weights)
- return np.sqrt(np.mean(np.minimum(0, wtd_daily_mean - risk_free_rate) ** 2))
+ return float(np.sqrt(np.mean(np.minimum(0, wtd_daily_mean - risk_free_rate) ** 2)))
-def value_at_risk(investment, mu, sigma, conf_level=0.95) -> float:
+def value_at_risk(
+ investment: NUMERIC, mu: FLOAT, sigma: FLOAT, conf_level: FLOAT = 0.95
+) -> FLOAT:
"""Computes and returns the expected value at risk of an investment/assets.
- :Input:
- :investment: ``float``/``int``, total value of the investment
- :mu: ``float``/``int`` average/mean return of the investment
- :sigma: ``float``/``int`` standard deviation of the investment
- :conf_level: ``float`` (default= ``0.95``), confidence level of the VaR
+ :param investment: Total value of the investment
+ :type investment: :py:data:`~.finquant.data_types.NUMERIC`
+
+ :param mu: Average/mean return of the investment
+ :type mu: :py:data:`~.finquant.data_types.FLOAT`
- :Output:
- :Value at Risk: ``float``, VaR of the investment
+ :param sigma: Standard deviation of the investment
+ :type sigma: :py:data:`~.finquant.data_types.FLOAT`
+
+ :param conf_level: Confidence level of the VaR
+ :type conf_level: :py:data:`~.finquant.data_types.FLOAT`, default: 0.95
+
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
+ :return: Value at Risk (VaR) of the investment: ``investment*(mu-sigma*norm.ppf(1-conf_level))``
"""
- if not isinstance(
- investment, (int, float, np.int32, np.int64, np.float32, np.float64)
- ):
- raise ValueError("investment is expected to be an integer or float.")
- if not isinstance(mu, (int, float, np.int32, np.int64, np.float32, np.float64)):
- raise ValueError("mu is expected to be an integer or float")
- if not isinstance(sigma, (int, float, np.int32, np.int64, np.float32, np.float64)):
- raise ValueError("sigma is expected to be an integer or float")
- if not isinstance(conf_level, float):
- raise ValueError("confidence level is expected to be a float.")
+ # Type validations:
+ type_validation(investment=investment, mu=mu, sigma=sigma, conf_level=conf_level)
if conf_level >= 1 or conf_level <= 0:
raise ValueError("confidence level is expected to be between 0 and 1.")
-
- return investment * (mu - sigma * norm.ppf(1 - conf_level))
+ res_value_at_risk: FLOAT = investment * (mu - sigma * norm.ppf(1 - conf_level))
+ return res_value_at_risk
def annualised_portfolio_quantities(
- weights, means, cov_matrix, risk_free_rate=0.005, freq=252
-):
+ weights: ARRAY_OR_SERIES[FLOAT],
+ means: ARRAY_OR_SERIES[FLOAT],
+ cov_matrix: ARRAY_OR_DATAFRAME[FLOAT],
+ risk_free_rate: FLOAT = 0.005,
+ freq: INT = 252,
+) -> Tuple[FLOAT, FLOAT, FLOAT]:
"""Computes and returns the expected annualised return, volatility
and Sharpe Ratio of a portfolio.
- :Input:
- :weights: ``numpy.ndarray``/``pd.Series`` of weights
- :means: ``numpy.ndarray``/``pd.Series`` of mean/average values
- :cov_matrix: ``numpy.ndarray``/``pandas.DataFrame``, covariance matrix
- :risk_free_rate: ``float`` (default= ``0.005``), risk free rate
- :freq: ``int`` (default= ``252``), number of trading days, default
- value corresponds to trading days in a year
-
- :Output:
- :(Expected Return, Volatility, Sharpe Ratio): tuple of those
- three quantities
+ :param weights: An array of weights
+ :type weights: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
+
+ :param means: An array of mean/average values
+ :type means: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
+
+ :param cov_matrix: Covariance matrix
+ :type cov_matrix: :py:data:`~.finquant.data_types.ARRAY_OR_DATAFRAME`
+
+ :param risk_free_rate: Risk free rate
+ :type risk_free_rate: :py:data:`~.finquant.data_types.FLOAT`, default: 0.005
+
+ :param freq: Number of trading days in a year
+ :type freq: :py:data:`~.finquant.data_types.INT`, default: 252
+
+ :rtype: Tuple[:py:data:`~.finquant.data_types.FLOAT`,
+ :py:data:`~.finquant.data_types.FLOAT`,
+ :py:data:`~.finquant.data_types.FLOAT`]
+ :return: Tuple of Expected Return, Volatility, Sharpe Ratio
"""
- if not isinstance(freq, int):
- raise ValueError("freq is expected to be an integer.")
+ # Type validations:
+ type_validation(
+ weights=weights,
+ means=means,
+ cov_matrix=cov_matrix,
+ risk_free_rate=risk_free_rate,
+ freq=freq,
+ )
expected_return = weighted_mean(means, weights) * freq
volatility = weighted_std(cov_matrix, weights) * np.sqrt(freq)
sharpe = sharpe_ratio(expected_return, volatility, risk_free_rate)
diff --git a/finquant/returns.py b/finquant/returns.py
index 2a2e26c8..c8a601d4 100644
--- a/finquant/returns.py
+++ b/finquant/returns.py
@@ -1,83 +1,104 @@
"""The module provides functions to compute different kinds of returns of stocks."""
+from typing import Any
+
import numpy as np
import pandas as pd
+from finquant.data_types import (
+ ARRAY_OR_SERIES,
+ FLOAT,
+ INT,
+ NUMERIC,
+ SERIES_OR_DATAFRAME,
+)
+from finquant.type_utilities import type_validation
+
-def cumulative_returns(data, dividend=0):
+def cumulative_returns(data: pd.DataFrame, dividend: NUMERIC = 0) -> pd.DataFrame:
"""Returns DataFrame with cumulative returns
:math:`\\displaystyle R = \\dfrac{\\text{price}_{t_i} - \\text{price}_{t_0} + \\text{dividend}}
{\\text{price}_{t_0}}`
- :Input:
- :data: ``pandas.DataFrame`` with daily stock prices
- :dividend: ``float`` (default= ``0``), paid dividend
+ :param data: A dataframe of daily stock prices
- :Output:
- :ret: a ``pandas.DataFrame`` of cumulative Returns of given stock prices.
+ :param dividend: Paid dividend
+ :type dividend: :py:data:`~.finquant.data_types.NUMERIC`, default: 0
+
+ :return: A dataframe of cumulative returns of given stock prices.
"""
+ # Type validations:
+ type_validation(data=data, dividend=dividend)
data = data.dropna(axis=0, how="any")
return ((data - data.iloc[0] + dividend) / data.iloc[0]).astype(np.float64)
-def daily_returns(data):
+def daily_returns(data: pd.DataFrame) -> pd.DataFrame:
"""Returns DataFrame with daily returns (percentage change)
:math:`\\displaystyle R = \\dfrac{\\text{price}_{t_i} - \\text{price}_{t_{i-1}}}{\\text{price}_{t_{i-1}}}`
- :Input:
- :data: ``pandas.DataFrame`` with daily stock prices
+ :param data: A dataframe of daily stock prices
- :Output:
- :ret: a ``pandas.DataFrame`` of daily percentage change of Returns
- of given stock prices.
+ :return: A dataframe of daily percentage change of returns of given stock prices.
"""
- return data.pct_change().dropna(how="all").replace([np.inf, -np.inf], np.nan)
-
-
-def weighted_mean_daily_returns(data, weights):
+ # Type validations:
+ type_validation(data=data)
+ return (
+ data.pct_change()
+ .dropna(how="all")
+ .replace([np.inf, -np.inf], np.nan)
+ .astype(np.float64)
+ )
+
+
+def weighted_mean_daily_returns(
+ data: pd.DataFrame, weights: ARRAY_OR_SERIES[FLOAT]
+) -> np.ndarray[FLOAT, Any]:
"""Returns DataFrame with the daily weighted mean returns
- :Input:
- :data: ``pandas.DataFrame`` with daily stock prices
- :weights: ``numpy.ndarray``/``pd.Series`` of weights
+ :param data: A dataframe of daily stock prices
+
+ :param weights: An array representing weights
+ :type weights: :py:data:`~.finquant.data_types.ARRAY_OR_SERIES`
- :Output:
- :ret: ``numpy.array`` of weighted mean daily percentage change of Returns
+ :return: An array of weighted mean daily percentage change of Returns
"""
- return np.dot(daily_returns(data), weights)
+ # Type validations:
+ type_validation(data=data, weights=weights)
+ res: np.ndarray[FLOAT, Any] = np.dot(daily_returns(data), weights)
+ return res
-def daily_log_returns(data):
+def daily_log_returns(data: pd.DataFrame) -> pd.DataFrame:
"""
Returns DataFrame with daily log returns
:math:`R_{\\log} = \\log\\left(1 + \\dfrac{\\text{price}_{t_i} - \\text{price}_{t_{i-1}}}
{\\text{price}_{t_{i-1}}}\\right)`
- :Input:
- :data: ``pandas.DataFrame`` with daily stock prices
+ :param data: A dataframe of daily stock prices
- :Output:
- :ret: a ``pandas.DataFrame`` of
- log(1 + daily percentage change of Returns)
+ :return: A dataframe of daily log returns
"""
- return np.log(1 + daily_returns(data)).dropna(how="all")
+ # Type validations:
+ type_validation(data=data)
+ return np.log(1 + daily_returns(data)).dropna(how="all").astype(np.float64)
+
+def historical_mean_return(data: SERIES_OR_DATAFRAME, freq: INT = 252) -> pd.Series:
+ """Returns the *mean return* based on historical stock price data.
-def historical_mean_return(data, freq=252):
- """Returns the mean return based on historical stock price data.
+ :param data: A dataframe of daily stock prices
+ :type data: :py:data:`~.finquant.data_types.SERIES_OR_DATAFRAME`
- :Input:
- :data: ``pandas.DataFrame`` or ``pandas.Series`` with daily stock prices
- :freq: ``int`` (default= ``252``), number of trading days, default
- value corresponds to trading days in a year
+ :param freq: Number of trading days in a year
+ :type freq: :py:data:`~.finquant.data_types.INT`, default: 252
- :Output:
- :ret: a ``pandas.Series`` or ``numpy.float`` of historical mean Returns.
+ :return: A series of historical mean returns
"""
- if not isinstance(data, (pd.DataFrame, pd.Series)):
- raise ValueError("data must be a pandas.DataFrame or pandas.Series")
+ # Type validations:
+ type_validation(data=data, freq=freq)
return daily_returns(data).mean() * freq
diff --git a/finquant/stock.py b/finquant/stock.py
index c47b32b7..bd3113c7 100644
--- a/finquant/stock.py
+++ b/finquant/stock.py
@@ -10,9 +10,9 @@
respectively. However, the DataFrame can contain more information beyond these columns,
such as year, strategy, currency (CCY), etc.
- - ``data``: Historical price data for the stock or fund provided as a ``pandas.DataFrame``.
- The data must contain ` - Adj. Close`, which represents the closing price used to
- compute the return on investment. The DataFrame can contain additional columns as well.
+ - ``data``: Historical price data for the stock or fund provided as a ``pandas.Series``.
+ The data must contain `` - Adj. Close``, which represents the closing price used to
+ compute the return on investment.
The ``Stock`` class computes various quantities related to the stock or fund, such as expected return,
volatility, skewness, and kurtosis. It also provides functionality to calculate the beta parameter
@@ -23,18 +23,21 @@
"""
+from typing import Optional
+
import numpy as np
import pandas as pd
from finquant.asset import Asset
-from finquant.returns import daily_returns, historical_mean_return
+from finquant.data_types import FLOAT
+from finquant.type_utilities import type_validation
class Stock(Asset):
"""Class that contains information about a stock/fund.
- :param investmentinfo: Investment information for the stock as a ``pandas.DataFrame``.
- :param data: Historical price data for the stock as a ``pandas.DataFrame``.
+ :param investmentinfo: Investment information of a stock.
+ :param data: Historical price data of a stock.
The ``Stock`` class extends the ``Asset`` class and represents a specific type of asset,
namely a stock within a portfolio.
@@ -46,11 +49,14 @@ class Stock(Asset):
"""
+ # Attributes:
+ investmentinfo: pd.DataFrame
+ beta: Optional[FLOAT]
+
def __init__(self, investmentinfo: pd.DataFrame, data: pd.Series) -> None:
"""
- :Input:
- :investmentinfo: ``pandas.DataFrame`` of investment information
- :data: ``pandas.Series`` of stock price
+ :param investmentinfo: Investment information of a stock.
+ :param data: Historical price data of a stock.
"""
self.name = investmentinfo.Name
self.investmentinfo = investmentinfo
@@ -58,25 +64,26 @@ def __init__(self, investmentinfo: pd.DataFrame, data: pd.Series) -> None:
# beta parameter of stock (CAPM)
self.beta = None
- def comp_beta(self, market_daily_returns: pd.Series) -> float:
- """Compute and return the Beta parameter of the stock.
+ def comp_beta(self, market_daily_returns: pd.Series) -> FLOAT:
+ """Computes and returns the Beta parameter of the stock.
- :Input:
- :market_daily_returns: ``pd.Series``, daily returns of the market
+ :param market_daily_returns: Daily returns of the market index.
- :Output:
- :beta: ``float``, the Beta parameter of the stock
+ :rtype: :py:data:`~.finquant.data_types.FLOAT`
+ :return: Beta parameter of the stock
"""
+ # Type validations:
+ type_validation(market_daily_returns=market_daily_returns)
cov_mat = np.cov(
self.comp_daily_returns(),
market_daily_returns.to_frame()[market_daily_returns.name],
)
- beta = cov_mat[0, 1] / cov_mat[1, 1]
+ beta = float(cov_mat[0, 1] / cov_mat[1, 1])
self.beta = beta
return beta
- def properties(self):
+ def properties(self) -> None:
"""Nicely prints out the properties of the stock: Expected Return,
Volatility, Beta (optional), Skewness, Kurtosis as well as the ``Allocation`` (and other
information provided in investmentinfo.)
diff --git a/finquant/type_utilities.py b/finquant/type_utilities.py
new file mode 100644
index 00000000..313b4c32
--- /dev/null
+++ b/finquant/type_utilities.py
@@ -0,0 +1,224 @@
+"""
+
+``finquant.type_utilities`` Module
+
+This module defines a type validation utility for working with various data types in Python, utilizing the 'numpy'
+and 'pandas' libraries.
+
+Dependencies:
+-------------
+This module requires the following external libraries:
+
+- 'numpy' (imported as 'np')
+- 'pandas' (imported as 'pd')
+
+Example usage:
+--------------
+Example:
+
+.. code-block:: python
+
+ type_validation(
+ data=pd.DataFrame([1., 2.]),
+ names=["name1", "name2"],
+ start_date="2023-08-01",
+ freq=10,
+ )
+
+"""
+
+import datetime
+from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
+
+import numpy as np
+import pandas as pd
+
+# supress some pylint complaints for this module only
+# pylint: disable=C0302,R0904,,R0912,W0212
+
+
+def _check_type(
+ arg_name: str,
+ arg_values: Any,
+ expected_type: Union[Type[Any], Tuple[Type[Any], ...]],
+ element_type: Optional[Union[Type[Any], Tuple[Type[Any], ...]]] = None,
+) -> None:
+ if isinstance(expected_type, tuple):
+ class_names = [cls.__name__ for cls in expected_type]
+ expected_type_string = ", ".join(class_names)
+ else:
+ expected_type_string = expected_type.__name__
+
+ element_type_string = None
+ if element_type is not None:
+ if isinstance(element_type, tuple):
+ class_names = [cls.__name__ for cls in element_type]
+ element_type_string = ", ".join(class_names)
+ else:
+ element_type_string = element_type.__name__
+
+ validation_failed = False
+
+ if not isinstance(arg_values, expected_type):
+ validation_failed = True
+
+ if element_type is not None:
+ if isinstance(arg_values, pd.DataFrame) and not all(
+ arg_values.dtypes == element_type
+ ):
+ validation_failed = True
+
+ if isinstance(arg_values, np.ndarray):
+ if arg_values.ndim == 2 and not arg_values.dtype == element_type:
+ validation_failed = True
+ elif arg_values.ndim == 1 and not all(
+ isinstance(val, element_type) for val in arg_values
+ ):
+ validation_failed = True
+
+ elif isinstance(arg_values, List) and not all(
+ isinstance(val, element_type) for val in arg_values
+ ):
+ validation_failed = True
+
+ if validation_failed:
+ error_msg = f"Error: {arg_name} is expected to be {expected_type_string}"
+ if element_type_string:
+ error_msg += f" with dtype '{element_type_string}'"
+ raise TypeError(error_msg)
+
+
+def _check_callable_type(
+ arg_name: str,
+ arg_values: Any,
+) -> None:
+ if not callable(arg_values):
+ error_msg = f"Error: {arg_name} is expected to be Callable"
+ raise TypeError(error_msg)
+
+
+def _check_empty_data(arg_name: str, arg_values: Any) -> None:
+ if isinstance(arg_values, (List, np.ndarray, pd.Series, pd.DataFrame)):
+ if len(arg_values) == 0:
+ raise ValueError(
+ f"Error: {arg_name} is an empty list, numpy array, pandas series, or dataframe"
+ )
+
+
+# Define a dictionary mapping each argument name to its expected type and, if applicable, element type
+type_dict: Dict[
+ str,
+ Tuple[
+ Union[Type[Any], Tuple[Type[Any], ...]],
+ Optional[Union[Type[Any], Tuple[Type[Any], ...], None]],
+ ],
+] = {
+ # DataFrames, Series, Array:
+ "data": ((pd.Series, pd.DataFrame), np.floating),
+ "pf_allocation": (pd.DataFrame, None),
+ "returns_df": (pd.DataFrame, np.floating),
+ "returns_series": (pd.Series, np.floating),
+ "market_daily_returns": (pd.Series, np.floating),
+ "means": ((np.ndarray, pd.Series), np.floating),
+ "weights": ((np.ndarray, pd.Series), np.floating),
+ "initial_weights": (np.ndarray, np.floating),
+ "weights_array": (np.ndarray, np.floating),
+ "cov_matrix": ((np.ndarray, pd.DataFrame), np.floating),
+ # Lists:
+ "names": ((List, np.ndarray), str),
+ "cols": ((List, np.ndarray), str),
+ "spans": ((List, np.ndarray), (int, np.integer)),
+ "targets": ((List, np.ndarray), (int, np.integer)),
+ # Datetime objects:
+ "start_date": ((str, datetime.datetime), None),
+ "end_date": ((str, datetime.datetime), None),
+ # Strings:
+ "data_api": (str, None),
+ "market_index": (str, None),
+ "method": (str, None),
+ "name": (str, None),
+ # FLOATs
+ "expected_return": ((float, np.floating), None),
+ "volatility": ((float, np.floating), None),
+ "risk_free_rate": ((float, np.floating), None),
+ "downside_risk": ((float, np.floating), None),
+ "mu": ((float, np.floating), None),
+ "sigma": ((float, np.floating), None),
+ "conf_level": ((float, np.floating), None),
+ # INTs:
+ "freq": ((int, np.integer), None),
+ "span": ((int, np.integer), None),
+ "num_trials": ((int, np.integer), None),
+ # NUMERICs:
+ "investment": ((int, np.integer, float, np.floating), None),
+ "dividend": ((int, np.integer, float, np.floating), None),
+ "target": ((int, np.integer, float, np.floating), None),
+ # Booleans:
+ "plot": (bool, None),
+ "save_weights": (bool, None),
+ "verbose": (bool, None),
+ "defer_update": (bool, None),
+}
+
+type_callable_dict: Dict[
+ str,
+ Tuple[
+ Callable[..., Any],
+ Optional[Type[Any]],
+ ],
+] = {
+ # Callables:
+ "fun": (callable, None),
+}
+
+
+def type_validation(**kwargs: Any) -> None:
+ """
+ Perform generic type validations on input variables.
+
+ This function performs various type validations on a set of input variables. It helps to ensure that the input
+ values conform to the expected types and conditions, raising a TypeError with a descriptive error message
+ if any type validation fails and a ValueError if a numpy.array or pd.Series/DataFrame is empty.
+
+ :param kwargs: Arbitrary keyword arguments representing the input variables to be checked.
+
+ Raises:
+ ``TypeError``:
+ If any of the type validations fail, a TypeError is raised with a descriptive error message
+ indicating the expected type and conditions for each variable.
+ ``ValueError``:
+ If any of the value validations fail, a ValueError is raised with a descriptive error message
+ indicating the expected conditions for each variable.
+
+ Example usage:
+
+ .. code-block:: python
+
+ type_validation(
+ data=pd.DataFrame([1., 2.]),
+ names=["name1", "name2"],
+ start_date="2023-08-01",
+ freq=10,
+ )
+ """
+
+ for arg_name, arg_values in kwargs.items():
+ if arg_name not in type_dict and arg_name not in type_callable_dict:
+ raise ValueError(
+ f"Error: '{arg_name}' is not a valid argument. "
+ f"Please only use argument names defined in `type_dict` or `type_callable_dict`."
+ )
+
+ # Some arguments are allowed to be None, so skip them
+ if arg_values is None:
+ continue
+
+ # Perform the type validation
+ if arg_name == "fun":
+ _check_callable_type(arg_name, arg_values)
+ else:
+ expected_type, element_type = type_dict[arg_name]
+ # Validation of type
+ _check_type(arg_name, arg_values, expected_type, element_type)
+ # Check for empty list/array/series/dataframe
+ _check_empty_data(arg_name, arg_values)
diff --git a/pyproject.toml b/pyproject.toml
index 2314693f..076a6c88 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,13 +4,16 @@ line-length = 88
[tool.isort]
profile = "black"
-[mypy]
+[tool.mypy]
python_version = "3.10"
-strict = false
-strict_optional = false
-ignore_missing_imports = false
-disallow_untyped_defs = false
-warn_unused_configs = false
-show_error_context = false
-warn_return_any = false
-warn_unused_ignores = false
+exclude = ["tests", "example"]
+strict = true
+strict_optional = true
+warn_return_any = true
+warn_no_return = true
+disallow_untyped_defs = true
+show_error_context = true
+ignore_missing_imports = true
+warn_unused_configs = true
+warn_unused_ignores = true
+plugins=["pydantic.mypy","numpy.typing.mypy_plugin"]
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 52d3f93e..58c18e68 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,4 @@
-numpy>=1.15
+numpy>=1.22.0
scipy>=1.2.0
pandas>=2.0
matplotlib>=3.0
diff --git a/requirements_docs.txt b/requirements_docs.txt
index 856b3f4c..8c722944 100644
--- a/requirements_docs.txt
+++ b/requirements_docs.txt
@@ -1,2 +1,3 @@
-sphinx
-sphinx_rtd_theme
\ No newline at end of file
+sphinx==6.2.1
+sphinx_rtd_theme==1.2.0
+sphinx-autodoc-typehints
\ No newline at end of file
diff --git a/requirements_test.txt b/requirements_test.txt
index 5260e7d3..3b9cfb3c 100644
--- a/requirements_test.txt
+++ b/requirements_test.txt
@@ -2,4 +2,5 @@ pytest>=7.3.2
black
mypy
isort
-pylint
\ No newline at end of file
+pylint
+pydantic
\ No newline at end of file
diff --git a/scripts/run_code_analysis.sh b/scripts/run_code_analysis.sh
new file mode 100644
index 00000000..f074f35e
--- /dev/null
+++ b/scripts/run_code_analysis.sh
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+echo "Running Pylint - finquant (ignoring TODOs)"
+python -m pylint --fail-under=10 --disable=fixme --output-format=parseable *.py finquant | tee pylint.log
+#echo "Running Pylint - tests (ignoring TODOs and access to protected attributes)"
+#python -m pylint --disable=fixme,protected-access --output-format=parseable tests | tee -a pylint.log
+
+echo ""
+echo "Running Mypy"
+python -m mypy *.py finquant | tee mypy.log
+
+#echo ""
+#echo "Running Black (check mode only)"
+#python -m black --check *.py finquant tests
+
+#echo ""
+#echo "Running isort (check mode only)"
+#python -m isort --check *.py finquant tests
\ No newline at end of file
diff --git a/scripts/update_version.py b/scripts/update_version.py
index 899afcb3..c7c9192a 100644
--- a/scripts/update_version.py
+++ b/scripts/update_version.py
@@ -1,7 +1,33 @@
+"""
+Version Management Module
+
+This module provides functions for managing version numbers based on branch names.
+
+Functions:
+----------
+- `increment_version(version: str, branch_name: str) -> str`: Increment the version based on the branch name pattern.
+- `increment_version_by(version: str, increment: str) -> str`: Increment the version by a given increment.
+- `read_version_from_file(filename: str) -> Optional[str]`: Read the version from a file.
+- `checkout_branch(branch_name: str) -> None`: Checkout a specific branch.
+- `get_version_from_branch(filename: str, branch_name: str) -> Optional[str]`: Get version number from a specific
+ branch.
+- `compare_versions(version1: str, version2: str) -> int`: Compare two strings of version numbers.
+- `write_version_to_file(filename: str, version: str) -> None`: Write the updated version back to the file.
+- `parse_args() -> argparse.Namespace`: Parse command-line arguments.
+- `main() -> None`: Main function that handles version updates based on branch names.
+
+Exceptions:
+-----------
+- `VersionFileReadError`: Exception raised when there is an error reading a version file.
+- `VersionUpdateError`: Exception raised when an error occurs during the update of a version.
+
+"""
+
import argparse
import re
import subprocess
import sys
+from typing import Optional, Tuple
# Define the version increments based on the change type (patch, minor, major)
version_increments = {
@@ -19,15 +45,33 @@
class VersionFileReadError(Exception):
- pass
+ """
+ Exception raised when there is an error reading a version file.
+ """
class VersionUpdateError(Exception):
- pass
+ """
+ Exception raised when an error occurs during the update of a version.
+ """
# Function to increment the version based on the branch name pattern
-def increment_version(version, branch_name):
+def increment_version(version: str, branch_name: str) -> str:
+ """
+ Increment the version number based on the branch name pattern.
+
+ Parameters:
+ -----------
+ version (str): The current version number in "x.y.z" format.
+ branch_name (str): The name of the branch being checked out.
+
+ Returns:
+ --------
+ str: The updated version number after applying the increment.
+
+ """
+
for change_type, prefixes in branch_prefixes.items():
prefixes = prefixes or [] # If None, set to an empty list
for prefix in prefixes:
@@ -36,21 +80,32 @@ def increment_version(version, branch_name):
return (
increment_version_by(version, increment) if increment else version
)
-
return version
# Function to increment the version by a given increment (e.g., "0.0.1" or "0.1.0" or "1.0.0")
-def increment_version_by(version, increment):
+def increment_version_by(version: str, increment: str) -> str:
+ """
+ Increment the version by a given increment (e.g., "0.0.1" or "0.1.0" or "1.0.0").
+
+ Parameters:
+ -----------
+ version (str): The current version number in "x.y.z" format.
+ increment (str): The version increment to apply in "x.y.z" format.
+
+ Returns:
+ --------
+ str: The updated version number after applying the increment.
+
+ """
+
version_parts = version.split(".")
increment_parts = increment.split(".")
new_version_parts = []
- for i in range(len(version_parts)):
- if i < len(increment_parts):
- new_version_parts.append(
- str(int(version_parts[i]) + int(increment_parts[i]))
- )
+ for idx, part in enumerate(version_parts):
+ if idx < len(increment_parts):
+ new_version_parts.append(str(int(part) + int(increment_parts[idx])))
else:
new_version_parts.append("0")
@@ -67,21 +122,45 @@ def increment_version_by(version, increment):
# Read the version from the file
-def read_version_from_file(filename):
+def read_version_from_file(filename: str) -> Optional[str]:
+ """
+ Read the version from a file.
+
+ Parameters:
+ -----------
+ filename (str): The path to the file containing the version.
+
+ Returns:
+ --------
+ Optional[str]: The version number read from the file, or None if not found.
+
+ """
+
with open(filename, "r") as file:
version_content = file.read()
version_match = re.search(r"version=(\d+\.\d+\.\d+)", version_content)
-
if version_match:
version = version_match.group(1)
else:
version = None
-
return version
# Function to checkout a specific branch
-def checkout_branch(branch_name):
+def checkout_branch(branch_name: str) -> None:
+ """
+ Checkout a specific branch to access its content.
+
+ Parameters:
+ -----------
+ branch_name (str): The name of the branch to be checked out.
+
+ Returns:
+ --------
+ None
+
+ """
+
# Fetch the latest changes from the remote repository
subprocess.run(["git", "fetch", "origin", branch_name], check=True)
@@ -90,7 +169,21 @@ def checkout_branch(branch_name):
# Function to get version number from a specific branch
-def get_version_from_branch(filename, branch_name):
+def get_version_from_branch(filename: str, branch_name: str) -> Optional[str]:
+ """
+ Get the version number from a specific branch.
+
+ Parameters:
+ -----------
+ filename (str): The path to the file containing the version.
+ branch_name (str): The name of the branch from which to read the version.
+
+ Returns:
+ --------
+ Optional[str]: The version number read from the file, or None if not found.
+
+ """
+
# Checkout branch
checkout_branch(branch_name)
@@ -102,8 +195,22 @@ def get_version_from_branch(filename, branch_name):
# Function to compare 2 strings of version numbers
-def compare_versions(version1, version2):
- def parse_version(version_str):
+def compare_versions(version1: str, version2: str) -> int:
+ """
+ Compare two strings of version numbers.
+
+ Parameters:
+ -----------
+ version1 (str): The first version number to compare in "x.y.z" format.
+ version2 (str): The second version number to compare in "x.y.z" format.
+
+ Returns:
+ --------
+ int: -1 if version1 < version2, 1 if version1 > version2, 0 if they are equal.
+
+ """
+
+ def parse_version(version_str: str) -> Tuple[int, ...]:
return tuple(map(int, version_str.split(".")))
parsed_version1 = parse_version(version1)
@@ -118,7 +225,21 @@ def parse_version(version_str):
# Write the updated version back to the file
-def write_version_to_file(filename, version):
+def write_version_to_file(filename: str, version: str) -> None:
+ """
+ Write the updated version back to the file.
+
+ Parameters:
+ -----------
+ filename (str): The path to the file to be updated.
+ version (str): The updated version number in "x.y.z" format.
+
+ Returns:
+ --------
+ None
+
+ """
+
with open(filename, "r+") as file:
file_content = file.read()
updated_content = re.sub(
@@ -134,7 +255,16 @@ def write_version_to_file(filename, version):
# Function to parse command-line arguments
-def parse_args():
+def parse_args() -> argparse.Namespace:
+ """
+ Parse command-line arguments.
+
+ Returns:
+ --------
+ argparse.Namespace: An object containing the parsed arguments.
+
+ """
+
parser = argparse.ArgumentParser(description="Update version based on branch name.")
parser.add_argument("base_branch", help="Base branch name")
parser.add_argument("source_branch", help="Source branch name")
@@ -142,7 +272,16 @@ def parse_args():
# Main function
-def main():
+def main() -> None:
+ """
+ Main function that handles version updates based on branch names.
+
+ Returns:
+ --------
+ None
+
+ """
+
args = parse_args()
base_branch_name = args.base_branch
source_branch_name = args.source_branch
@@ -181,11 +320,11 @@ def main():
raise VersionUpdateError(
"Error: Updated version is lower than version in base branch."
)
- elif version_comparison == 0:
+ if version_comparison == 0:
print("Version does not increase.")
# Exit with error code 1
sys.exit(1)
- elif version_comparison > 0:
+ if version_comparison > 0:
if updated_version == current_version_source:
print("Version is already updated.")
# Exit with error code 1
diff --git a/setup.py b/setup.py
index bb76ab19..57a6846a 100644
--- a/setup.py
+++ b/setup.py
@@ -1,9 +1,11 @@
+from typing import List
+
import setuptools
-def read_file(file_path):
- with open(file_path, "r") as f:
- return f.read()
+def read_file(file_path: str) -> str:
+ with open(file_path, "r") as file:
+ return file.read()
# get version/release from file
@@ -14,7 +16,7 @@ def read_file(file_path):
# get dependencies
-def read_requirements(file_path):
+def read_requirements(file_path: str) -> List[str]:
return [line.strip() for line in read_file(file_path).splitlines() if line.strip()]
diff --git a/tests/test_moving_average.py b/tests/test_moving_average.py
index bb0133cf..d7657c88 100644
--- a/tests/test_moving_average.py
+++ b/tests/test_moving_average.py
@@ -24,7 +24,7 @@ def test_sma():
dforig = pd.DataFrame({"0": orig[0], "1": orig[1]}).dropna()
l1 = range(10)
l2 = [i**2 for i in range(10)]
- df = pd.DataFrame({"0": l1, "1": l2})
+ df = pd.DataFrame({"0": l1, "1": l2}).astype(np.float64)
res = sma(df, span=2).dropna()
assert all((dforig == res).all())
@@ -61,7 +61,7 @@ def test_ema():
dforig = pd.DataFrame({"0": orig[0], "1": orig[1]}).dropna()
l1 = range(10)
l2 = [i**2 for i in range(10)]
- df = pd.DataFrame({"0": l1, "1": l2})
+ df = pd.DataFrame({"0": l1, "1": l2}).astype(np.float64)
res = ema(df, span=2).dropna()
assert all((abs(dforig - res) <= 1e-15).all())
@@ -98,7 +98,7 @@ def test_sma_std():
dforig = pd.DataFrame({"0": orig[0], "1": orig[1]}).dropna()
l1 = range(10)
l2 = [i**2 for i in range(10)]
- df = pd.DataFrame({"0": l1, "1": l2})
+ df = pd.DataFrame({"0": l1, "1": l2}).astype(np.float64)
res = sma_std(df, span=2).dropna()
assert all((abs(dforig - res) <= 1e-15).all())
@@ -135,7 +135,7 @@ def test_ema_std():
dforig = pd.DataFrame({"0": orig[0], "1": orig[1]}).dropna()
l1 = range(10)
l2 = [i**2 for i in range(10)]
- df = pd.DataFrame({"0": l1, "1": l2})
+ df = pd.DataFrame({"0": l1, "1": l2}).astype(np.float64)
res = ema_std(df, span=2).dropna()
assert all((abs(dforig - res) <= 1e-15).all())
@@ -176,7 +176,7 @@ def test_compute_ma():
{"Stock": stock_orig, "10d": ma10d_orig, "30d": ma30d_orig}, index=index
)
x = np.sin(np.linspace(1, 10, 100))
- df = pd.DataFrame({"Stock": x})
+ df = pd.DataFrame({"Stock": x}).astype(np.float64)
ma = compute_ma(df, ema, spans=[10, 30], plot=False)
assert all(abs((dforig - ma.describe()) <= 1e-15).all())
@@ -189,7 +189,7 @@ def test_plot_bollinger_band():
"Bollinger Band of +/- 2$\\sigma$, Moving Average " "of sma over 15 days"
)
x = np.sin(np.linspace(1, 10, 100))
- df = pd.DataFrame({"Stock": x}, index=np.linspace(1, 10, 100))
+ df = pd.DataFrame({"Stock": x}, index=np.linspace(1, 10, 100)).astype(np.float64)
df.index.name = "Days"
plt.figure()
plot_bollinger_band(df, sma, span=15)
diff --git a/tests/test_quants.py b/tests/test_quants.py
index 833c8f4d..1d39763c 100644
--- a/tests/test_quants.py
+++ b/tests/test_quants.py
@@ -16,21 +16,21 @@
def test_weighted_mean():
- means = np.array([1])
- weights = np.array([1])
+ means = np.array([1.0])
+ weights = np.array([1.0])
assert weighted_mean(means, weights) == 1
- means = np.array(range(5))
- weights = np.array(range(5, 10))
+ means = np.array(range(5)).astype(np.float64)
+ weights = np.array(range(5, 10)).astype(np.float64)
assert weighted_mean(means, weights) == 80
def test_weighted_std():
- x = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9])
- y = np.array([9, 8, 7, 6, 5, 4, 3, 2, 1])
+ x = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0])
+ y = np.array([9.0, 8.0, 7.0, 6.0, 5.0, 4.0, 3.0, 2.0, 1.0])
Sigma = np.cov(x, y)
- weights = np.array([1, 1])
+ weights = np.array([1.0, 1.0])
assert weighted_std(Sigma, weights) == 0.0
- weights = np.array([-3, 5])
+ weights = np.array([-3.0, 5.0])
assert weighted_std(Sigma, weights) ** 2 == 480.0
@@ -51,22 +51,22 @@ def test_value_at_risk():
assert abs(value_at_risk(1e4, 0.1, -0.25, 0.9) - (-2203.88)) <= 1e-1
assert abs(value_at_risk(1e4, -0.1, -0.25, 0.9) - (-4203.88)) <= 1e-1
assert value_at_risk(0, 0.1, 0.5, 0.9) == 0
- assert abs(value_at_risk(1e4, 0, 0.5, 0.9) - 6407.76) <= 1e-1
- assert abs(value_at_risk(1e4, 0.1, 0, 0.9) - 1000) <= 1e-1
- assert value_at_risk(1e4, 0, 0, 0.9) == 0
+ assert abs(value_at_risk(1e4, 0.0, 0.5, 0.9) - 6407.76) <= 1e-1
+ assert abs(value_at_risk(1e4, 0.1, 0.0, 0.9) - 1000) <= 1e-1
+ assert value_at_risk(1e4, 0.0, 0.0, 0.9) == 0
def test_value_at_risk_invalid_types():
- with pytest.raises(ValueError):
+ with pytest.raises(TypeError):
value_at_risk("10000", 0.05, 0.02, 0.95)
- with pytest.raises(ValueError):
+ with pytest.raises(TypeError):
value_at_risk(10000, 0.05, "0.02", 0.95)
- with pytest.raises(ValueError):
+ with pytest.raises(TypeError):
value_at_risk(10000, [0.05], 0.02, 0.95)
- with pytest.raises(ValueError):
+ with pytest.raises(TypeError):
value_at_risk(10000, 0.05, 0.02, "0.95")
with pytest.raises(ValueError):
@@ -77,27 +77,26 @@ def test_value_at_risk_invalid_types():
def test_annualised_portfolio_quantities():
- x = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9])
- y = np.array([9, 8, 7, 6, 5, 4, 3, 2, 1])
+ x = np.array([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0])
+ y = np.array([9.0, 8.0, 7.0, 6.0, 5.0, 4.0, 3.0, 2.0, 1.0])
Sigma = np.cov(x, y)
- weights = np.array([1, 1])
- mean = np.array([1, 2])
- weights = np.array([-3, 5])
- res = annualised_portfolio_quantities(weights, mean, Sigma, 0, 252)
- orig = (1764, 347.79304190854657, 5.071981861166303)
+ mean = np.array([1.0, 2.0])
+ weights = np.array([-3.0, 5.0])
+ res = annualised_portfolio_quantities(weights, mean, Sigma, 0.0, 252)
+ orig = (1764.0, 347.79304190854657, 5.071981861166303)
for i in range(len(res)):
assert abs(res[i] - orig[i]) <= 1e-15
def test_downside_risk():
- data1 = pd.DataFrame({"1": [1, 2, 4, 8], "2": [1, 2, 3, 4]})
+ data1 = pd.DataFrame({"1": [1.0, 2.0, 4.0, 8.0], "2": [1.0, 2.0, 3.0, 4.0]})
weights = np.array([0.25, 0.75])
rf_rate = 0.005
dr1 = downside_risk(data1, weights, rf_rate)
assert dr1 == 0
- data2 = pd.DataFrame({"1": [7, 6, 5, 4, 3]})
- weights = np.array([1])
+ data2 = pd.DataFrame({"1": [7.0, 6.0, 5.0, 4.0, 3.0]})
+ weights = np.array([1.0])
rf_rate = 0.0
dr2 = downside_risk(data2, weights, rf_rate)
assert abs(dr2 - 0.19409143531019335) <= 1e-15
diff --git a/tests/test_returns.py b/tests/test_returns.py
index 3ac33999..62fed8e3 100644
--- a/tests/test_returns.py
+++ b/tests/test_returns.py
@@ -13,10 +13,10 @@
def test_cumulative_returns():
orig = [
list(range(10)),
- [0, -0.025, -0.05, -0.075, -0.1, -0.125, -0.15, -0.175, -0.2, -0.225],
+ [0.0, -0.025, -0.05, -0.075, -0.1, -0.125, -0.15, -0.175, -0.2, -0.225],
]
- l1 = range(1, 11)
- l2 = [40 - i for i in range(10)]
+ l1 = np.array(range(1, 11)).astype(np.float64)
+ l2 = [float(40 - i) for i in range(10)]
d = {"1": l1, "2": l2}
df = pd.DataFrame(d)
ret = cumulative_returns(df)
@@ -36,7 +36,7 @@ def test_cumulative_returns():
def test_daily_returns():
orig = [[1.0, 1.0 / 2, 1.0 / 3, 1.0 / 4], [1.0 / 9, 1.0 / 10, 1.0 / 11, 1.0 / 12]]
- l1 = range(1, 6)
+ l1 = np.array(range(1, 6)).astype(np.float64)
l2 = [10 * 0.2 + i * 0.25 for i in range(1, 6)]
d = {"1": l1, "2": l2}
df = pd.DataFrame(d)
@@ -58,7 +58,7 @@ def test_weighted_daily_mean_returns():
d = {"1": l1}
expected = [0.5 for i in range(len(l1) - 1)]
df = pd.DataFrame(d)
- ret = weighted_mean_daily_returns(df, np.array([1]))
+ ret = weighted_mean_daily_returns(df, np.array([1.0]))
assert all(abs(ret - expected) <= 1e-15)
@@ -77,7 +77,7 @@ def test_daily_log_returns():
0.08004270767353636,
],
]
- l1 = range(1, 6)
+ l1 = np.array(range(1, 6)).astype(np.float64)
l2 = [10 * 0.2 + i * 0.25 for i in range(1, 6)]
d = {"1": l1, "2": l2}
df = pd.DataFrame(d)
@@ -90,7 +90,7 @@ def test_daily_log_returns():
def test_historical_mean_return():
orig = [13.178779135809942, 3.8135072274034982]
- l1 = range(1, 101)
+ l1 = np.array(range(1, 101)).astype(np.float64)
l2 = [10 * 0.2 + i * 0.25 for i in range(21, 121)]
d = {"1": l1, "2": l2}
df = pd.DataFrame(d)
diff --git a/version b/version
index 93407d69..d4576a1d 100644
--- a/version
+++ b/version
@@ -1,2 +1,2 @@
-version=0.6.1
-release=0.6.1
+version=0.6.2
+release=0.6.2