Skip to content

Commit

Permalink
Merge pull request #196 from amor71/optimizer
Browse files Browse the repository at this point in the history
bug fixes
  • Loading branch information
amor71 authored Jun 24, 2021
2 parents 1eaa93a + 9f76294 commit 2cded9c
Show file tree
Hide file tree
Showing 5 changed files with 34 additions and 30 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
language: python
python:
- '3.8'
- '3.9'
os: linux
services:
- postgresql
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# LiuAlgoTrader
[![Build Status](https://travis-ci.org/amor71/LiuAlgoTrader.svg?branch=master)](https://travis-ci.org/amor71/LiuAlgoTrader)
[![Build Status](https://travis-ci.com/amor71/LiuAlgoTrader.svg?branch=master)](https://travis-ci.org/amor71/LiuAlgoTrader)
![PyPI - Python Version](https://img.shields.io/pypi/pyversions/liualgotrader)
[![Python 3](https://pyup.io/repos/github/amor71/LiuAlgoTrader/python-3-shield.svg)](https://pyup.io/repos/github/amor71/LiuAlgoTrader/)
[![Updates](https://pyup.io/repos/github/amor71/LiuAlgoTrader/shield.svg)](https://pyup.io/repos/github/amor71/LiuAlgoTrader/)
Expand All @@ -11,7 +11,7 @@

**LiuAlgoTrader** is a scalable, multi-process ML-ready framework
for effective algorithmic trading. The framework simplify development, testing,
deployment, analysis and training algo trading strategies. The framework **automatically analyzes** trading sessions, and the analysis may be used to train predictive models.
deployment, analysis and training algo trading strategies. The framework **automatically analyzes** trading sessions, hyper-parameters optimization, and the analysis may be used to train predictive models.

LiuAlgoTrader can run on a laptop and
*hedge-on-the-go*, or run on a multi-core hosted Linux server
Expand Down
33 changes: 15 additions & 18 deletions analysis/notebooks/hyperparameters_analysis.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"## Take Action: Select portfolio for analysis"
"## Take Action: Select Optimizer run for analysis"
]
},
{
Expand All @@ -13,7 +13,7 @@
"metadata": {},
"outputs": [],
"source": [
"optimizer_run_id = \"c76c97cb-d8cd-4498-8112-d75b2b1f1443\""
"optimizer_run_id = \"28fb93cc-489d-48b3-950b-ee207d7317ed\""
]
},
{
Expand Down Expand Up @@ -63,36 +63,33 @@
"metadata": {},
"outputs": [],
"source": [
"analysis = pd.DataFrame(columns=['profile_id'])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"analysis = pd.DataFrame(columns=['profile_id'])\n",
"analysis['profile_id'] = hyperparameters.index.levels[0]\n",
"analysis['configurations'] = analysis['profile_id'].apply(\n",
" lambda x: hyperparameters[\n",
" hyperparameters.index.get_level_values(\"portfolio_id\") == x\n",
" ].configurations[-1]\n",
")\n",
"analysis['sharpe'] = analysis['profile_id'].apply(\n",
" lambda x: qs.stats.sharpe(\n",
" returns=hyperparameters[\n",
" hyperparameters.index.get_level_values(\"portfolio_id\") == x\n",
" ]\n",
" ).totals\n",
" ].totals\n",
" )\n",
")\n",
"analysis['volatility'] = analysis['profile_id'].apply(\n",
" lambda x: qs.stats.volatility(\n",
" returns=hyperparameters[\n",
" hyperparameters.index.get_level_values(\"portfolio_id\") == x\n",
" ]\n",
" ).totals\n",
" ].totals\n",
" )\n",
")\n",
"analysis['total'] = analysis['profile_id'].apply(\n",
" lambda x: hyperparameters[\n",
" hyperparameters.index.get_level_values(\"portfolio_id\") == x\n",
" ]\n",
" .totals[-1]\n",
")\n"
" ].totals[-1]\n",
")\n",
"\n"
]
},
{
Expand Down
7 changes: 4 additions & 3 deletions analysis/notebooks/portfolio_analysis.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"metadata": {},
"outputs": [],
"source": [
"portfolio_id = \"765c2f7d-658f-4e29-a276-11fc904cdff9\"\n",
"portfolio_id = \"13c0303f-814b-4402-a68c-6de9369b87af\"\n",
"report_title = \"Trade JNUG/JDST (based on GDXJ SMA)volatility, Jun-20 to Jun-21 \""
]
},
Expand Down Expand Up @@ -73,8 +73,9 @@
"metadata": {},
"outputs": [],
"source": [
"qs.extend_pandas()\n",
"qs.reports.html(portfolio_returns.totals.pct_change(1), \"SPY\", title=report_title, output=\"/tmp/tearsheet.html\")"
"if len(portfolio_returns):\n",
" qs.extend_pandas()\n",
" qs.reports.html(portfolio_returns.totals.pct_change(1), \"SPY\", title=report_title, output=\"/tmp/tearsheet.html\")"
]
},
{
Expand Down
18 changes: 12 additions & 6 deletions liualgotrader/analytics/analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -483,9 +483,14 @@ def calc_portfolio_returns(portfolio_id: str) -> pd.DataFrame:
loop = asyncio.get_event_loop()
_ = loop.run_until_complete(Portfolio.load_by_portfolio_id(portfolio_id))

account_id, initial_account_size = loop.run_until_complete(
Portfolio.load_details(portfolio_id)
)
try:
account_id, initial_account_size = loop.run_until_complete(
Portfolio.load_details(portfolio_id)
)
except Exception:
print("ERROR loading portfolio-id, please verify id and re-run.")
return pd.DataFrame()

data_loader = DataLoader()
trades = load_trades_by_portfolio(portfolio_id)
start_date = trades.client_time.min().date()
Expand Down Expand Up @@ -515,15 +520,16 @@ def calc_portfolio_returns(portfolio_id: str) -> pd.DataFrame:
def calc_hyperparameters_analysis(optimizer_run_id: str) -> pd.DataFrame:
loop = asyncio.get_event_loop()

portfolio_ids = loop.run_until_complete(
OptimizerRun.get_portfolio_ids(optimizer_run_id)
portfolio_ids_parameters = loop.run_until_complete(
OptimizerRun.get_portfolio_ids_parameters(optimizer_run_id)
)

portfolio_ids, hypers = list(map(list, zip(*portfolio_ids_parameters)))
df = None
if len(portfolio_ids):
for i in tqdm(range(len(portfolio_ids)), desc="Loading Portfolios"):
_df = calc_portfolio_returns(portfolio_ids[i])
_df["portfolio_id"] = portfolio_ids[i]
_df["configurations"] = hypers[i]
_df.reset_index(inplace=True)
_df = _df.set_index(["portfolio_id", "date"])
df = pd.concat([df, _df], axis=0) if df is not None else _df
Expand Down

0 comments on commit 2cded9c

Please sign in to comment.