diff --git a/README.md b/README.md index 13aed7d..1447428 100644 --- a/README.md +++ b/README.md @@ -63,17 +63,17 @@ python ./cli.py --help or simply run the project with default options: ```bash -python ./cli.py optimize-train-test +python ./cli.py optimize ``` If you have a standard set of configs you want to run the trader against, you can specify a config file to load configuration from. Rename config/config.ini.dist to config/config.ini and run ```bash -python ./cli.py --from-config config/config.ini optimize-train-test +python ./cli.py --from-config config/config.ini optimize ``` ```bash -python ./cli.py optimize-train-test +python ./cli.py optimize ``` ### Testing with vagrant @@ -92,7 +92,7 @@ Note: With vagrant you cannot take full advantage of your GPU, so is mainly for If you want to run everything within a docker container, then just use: ```bash -./run-with-docker (cpu|gpu) (yes|no) optimize-train-test +./run-with-docker (cpu|gpu) (yes|no) optimize ``` - cpu - start the container using CPU requirements @@ -101,7 +101,7 @@ If you want to run everything within a docker container, then just use: Note: in case using yes as second argument, use ```bash -python ./ cli.py --params-db-path "postgres://rl_trader:rl_trader@localhost" optimize-train-test +python ./ cli.py --params-db-path "postgres://rl_trader:rl_trader@localhost" optimize ``` The database and it's data are pesisted under `data/postgres` locally. diff --git a/cli.py b/cli.py index aa327d5..4d11ab2 100644 --- a/cli.py +++ b/cli.py @@ -1,6 +1,5 @@ import numpy as np - -from deco import concurrent +import multiprocessing from lib.RLTrader import RLTrader from lib.cli.RLTraderCLI import RLTraderCLI @@ -12,9 +11,23 @@ args = trader_cli.get_args() -@concurrent(processes=args.parallel_jobs) -def run_concurrent_optimize(trader: RLTrader, args): - trader.optimize(args.trials, args.trials, args.parallel_jobs) +def run_concurrent_optimize(): + trader = RLTrader(**vars(args)) + trader.optimize(args.trials) + + +def concurrent_optimize(): + processes = [] + for i in range(args.parallel_jobs): + processes.append(multiprocessing.Process(target=run_concurrent_optimize, args=())) + + print(processes) + + for p in processes: + p.start() + + for p in processes: + p.join() if __name__ == '__main__': @@ -22,17 +35,10 @@ def run_concurrent_optimize(trader: RLTrader, args): trader = RLTrader(**vars(args), logger=logger) if args.command == 'optimize': - run_concurrent_optimize(trader, args) + concurrent_optimize() elif args.command == 'train': trader.train(n_epochs=args.epochs) elif args.command == 'test': trader.test(model_epoch=args.model_epoch, should_render=args.no_render) - elif args.command == 'optimize-train-test': - run_concurrent_optimize(trader, args) - trader.train( - n_epochs=args.train_epochs, - test_trained_model=args.no_test, - render_trained_model=args.no_render - ) elif args.command == 'update-static-data': download_data_async() diff --git a/lib/RLTrader.py b/lib/RLTrader.py index 36d3974..625a3f8 100644 --- a/lib/RLTrader.py +++ b/lib/RLTrader.py @@ -5,9 +5,8 @@ from os import path from typing import Dict -from deco import concurrent from stable_baselines.common.base_class import BaseRLModel -from stable_baselines.common.policies import BasePolicy, MlpPolicy +from stable_baselines.common.policies import BasePolicy, MlpLnLstmPolicy from stable_baselines.common.vec_env import DummyVecEnv, SubprocVecEnv from stable_baselines.common import set_global_seeds from stable_baselines import PPO2 @@ -31,7 +30,7 @@ class RLTrader: data_provider = None study_name = None - def __init__(self, modelClass: BaseRLModel = PPO2, policyClass: BasePolicy = MlpPolicy, exchange_args: Dict = {}, **kwargs): + def __init__(self, modelClass: BaseRLModel = PPO2, policyClass: BasePolicy = MlpLnLstmPolicy, exchange_args: Dict = {}, **kwargs): self.logger = kwargs.get('logger', init_logger(__name__, show_debug=kwargs.get('show_debug', True))) self.Model = modelClass @@ -162,7 +161,6 @@ def optimize_params(self, trial, n_prune_evals_per_trial: int = 2, n_tests_per_e return -1 * last_reward - @concurrent def optimize(self, n_trials: int = 100, n_parallel_jobs: int = 1, *optimize_params): try: self.optuna_study.optimize( diff --git a/lib/cli/RLTraderCLI.py b/lib/cli/RLTraderCLI.py index c2a4e4b..1a55add 100644 --- a/lib/cli/RLTraderCLI.py +++ b/lib/cli/RLTraderCLI.py @@ -44,12 +44,6 @@ def __init__(self): subparsers = self.parser.add_subparsers(help='Command', dest="command") - opt_train_test_parser = subparsers.add_parser('optimize-train-test', description='Optimize train and test') - opt_train_test_parser.add_argument('--trials', type=int, default=20, help='Number of trials') - opt_train_test_parser.add_argument('--train-epochs', type=int, default=10, help='Train for how many epochs') - opt_train_test_parser.add_argument('--no-render', action='store_false', help='Should render the model') - opt_train_test_parser.add_argument('--no-test', action='store_false', help='Should test the model') - optimize_parser = subparsers.add_parser('optimize', description='Optimize model parameters') optimize_parser.add_argument('--trials', type=int, default=1, help='Number of trials') diff --git a/requirements.base.txt b/requirements.base.txt index 0b25d07..f352090 100644 --- a/requirements.base.txt +++ b/requirements.base.txt @@ -10,5 +10,4 @@ statsmodels==0.10.0rc2 empyrical ccxt psycopg2 -deco configparser \ No newline at end of file diff --git a/update_data.py b/update_data.py deleted file mode 100644 index 90c2ed8..0000000 --- a/update_data.py +++ /dev/null @@ -1,42 +0,0 @@ -import asyncio -import ssl -import pandas as pd -import os - -final_date_format = '%Y-%m-%d %H:%M' -ssl._create_default_https_context = ssl._create_unverified_context - -hourly_url = "https://www.cryptodatadownload.com/cdd/Coinbase_BTCUSD_1h.csv" -daily_url = "https://www.cryptodatadownload.com/cdd/Coinbase_BTCUSD_d.csv" - - -async def save_url_to_csv(url: str, date_format: str, file_name: str): - csv = pd.read_csv(url, header=1) - csv = csv.dropna(thresh=2) - csv.columns = ['Date', 'Symbol', 'Open', 'High', 'Low', 'Close', 'VolumeFrom', 'VolumeTo'] - csv['Date'] = pd.to_datetime(csv['Date'], format=date_format) - csv['Date'] = csv['Date'].dt.strftime(final_date_format) - - final_path = os.path.join('data', 'input', file_name) - csv.to_csv(final_path, index=False) - - return csv - - -async def save_as_csv(hourly_url: str, daily_url: str): - tasks = [save_url_to_csv(hourly_url, '%Y-%m-%d %I-%p', 'coinbase-1h-btc-usd.csv'), - save_url_to_csv(daily_url, '%Y-%m-%d', 'coinbase-1d-btc-usd.csv')] - # also FIRST_EXCEPTION and ALL_COMPLETED (default) - done, pending = await asyncio.wait(tasks, return_when=asyncio.ALL_COMPLETED) - print('>> done: ', done) - print('>> pending: ', pending) # will be empty if using default return_when setting - - -def download_async(): - loop = asyncio.get_event_loop() - loop.run_until_complete(save_as_csv(hourly_url, daily_url)) - loop.close() - - -if __name__ == '__main__': - download_async() \ No newline at end of file