Skip to content

Commit

Permalink
Merge branch 'dev'
Browse files Browse the repository at this point in the history
  • Loading branch information
DimensionSTP committed May 26, 2024
2 parents f5f9625 + 6d79521 commit 2a0f723
Show file tree
Hide file tree
Showing 10 changed files with 67 additions and 21 deletions.
10 changes: 10 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,14 +1,24 @@
*.ckpt
*.env
*.out
*.model
*.csv
*.mp4
*.npy
*.txt
*.xml
*.json
*.pkl
*.pickle
*.ipynb
*.tar
*.tar.gz
*__pycache__
logs/runs/
logs/wandb/
multirun/
data/
baseline/
tmp*
!requirements.txt
!packages.txt
20 changes: 20 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,14 @@ conda activate myenv
pip install -r requirements.txt
```

### .env file setting
```shell
PROJECT_DIR={PROJECT_DIR}
CONNECTED_DIR={CONNECTED_DIR}
DEVICES={DEVICES}
HF_HOME={HF_HOME}
```

### Model Hyper-Parameters Tuning

* end-to-end
Expand Down Expand Up @@ -49,6 +57,18 @@ python main.py mode=test is_tuned={tuned or untuned} num_trials={num_trials} epo
python main.py mode=predict is_tuned={tuned or untuned} num_trials={num_trials} epoch={ckpt epoch}
```

### Examples of shell scipts

* train
```shell
bash scripts/train.sh
```

* test
```shell
bash scripts/test.sh
```


__If you want to change main config, use --config-name={config_name}.__

Expand Down
2 changes: 1 addition & 1 deletion configs/architecture/rhythm_architecture.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ model:

strategy: ${strategy}
lr: ${lr}
t_max: ${t_max}
period: ${period}
eta_min: ${eta_min}
interval: step
connected_dir: ${connected_dir}
14 changes: 8 additions & 6 deletions configs/rhythm.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ defaults:
- trainer: trainer

package_name: rppg-project
project_dir: /home/ddang/${package_name}
connected_dir: /data/${package_name}
project_dir: ${oc.env:PROJECT_DIR}/${package_name}
connected_dir: ${oc.env:CONNECTED_DIR}/${package_name}

seed: 2024

Expand All @@ -31,20 +31,22 @@ rnn_num_layers: 1
direction: bi

lr: 0.0001
t_max: 50
eta_min: 0.000025
period: 2
eta_min: 0.00001

monitor: val_rmse_loss
tracking_direction: min
patience: 3
patience: 2
min_delta: 0

devices: 4
devices: ${oc.decode:${oc.env:DEVICES}}
accelerator: gpu
strategy: ddp
log_every_n_steps: 10
precision: 32
accumulate_grad_batches: 1
gradient_clip_val: 1
gradient_clip_algorithm: norm
epoch: 100

model_name: CustomizedRhythmNet
Expand Down
2 changes: 2 additions & 0 deletions configs/trainer/trainer.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,6 @@ strategy: ${strategy}
log_every_n_steps: ${log_every_n_steps}
precision: ${precision}
accumulate_grad_batches: ${accumulate_grad_batches}
gradient_clip_val: ${gradient_clip_val}
gradient_clip_algorithm: ${gradient_clip_algorithm}
max_epochs: ${epoch}
8 changes: 5 additions & 3 deletions configs/tuner/rhythm_tuner.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ hparams:
low: 0.00005
high: 0.0005
log: False
t_max:
low: 25
high: 100
period:
low: 1
high: 10
log: False
eta_min:
low: 0.00005
Expand All @@ -42,6 +42,8 @@ module_params:
log_every_n_steps: ${log_every_n_steps}
precision: ${precision}
accumulate_grad_batches: ${accumulate_grad_batches}
gradient_clip_val: ${gradient_clip_val}
gradient_clip_algorithm: ${gradient_clip_algorithm}
max_epochs: ${epoch}
monitor: ${monitor}
mode: ${tracking_direction}
Expand Down
8 changes: 7 additions & 1 deletion main.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
import dotenv

dotenv.load_dotenv(
override=True,
)

import os
import warnings

os.environ["HYDRA_FULL_ERROR"] = "1"
os.environ["HF_HOME"] = "/data/huggingface"
os.environ["HF_HOME"] = os.environ.get("HF_HOME")
os.environ["TOKENIZERS_PARALLELISM"] = "false"
warnings.filterwarnings("ignore")

Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ opencv-python==4.8.1.78
optuna==3.5.0
pandas==1.5.0
pillow==10.2.0
python-dotenv==1.0.1
pytorch-lightning==2.2.0
scikit-learn==1.3.2
scipy==1.10.1
Expand Down
7 changes: 4 additions & 3 deletions src/architectures/rhythm_architecture.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def __init__(
model: nn.Module,
strategy: str,
lr: float,
t_max: int,
period: int,
eta_min: float,
interval: str,
connected_dir: str,
Expand All @@ -25,7 +25,7 @@ def __init__(
self.model = model
self.strategy = strategy
self.lr = lr
self.t_max = t_max
self.period = period
self.eta_min = eta_min
self.interval = interval
self.connected_dir = connected_dir
Expand Down Expand Up @@ -81,9 +81,10 @@ def configure_optimizers(self) -> Dict[str, Any]:
self.parameters(),
lr=self.lr,
)
t_max = self.period * self.trainer.num_training_batches
scheduler = optim.lr_scheduler.CosineAnnealingLR(
optimizer=optimizer,
T_max=self.t_max,
T_max=t_max,
eta_min=self.eta_min,
)
return {
Expand Down
16 changes: 9 additions & 7 deletions src/tuners/rhythm_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,12 +107,12 @@ def optuna_objective(
high=self.hparams.lr.high,
log=self.hparams.lr.log,
)
if self.hparams.t_max:
params["t_max"] = trial.suggest_int(
name="t_max",
low=self.hparams.t_max.low,
high=self.hparams.t_max.high,
log=self.hparams.t_max.log,
if self.hparams.period:
params["period"] = trial.suggest_int(
name="period",
low=self.hparams.period.low,
high=self.hparams.period.high,
log=self.hparams.period.log,
)
if self.hparams.eta_min:
params["eta_min"] = trial.suggest_float(
Expand All @@ -133,7 +133,7 @@ def optuna_objective(
model=model,
strategy=self.module_params.strategy,
lr=params["lr"],
t_max=params["t_max"],
period=params["period"],
eta_min=params["eta_min"],
interval=self.module_params.interval,
connected_dir=self.module_params.connected_dir,
Expand All @@ -154,6 +154,8 @@ def optuna_objective(
log_every_n_steps=self.module_params.log_every_n_steps,
precision=self.module_params.precision,
accumulate_grad_batches=self.module_params.accumulate_grad_batches,
gradient_clip_val=self.module_params.gradient_clip_val,
gradient_clip_algorithm=self.module_params.gradient_clip_algorithm,
max_epochs=self.module_params.max_epochs,
enable_checkpointing=False,
callbacks=callbacks,
Expand Down

0 comments on commit 2a0f723

Please sign in to comment.