Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor TRA #531

Merged
merged 8 commits into from
Jul 30, 2021
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
format by black
Dong Zhou authored and Dong Zhou committed Jul 21, 2021

Verified

This commit was signed with the committer’s verified signature. The key has expired.
tvdeyen Thomas von Deyen
commit 740b83ed3f2ae3de123836c57b87ae80cabddd0b
2 changes: 1 addition & 1 deletion qlib/contrib/data/dataset.py
Original file line number Diff line number Diff line change
@@ -122,7 +122,7 @@ def __init__(
shuffle=True,
drop_last=False,
input_size=None,
**kwargs
**kwargs,
):

assert num_states == 0 or horizon > 0, "please specify `horizon` to avoid data leakage"
13 changes: 9 additions & 4 deletions qlib/contrib/model/pytorch_tra.py
Original file line number Diff line number Diff line change
@@ -13,6 +13,7 @@
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F

try:
from torch.utils.tensorboard import SummaryWriter
except:
@@ -84,8 +85,10 @@ def __init__(

assert memory_mode in ["sample", "daily"], "invalid memory mode"
assert transport_method in ["none", "router", "oracle"], f"invalid transport method {transport_method}"
assert transport_method == "none" or tra_config['num_states'] > 1, "optimal transport requires `num_states` > 1"
assert memory_mode != "daily" or tra_config['src_info'] == 'TPE', "daily transport can only support TPE as `src_info`"
assert transport_method == "none" or tra_config["num_states"] > 1, "optimal transport requires `num_states` > 1"
assert (
memory_mode != "daily" or tra_config["src_info"] == "TPE"
), "daily transport can only support TPE as `src_info`"

if transport_method == "router" and not eval_train:
self.logger.warning("`eval_train` will be ignored when using TRA.router")
@@ -246,7 +249,9 @@ def test_epoch(self, epoch, data_set, return_pred=False, prefix="test", is_pretr
all_preds, choice, prob = self.tra(hidden, state)

if not is_pretrain and self.transport_method != "none":
loss, pred, L, P = self.transport_fn(all_preds, label, choice, prob, count, self.transport_method, training=False)
loss, pred, L, P = self.transport_fn(
all_preds, label, choice, prob, count, self.transport_method, training=False
)
data_set.assign_data(index, L) # save loss to memory
else:
pred = all_preds.mean(dim=1)
@@ -614,7 +619,7 @@ class TRA(nn.Module):
def __init__(self, input_size, num_states=1, hidden_size=8, tau=1.0, src_info="LR_TPE"):
super().__init__()

assert src_info in ['LR', 'TPE', 'LR_TPE'], 'invalid `src_info`'
assert src_info in ["LR", "TPE", "LR_TPE"], "invalid `src_info`"

self.num_states = num_states
self.tau = tau