Update configs with best params

This commit is contained in:
Filip Stefaniuk 2024-09-15 12:10:09 +02:00
parent a0e06cfc06
commit 8082e99159
7 changed files with 16 additions and 16 deletions

View File

@ -6,7 +6,7 @@ command:
- ${program} - ${program}
- "./configs/experiments/informer-btcusdt-15m-quantile.yaml" - "./configs/experiments/informer-btcusdt-15m-quantile.yaml"
- "--patience" - "--patience"
- "10" - "20"
- "--store-predictions" - "--store-predictions"
method: grid method: grid
metric: metric:

View File

@ -6,7 +6,7 @@ command:
- ${program} - ${program}
- "./configs/experiments/informer-btcusdt-30m-quantile.yaml" - "./configs/experiments/informer-btcusdt-30m-quantile.yaml"
- "--patience" - "--patience"
- "10" - "20"
- "--store-predictions" - "--store-predictions"
method: grid method: grid
metric: metric:

View File

@ -6,7 +6,7 @@ command:
- ${program} - ${program}
- "./configs/experiments/informer-btcusdt-5m-quantile.yaml" - "./configs/experiments/informer-btcusdt-5m-quantile.yaml"
- "--patience" - "--patience"
- "10" - "20"
- "--store-predictions" - "--store-predictions"
method: grid method: grid
metric: metric:

View File

@ -1,7 +1,7 @@
future_window: future_window:
value: 5 value: 5
past_window: past_window:
value: 48 value: 22
batch_size: batch_size:
value: 64 value: 64
max_epochs: max_epochs:
@ -62,8 +62,8 @@ model:
d_model: 256 d_model: 256
d_fully_connected: 512 d_fully_connected: 512
n_attention_heads: 2 n_attention_heads: 2
dropout: 0.1 dropout: 0.05
n_encoder_layers: 2 n_encoder_layers: 1
n_decoder_layers: 1 n_decoder_layers: 1
learning_rate: 0.001 learning_rate: 0.0001
optimizer: "Adam" optimizer: "Adam"

View File

@ -1,7 +1,7 @@
future_window: future_window:
value: 5 value: 5
past_window: past_window:
value: 48 value: 22
batch_size: batch_size:
value: 64 value: 64
max_epochs: max_epochs:
@ -62,8 +62,8 @@ model:
d_model: 256 d_model: 256
d_fully_connected: 512 d_fully_connected: 512
n_attention_heads: 2 n_attention_heads: 2
dropout: 0.1 dropout: 0.05
n_encoder_layers: 2 n_encoder_layers: 1
n_decoder_layers: 1 n_decoder_layers: 1
learning_rate: 0.001 learning_rate: 0.0001
optimizer: "Adam" optimizer: "Adam"

View File

@ -1,7 +1,7 @@
future_window: future_window:
value: 5 value: 5
past_window: past_window:
value: 48 value: 22
batch_size: batch_size:
value: 64 value: 64
max_epochs: max_epochs:
@ -62,8 +62,8 @@ model:
d_model: 256 d_model: 256
d_fully_connected: 512 d_fully_connected: 512
n_attention_heads: 2 n_attention_heads: 2
dropout: 0.1 dropout: 0.05
n_encoder_layers: 2 n_encoder_layers: 1
n_decoder_layers: 1 n_decoder_layers: 1
learning_rate: 0.001 learning_rate: 0.0001
optimizer: "Adam" optimizer: "Adam"

View File

@ -22,7 +22,7 @@ parameters:
parameters: parameters:
name: name:
value: "Informer" value: "Informer"
dmodel: d_model:
values: [256, 512, 1024] values: [256, 512, 1024]
d_fully_connected: d_fully_connected:
values: [256, 512, 1024] values: [256, 512, 1024]