Skip to content

Commit

Permalink
Update lra_config.py
Browse files Browse the repository at this point in the history
  • Loading branch information
mlpen authored Feb 27, 2021
1 parent 9d9d5ec commit 2bcc280
Showing 1 changed file with 0 additions and 10 deletions.
10 changes: 0 additions & 10 deletions LRA/code/lra_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
"nystrom-256":32,
"linformer-256":32,
"reformer-2":32,
"longformer-128":32,
"performer-256":32,
"linear":32,
},
Expand All @@ -52,7 +51,6 @@
"nystrom-256":{"attention_grad_checkpointing":False, "num_landmarks":256, "conv_kernel_size":35},
"linformer-256":{"attention_grad_checkpointing":False, "linformer_k":256},
"reformer-2":{"attention_grad_checkpointing":False, "num_hash":2},
"longformer-128":{"attention_grad_checkpointing":False, "window_size":128, "first_token_view":True},
"performer-256":{"attention_grad_checkpointing":False, "rp_dim":256, "kernel_type":"relu"},
"linear":{"attention_grad_checkpointing":False},
}
Expand Down Expand Up @@ -97,7 +95,6 @@
"nystrom-256":128,
"linformer-256":128,
"reformer-2":128,
"longformer-128":128,
"performer-256":128,
"linear":128,
},
Expand All @@ -109,7 +106,6 @@
"nystrom-256":{"attention_grad_checkpointing":False, "num_landmarks":256, "conv_kernel_size":35},
"linformer-256":{"attention_grad_checkpointing":False, "linformer_k":256},
"reformer-2":{"attention_grad_checkpointing":False, "num_hash":2},
"longformer-128":{"attention_grad_checkpointing":False, "window_size":128, "first_token_view":True},
"performer-256":{"attention_grad_checkpointing":False, "rp_dim":256, "kernel_type":"relu"},
"linear":{"attention_grad_checkpointing":False},
}
Expand Down Expand Up @@ -149,7 +145,6 @@
"nystrom-256":128,
"linformer-256":128,
"reformer-2":128,
"longformer-128":128,
"performer-256":128,
"linear":128,
},
Expand All @@ -161,7 +156,6 @@
"nystrom-256":{"attention_grad_checkpointing":False, "num_landmarks":256, "conv_kernel_size":35},
"linformer-256":{"attention_grad_checkpointing":False, "linformer_k":256},
"reformer-2":{"attention_grad_checkpointing":False, "num_hash":2},
"longformer-128":{"attention_grad_checkpointing":False, "window_size":128, "first_token_view":True},
"performer-256":{"attention_grad_checkpointing":False, "rp_dim":256, "kernel_type":"relu"},
"linear":{"attention_grad_checkpointing":False},
}
Expand Down Expand Up @@ -206,7 +200,6 @@
"nystrom-256":32,
"linformer-256":32,
"reformer-2":32,
"longformer-128":32,
"performer-256":32,
"linear":32,
},
Expand All @@ -218,7 +211,6 @@
"nystrom-256":{"attention_grad_checkpointing":False, "num_landmarks":256, "conv_kernel_size":35},
"linformer-256":{"attention_grad_checkpointing":False, "linformer_k":256},
"reformer-2":{"attention_grad_checkpointing":False, "num_hash":2},
"longformer-128":{"attention_grad_checkpointing":False, "window_size":128, "first_token_view":True},
"performer-256":{"attention_grad_checkpointing":False, "rp_dim":256, "kernel_type":"relu"},
"linear":{"attention_grad_checkpointing":False},
}
Expand Down Expand Up @@ -263,7 +255,6 @@
"nystrom-256":32,
"linformer-256":32,
"reformer-2":32,
"longformer-128":32,
"performer-256":32,
"linear":32,
},
Expand All @@ -275,7 +266,6 @@
"nystrom-256":{"attention_grad_checkpointing":False, "num_landmarks":256, "conv_kernel_size":35},
"linformer-256":{"attention_grad_checkpointing":False, "linformer_k":256},
"reformer-2":{"attention_grad_checkpointing":False, "num_hash":2},
"longformer-128":{"attention_grad_checkpointing":False, "window_size":128, "first_token_view":True},
"performer-256":{"attention_grad_checkpointing":False, "rp_dim":256, "kernel_type":"relu"},
"linear":{"attention_grad_checkpointing":False},
}
Expand Down

0 comments on commit 2bcc280

Please sign in to comment.