Should now be using absolute position embeddings
Browse files- config.json +1 -1
- pytorch_model.bin +1 -1
config.json
CHANGED
@@ -30,7 +30,7 @@
|
|
30 |
"num_hidden_layers": 4,
|
31 |
"pad_token_id": 0,
|
32 |
"positive_label_weight": 10.0,
|
33 |
-
"reset_position_index_per_cell":
|
34 |
"select_one_column": true,
|
35 |
"softmax_temperature": 1.0,
|
36 |
"type_vocab_size": [
|
|
|
30 |
"num_hidden_layers": 4,
|
31 |
"pad_token_id": 0,
|
32 |
"positive_label_weight": 10.0,
|
33 |
+
"reset_position_index_per_cell": false,
|
34 |
"select_one_column": true,
|
35 |
"softmax_temperature": 1.0,
|
36 |
"type_vocab_size": [
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 117215095
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8401411bb83648c2979404e29c91343bbbb9ae8d09894b24f554d79bd7aa483d
|
3 |
size 117215095
|