Soumitra-1808 commited on
Commit
2cd05b7
·
1 Parent(s): 169e0b1

Upload modified spatial-ner model for landmark

Browse files
landmark_tagging/checkpoint-114/config.json CHANGED
@@ -4,20 +4,21 @@
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "classifier_dropout": null,
 
7
  "gradient_checkpointing": false,
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
11
  "id2label": {
12
  "0": "O",
13
- "1": "B-T",
14
- "2": "I-T"
15
  },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
  "label2id": {
19
- "B-T": 1,
20
- "I-T": 2,
21
  "O": 0
22
  },
23
  "layer_norm_eps": 1e-12,
@@ -27,8 +28,7 @@
27
  "num_hidden_layers": 12,
28
  "pad_token_id": 0,
29
  "position_embedding_type": "absolute",
30
- "torch_dtype": "float32",
31
- "transformers_version": "4.55.2",
32
  "type_vocab_size": 2,
33
  "use_cache": true,
34
  "vocab_size": 28996
 
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "classifier_dropout": null,
7
+ "dtype": "float32",
8
  "gradient_checkpointing": false,
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
  "0": "O",
14
+ "1": "B-Lm",
15
+ "2": "I-Lm"
16
  },
17
  "initializer_range": 0.02,
18
  "intermediate_size": 3072,
19
  "label2id": {
20
+ "B-Lm": 1,
21
+ "I-Lm": 2,
22
  "O": 0
23
  },
24
  "layer_norm_eps": 1e-12,
 
28
  "num_hidden_layers": 12,
29
  "pad_token_id": 0,
30
  "position_embedding_type": "absolute",
31
+ "transformers_version": "4.56.0",
 
32
  "type_vocab_size": 2,
33
  "use_cache": true,
34
  "vocab_size": 28996
landmark_tagging/checkpoint-114/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4bc2d2f3d3d80575bcf1bbe0963d9b19ac48af3b2cdadb345bd4394ef20062ae
3
  size 5777
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97fa57cc39df068f6ae4e7aa3fbbb4f1575a22076bc59d8f34c7eb2f9ac6be70
3
  size 5777
landmark_tagging/config.json CHANGED
@@ -4,20 +4,21 @@
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "classifier_dropout": null,
 
7
  "gradient_checkpointing": false,
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
11
  "id2label": {
12
  "0": "O",
13
- "1": "B-T",
14
- "2": "I-T"
15
  },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
  "label2id": {
19
- "B-T": 1,
20
- "I-T": 2,
21
  "O": 0
22
  },
23
  "layer_norm_eps": 1e-12,
@@ -27,8 +28,7 @@
27
  "num_hidden_layers": 12,
28
  "pad_token_id": 0,
29
  "position_embedding_type": "absolute",
30
- "torch_dtype": "float32",
31
- "transformers_version": "4.55.2",
32
  "type_vocab_size": 2,
33
  "use_cache": true,
34
  "vocab_size": 28996
 
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "classifier_dropout": null,
7
+ "dtype": "float32",
8
  "gradient_checkpointing": false,
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
  "0": "O",
14
+ "1": "B-Lm",
15
+ "2": "I-Lm"
16
  },
17
  "initializer_range": 0.02,
18
  "intermediate_size": 3072,
19
  "label2id": {
20
+ "B-Lm": 1,
21
+ "I-Lm": 2,
22
  "O": 0
23
  },
24
  "layer_norm_eps": 1e-12,
 
28
  "num_hidden_layers": 12,
29
  "pad_token_id": 0,
30
  "position_embedding_type": "absolute",
31
+ "transformers_version": "4.56.0",
 
32
  "type_vocab_size": 2,
33
  "use_cache": true,
34
  "vocab_size": 28996
landmark_tagging/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4bc2d2f3d3d80575bcf1bbe0963d9b19ac48af3b2cdadb345bd4394ef20062ae
3
  size 5777
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97fa57cc39df068f6ae4e7aa3fbbb4f1575a22076bc59d8f34c7eb2f9ac6be70
3
  size 5777