guenthermi commited on
Commit
66994db
·
2 Parent(s): 64cb362 ec5f5ed

Merge branch 'main' of https://huggingface.co/jinaai/jina-embeddings-v2-base-de

Browse files
Files changed (2) hide show
  1. config.json +5 -7
  2. model.onnx +3 -0
config.json CHANGED
@@ -1,17 +1,15 @@
1
  {
2
- "_name_or_path": "jinaai/jina-bert-base-de-en-bpe-61k",
 
3
  "architectures": [
4
  "JinaBertForMaskedLM"
5
  ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "attn_implementation": "torch",
8
  "auto_map": {
9
  "AutoConfig": "jinaai/jina-bert-implementation--configuration_bert.JinaBertConfig",
10
- "AutoModel": "jinaai/jina-bert-implementation--modeling_bert.JinaBertModel",
11
  "AutoModelForMaskedLM": "jinaai/jina-bert-implementation--modeling_bert.JinaBertForMaskedLM",
12
- "AutoModelForQuestionAnswering": "jinaai/jina-bert-implementation--modeling_bert.JinaBertForQuestionAnswering",
13
- "AutoModelForSequenceClassification": "jinaai/jina-bert-implementation--modeling_bert.JinaBertForSequenceClassification",
14
- "AutoModelForTokenClassification": "jinaai/jina-bert-implementation--modeling_bert.JinaBertForTokenClassification"
15
  },
16
  "classifier_dropout": null,
17
  "emb_pooler": "mean",
 
1
  {
2
+ "_name_or_path": "jinaai/jina-bert-implementation",
3
+ "model_max_length": 8192,
4
  "architectures": [
5
  "JinaBertForMaskedLM"
6
  ],
7
+ "attention_probs_dropout_prob": 0.0,
 
8
  "auto_map": {
9
  "AutoConfig": "jinaai/jina-bert-implementation--configuration_bert.JinaBertConfig",
 
10
  "AutoModelForMaskedLM": "jinaai/jina-bert-implementation--modeling_bert.JinaBertForMaskedLM",
11
+ "AutoModel": "jinaai/jina-bert-implementation--modeling_bert.JinaBertModel",
12
+ "AutoModelForSequenceClassification": "jinaai/jina-bert-implementation--modeling_bert.JinaBertForSequenceClassification"
 
13
  },
14
  "classifier_dropout": null,
15
  "emb_pooler": "mean",
model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51654b7441fbfcfef6598b01cbd1ea925ca0f0cad81202fcd36fee325783f1b0
3
+ size 641212851