Skip to content

Commit

Permalink
Add model metas (#22)
Browse files Browse the repository at this point in the history
Add model metas

Co-authored-by: Explosion Bot <[email protected]>
  • Loading branch information
ines and explosion-bot committed Oct 8, 2019
2 parents 7379fc4 + acc64c6 commit 79eeb72
Show file tree
Hide file tree
Showing 5 changed files with 185 additions and 0 deletions.
37 changes: 37 additions & 0 deletions meta/de_trf_bertbasecased_lg-2.2.0.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
{
"lang": "de",
"lang_factory": "trf",
"name": "trf_bertbasecased_lg",
"description": "Provides weights and configuration for the pretrained transformer model `bert-base-german-cased`, published by deepset. The package uses HuggingFace's `transformers` implementation of the model. Pretrained transformer models assign detailed contextual word representations, using knowledge drawn from a large corpus of unlabelled text. You can use the contextual word representations as features in a variety of pipeline components that can be trained on your own data.",
"notes": "Requires the `spacy-transformers` package to be installed. A CUDA-compatible GPU is advised for reasonable performance.",
"author": "deepset (repackaged by Explosion)",
"email": "[email protected]",
"url": "https://github.com/explosion/spacy-transformers",
"license": "MIT",
"sources": [
{
"name": "bert-base-german-cased",
"author": "deepset",
"url": "https://deepset.ai/german-bert"
}
],
"version": "2.2.0",
"requirements": [
"spacy-transformers>=0.5.0"
],
"spacy_version": ">=2.2.1",
"vectors": {
"width": 0,
"vectors": 0,
"keys": 0,
"name": null
},
"pipeline": [
"sentencizer",
"trf_wordpiecer",
"trf_tok2vec"
],
"labels": {},
"size": "386 MB",
"checksum": "f9f27bfd138f5b55b3177bb2d933d1825a107275f599c11797f9c2f5dea048b4"
}
37 changes: 37 additions & 0 deletions meta/en_trf_bertbaseuncased_lg-2.2.0.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
{
"lang": "en",
"lang_factory": "trf",
"name": "trf_bertbaseuncased_lg",
"description": "Provides weights and configuration for the pretrained transformer model `bert-base-uncased`, published by Google Research. The package uses HuggingFace's `transformers` implementation of the model. Pretrained transformer models assign detailed contextual word representations, using knowledge drawn from a large corpus of unlabelled text. You can use the contextual word representations as features in a variety of pipeline components that can be trained on your own data.",
"notes": "Requires the `spacy-transformers` package to be installed. A CUDA-compatible GPU is advised for reasonable performance.",
"author": "Google Research (repackaged by Explosion)",
"email": "[email protected]",
"url": "https://github.com/explosion/spacy-transformers",
"license": "MIT",
"sources": [
{
"name": "bert-base-uncased",
"author": "Google Research",
"url": "https://github.com/google-research/bert"
}
],
"version": "2.2.0",
"requirements": [
"spacy-transformers>=0.5.0"
],
"spacy_version": ">=2.2.1",
"vectors": {
"width": 0,
"vectors": 0,
"keys": 0,
"name": null
},
"pipeline": [
"sentencizer",
"trf_wordpiecer",
"trf_tok2vec"
],
"labels": {},
"size": "387 MB",
"checksum": "c2fa0f48ff5bf176a69ee0439a8d9b9f068a8805be8de60d38b1cac071d149c4"
}
37 changes: 37 additions & 0 deletions meta/en_trf_distilbertbaseuncased_lg-2.2.0.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
{
"lang": "en",
"lang_factory": "trf",
"name": "trf_distilbertbaseuncased_lg",
"description": "Provides weights and configuration for the pretrained transformer model `distilbert-base-uncased`, published by Hugging Face. The package uses HuggingFace's `transformers` implementation of the model. Pretrained transformer models assign detailed contextual word representations, using knowledge drawn from a large corpus of unlabelled text. You can use the contextual word representations as features in a variety of pipeline components that can be trained on your own data.",
"notes": "Requires the `spacy-transformers` package to be installed. A CUDA-compatible GPU is advised for reasonable performance.",
"author": "Hugging Face (repackaged by Explosion)",
"email": "[email protected]",
"url": "https://github.com/explosion/spacy-transformers",
"license": "MIT",
"sources": [
{
"name": "distilbert-base-uncased",
"author": "Hugging Face",
"url": "https://medium.com/huggingface/distilbert-8cf3380435b5"
}
],
"version": "2.2.0",
"requirements": [
"spacy-transformers>=0.5.0"
],
"spacy_version": ">=2.2.1",
"vectors": {
"width": 0,
"vectors": 0,
"keys": 0,
"name": null
},
"pipeline": [
"sentencizer",
"trf_wordpiecer",
"trf_tok2vec"
],
"labels": {},
"size": "233 MB",
"checksum": "be221dadce03599e57da5c7341abe323ad2fb1a5990a342288745f25d20dc854"
}
37 changes: 37 additions & 0 deletions meta/en_trf_robertabase_lg-2.2.0.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
{
"lang": "en",
"lang_factory": "trf",
"name": "trf_robertabase_lg",
"description": "Provides weights and configuration for the pretrained transformer model `roberta-base`, published by Facebook. The package uses HuggingFace's `transformers` implementation of the model. Pretrained transformer models assign detailed contextual word representations, using knowledge drawn from a large corpus of unlabelled text. You can use the contextual word representations as features in a variety of pipeline components that can be trained on your own data.",
"notes": "Requires the `spacy-transformers` package to be installed. A CUDA-compatible GPU is advised for reasonable performance.",
"author": "Facebook (repackaged by Explosion)",
"email": "[email protected]",
"url": "https://github.com/explosion/spacy-transformers",
"license": "MIT",
"sources": [
{
"name": "roberta-base",
"author": "Facebook",
"url": "https://github.com/pytorch/fairseq/tree/master/examples/roberta"
}
],
"version": "2.2.0",
"requirements": [
"spacy-transformers>=0.5.0"
],
"spacy_version": ">=2.2.1",
"vectors": {
"width": 0,
"vectors": 0,
"keys": 0,
"name": null
},
"pipeline": [
"sentencizer",
"trf_wordpiecer",
"trf_tok2vec"
],
"labels": {},
"size": "278 MB",
"checksum": "cf32b4f5dbbd3ac4e2584f1cec77a91ceabc3b452005380b652bfc890de80680"
}
37 changes: 37 additions & 0 deletions meta/en_trf_xlnetbasecased_lg-2.2.0.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
{
"lang": "en",
"lang_factory": "trf",
"name": "trf_xlnetbasecased_lg",
"description": "Provides weights and configuration for the pretrained transformer model `xlnet-base-cased`, published by CMU and Google Brain. The package uses HuggingFace's `transformers` implementation of the model. Pretrained transformer models assign detailed contextual word representations, using knowledge drawn from a large corpus of unlabelled text. You can use the contextual word representations as features in a variety of pipeline components that can be trained on your own data.",
"notes": "Requires the `spacy-transformers` package to be installed. A CUDA-compatible GPU is advised for reasonable performance.",
"author": "CMU & Google Brain (repackaged by Explosion)",
"email": "[email protected]",
"url": "https://github.com/explosion/spacy-transformers",
"license": "MIT",
"sources": [
{
"name": "xlnet-base-cased",
"author": "CMU & Google Brain",
"url": "https://github.com/zihangdai/xlnet/"
}
],
"version": "2.2.0",
"requirements": [
"spacy-transformers>=0.5.0"
],
"spacy_version": ">=2.2.1",
"vectors": {
"width": 0,
"vectors": 0,
"keys": 0,
"name": null
},
"pipeline": [
"sentencizer",
"trf_wordpiecer",
"trf_tok2vec"
],
"labels": {},
"size": "413 MB",
"checksum": "e769e3180783457f26136638a5b664a068b7a711b0c8dd6c01187cc7d0ee9ccf"
}

0 comments on commit 79eeb72

Please sign in to comment.