@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix ns0: <http://data.loterre.fr/OntoTM#> .
@prefix ns1: <http://www.ebi.ac.uk/swo/> .
@prefix ns2: <http://w3id.org/meta-share/omtd-share/> .
@prefix dc: <http://purl.org/dc/terms/> .

ltk:-SCTX2SHF-V
  skos:prefLabel "plongement lexical"@fr, "word embedding"@en ;
  a skos:Concept ;
  ns0:executes ltk:-LKG67B9D-D .

ltk:-C3259FJL-2
  skos:prefLabel "anglais"@fr, "English"@en ;
  a skos:Concept ;
  ns0:isInputLanguageOf ltk:-LKG67B9D-D .

ltk:-TG9HQGK7-Z
  skos:prefLabel "generic"@en, "générique"@fr ;
  a skos:Concept ;
  ns0:isApplicationFieldOf ltk:-LKG67B9D-D .

ltk:-PFDPNVQ7-3
  skos:prefLabel "transformeur"@fr, "transformer"@en ;
  a skos:Concept ;
  ns1:SWO_0000085 ltk:-LKG67B9D-D .

ltk:-WVMJL37J-5
  skos:prefLabel "Python"@fr, "Python"@en ;
  a skos:Concept .

ltk:-SWNPX252-5
  skos:prefLabel "large language model"@en, "grand modèle de langue"@fr ;
  a skos:Concept ;
  skos:narrower ltk:-LKG67B9D-D .

ltk:-LKG67B9D-D
  ns1:SWO_0000741 ltk:-WVMJL37J-5 ;
  a skos:Concept ;
  skos:inScheme ltk: ;
  ns0:hasInputLanguage ltk:-C3259FJL-2 ;
  ns1:SWO_0040005 ltk:-SCTX2SHF-V ;
  skos:definition "\"TinyBERT is 7.5x smaller and 9.4x faster on inference than BERT-base and achieves competitive performances in the tasks of natural language understanding. It performs a novel transformer distillation at both the pre-training and task-specific learning stages.\" (source: https://github.com/huawei-noah/Pretrained-Language-Model/tree/master/TinyBERT)."@en, "« TinyBERT est 7,5 fois plus petit et 9,4 fois plus rapide que la base BERT pour l'inférence. » (source : https://github.com/huawei-noah/Pretrained-Language-Model/tree/master/TinyBERT)."@fr ;
  ns0:hasRepository <https://github.com/huawei-noah/Pretrained-Language-Model/tree/master/TinyBERT> ;
  skos:broader ltk:-SWNPX252-5 ;
  ns1:SWO_0000740 ltk:-PFDPNVQ7-3 ;
  ns2:documentationUrl <https://github.com/huawei-noah/Pretrained-Language-Model/tree/master/TinyBERT> ;
  skos:prefLabel "TinyBERT"@en, "TinyBERT"@fr ;
  ns0:hasDesignCountry "Chine"@fr, "China"@en ;
  ns0:hasApplicationField ltk:-TG9HQGK7-Z ;
  dc:bibliographicCitation "• Jiao, X., Yin, Y., Shang, L., Jiang, X., Chen, X., Li, L., Wang, F., & Liu, Q. (2020). TinyBERT : Distilling BERT for natural language understanding. arXiv:1909.10351 [cs].  <a href=\"http://arxiv.org/abs/1909.10351\">http://arxiv.org/abs/1909.10351</a>" ;
  ns0:basedOn ltk:-SSWGBD85-7 .

ltk:-SSWGBD85-7
  skos:prefLabel "BERT"@fr, "BERT"@en ;
  a skos:Concept ;
  ns0:baseOf ltk:-LKG67B9D-D .

ltk: a skos:ConceptScheme .
