@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix ns0: <http://data.loterre.fr/OntoTM#> .
@prefix ns1: <http://www.ebi.ac.uk/swo/> .
@prefix dc: <http://purl.org/dc/terms/> .
@prefix ns2: <http://w3id.org/meta-share/omtd-share/> .

ltk:-SCTX2SHF-V
  skos:prefLabel "plongement lexical"@fr, "word embedding"@en ;
  a skos:Concept ;
  ns0:executes ltk:-NV2360BM-D .

ltk:-NV2360BM-D
  a skos:Concept ;
  skos:definition "\"Small and fast pre-trained models for language understanding and generation\" (source: https://github.com/microsoft/unilm/tree/master/minilm)"@en, "« modèles simples et rapides pré-entraînés pour la compréhension et la génération du langage » (source : https://github.com/microsoft/unilm/tree/master/minilm)"@fr ;
  ns1:has_license ltk:-TSJJ2WD8-T ;
  skos:inScheme ltk: ;
  ns0:hasRepository <https://github.com/microsoft/unilm/tree/master/minilm> ;
  dc:bibliographicCitation "• Wang, W., Bao, H., Huang, S., Dong, L., & Wei, F. (2021). MiniLMv2: Multi-Head Self-Attention Relation Distillation for Compressing Pretrained Transformers (No. arXiv:2012.15828). arXiv.  <a href=\"https://doi.org/10.48550/arXiv.2012.15828\">https://doi.org/10.48550/arXiv.2012.15828</a>", "• Wang, W., Wei, F., Dong, L., Bao, H., Yang, N., & Zhou, M. (2020). MiniLM : Deep self-attention distillation for task-agnostic compression of pre-trained transformers. arXiv:2002.10957 [cs].  <a href=\"http://arxiv.org/abs/2002.10957\">http://arxiv.org/abs/2002.10957</a>" ;
  skos:prefLabel "MiniLM"@en, "MiniLM"@fr ;
  skos:broader ltk:-SWNPX252-5 ;
  ns0:basedOn ltk:-SSWGBD85-7 ;
  ns2:documentationUrl <https://github.com/microsoft/unilm/tree/master/minilm> ;
  ns0:hasInputLanguage ltk:-C3259FJL-2 ;
  ns1:SWO_0000740 ltk:-PFDPNVQ7-3 ;
  ns1:SWO_0000741 ltk:-WVMJL37J-5 ;
  ns0:baseOf ltk:-J34XPZNX-9, ltk:-KQ7NRS7V-L ;
  ns1:SWO_0040005 ltk:-SCTX2SHF-V ;
  ns0:hasApplicationField ltk:-TG9HQGK7-Z .

ltk:-C3259FJL-2
  skos:prefLabel "anglais"@fr, "English"@en ;
  a skos:Concept ;
  ns0:isInputLanguageOf ltk:-NV2360BM-D .

ltk:-TG9HQGK7-Z
  skos:prefLabel "generic"@en, "générique"@fr ;
  a skos:Concept ;
  ns0:isApplicationFieldOf ltk:-NV2360BM-D .

ltk:-PFDPNVQ7-3
  skos:prefLabel "transformeur"@fr, "transformer"@en ;
  a skos:Concept ;
  ns1:SWO_0000085 ltk:-NV2360BM-D .

ltk:-WVMJL37J-5
  skos:prefLabel "Python"@fr, "Python"@en ;
  a skos:Concept .

ltk:-SWNPX252-5
  skos:prefLabel "large language model"@en, "grand modèle de langue"@fr ;
  a skos:Concept ;
  skos:narrower ltk:-NV2360BM-D .

ltk:-J34XPZNX-9
  skos:prefLabel "all-Mini-LM-L6fr"@fr, "all-Mini-LM-L6"@en ;
  a skos:Concept ;
  ns0:basedOn ltk:-NV2360BM-D .

ltk:-KQ7NRS7V-L
  skos:prefLabel "FARM"@en, "FARM"@fr ;
  a skos:Concept ;
  ns0:basedOn ltk:-NV2360BM-D .

ltk:-SSWGBD85-7
  skos:prefLabel "BERT"@fr, "BERT"@en ;
  a skos:Concept ;
  ns0:baseOf ltk:-NV2360BM-D .

ltk:-TSJJ2WD8-T
  skos:prefLabel "MIT License"@en, "licence MIT"@fr ;
  a skos:Concept ;
  ns1:is_license_for ltk:-NV2360BM-D .

ltk: a skos:ConceptScheme .
