@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix ns0: <http://data.loterre.fr/OntoTM#> .
@prefix ns1: <http://w3id.org/meta-share/omtd-share/> .
@prefix ns2: <http://www.ebi.ac.uk/swo/> .
@prefix dc: <http://purl.org/dc/terms/> .

ltk:-SCTX2SHF-V
  skos:prefLabel "plongement lexical"@fr, "word embedding"@en ;
  a skos:Concept ;
  ns0:executes ltk:-DCJM3LC1-6 .

ltk:-C3259FJL-2
  skos:prefLabel "anglais"@fr, "English"@en ;
  a skos:Concept ;
  ns0:isInputLanguageOf ltk:-DCJM3LC1-6 .

ltk:-TG9HQGK7-Z
  skos:prefLabel "generic"@en, "générique"@fr ;
  a skos:Concept ;
  ns0:isApplicationFieldOf ltk:-DCJM3LC1-6 .

ltk:-DCJM3LC1-6
  ns1:documentationUrl <https://github.com/NVIDIA/Megatron-LM> ;
  skos:prefLabel "Megatron-LM"@en, "Megatron-LM"@fr ;
  skos:broader ltk:-SWNPX252-5 ;
  ns0:hasRepository <https://github.com/NVIDIA/Megatron-LM> ;
  skos:definition "A language model pre-training based on GPU parallelism."@en, "Modèle de langage pré-entrainé grâce à traitements en parallèle de GPU."@fr ;
  ns0:baseOf ltk:-BS2TNVVP-N ;
  ns2:SWO_0040005 ltk:-SCTX2SHF-V ;
  ns0:hasInputLanguage ltk:-C3259FJL-2 ;
  dc:bibliographicCitation "• Narayanan, D., Shoeybi, M., Casper, J., LeGresley, P., Patwary, M., Korthikanti, V. A., Vainbrand, D., Kashinkunti, P., Bernauer, J., Catanzaro, B., Phanishayee, A., & Zaharia, M. (2021). Efficient large-scale language model training on GPU clusters using megatron-lm. ArXiv:2104.04473 [Cs].  <a href=\"http://arxiv.org/abs/2104.04473\">http://arxiv.org/abs/2104.04473</a>", "• Shoeybi, M., Patwary, M., Puri, R., LeGresley, P., Casper, J., & Catanzaro, B. (2019). Megatron-LM: Training multi-billion parameter language models using GPU model parallelism.  <a href=\"https://arxiv.org/abs/1909.08053v1\">https://arxiv.org/abs/1909.08053v1</a>" ;
  skos:inScheme ltk: ;
  ns2:SWO_0000741 ltk:-WVMJL37J-5 ;
  ns0:hasApplicationField ltk:-TG9HQGK7-Z ;
  ns2:has_license ltk:-GVNT1P3N-X ;
  a skos:Concept .

ltk:-WVMJL37J-5
  skos:prefLabel "Python"@fr, "Python"@en ;
  a skos:Concept .

ltk:-BS2TNVVP-N
  skos:prefLabel "BioMegatron"@en, "BioMegatron"@fr ;
  a skos:Concept ;
  ns0:basedOn ltk:-DCJM3LC1-6 .

ltk:-GVNT1P3N-X
  skos:prefLabel "Apache License Version 2.0"@fr, "Apache License Version 2.0"@en ;
  a skos:Concept ;
  ns2:is_license_for ltk:-DCJM3LC1-6 .

ltk:-SWNPX252-5
  skos:prefLabel "large language model"@en, "grand modèle de langue"@fr ;
  a skos:Concept ;
  skos:narrower ltk:-DCJM3LC1-6 .

ltk: a skos:ConceptScheme .
