@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix ns0: <http://data.loterre.fr/OntoTM#> .
@prefix ns1: <http://www.ebi.ac.uk/swo/> .
@prefix dc: <http://purl.org/dc/terms/> .
@prefix ns2: <http://w3id.org/meta-share/omtd-share/> .

ltk:-SCTX2SHF-V
  skos:prefLabel "plongement lexical"@fr, "word embedding"@en ;
  a skos:Concept ;
  ns0:executes ltk:-G64XHKJT-9 .

ltk:-C3259FJL-2
  skos:prefLabel "anglais"@fr, "English"@en ;
  a skos:Concept ;
  ns0:isInputLanguageOf ltk:-G64XHKJT-9 .

ltk:-TG9HQGK7-Z
  skos:prefLabel "generic"@en, "générique"@fr ;
  a skos:Concept ;
  ns0:isApplicationFieldOf ltk:-G64XHKJT-9 .

ltk:-T7ZQ5P6S-Z
  skos:prefLabel "Spark NLP Scala"@en, "Spark NLP Scala"@fr ;
  a skos:Concept ;
  ns0:basedOn ltk:-G64XHKJT-9 .

ltk:-PFDPNVQ7-3
  skos:prefLabel "transformeur"@fr, "transformer"@en ;
  a skos:Concept ;
  ns1:SWO_0000085 ltk:-G64XHKJT-9 .

ltk:-WVMJL37J-5
  skos:prefLabel "Python"@fr, "Python"@en ;
  a skos:Concept .

ltk:-SWNPX252-5
  skos:prefLabel "large language model"@en, "grand modèle de langue"@fr ;
  a skos:Concept ;
  skos:narrower ltk:-G64XHKJT-9 .

ltk:-SK4M4TN5-J
  skos:prefLabel "polyBERT"@en, "polyBERT"@fr ;
  a skos:Concept ;
  ns0:basedOn ltk:-G64XHKJT-9 .

ltk:-XT2FHL42-3
  skos:prefLabel "Spark NLP"@en, "Spark NLP"@fr ;
  a skos:Concept ;
  ns0:basedOn ltk:-G64XHKJT-9 .

ltk:-G64XHKJT-9
  dc:bibliographicCitation "• He, P., Liu, X., Gao, J., & Chen, W. (2020). DeBERTa: Decoding-enhanced BERT with Disentangled Attention.  <a href=\"https://arxiv.org/abs/2006.03654v1\">https://arxiv.org/abs/2006.03654v1</a>" ;
  ns0:baseOf ltk:-SK4M4TN5-J, ltk:-S7W428SC-Z, ltk:-T7ZQ5P6S-Z, ltk:-XT2FHL42-3 ;
  ns2:documentationUrl <https://github.com/microsoft/DeBERTa> ;
  skos:prefLabel "DeBERTa"@fr, "DeBERTa"@en ;
  ns1:has_license ltk:-TSJJ2WD8-T ;
  ns0:hasApplicationField ltk:-TG9HQGK7-Z ;
  skos:definition "Modèle de langage neuronal basé sur un transformeur, DeBERTa (Decoding-enhanced BERT with disentangled attention), repose sur un nouveau mécanisme d'attention démêlée un décodeur de masques amélioré (He et al., 2020)."@fr, "\"Transformer-based neural language model DeBERTa (Decoding-enhanced BERT with disentangled attention), which improves previous state-of-the-art PLMs [Pre-trained Language Models] using two novel techniques: a disentangled attention mechanism, and an enhanced mask decoder.\" (He et al., 2020)."@en ;
  ns0:hasRepository <https://github.com/microsoft/DeBERTa> ;
  ns1:SWO_0040005 ltk:-SCTX2SHF-V ;
  ns1:SWO_0000741 ltk:-WVMJL37J-5 ;
  ns0:hasInputLanguage ltk:-C3259FJL-2 ;
  skos:inScheme ltk: ;
  a skos:Concept ;
  skos:altLabel "Decoding-enhanced BERT with disentangled attention"@en ;
  ns0:basedOn ltk:-SSWGBD85-7 ;
  ns1:SWO_0000740 ltk:-PFDPNVQ7-3 ;
  skos:broader ltk:-SWNPX252-5 .

ltk:-S7W428SC-Z
  skos:prefLabel "Spark NLP Python"@en, "Spark NLP Python"@fr ;
  a skos:Concept ;
  ns0:basedOn ltk:-G64XHKJT-9 .

ltk:-SSWGBD85-7
  skos:prefLabel "BERT"@fr, "BERT"@en ;
  a skos:Concept ;
  ns0:baseOf ltk:-G64XHKJT-9 .

ltk:-TSJJ2WD8-T
  skos:prefLabel "MIT License"@en, "licence MIT"@fr ;
  a skos:Concept ;
  ns1:is_license_for ltk:-G64XHKJT-9 .

ltk: a skos:ConceptScheme .
