@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix ns0: <http://data.loterre.fr/OntoTM#> .
@prefix ns1: <http://www.ebi.ac.uk/swo/> .
@prefix ns2: <http://w3id.org/meta-share/omtd-share/> .
@prefix dc: <http://purl.org/dc/terms/> .

ltk:-SCTX2SHF-V
  skos:prefLabel "plongement lexical"@fr, "word embedding"@en ;
  a skos:Concept ;
  ns0:executes ltk:-CH1KS4KF-Z .

ltk:-C3259FJL-2
  skos:prefLabel "anglais"@fr, "English"@en ;
  a skos:Concept ;
  ns0:isInputLanguageOf ltk:-CH1KS4KF-Z .

ltk:-TG9HQGK7-Z
  skos:prefLabel "generic"@en, "générique"@fr ;
  a skos:Concept ;
  ns0:isApplicationFieldOf ltk:-CH1KS4KF-Z .

ltk:-T7ZQ5P6S-Z
  skos:prefLabel "Spark NLP Scala"@en, "Spark NLP Scala"@fr ;
  a skos:Concept ;
  ns0:basedOn ltk:-CH1KS4KF-Z .

ltk:-PFDPNVQ7-3
  skos:prefLabel "transformeur"@fr, "transformer"@en ;
  a skos:Concept ;
  ns1:SWO_0000085 ltk:-CH1KS4KF-Z .

ltk:-WVMJL37J-5
  skos:prefLabel "Python"@fr, "Python"@en ;
  a skos:Concept .

ltk:-GVNT1P3N-X
  skos:prefLabel "Apache License Version 2.0"@fr, "Apache License Version 2.0"@en ;
  a skos:Concept ;
  ns1:is_license_for ltk:-CH1KS4KF-Z .

ltk:-SWNPX252-5
  skos:prefLabel "large language model"@en, "grand modèle de langue"@fr ;
  a skos:Concept ;
  skos:narrower ltk:-CH1KS4KF-Z .

ltk:-LQ8PQSC4-R
  skos:prefLabel "ClincalXLNet"@en, "ClincalXLNet"@fr ;
  a skos:Concept ;
  ns0:basedOn ltk:-CH1KS4KF-Z .

ltk:-XT2FHL42-3
  skos:prefLabel "Spark NLP"@en, "Spark NLP"@fr ;
  a skos:Concept ;
  ns0:basedOn ltk:-CH1KS4KF-Z .

ltk:-KQ7NRS7V-L
  skos:prefLabel "FARM"@en, "FARM"@fr ;
  a skos:Concept ;
  ns0:basedOn ltk:-CH1KS4KF-Z .

ltk:-S7W428SC-Z
  skos:prefLabel "Spark NLP Python"@en, "Spark NLP Python"@fr ;
  a skos:Concept ;
  ns0:basedOn ltk:-CH1KS4KF-Z .

ltk: a skos:ConceptScheme .
ltk:-CH1KS4KF-Z
  ns0:baseOf ltk:-KQ7NRS7V-L, ltk:-XT2FHL42-3, ltk:-LQ8PQSC4-R, ltk:-S7W428SC-Z, ltk:-T7ZQ5P6S-Z ;
  ns0:hasApplicationField ltk:-TG9HQGK7-Z ;
  ns0:hasRepository <https://github.com/zihangdai/xlnet> ;
  ns1:SWO_0000740 ltk:-PFDPNVQ7-3 ;
  ns1:SWO_0000741 ltk:-WVMJL37J-5 ;
  ns0:hasDesignCountry "États-Unis"@fr, "United States"@en ;
  ns0:hasInputLanguage ltk:-C3259FJL-2 ;
  ns1:SWO_0040005 ltk:-SCTX2SHF-V ;
  ns1:has_license ltk:-GVNT1P3N-X ;
  skos:inScheme ltk: ;
  ns2:documentationUrl <https://github.com/zihangdai/xlnet> ;
  dc:bibliographicCitation "• Yang, Z., Dai, Z., Yang, Y., Carbonell, J., Salakhutdinov, R., & Le, Q. V. (2020). XLNet : Generalized autoregressive pretraining for language understanding. arXiv:1906.08237 [cs].  <a href=\"http://arxiv.org/abs/1906.08237\">http://arxiv.org/abs/1906.08237</a>" ;
  skos:prefLabel "XLNet"@en, "XLNet"@fr ;
  skos:definition "« une méthode autorégressive généralisée qui exploite le meilleur de la modélisation linguistique autorégressive et par autoencodage tout en évitant leurs limites. » (Yang et al., 2020)."@fr, "\"a generalized autoregressive method that leverages the best of both AR [autoregressive] language modeling and AE [autoencoding] while avoiding their limitations.\" (Yang et al., 2020)."@en ;
  a skos:Concept ;
  skos:broader ltk:-SWNPX252-5 .

