@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix ns0: <http://data.loterre.fr/OntoTM#> .
@prefix ns1: <http://www.ebi.ac.uk/swo/> .
@prefix dc: <http://purl.org/dc/terms/> .

ltk:-SCTX2SHF-V
  skos:prefLabel "plongement lexical"@fr, "word embedding"@en ;
  a skos:Concept ;
  ns0:executes ltk:-Z5L3TJKC-R .

ltk:-C3259FJL-2
  skos:prefLabel "anglais"@fr, "English"@en ;
  a skos:Concept ;
  ns0:isInputLanguageOf ltk:-Z5L3TJKC-R .

ltk:-Z5L3TJKC-R
  skos:definition "A BERT language model for clinical entities."@en, "Modèle de langage BERT pour les entités cliniques."@fr ;
  ns1:SWO_0000740 ltk:-PFDPNVQ7-3 ;
  ns0:hasInputLanguage ltk:-C3259FJL-2 ;
  skos:inScheme ltk: ;
  ns1:SWO_0040005 ltk:-SCTX2SHF-V ;
  ns0:basedOn ltk:-SSWGBD85-7 ;
  skos:prefLabel "entityBERT"@en, "entityBERT"@fr ;
  skos:broader ltk:-SWNPX252-5 ;
  a skos:Concept ;
  ns0:hasApplicationField ltk:-FLNMSH9L-2 ;
  ns0:hasDesignCountry "United States"@en, "États-Unis"@fr ;
  dc:bibliographicCitation "• Lin, C., Miller, T., Dligach, D., Bethard, S., & Savova, G. (2021). Entitybert: Entity-centric masking strategy for model pretraining for the clinical domain. Proceedings of the 20th Workshop on Biomedical Language Processing, 191–201.  <a href=\"https://doi.org/10.18653/v1/2021.bionlp-1.21\">https://doi.org/10.18653/v1/2021.bionlp-1.21</a>" .

ltk:-PFDPNVQ7-3
  skos:prefLabel "transformeur"@fr, "transformer"@en ;
  a skos:Concept ;
  ns1:SWO_0000085 ltk:-Z5L3TJKC-R .

ltk:-SWNPX252-5
  skos:prefLabel "large language model"@en, "grand modèle de langue"@fr ;
  a skos:Concept ;
  skos:narrower ltk:-Z5L3TJKC-R .

ltk:-SSWGBD85-7
  skos:prefLabel "BERT"@fr, "BERT"@en ;
  a skos:Concept ;
  ns0:baseOf ltk:-Z5L3TJKC-R .

ltk: a skos:ConceptScheme .
ltk:-FLNMSH9L-2
  skos:prefLabel "medicine"@en, "médecine"@fr ;
  a skos:Concept ;
  ns0:isApplicationFieldOf ltk:-Z5L3TJKC-R .

