@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix ns0: <http://data.loterre.fr/OntoTM#> .
@prefix ns1: <http://www.ebi.ac.uk/swo/> .
@prefix dc: <http://purl.org/dc/terms/> .

ltk:-SCTX2SHF-V
  skos:prefLabel "plongement lexical"@fr, "word embedding"@en ;
  a skos:Concept ;
  ns0:executes ltk:-S5B5K4XC-W .

ltk:-C3259FJL-2
  skos:prefLabel "anglais"@fr, "English"@en ;
  a skos:Concept ;
  ns0:isInputLanguageOf ltk:-S5B5K4XC-W .

ltk:-QQV8SG3T-1
  skos:prefLabel "japonais"@fr, "Japanese"@en ;
  a skos:Concept ;
  ns0:isInputLanguageOf ltk:-S5B5K4XC-W .

ltk:-S5B5K4XC-W
  ns0:basedOn ltk:-SSWGBD85-7 ;
  ns1:has_license ltk:-GVNT1P3N-X ;
  dc:bibliographicCitation "• Wada, S., Takeda, T., Manabe, S., Konishi, S., Kamohara, J., & Matsumura, Y. (2021). Pre-training technique to localize medical BERT and enhance biomedical BERT. ArXiv:2005.07202 [Cs].  <a href=\"http://arxiv.org/abs/2005.07202\">http://arxiv.org/abs/2005.07202</a>" ;
  skos:definition "A language model based on BERT pre-trained on biomedical texts."@en, "Modèle de langage basé sur BERT et pré-entrainé sur des textes biomédicaux."@fr ;
  ns0:hasInputLanguage ltk:-QQV8SG3T-1, ltk:-C3259FJL-2 ;
  a skos:Concept ;
  ns0:hasDesignCountry "Japan"@en, "Japon"@fr ;
  ns0:hasApplicationField ltk:-FLNMSH9L-2, ltk:-DBTH07N9-1 ;
  ns1:SWO_0040005 ltk:-SCTX2SHF-V ;
  skos:prefLabel "OuBioBERT"@en, "OuBioBERT"@fr ;
  skos:broader ltk:-SWNPX252-5 ;
  ns1:SWO_0000740 ltk:-PFDPNVQ7-3 ;
  skos:inScheme ltk: ;
  ns0:hasRepository <https://huggingface.co/seiya/oubiobert-base-uncased> .

ltk:-PFDPNVQ7-3
  skos:prefLabel "transformeur"@fr, "transformer"@en ;
  a skos:Concept ;
  ns1:SWO_0000085 ltk:-S5B5K4XC-W .

ltk:-DBTH07N9-1
  skos:prefLabel "biologie"@fr, "biology"@en ;
  a skos:Concept ;
  ns0:isApplicationFieldOf ltk:-S5B5K4XC-W .

ltk:-GVNT1P3N-X
  skos:prefLabel "Apache License Version 2.0"@fr, "Apache License Version 2.0"@en ;
  a skos:Concept ;
  ns1:is_license_for ltk:-S5B5K4XC-W .

ltk:-SWNPX252-5
  skos:prefLabel "large language model"@en, "grand modèle de langue"@fr ;
  a skos:Concept ;
  skos:narrower ltk:-S5B5K4XC-W .

ltk:-SSWGBD85-7
  skos:prefLabel "BERT"@fr, "BERT"@en ;
  a skos:Concept ;
  ns0:baseOf ltk:-S5B5K4XC-W .

ltk: a skos:ConceptScheme .
ltk:-FLNMSH9L-2
  skos:prefLabel "medicine"@en, "médecine"@fr ;
  a skos:Concept ;
  ns0:isApplicationFieldOf ltk:-S5B5K4XC-W .

