@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix dc: <http://purl.org/dc/terms/> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .
@prefix owl: <http://www.w3.org/2002/07/owl#> .

<http://data.loterre.fr/ark:/67375/8LP-V1WS80W6-2>
  skos:prefLabel "modèle de langue"@fr, "language model"@en ;
  a skos:Concept ;
  skos:narrower <http://data.loterre.fr/ark:/67375/8LP-LHVS1Q6T-1> .

<http://data.loterre.fr/ark:/67375/8LP-LHVS1Q6T-1>
  skos:definition "A BERT language model pre-trained on PubMed abstracts and clinical notes (MIMIC-III). (Loterre)"@en, "Modèle de langue BERT pré-entrainé sur les résumés de PubMed et les notes cliniques (MIMIC-III). (Loterre)"@fr ;
  dc:modified "2024-04-26T13:22:51"^^xsd:dateTime ;
  skos:prefLabel "BlueBERT"@fr, "BlueBERT"@en ;
  skos:inScheme <http://data.loterre.fr/ark:/67375/8LP> ;
  skos:broader <http://data.loterre.fr/ark:/67375/8LP-V1WS80W6-2> ;
  skos:example "We evaluate BlueBERT on the N2C2STS and train it with 5-fold cross-validation. (Xiong, Yang, Liu, Wong, Chen, Xiang & Tang, 2023)"@en, "For comparison BlueBERT achieved 84-85 for all 3 entities and ClinicalBERT achieved 87. (Abadeer, 2020)"@en, "This is also required for a direct comparison since ClinicalBERT used a cased corpus while BlueBERT used an uncased one. (Abadeer, 2020)"@en ;
  skos:hiddenLabel "BlueBert"@en, "BlueBert"@fr ;
  skos:exactMatch ltk:-SQSJ19RK-N ;
  a skos:Concept .

<http://data.loterre.fr/ark:/67375/8LP> a owl:Ontology, skos:ConceptScheme .
