@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix owl: <http://www.w3.org/2002/07/owl#> .
@prefix dc: <http://purl.org/dc/terms/> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .

<http://data.loterre.fr/ark:/67375/8LP-V1WS80W6-2>
  skos:prefLabel "modèle de langue"@fr, "language model"@en ;
  a skos:Concept ;
  skos:narrower <http://data.loterre.fr/ark:/67375/8LP-CF4R9CVM-J> .

<http://data.loterre.fr/ark:/67375/8LP> a owl:Ontology, skos:ConceptScheme .
<http://data.loterre.fr/ark:/67375/8LP-CF4R9CVM-J>
  a skos:Concept ;
  skos:definition "A pre-trained language model on large-scale textual corpora and knowledge graphs. (Loterre)"@en, "Modèle de langue pré-entraîné sur des corpus textuels volumineux et sur des graphes de connaissances. (Loterre)"@fr ;
  skos:example "Since ERNIE is not significantly improved compared to BERT in order to reduce model parameters we choose to use BERT as the pre-trained model. (Tan, Hu & Qiu, 2023)"@en, "It shows that ERNIE has a prominent advantage over BERT in both detection and correction. (Zhang, Pang, Zhang, Wang, He, Sun, Wu & Wang, 2021)"@en ;
  skos:prefLabel "ERNIE"@fr, "ERNIE"@en ;
  dc:modified "2024-04-26T13:24:15"^^xsd:dateTime ;
  skos:exactMatch ltk:-DXHSN8C1-Q ;
  skos:hiddenLabel "Ernie"@en, "Ernie"@fr ;
  skos:broader <http://data.loterre.fr/ark:/67375/8LP-V1WS80W6-2> ;
  skos:inScheme <http://data.loterre.fr/ark:/67375/8LP> .

