@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix dc: <http://purl.org/dc/terms/> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .
@prefix owl: <http://www.w3.org/2002/07/owl#> .

<http://data.loterre.fr/ark:/67375/8LP-V1WS80W6-2>
  skos:prefLabel "modèle de langue"@fr, "language model"@en ;
  a skos:Concept ;
  skos:narrower <http://data.loterre.fr/ark:/67375/8LP-ZVRTVWP6-K> .

<http://data.loterre.fr/ark:/67375/8LP-ZVRTVWP6-K>
  dc:modified "2024-04-26T13:28:51"^^xsd:dateTime ;
  a skos:Concept ;
  skos:example "It shows that UniLM is stronger than BERT in modeling news texts and thereby we used UniLM for learning and distilling our models. (Wu, Wu, Yu, Qi, Huang & Liu, 2021)"@en, "The UniLM model is also fine-tuned with an additional extractive summarization objective to predict relevant sentences in the document; this objective could be beneficial to generate the CNN/DailyMail extracts. (Rothe, Narayan & Severyn, 2020)"@en ;
  skos:hiddenLabel "Unilm"@en, "Unilm"@fr, "uniLM"@fr, "uniLM"@en ;
  skos:definition "Language model pre-trained using three types of language modeling tasks: unidirectional, bidirectional, and sequence-to-sequence prediction. (Dong et al., 2019)"@en, "modèle de langue « pré-entraîné à l'aide de trois types de tâches de modélisation du langage : unidirectionnel, bidirectionnel et prédiction de séquence à séquence. » (Dong et al., 2019)"@fr ;
  skos:broader <http://data.loterre.fr/ark:/67375/8LP-V1WS80W6-2> ;
  skos:altLabel "UNIfied pre-trained Language Model"@en ;
  skos:exactMatch ltk:-DQX644SX-0 ;
  skos:prefLabel "UniLM"@fr, "UniLM"@en ;
  skos:inScheme <http://data.loterre.fr/ark:/67375/8LP> .

<http://data.loterre.fr/ark:/67375/8LP> a owl:Ontology, skos:ConceptScheme .
