@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix dc: <http://purl.org/dc/terms/> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .
@prefix owl: <http://www.w3.org/2002/07/owl#> .

<http://data.loterre.fr/ark:/67375/8LP-V1WS80W6-2>
  skos:prefLabel "modèle de langue"@fr, "language model"@en ;
  a skos:Concept ;
  skos:narrower <http://data.loterre.fr/ark:/67375/8LP-PDSXQ6Q0-M> .

<http://data.loterre.fr/ark:/67375/8LP-PDSXQ6Q0-M>
  skos:definition "A sequence-to-sequence denoising auto-encoder pre-trained on large-scale monolingual corpora in many languages using the BART objective. (Liu et al., 2020)"@en, "Modèle de langue multilingue basé sur BART. (Loterre)"@fr ;
  skos:hiddenLabel "m-BART"@en, "m-BART"@fr, "MBART"@fr, "MBART"@en ;
  skos:example "Specifically we augment the original vocabulary of mBART with the names of AMR relations and frames occurring at least 5 times in the gold training corpus. (Cai, Li, Ho, Bing & Lam, 2021)"@en, "Tran et al. (2020) show that fine-tuning mBART using pseudo-parallel data leads to very promising results so we use mBART for our own experiments as well. (Belouadi & Eger, 2023)"@en, "We base our approach on the mBART model which is pre-trained for multilingual denoising. (Kasner & Dušek, 2020)"@en, "The noise function of mBART replaces text spans of arbitrary length with a mask token (35% of the words in each instance) and permutes the order of sentences. (Kasner & Dušek, 2020)"@en ;
  skos:broader <http://data.loterre.fr/ark:/67375/8LP-V1WS80W6-2> ;
  dc:modified "2024-04-26T13:26:55"^^xsd:dateTime ;
  skos:inScheme <http://data.loterre.fr/ark:/67375/8LP> ;
  a skos:Concept ;
  skos:prefLabel "mBART"@en, "mBART"@fr ;
  skos:exactMatch ltk:-CX7MNQZF-F .

<http://data.loterre.fr/ark:/67375/8LP> a owl:Ontology, skos:ConceptScheme .
