@prefix owl: <http://www.w3.org/2002/07/owl#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix dc: <http://purl.org/dc/terms/> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .

<http://data.loterre.fr/ark:/67375/8LP> a owl:Ontology, skos:ConceptScheme .
<http://data.loterre.fr/ark:/67375/8LP-C19J8C5F-M>
  skos:inScheme <http://data.loterre.fr/ark:/67375/8LP> ;
  skos:prefLabel "représentation des mots dépendant du contexte"@fr, "context-dependent word representation"@en ;
  skos:hiddenLabel "Représentation des mots dépendant du contexte"@fr, "Context-dependent word representation"@en ;
  skos:example "Specifically AMTN first utilizes BioBERT as an embedding layer to generate context-dependent word representations. (Zhou, Li, Yao, Lang & Ning, 2019)"@en, "This problem has been further attenuated by methods based on language model pre-training that produced context-dependent word representations. (Boros, Hamdi, Linhares Pontes, Cabrera-Diego, Moreno, Sidere & Doucet, 2020)"@en, "For an OOV word the ELMo layer of ELMoLex computes the context-dependent word representation based on the other vocabulary words present in the focal sentence. (Jawahar, Muller, Fethi, Martin, Villemonte de la Clergerie, Sagot & Seddah, 2018)"@en, "One way to get context-dependent word representations is using a pretrained neural language model as a feature extractor (Peters et al. 2018). (Pömsl & Lyapin, 2020)"@en, "Context-dependent word representations assign a semantic vector to each word-use within the context of its sentence rather than to each unique word. (Pömsl & Lyapin, 2020)"@en ;
  skos:broader <http://data.loterre.fr/ark:/67375/8LP-ZKBRNB69-9> ;
  a skos:Concept ;
  dc:modified "2024-06-05T09:04:01"^^xsd:dateTime .

<http://data.loterre.fr/ark:/67375/8LP-ZKBRNB69-9>
  skos:prefLabel "context representation"@en, "représentation contextuelle"@fr ;
  a skos:Concept ;
  skos:narrower <http://data.loterre.fr/ark:/67375/8LP-C19J8C5F-M> .

