@prefix owl: <http://www.w3.org/2002/07/owl#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix dc: <http://purl.org/dc/terms/> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .

<http://data.loterre.fr/ark:/67375/8LP> a owl:Ontology, skos:ConceptScheme .
<http://data.loterre.fr/ark:/67375/8LP-D73KVZ17-N>
  skos:prefLabel "couche"@fr, "layer"@en ;
  a skos:Concept ;
  skos:narrower <http://data.loterre.fr/ark:/67375/8LP-DMDVS16W-4> .

<http://data.loterre.fr/ark:/67375/8LP-DMDVS16W-4>
  skos:example "Specifically we distill the knowledge from the hidden state of each transformer block and the attention score of each self-attention layer. (Li, Gao, Lei & Xu, 2023)"@en, "Each encoder has its own self-attention layer and feed-forward layer to process each input separately. (Shin & Lee, 2018)"@en, "We then apply a self-attention layer to model the guiding effect of ontology knowledge on the extraction of entities and relations from the sentence. (Xiong, Chen, Yunfei & Shengyang, 2023)"@en, "We are motivated to improve the self-attention layer appended to the top of the transformer encoder to enrich the contextualized word representation with information from its neighbors and the relations from the dependency parse trees. (Galitsky, Ilvovsky & Goncharova, 2021)"@en, "First encoder self-attention layers benefit most from additive window attention while decoder self-attention layers prefer multiplicative attention. (Nguyen, Nguyen, Joty & Li, 2020)"@en ;
  skos:broader <http://data.loterre.fr/ark:/67375/8LP-D73KVZ17-N> ;
  dc:modified "2024-05-13T14:12:15"^^xsd:dateTime ;
  skos:prefLabel "couche d'auto-attention"@fr, "self-attention layer"@en ;
  skos:inScheme <http://data.loterre.fr/ark:/67375/8LP> ;
  skos:definition "A layer in the architecture of transformer-based models that allows the model to focus on different parts of the input sequence when processing each token to capture contextual relationships and dependencies within the input sequence."@en ;
  skos:hiddenLabel "Self-attention layer"@en, "Couche d'auto-attention"@fr ;
  a skos:Concept .

