@prefix ltk: <http://data.loterre.fr/ark:/67375/LTK> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix ns0: <http://data.loterre.fr/OntoTM#> .
@prefix ns1: <http://www.ebi.ac.uk/swo/> .
@prefix dc: <http://purl.org/dc/terms/> .

ltk:-SCTX2SHF-V
  skos:prefLabel "plongement lexical"@fr, "word embedding"@en ;
  a skos:Concept ;
  ns0:executes ltk:-Q9B8D51W-T .

ltk:-FBRN2P64-2
  skos:prefLabel "psychologie"@fr, "psychology"@en ;
  a skos:Concept ;
  ns0:isApplicationFieldOf ltk:-Q9B8D51W-T .

ltk:-C3259FJL-2
  skos:prefLabel "anglais"@fr, "English"@en ;
  a skos:Concept ;
  ns0:isInputLanguageOf ltk:-Q9B8D51W-T .

ltk:-BV6K0MM8-9
  skos:prefLabel "RoBERTa"@en, "RoBERTa"@fr ;
  a skos:Concept ;
  ns0:baseOf ltk:-Q9B8D51W-T .

ltk:-PFDPNVQ7-3
  skos:prefLabel "transformeur"@fr, "transformer"@en ;
  a skos:Concept ;
  ns1:SWO_0000085 ltk:-Q9B8D51W-T .

ltk:-SWNPX252-5
  skos:prefLabel "large language model"@en, "grand modèle de langue"@fr ;
  a skos:Concept ;
  skos:narrower ltk:-Q9B8D51W-T .

ltk:-N53MKR1Z-7
  skos:prefLabel "CC BY-NC 4.0"@fr, "CC BY-NC 4.0"@en ;
  a skos:Concept ;
  ns1:is_license_for ltk:-Q9B8D51W-T .

ltk:-Q9B8D51W-T
  dc:bibliographicCitation "• Ji, S., Zhang, T., Ansari, L., Fu, J., Tiwari, P., & Cambria, E. (2021). MentalBERT: Publicly available pretrained language models for mental healthcare (No. arXiv:2110.15621). arXiv.  <a href=\"https://doi.org/10.48550/arXiv.2110.15621\">https://doi.org/10.48550/arXiv.2110.15621</a>", "• Wagay, F. A., Jahiruddin, & Altaf, Y. (2025). MentalRoBERTa-Caps: A capsule-enhanced transformer model for mental health classification. MethodsX, 15, 103483.  <a href=\"https://doi.org/10.1016/j.mex.2025.103483\">https://doi.org/10.1016/j.mex.2025.103483</a>" ;
  ns0:hasDesignCountry "Singapour"@fr, "Singapore"@en, "Royaume-Uni"@fr, "Canada"@fr, "Canada"@en, "United Kingdom"@en, "Finlande"@fr, "Finland"@en ;
  ns1:SWO_0000740 ltk:-PFDPNVQ7-3 ;
  a skos:Concept ;
  skos:inScheme ltk: ;
  ns0:hasRepository <https://huggingface.co/mental/mental-roberta-base> ;
  skos:prefLabel "mentalRoberta"@en, "mentalRoberta"@fr ;
  skos:definition "Grand modèle de langage, initialisé avec RoBERTa-Base entraîné avec des publications liées à la santé mentale recueillies sur Reddit."@fr, "\"MentalRoBERTa is a model initialized with RoBERTa-Base (cased_L-12_H-768_A-12) and trained with mental health-related posts collected from Reddit.\" (source: https://huggingface.co/papers/2110.15621)/"@en ;
  ns1:SWO_0040005 ltk:-T8C4CNZQ-G, ltk:-SCTX2SHF-V ;
  ns0:basedOn ltk:-BV6K0MM8-9 ;
  ns0:hasApplicationField ltk:-FBRN2P64-2, ltk:-FLNMSH9L-2 ;
  ns1:has_license ltk:-N53MKR1Z-7 ;
  skos:broader ltk:-SWNPX252-5 ;
  ns0:hasInputLanguage ltk:-C3259FJL-2 .

ltk:-T8C4CNZQ-G
  skos:prefLabel "génération de textes"@fr, "text generation"@en ;
  a skos:Concept ;
  ns0:executes ltk:-Q9B8D51W-T .

ltk: a skos:ConceptScheme .
ltk:-FLNMSH9L-2
  skos:prefLabel "medicine"@en, "médecine"@fr ;
  a skos:Concept ;
  ns0:isApplicationFieldOf ltk:-Q9B8D51W-T .

