@inproceedings{53b05a85a60842b5a02986a7199f8c0b,
title = "Injecting commonsense knowledge into prompt learning for zero-shot text classification",
abstract = "The combination of pre-training and fine-tuning has become a default solution to Natural Language Processing (NLP) tasks. The emergence of prompt learning breaks such routine, especially in the scenarios of low data resources. Insufficient labelled data or even unseen classes are frequent problems in text classification, equipping Pre-trained Language Models (PLMs) with task-specific prompts helps get rid of the dilemma. However, general PLMs are barely provided with commonsense knowledge. In this work, we propose a KG-driven verbalizer that leverages commonsense Knowledge Graph (KG) to map label words with predefined classes. Specifically, we transform the mapping relationships into semantic relevance in the commonsense-injected embedding space. For zero-shot text classification task, experimental results exhibit the effectiveness of our KG-driven verbalizer on a Twitter dataset for natural disasters (i.e. HumAID) compared with other baselines.",
keywords = "prompt learning, zero-shot text classification, Knowledge graph, knowledge graph",
author = "Jing Qian and Qi Chen and Yong Yue and Katie Atkinson and Gangmin Li",
note = "Publisher Copyright: {\textcopyright} 2023 ACM.; ICMLC '23: Proceedings of the 2023 15th International Conference on Machine Learning and Computing ; Conference date: 17-02-2023 Through 20-02-2023",
year = "2023",
month = sep,
day = "7",
doi = "10.1145/3587716.3587787",
language = "English",
isbn = "9781450398411",
series = "ACM International Conference Proceeding Series",
publisher = "Association for Computing Machinery",
pages = "427--432",
booktitle = "ICMLC '23: Proceedings of the 2023 15th International Conference on Machine Learning and Computing",
address = "United States",
}