diff --git a/tutorial-notebooks/ae/standard/data/attribute.csv b/tutorial-notebooks/ae/standard/data/attribute.csv deleted file mode 100644 index b364e74..0000000 --- a/tutorial-notebooks/ae/standard/data/attribute.csv +++ /dev/null @@ -1,4 +0,0 @@ -attribute,index -None,0 -民族,1 -字,2 diff --git a/tutorial-notebooks/ae/standard/data/test.csv b/tutorial-notebooks/ae/standard/data/test.csv deleted file mode 100644 index e6b732e..0000000 --- a/tutorial-notebooks/ae/standard/data/test.csv +++ /dev/null @@ -1,3 +0,0 @@ -sentence,attribute,entity,entity_offset,attribute_value,attribute_value_offset,len,tokens,dependency -柳为易,女,1989年5月出生,中共党员 ,汉族,重庆市人,民族,柳为易,0,汉族,22,17,"['柳为', '易', ',', '女', ',', '1989', '年', '5', '月', '出生', ',', '中共党员', ',', '汉族', ',', '重庆市', '人']","[1, 1, 12, 1, 12, 1, 10, 1, 1, 1, 12, 16, 12, 15, 12, 1, 13]" -庄肇奎 (1728-1798) 榜姓杜,字星堂,号胥园,江苏武进籍,浙江秀水(今嘉兴)人,字,庄肇奎,0,星堂,23,23,"['庄肇奎', '(', '1728', '-', '1798', ')', '榜姓', '杜', ',', '字星堂', ',', '号', '胥园', ',', '江苏', '武进', '籍', ',', '浙江', '秀水', '(', '今', '嘉兴', ')', '人']","[1, 9, 1, 1, 1, 11, 1, 1, 12, 1, 12, 1, 1, 12, 1, 1, 1, 12, 1, 1, 9, 1, 1, 11, 13]" \ No newline at end of file diff --git a/tutorial-notebooks/ae/standard/data/train.csv b/tutorial-notebooks/ae/standard/data/train.csv deleted file mode 100644 index c89b1c1..0000000 --- a/tutorial-notebooks/ae/standard/data/train.csv +++ /dev/null @@ -1,5 +0,0 @@ -sentence,attribute,entity,entity_offset,attribute_value,attribute_value_offset,len,tokens,dependency -苏轼(1037~1101年),字子瞻,又字和仲,号“东坡居士”,眉州眉山(即今四川眉州)人,是宋代(北宋)著名的文学家、书画家,字,苏轼,0,和仲,21,42,"['苏轼', '(', '1037', '~', '1101', '年', ')', ',', '字子', '瞻', ',', '又', '字', '和', '仲', ',', '号', '“', '东坡', '居士', '”', ',', '眉州', '眉山', '(', '即', '今', '四川', '眉州', ')', '人', ',', '是', '宋代', '(', '北宋', ')', '著名', '的', '文学家', '、', '书画家']","[1, 9, 1, 1, 1, 10, 11, 12, 1, 1, 12, 1, 1, 1, 1, 12, 1, 1, 1, 1, 1, 12, 1, 1, 9, 1, 1, 1, 1, 11, 13, 12, 1, 1, 9, 1, 11, 1, 1, 1, 1, 1]" -屈中乾,男,汉族,中共党员,特级教师,民族,屈中乾,0,汉族,6,10,"['屈中', '乾', ',', '男', ',', '汉族', ',', '中共党员', ',', '特级教师']","[1, 1, 12, 14, 12, 15, 12, 16, 12, 1]" -黄向静,女,汉族,1965年5月生,大学学历,1986年17月参加工作,中共党员,身体健康,民族,黄向静,0,汉族,6,24,"['黄向静', ',', '女', ',', '汉族', ',', '1965', '年', '5', '月生', ',', '大学', '学历', ',', '1986', '年', '17', '月', '参加', '工作', ',', '中共党员', ',', '身体健康']","[1, 12, 1, 12, 15, 12, 1, 10, 1, 1, 12, 1, 1, 12, 1, 10, 1, 1, 1, 1, 12, 16, 12, 1]" -司马懿,字仲达,河南温县人,字,司马懿,0,仲达,5,7,"['司马懿', ',', '字仲达', ',', '河南', '温县', '人']","[1, 12, 1, 12, 1, 1, 13]" \ No newline at end of file diff --git a/tutorial-notebooks/ae/standard/data/valid.csv b/tutorial-notebooks/ae/standard/data/valid.csv deleted file mode 100644 index 691ca4f..0000000 --- a/tutorial-notebooks/ae/standard/data/valid.csv +++ /dev/null @@ -1,3 +0,0 @@ -sentence,attribute,entity,entity_offset,attribute_value,attribute_value_offset,len,tokens,dependency -田承冉 男,1952年生,汉族,山东桓台人,共党员,民族,田承冉,0,汉族,13,14,"['田承冉', '男', ',', '1952', '年生', ',', '汉族', ',', '山东', '桓台', '人', ',', '共', '党员']","[1, 14, 12, 1, 1, 12, 15, 12, 1, 1, 13, 12, 1, 1]" -冷家骥,字展麒,山东招远人,字,冷家骥,0,展麒,5,8,"['冷家骥', ',', '字展', '麒', ',', '山东', '招远', '人']","[1, 12, 1, 1, 12, 1, 1, 13]" \ No newline at end of file diff --git a/tutorial-notebooks/ae/standard/standard_ae_tutorial.ipynb b/tutorial-notebooks/ae/standard/standard_ae_tutorial.ipynb index 1fc01f7..4d7f734 100644 --- a/tutorial-notebooks/ae/standard/standard_ae_tutorial.ipynb +++ b/tutorial-notebooks/ae/standard/standard_ae_tutorial.ipynb @@ -4,13 +4,13 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Attribution Extraction Experiment\n", + "## Attribution Extraction Tutorial\n", "> Tutorial author: 陶联宽(22051063@zju.edu.cn)\n", "\n", - "On this demo, we use `pretrain_language model` to extract attributions.\n", - "We hope this demo can help you understand the process of construction knowledge graph and the principles and common methods of triplet extraction.\n", + "In this tutorial, we use `pretrain_language model` to extract attributions.\n", + "We hope this tutorial can help you understand the process of construction knowledge graph and the principles and common methods of triplet extraction.\n", "\n", - "This demo uses `Python3`.\n", + "This tutorial uses `Python3`.\n", "\n", "### Dataset\n", "In this example,we get some Chinese text to extract the triples\n", @@ -55,9 +55,9 @@ "outputs": [], "source": [ "# Run the neural network with pytorch and confirm whether it is installed before running\n", - "!pip install torch\n", - "!pip install matplotlib\n", - "!pip install transformers" + "!pip install deepke\n", + "!wget 120.27.214.45/Data/ae/standard/data.tar.gz\n", + "!tar -xzvf data.tar.gz" ] }, { @@ -532,13 +532,6 @@ "test_f1, _ = validate(0, model, test_dataloader, criterion,verbose=False)\n", "print(f'after {cfg.epoch} epochs, final test data macro f1: {test_f1:.4f}')" ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This demo does not include parameter adjustment. Interested students can go to [deepke] by themselves( http://openkg.cn/tool/deepke )Warehouse, download and use more models:)" - ] } ], "metadata": { diff --git a/tutorial-notebooks/ner/few-shot/fewshot_ner_tutorial.ipynb b/tutorial-notebooks/ner/few-shot/fewshot_ner_tutorial.ipynb index ef45bc6..b0f90c3 100644 --- a/tutorial-notebooks/ner/few-shot/fewshot_ner_tutorial.ipynb +++ b/tutorial-notebooks/ner/few-shot/fewshot_ner_tutorial.ipynb @@ -91,9 +91,9 @@ "metadata": {}, "outputs": [], "source": [ - "!pip install transformers==3.4.0\n", - "!pip install torch==1.7.0\n", - "!pip install tqdm==4.61.1" + "!pip install deepke\n", + "!wget 120.27.214.45/Data/ner/few_shot/data.tar.gz\n", + "!tar -xzvf data.tar.gz" ] }, { diff --git a/tutorial-notebooks/re/document/document_re_tutorial.ipynb b/tutorial-notebooks/re/document/document_re_tutorial.ipynb index 72bfd7d..2e1a521 100644 --- a/tutorial-notebooks/re/document/document_re_tutorial.ipynb +++ b/tutorial-notebooks/re/document/document_re_tutorial.ipynb @@ -105,15 +105,9 @@ "metadata": {}, "outputs": [], "source": [ - "!pip install python==3.7\n", - "!pip install cuda==10.2\n", - "!pip install torch==1.5.0\n", - "!pip install transformers==3.0.4\n", - "!pip install opt-einsum==3.3.0\n", - "!pip install ujson\n", - "!pip install tqdm\n", - "!pip install allennlp\n", - "!pip install matplotlib" + "!pip install deepke\n", + "!wget 120.27.214.45/Data/re/document/data.tar.gz\n", + "!tar -xzvf data.tar.gz" ] }, { diff --git a/tutorial-notebooks/re/few-shot/fewshot_re_tutorial.ipynb b/tutorial-notebooks/re/few-shot/fewshot_re_tutorial.ipynb index 378117d..3c6d772 100644 --- a/tutorial-notebooks/re/few-shot/fewshot_re_tutorial.ipynb +++ b/tutorial-notebooks/re/few-shot/fewshot_re_tutorial.ipynb @@ -93,17 +93,9 @@ "metadata": {}, "outputs": [], "source": [ - "!pip install numpy==1.20.3\n", - "!pip install tokenizers==0.10.3\n", - "!pip install torch==1.8.0\n", - "!pip install regex==2021.4.4\n", - "!pip install transformers==4.7.0\n", - "!pip install tqdm==4.49.0\n", - "!pip install activations==0.1.0\n", - "!pip install dataclasses==0.6\n", - "!pip install file_utils==0.0.1\n", - "!pip install flax==0.3.4\n", - "!pip install utils==1.0.1" + "!pip install deepke\n", + "!wget 120.27.214.45/Data/re/few_shot/data.tar.gz\n", + "!tar -xzvf data.tar.gz" ] }, { diff --git a/tutorial-notebooks/re/standard/data/relation.csv b/tutorial-notebooks/re/standard/data/relation.csv deleted file mode 100644 index bc5f646..0000000 --- a/tutorial-notebooks/re/standard/data/relation.csv +++ /dev/null @@ -1,5 +0,0 @@ -head_type,tail_type,relation,index -None,None,None,0 -影视作品,人物,导演,1 -景点,城市,所在城市,2 -歌曲,音乐专辑,所属专辑,3 diff --git a/tutorial-notebooks/re/standard/data/test.csv b/tutorial-notebooks/re/standard/data/test.csv deleted file mode 100644 index 09f777d..0000000 --- a/tutorial-notebooks/re/standard/data/test.csv +++ /dev/null @@ -1,4 +0,0 @@ -sentence,relation,head,tail,tokens,lens,head_type,head_offset,tail_type,tail_offset,dependency -建国后南京于1951年将莫愁湖列为第一区人民公园。,所在城市,莫愁湖,南京,"['建国', '后', '南京', '于', '1951年', '将', '莫愁湖', '列为', '第一', '区', '人民', '公园', '。']",13,景点,6,城市,2,"[2, 8, 8, 8, 4, 8, 6, 0, 10, 12, 12, 8, 8]" -2001年李三光被张黎选中让其饰演《走向共和》中的光绪皇帝而出道。,导演,走向共和,李三光,"['2001年', '李三光', '被', '张黎', '选中', '让', '其', '饰演', '《', '走向', '共和', '》', '中的', '光绪皇帝', '而', '出道', '。']",17,影视作品,9,人物,1,"[2, 5, 5, 5, 0, 5, 6, 6, 10, 13, 10, 10, 8, 13, 16, 13, 5]" -《我爱秋莲》是收录于高胜美专辑《雷射》的一首金曲。,所属专辑,我爱秋莲,雷射,"['《', '我', '爱秋莲', '》', '是', '收录于', '高胜', '美专', '辑', '《', '雷射', '》', '的', '一首', '金曲', '。']",16,歌曲,1,音乐专辑,10,"[3, 3, 5, 3, 0, 15, 8, 9, 15, 11, 15, 11, 11, 15, 5, 5]" diff --git a/tutorial-notebooks/re/standard/data/train.csv b/tutorial-notebooks/re/standard/data/train.csv deleted file mode 100644 index cd2e60f..0000000 --- a/tutorial-notebooks/re/standard/data/train.csv +++ /dev/null @@ -1,7 +0,0 @@ -sentence,relation,head,tail,tokens,lens,head_type,head_offset,tail_type,tail_offset,dependency -孔正锡在2005年以一部温馨的爱情电影《长腿叔叔》敲开电影界大门。,导演,长腿叔叔,孔正锡,"['孔正锡', '在', '2005年', '以', '一部', '温馨', '的', '爱情', '电影', '《', '长腿', '叔叔', '》', '敲开', '电影界', '大门', '。']",17,影视作品,10,人物,0,"[14, 14, 2, 14, 9, 9, 6, 9, 12, 12, 12, 4, 12, 0, 16, 14, 14]" -2014年8月,韩兆导演的电影《好命先生》正式上映。,导演,好命先生,韩兆,"['2014年8月', ',', '韩兆', '导演', '的', '电影', '《', '好命先生', '》', '正式', '上映', '。']",12,影视作品,7,人物,2,"[11, 1, 4, 6, 4, 8, 8, 11, 8, 11, 0, 11]" -2000年8月,「天坛大佛」荣获「香港十大杰出工程项目」第四名。,所在城市,天坛大佛,香港,"['2000年8月', ',', '「', '天坛', '大佛', '」', '荣获', '「', '香港', '十', '大', '杰出', '工程项目', '」', '第四', '名', '。']",17,景点,3,城市,8,"[7, 1, 5, 5, 7, 5, 0, 13, 13, 11, 13, 13, 16, 13, 16, 7, 7]" -地安门是北京主皇城四门之一。,所在城市,地安门,北京,"['地安门', '是', '北京', '主', '皇城', '四', '门', '之一', '。']",9,景点,0,城市,2,"[2, 0, 5, 5, 7, 7, 8, 2, 2]" -《伤心的树》是吴宗宪的音乐作品,收录在《你比从前快乐》专辑中。,所属专辑,伤心的树,你比从前快乐,"['《', '伤心', '的', '树', '》', '是', '吴宗宪', '的', '音乐作品', ',', '收录', '在', '《', '你', '比', '从前', '快乐', '》', '专辑', '中', '。']",21,歌曲,1,音乐专辑,13,"[4, 4, 2, 6, 4, 0, 9, 7, 6, 6, 6, 11, 17, 17, 17, 15, 19, 17, 20, 12, 6]" -请不要认错我是关淑怡专辑《冬恋》里的一首歌曲。,所属专辑,请不要认错我,冬恋,"['请', '不要', '认错', '我', '是', '关淑怡', '专辑', '《', '冬恋', '》', '里', '的', '一', '首', '歌曲', '。']",16,歌曲,0,音乐专辑,8,"[0, 3, 1, 5, 3, 7, 9, 9, 11, 9, 15, 11, 14, 15, 5, 1]" diff --git a/tutorial-notebooks/re/standard/data/valid.csv b/tutorial-notebooks/re/standard/data/valid.csv deleted file mode 100644 index adc9b14..0000000 --- a/tutorial-notebooks/re/standard/data/valid.csv +++ /dev/null @@ -1,4 +0,0 @@ -sentence,relation,head,tail,tokens,lens,head_type,head_offset,tail_type,tail_offset,dependency -《岳父也是爹》是王军执导的电视剧,马恩然、范明主演。,导演,岳父也是爹,王军,"['《', '岳父', '也是', '爹', '》', '是', '王军', '执导', '的', '电视剧', ',', '马恩然', '、', '范明', '主演', '。']",16,影视作品,1,人物,6,"[4, 4, 4, 6, 4, 0, 8, 10, 8, 6, 6, 15, 14, 12, 6, 6]" -渔人码头的落成使得澳门旅游业展现全新的旅游面貌。,所在城市,渔人码头,澳门,"['渔人码头', '的', '落成', '使得', '澳门', '旅游业', '展现', '全新', '的', '旅游', '面貌', '。']",12,景点,0,城市,4,"[3, 1, 4, 0, 6, 4, 4, 11, 8, 11, 7, 4]" -《寄梦》是黄露仪的音乐作品,收录在《宁愿相信》专辑中。,所属专辑,寄梦,宁愿相信,"['《', '寄', '梦', '》', '是', '黄露仪', '的', '音乐作品', ',', '收录', '在', '《', '宁愿', '相信', '》', '专辑', '中', '。']",18,歌曲,1,音乐专辑,12,"[2, 5, 2, 2, 0, 8, 6, 5, 5, 5, 10, 14, 14, 17, 14, 17, 11, 5]" diff --git a/tutorial-notebooks/re/standard/LM.ipynb b/tutorial-notebooks/re/standard/standard_re_BERT_tutorial.ipynb similarity index 96% rename from tutorial-notebooks/re/standard/LM.ipynb rename to tutorial-notebooks/re/standard/standard_re_BERT_tutorial.ipynb index 22b25d7..0cc0d7e 100644 --- a/tutorial-notebooks/re/standard/LM.ipynb +++ b/tutorial-notebooks/re/standard/standard_re_BERT_tutorial.ipynb @@ -4,13 +4,13 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## relation extraction experiment\n", + "## BERT based Standard Relation Extraction Tutorial\n", "> Tutorial author:余海阳(yuhaiyang@zju.edu.cn)\n", "\n", - "On this demo,we use `pretrain_language model` to extract relations.\n", - "We hope this demo can help you understand the process of conctruction knowledge graph and the the principles and common methods of triplet extraction.\n", + "In this tutorial,we use `BERT` to extract relations.\n", + "We hope this tutorial can help you understand the process of conctruction knowledge graph and the the principles and common methods of triplet extraction.\n", "\n", - "This demo uses `Python3`.\n", + "This tutorial uses `Python3`.\n", "\n", "### Dataset\n", "In this example,we get some Chinese text to extract the triples.\n", @@ -56,9 +56,9 @@ "outputs": [], "source": [ "# Run the neural network with pytorch and confirm whether it is installed before running\n", - "!pip install torch\n", - "!pip install matplotlib\n", - "!pip install transformers" + "!pip install deepke\n", + "!wget 120.27.214.45/Data/re/standard/data.tar.gz\n", + "!tar -xzvf data.tar.gz" ] }, { @@ -546,13 +546,6 @@ "test_f1, _ = validate(0, model, test_dataloader, criterion,verbose=False)\n", "print(f'after {cfg.epoch} epochs, final test data macro f1: {test_f1:.4f}')" ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This demo does not include parameter adjustment. Interested students can go to [deepke] by themselves( http://openkg.cn/tool/deepke )Warehouse, download and use more models:)" - ] } ], "metadata": { @@ -560,7 +553,8 @@ "hash": "07ee17aed077b353900b50ce6f0ef17f1492499c86f09df07de696a5c0b76ad4" }, "kernelspec": { - "display_name": "Python 3.8.11 64-bit ('deepke': conda)", + "display_name": "Python 3 (ipykernel)", + "language": "python", "name": "python3" }, "language_info": { diff --git a/tutorial-notebooks/re/standard/standard_re_tutorial.ipynb b/tutorial-notebooks/re/standard/standard_re_pcnn_tutorial.ipynb similarity index 99% rename from tutorial-notebooks/re/standard/standard_re_tutorial.ipynb rename to tutorial-notebooks/re/standard/standard_re_pcnn_tutorial.ipynb index f320d68..033e68c 100644 --- a/tutorial-notebooks/re/standard/standard_re_tutorial.ipynb +++ b/tutorial-notebooks/re/standard/standard_re_pcnn_tutorial.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Standard Relation Rxtraction Tutorial\n", + "## PCNN based Standard Relation Rxtraction Tutorial\n", "> Tutorial author:余海阳(yuhaiyang@zju.edu.cn)\n", "\n", "In this tutorial,we use `pcnn` model to extract relations.\n", @@ -46,9 +46,9 @@ "outputs": [], "source": [ "# Run the neural network with pytorch and confirm whether it is installed before running\n", - "!pip install torch\n", - "!pip install matplotlib\n", - "!pip install transformers" + "!pip install deepke\n", + "!wget 120.27.214.45/Data/re/standard/data.tar.gz\n", + "!tar -xzvf data.tar.gz" ] }, {