This commit is contained in:
tlk-dsg 2021-09-28 16:59:43 +08:00
parent 1f9041d9b3
commit 115f6ca852
13 changed files with 65 additions and 29 deletions

View File

@ -1,7 +1,7 @@
from setuptools import setup, find_packages
setup(
name='deepke', # 打包后的包文件名
version='0.2.27', #版本号
version='0.2.40', #版本号
keywords=["pip", "RE","NER","AE"], # 关键字
description='DeepKE 是基于 Pytorch 的深度学习中文关系抽取处理套件。', # 说明
long_description="client", #详细说明

View File

@ -1,6 +1,9 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
import torch.nn as nn
from . import BasicModule
from ..module import Embedding, RNN
from module import Embedding, RNN
class BiLSTM(BasicModule):

View File

@ -1,9 +1,12 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
import torch
from . import BasicModule
from ..module import Embedding, CNN
from ..module import Capsule as CapsuleLayer
from module import Embedding, CNN
from module import Capsule as CapsuleLayer
from ..utils import seq_len_to_mask, to_one_hot
from utils import seq_len_to_mask, to_one_hot
class Capsule(BasicModule):

View File

@ -1,10 +1,13 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
import torch
import torch.nn as nn
from . import BasicModule
from ..module import Embedding
from ..module import GCN as GCNBlock
from module import Embedding
from module import GCN as GCNBlock
from ..utils import seq_len_to_mask
from utils import seq_len_to_mask
class GCN(BasicModule):

View File

@ -1,9 +1,12 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
from torch import nn
from . import BasicModule
from ..module import RNN
from module import RNN
from transformers import BertModel
from ..utils import seq_len_to_mask
from utils import seq_len_to_mask
class LM(BasicModule):

View File

@ -1,10 +1,13 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
import torch
import torch.nn as nn
import torch.nn.functional as F
from . import BasicModule
from ..module import Embedding, CNN
from module import Embedding, CNN
from ..utils import seq_len_to_mask
from utils import seq_len_to_mask
class PCNN(BasicModule):

View File

@ -1,9 +1,12 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
import torch.nn as nn
from . import BasicModule
from ..module import Embedding
from ..module import Transformer as TransformerBlock
from module import Embedding
from module import Transformer as TransformerBlock
from ..utils import seq_len_to_mask
from utils import seq_len_to_mask
class Transformer(BasicModule):

View File

@ -1,6 +1,9 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
import torch.nn as nn
from . import BasicModule
from ..module import Embedding, RNN
from module import Embedding, RNN
class BiLSTM(BasicModule):

View File

@ -1,9 +1,12 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
import torch
from . import BasicModule
from ..module import Embedding, CNN
from ..module import Capsule as CapsuleLayer
from module import Embedding, CNN
from module import Capsule as CapsuleLayer
from ..utils import seq_len_to_mask, to_one_hot
from utils import seq_len_to_mask, to_one_hot
class Capsule(BasicModule):

View File

@ -1,9 +1,12 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
import torch
import torch.nn as nn
from . import BasicModule
from ..module import Embedding
from ..module import GCN as GCNBlock
from ..utils import seq_len_to_mask
from module import Embedding
from module import GCN as GCNBlock
from utils import seq_len_to_mask
class GCN(BasicModule):

View File

@ -1,8 +1,11 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
from torch import nn
from . import BasicModule
from ..module import RNN
from module import RNN
from transformers import BertModel
from ..utils import seq_len_to_mask
from utils import seq_len_to_mask
class LM(BasicModule):

View File

@ -1,9 +1,12 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
import torch
import torch.nn as nn
import torch.nn.functional as F
from . import BasicModule
from ..module import Embedding, CNN
from ..utils import seq_len_to_mask
from module import Embedding, CNN
from utils import seq_len_to_mask
class PCNN(BasicModule):

View File

@ -1,8 +1,11 @@
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
import torch.nn as nn
from . import BasicModule
from ..module import Embedding
from ..module import Transformer as TransformerBlock
from ..utils import seq_len_to_mask
from module import Embedding
from module import Transformer as TransformerBlock
from utils import seq_len_to_mask
class Transformer(BasicModule):