图神经网络DGL 框架实测记录

import torch
import dgl

device=torch.device("cuda" if torch.cuda.is_available() else "cpu")
u,v = torch.tensor([0,0,0,1]).to(device), torch.tensor([1,2,3,3,]).to(device)
g =dgl.graph((u,v))

# https://blog.csdn.net/Iam_Human/article/details/108398115


#bg=dgl.to_bidirected(g)
print(g)
#print(bg)

print(g.num_nodes())

print(g.num_edges())

g.ndata["x"] =torch.ones(g.num_nodes(),3).to(device)
g.ndata["y"] =torch.rand(g.num_nodes(),3).to(device)

print(g.ndata["x"][torch.tensor([0,3])])


g.edata["x"]=torch.randn(g.num_edges(),5).to(device)

print(g.edata["x"])

print(g.edata["x"][[1,2]])


print(g.edges)

#---------------------------
edges=torch.tensor([0,0,0,1]),torch.tensor([1,2,3,3])


#gx=dgl.graph(edges)
#print(gx)


#  下面为异构图测试
graph_data= {

    ('drug','interacts','drug'):(torch.tensor([0,1,2]),torch.tensor([1,2,3])),  # u, v 顶点集
    ('drug','interacts','gene'):(torch.tensor([5,1]),torch.tensor([2,3])),
    ('drug','treats','disease'):(torch.tensor([1,2]),torch.tensor([2 ,4]))

}

hg = dgl.heterograph(graph_data).to(device)

print(hg.device)
print(hg.num_edges())

print(hg.num_edges(('drug','interacts','drug')))

print(hg.num_nodes("drug"))

print(hg.etypes)
print(hg.canonical_etypes)

print("the number of treats edge:")
print(hg.edges(etype='treats',form="all"))   #异构图中打印边数, 以 U,V的格式打印出来  ,form= 'uv' / 'eid', 'all'
print(hg.edges(etype=('drug','interacts','gene'),form="all"))   #异构图中打印边数, 以 U,V的格式打印出来  ,form= 'uv' / 'eid', 'all'
print(hg.is_multigraph)  
print(hg)
print(hg.num_src_nodes())
print(hg.num_dst_nodes())
print(hg.in_degrees(3, ('drug','interacts','drug')))

print("find edges")
print(hg.find_edges((0,1),etype="treats"))  # return 

print(hg.is_homogeneous)  # 是否同构图

hg.nodes["drug"].data["x"]=torch.randn(hg.num_nodes("drug"),5).to(device)  # 给节点赋值  ,见下面的输出
print(hg.nodes["drug"].data["x"])

print(hg.nodes["drug"].data["x"][0,1]) #打印  类型为drug的第一个节点的, 第二个元素[0,1] 表示节点序号为0, 元素序号为1的数值。 这里是一个向量的第二个元素,见下图
'''

tensor([[-0.1732,  0.7254,  0.7928, -1.2137, -1.0722],
        [ 0.3793, -0.5004,  0.9902,  0.2487,  1.3942],
        [-1.4447,  0.7000, -0.9142, -1.1791, -0.1519],
        [-0.5194,  1.0848, -0.4168, -0.8474, -0.8101],
        [ 0.1519, -0.4287, -1.5856, -0.8268,  1.6037],
        [ 0.8155, -0.1937,  0.7735,  2.3836, -0.3404]], device='cuda:0')
tensor(0.7254, device='cuda:0')
'''

 

评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值