当前位置: 首页> 健康> 美食 > 网络搭建学什么_百度seo教程_电商培训课程_最新搜索引擎排名

网络搭建学什么_百度seo教程_电商培训课程_最新搜索引擎排名

时间:2025/7/11 7:47:26来源:https://blog.csdn.net/2301_76574743/article/details/144358330 浏览次数:0次
网络搭建学什么_百度seo教程_电商培训课程_最新搜索引擎排名

Pytorch中的函数

import torch
torch.seed()
598102448800
torch.manual_seed(12)
<torch._C.Generator at 0x2ca8003b530>
torch.initial_seed()
12
torch.get_rng_state()
tensor([12,  0,  0,  ...,  0,  0,  0], dtype=torch.uint8)
rng_state1 = torch.get_rng_state()
print(rng_state1)torch.set_rng_state(rng_state1*2)
rng_state2 = torch.get_rng_state()
print(rng_state2)
tensor([12,  0,  0,  ...,  0,  0,  0], dtype=torch.uint8)
tensor([24,  0,  0,  ...,  0,  0,  0], dtype=torch.uint8)

#微分基础
import torchw = torch.tensor([1.], requires_grad=True)
x = torch.tensor([2.], requires_grad=True)
a = torch.add(x, w)
b = torch.add(w, 1)
y = torch.mul(a, b)
y.backward()print(w.grad)
tensor([5.])
import torchw = torch.tensor([1.], requires_grad=True)
x = torch.tensor([2.], requires_grad=True)
a = torch.add(x, w)
b = torch.add(w, 1)
y0 = torch.mul(a, b)
y1 = torch.add(a, b)loss = torch.cat([y0, y1], dim=0)
grad_t = torch.tensor([1., 2.])
loss.backward(gradient=grad_t)
print(w.grad)
tensor([9.])
import torchx = torch.tensor([3.], requires_grad=True)
y = torch.pow(x, 2)
grad1 = torch.autograd.grad(y, x, create_graph=True)
print(grad1)
grad2 = torch.autograd.grad(grad1[0], x)
print(grad2)
(tensor([6.], grad_fn=<MulBackward0>),)
(tensor([2.]),)
w = torch.tensor([1.], requires_grad=True)
x = torch.tensor([2.], requires_grad=True)
for i in range(3):a = torch.add(x, w)b = torch.add(w, 1)y = torch.mul(a, b)y.backward()print(w.grad)
tensor([5.])
tensor([10.])
tensor([15.])
w = torch.tensor([1.], requires_grad=True)
x = torch.tensor([2.], requires_grad=True)
for i in range(3):a = torch.add(x, w)b = torch.add(w, 1)y = torch.mul(a, b)y.backward()print(w.grad)w.grad.zero_()
tensor([5.])
tensor([5.])
tensor([5.])
w = torch.tensor([1.], requires_grad=True)
x = torch.tensor([2.], requires_grad=True)
a = torch.add(x, w)
b = torch.add(w, 1)
y = torch.mul(a, b)
print(a.requires_grad, b.requires_grad, y.requires_grad)
True True True
# w = torch.tensor([1.], requires_grad=True)
# x = torch.tensor([2.], requires_grad=True)
# a = torch.add(x, w)
# b = torch.add(w, 1)
# y = torch.mul(a, b)
# w.add_(1)#报错w = torch.tensor([1.], requires_grad=True)
x = torch.tensor([2.], requires_grad=True)
a = torch.add(x, w)
b = torch.add(w, 1)
y = torch.mul(a, b)
w = w + 1#不报错
a = torch.tensor([1])
print(id(a), a)
#开辟新的内存地址
a = a + torch.tensor([1])  
print(id(a), a)
#in-place操作,地址不变
a += torch.tensor([1])  
print(id(a), a)
3068794948112 tensor([1])
3068845027792 tensor([2])
3068845027792 tensor([3])

#PyTorch统计函数
print(torch.prod(a))
tensor(3)
import torcha = torch.rand(2,2)
print(a)
tensor([[0.0157, 0.6872],[0.8758, 0.0657]])
a1 = torch.sum(a)
a2 = torch.sum(a, dim=(0, 1))
a3 = torch.sum(a, dim=0)
a4 = torch.sum(a, dim=1)print(a1)
print(a2)
print(a3)
print(a4)
tensor(1.6444)
tensor(1.6444)
tensor([0.8915, 0.7529])
tensor([0.7029, 0.9415])
a5 = torch.sum(a, dim=(0, 1), keepdim=True)
a6 = torch.sum(a, dim=(0, ), keepdim=True)
a7 = torch.sum(a, dim=(1, ), keepdim=True)print(a5)
print(a6)
print(a7)
tensor([[1.6444]])
tensor([[0.8915, 0.7529]])
tensor([[0.7029],[0.9415]])
a8 = torch.mean(a)
a9 = torch.mean(a, dim=(0, 1))
a10 = torch.mean(a, dim=0)
a11 = torch.mean(a, dim=1)print(a8)
print(a9)
print(a10)
print(a11)
tensor(0.4111)
tensor(0.4111)
tensor([0.4457, 0.3765])
tensor([0.3514, 0.4708])
a12 = torch.mean(a, dim=(0, 1), keepdim=True)
a13 = torch.mean(a, dim=(0, ), keepdim=True)
a14 = torch.mean(a, dim=(1, ), keepdim=True)print(a12)
print(a13)
print(a14)
tensor([[0.4111]])
tensor([[0.4457, 0.3765]])
tensor([[0.3514],[0.4708]])
a15 = torch.max(a)
a16 = torch.max(a, dim=0)
a17 = torch.max(a, dim=1)print(a15)
print(a16)
print(a17)
tensor(0.8758)
torch.return_types.max(
values=tensor([0.8758, 0.6872]),
indices=tensor([1, 0]))
torch.return_types.max(
values=tensor([0.6872, 0.8758]),
indices=tensor([1, 0]))
a18 = torch.max(a, 0, keepdim=True)
a19 = torch.max(a, 1, keepdim=True)print(a18)
print(a19)
torch.return_types.max(
values=tensor([[0.8758, 0.6872]]),
indices=tensor([[1, 0]]))
torch.return_types.max(
values=tensor([[0.6872],[0.8758]]),
indices=tensor([[1],[0]]))
a20 = torch.min(a)
a21 = torch.min(a, dim=0)
a22 = torch.min(a, dim=1)print(a20)
print(a21)
print(a22)
tensor(0.0157)
torch.return_types.min(
values=tensor([0.0157, 0.0657]),
indices=tensor([0, 1]))
torch.return_types.min(
values=tensor([0.0157, 0.0657]),
indices=tensor([0, 1]))
a23 = torch.min(a, 0, keepdim=True)
a24 = torch.min(a, 1, keepdim=True)print(a23)
print(a24)
torch.return_types.min(
values=tensor([[0.0157, 0.0657]]),
indices=tensor([[0, 1]]))
torch.return_types.min(
values=tensor([[0.0157],[0.0657]]),
indices=tensor([[0],[1]]))
import torcha = torch.rand(2,2)
print(a)
tensor([[0.8494, 0.1956],[0.3784, 0.7644]])
print(torch.median(a))
print(torch.median(a, 1))
tensor(0.3784)
torch.return_types.median(
values=tensor([0.1956, 0.3784]),
indices=tensor([1, 0]))
torch.median(a, 1, keepdim=True)
torch.return_types.median(
values=tensor([[0.1956],[0.3784]]),
indices=tensor([[1],[0]]))
print(torch.mode(a))
print(torch.mode(a, 0))
torch.return_types.mode(
values=tensor([0.1956, 0.3784]),
indices=tensor([1, 0]))
torch.return_types.mode(
values=tensor([0.3784, 0.1956]),
indices=tensor([1, 0]))
torch.mode(a, 1, keepdim=True)
torch.return_types.mode(
values=tensor([[0.1956],[0.3784]]),
indices=tensor([[1],[0]]))
torch.var(a, 1)
tensor([0.2137, 0.0745])
torch.var(a, 1, unbiased=False)
tensor([0.1069, 0.0372])
torch.var(a, 1, unbiased=False, keepdim=True)
tensor([[0.1069],[0.0372]])
torch.std(a, 1)
tensor([0.4623, 0.2729])
torch.std(a, 1, unbiased=False)
tensor([0.3269, 0.1930])
torch.std(a, 1, unbiased=False, keepdim=True)
tensor([[0.3269],[0.1930]])

#矩阵运算
import torcha = torch.rand(3,4)
b = torch.rand(4)print(a)
print(b)
tensor([[0.4101, 0.5598, 0.2409, 0.1562],[0.9438, 0.2779, 0.7730, 0.2810],[0.7016, 0.2579, 0.4324, 0.5111]])
tensor([0.4659, 0.5240, 0.9565, 0.0216])
print(a+b)
tensor([[0.8760, 1.0837, 1.1973, 0.1779],[1.4096, 0.8019, 1.7295, 0.3026],[1.1675, 0.7819, 1.3889, 0.5328]])
print(torch.add(a,b))
tensor([[0.8760, 1.0837, 1.1973, 0.1779],[1.4096, 0.8019, 1.7295, 0.3026],[1.1675, 0.7819, 1.3889, 0.5328]])
c = torch.Tensor(3,4)
print(torch.add(a,b,out=c))
tensor([[0.8760, 1.0837, 1.1973, 0.1779],[1.4096, 0.8019, 1.7295, 0.3026],[1.1675, 0.7819, 1.3889, 0.5328]])
print(b.add(a))
tensor([[0.8760, 1.0837, 1.1973, 0.1779],[1.4096, 0.8019, 1.7295, 0.3026],[1.1675, 0.7819, 1.3889, 0.5328]])
print(a-b)
tensor([[-0.0558,  0.0358, -0.7156,  0.1346],[ 0.4779, -0.2461, -0.1835,  0.2594],[ 0.2358, -0.2661, -0.5240,  0.4895]])
print(torch.sub(a,b))
tensor([[-0.0558,  0.0358, -0.7156,  0.1346],[ 0.4779, -0.2461, -0.1835,  0.2594],[ 0.2358, -0.2661, -0.5240,  0.4895]])
c = torch.Tensor(3,4)
print(torch.sub(a,b,out=c))
tensor([[-0.0558,  0.0358, -0.7156,  0.1346],[ 0.4779, -0.2461, -0.1835,  0.2594],[ 0.2358, -0.2661, -0.5240,  0.4895]])
print(b.sub(a))
tensor([[ 0.0558, -0.0358,  0.7156, -0.1346],[-0.4779,  0.2461,  0.1835, -0.2594],[-0.2358,  0.2661,  0.5240, -0.4895]])
print(a*b)
tensor([[0.1911, 0.2933, 0.2304, 0.0034],[0.4397, 0.1456, 0.7394, 0.0061],[0.3269, 0.1351, 0.4136, 0.0111]])
print(torch.mul(a,b))
tensor([[0.1911, 0.2933, 0.2304, 0.0034],[0.4397, 0.1456, 0.7394, 0.0061],[0.3269, 0.1351, 0.4136, 0.0111]])
c = torch.Tensor(3,4)
print(torch.mul(a,b,out=c))
tensor([[0.1911, 0.2933, 0.2304, 0.0034],[0.4397, 0.1456, 0.7394, 0.0061],[0.3269, 0.1351, 0.4136, 0.0111]])
print(b.mul(a))
tensor([[0.1911, 0.2933, 0.2304, 0.0034],[0.4397, 0.1456, 0.7394, 0.0061],[0.3269, 0.1351, 0.4136, 0.0111]])
print(a/b)
tensor([[ 0.8803,  1.0682,  0.2518,  7.2180],[ 2.0258,  0.5303,  0.8082, 12.9823],[ 1.5061,  0.4922,  0.4521, 23.6140]])
print(torch.div(a,b))
tensor([[ 0.8803,  1.0682,  0.2518,  7.2180],[ 2.0258,  0.5303,  0.8082, 12.9823],[ 1.5061,  0.4922,  0.4521, 23.6140]])
c = torch.Tensor(3,4)
print(torch.div(a,b,out=c))
tensor([[ 0.8803,  1.0682,  0.2518,  7.2180],[ 2.0258,  0.5303,  0.8082, 12.9823],[ 1.5061,  0.4922,  0.4521, 23.6140]])
print(a.div(b))
tensor([[ 0.8803,  1.0682,  0.2518,  7.2180],[ 2.0258,  0.5303,  0.8082, 12.9823],[ 1.5061,  0.4922,  0.4521, 23.6140]])
print(a.pow(2))print(a**2)
tensor([[0.1682, 0.3133, 0.0580, 0.0244],[0.8907, 0.0772, 0.5975, 0.0790],[0.4923, 0.0665, 0.1870, 0.2612]])
tensor([[0.1682, 0.3133, 0.0580, 0.0244],[0.8907, 0.0772, 0.5975, 0.0790],[0.4923, 0.0665, 0.1870, 0.2612]])
print(a.sqrt())print(a.rsqrt())
tensor([[0.6404, 0.7482, 0.4908, 0.3953],[0.9715, 0.5271, 0.8792, 0.5301],[0.8376, 0.5078, 0.6576, 0.7149]])
tensor([[1.5615, 1.3366, 2.0376, 2.5300],[1.0294, 1.8970, 1.1374, 1.8865],[1.1938, 1.9691, 1.5207, 1.3987]])
print(torch.log2(a))print(torch.log10(a))
tensor([[-1.2859, -0.8371, -2.0537, -2.6782],[-0.0835, -1.8475, -0.3714, -1.8313],[-0.5112, -1.9551, -1.2094, -0.9683]])
tensor([[-0.3871, -0.2520, -0.6182, -0.8062],[-0.0251, -0.5561, -0.1118, -0.5513],[-0.1539, -0.5885, -0.3641, -0.2915]])
a = torch.tensor(3.1415)print(a.floor())print(a.ceil())print(a.round())
tensor(3.)
tensor(4.)
tensor(3.)
print(a.trunc())print(a.frac())
tensor(3.)
tensor(0.1415)
关键字:网络搭建学什么_百度seo教程_电商培训课程_最新搜索引擎排名

版权声明:

本网仅为发布的内容提供存储空间,不对发表、转载的内容提供任何形式的保证。凡本网注明“来源:XXX网络”的作品,均转载自其它媒体,著作权归作者所有,商业转载请联系作者获得授权,非商业转载请注明出处。

我们尊重并感谢每一位作者,均已注明文章来源和作者。如因作品内容、版权或其它问题,请及时与我们联系,联系邮箱:809451989@qq.com,投稿邮箱:809451989@qq.com

责任编辑: