Python学习26

练习37:
题目:对10个数进行排序。

程序分析:可以利用选择法,即从后9个比较过程中,选择一个最小的与第一个元素交换,下次类推,即用第二个元素与后8个进行比较,并进行交换。

代码:

if __name__=='__main__':
    N=10
    print('请输入10个数字\n')
    l=[]
    for i in range(N):
        l.append(int(input('请输入一个数字\n')))
    print( )
    for i in range(N):
        print(l[i])
    print( )
    for i in range(N-1):
        min=i
        for j in range(i+1,N):
            if l[min]>l[j]:min=j
        l[i], l[min] = l[min], l[i]
        print('排列之后:')
        for i in range(N):
            print(l[i])

结果:

请输入10个数字

请输入一个数字
4
请输入一个数字
8
请输入一个数字
9
请输入一个数字
6
请输入一个数字
32
请输入一个数字
1
请输入一个数字
12
请输入一个数字
10
请输入一个数字
23
请输入一个数字
32

4
8
9
6
32
1
12
10
23
32

排列之后:
1
8
9
6
32
4
12
10
23
32
排列之后:
1
4
9
6
32
8
12
10
23
32
排列之后:
1
4
6
9
32
8
12
10
23
3
  • 1
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
以下是使用深度学习进行图像去雾的Python代码示例,使用的是AOD-Net去雾网络的PyTorch实现: ```python import torch import torch.nn as nn import torch.nn.functional as F import numpy as np from PIL import Image # 定义AOD-Net去雾网络 class AODNet(nn.Module): def __init__(self): super(AODNet, self).__init__() self.conv1 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(3) self.relu1 = nn.ReLU(inplace=True) self.conv2 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(3) self.relu2 = nn.ReLU(inplace=True) self.conv3 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn3 = nn.BatchNorm2d(3) self.relu3 = nn.ReLU(inplace=True) self.conv4 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn4 = nn.BatchNorm2d(3) self.relu4 = nn.ReLU(inplace=True) self.conv5 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn5 = nn.BatchNorm2d(3) self.relu5 = nn.ReLU(inplace=True) self.conv6 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn6 = nn.BatchNorm2d(3) self.relu6 = nn.ReLU(inplace=True) self.conv7 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn7 = nn.BatchNorm2d(3) self.relu7 = nn.ReLU(inplace=True) self.conv8 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn8 = nn.BatchNorm2d(3) self.relu8 = nn.ReLU(inplace=True) self.conv9 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn9 = nn.BatchNorm2d(3) self.relu9 = nn.ReLU(inplace=True) self.conv10 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn10 = nn.BatchNorm2d(3) self.relu10 = nn.ReLU(inplace=True) self.conv11 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn11 = nn.BatchNorm2d(3) self.relu11 = nn.ReLU(inplace=True) self.conv12 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn12 = nn.BatchNorm2d(3) self.relu12 = nn.ReLU(inplace=True) self.conv13 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn13 = nn.BatchNorm2d(3) self.relu13 = nn.ReLU(inplace=True) self.conv14 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn14 = nn.BatchNorm2d(3) self.relu14 = nn.ReLU(inplace=True) self.conv15 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn15 = nn.BatchNorm2d(3) self.relu15 = nn.ReLU(inplace=True) self.conv16 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn16 = nn.BatchNorm2d(3) self.relu16 = nn.ReLU(inplace=True) self.conv17 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn17 = nn.BatchNorm2d(3) self.relu17 = nn.ReLU(inplace=True) self.conv18 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn18 = nn.BatchNorm2d(3) self.relu18 = nn.ReLU(inplace=True) self.conv19 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn19 = nn.BatchNorm2d(3) self.relu19 = nn.ReLU(inplace=True) self.conv20 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn20 = nn.BatchNorm2d(3) self.relu20 = nn.ReLU(inplace=True) self.conv21 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn21 = nn.BatchNorm2d(3) self.relu21 = nn.ReLU(inplace=True) self.conv22 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn22 = nn.BatchNorm2d(3) self.relu22 = nn.ReLU(inplace=True) self.conv23 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn23 = nn.BatchNorm2d(3) self.relu23 = nn.ReLU(inplace=True) self.conv24 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn24 = nn.BatchNorm2d(3) self.relu24 = nn.ReLU(inplace=True) self.conv25 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn25 = nn.BatchNorm2d(3) self.relu25 = nn.ReLU(inplace=True) self.conv26 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn26 = nn.BatchNorm2d(3) self.relu26 = nn.ReLU(inplace=True) self.conv27 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn27 = nn.BatchNorm2d(3) self.relu27 = nn.ReLU(inplace=True) self.conv28 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn28 = nn.BatchNorm2d(3) self.relu28 = nn.ReLU(inplace=True) self.conv29 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn29 = nn.BatchNorm2d(3) self.relu29 = nn.ReLU(inplace=True) self.conv30 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn30 = nn.BatchNorm2d(3) self.relu30 = nn.ReLU(inplace=True) self.conv31 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn31 = nn.BatchNorm2d(3) self.relu31 = nn.ReLU(inplace=True) self.conv32 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn32 = nn.BatchNorm2d(3) self.relu32 = nn.ReLU(inplace=True) self.conv33 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn33 = nn.BatchNorm2d(3) self.relu33 = nn.ReLU(inplace=True) self.conv34 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn34 = nn.BatchNorm2d(3) self.relu34 = nn.ReLU(inplace=True) self.conv35 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn35 = nn.BatchNorm2d(3) self.relu35 = nn.ReLU(inplace=True) self.conv36 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn36 = nn.BatchNorm2d(3) self.relu36 = nn.ReLU(inplace=True) self.conv37 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn37 = nn.BatchNorm2d(3) self.relu37 = nn.ReLU(inplace=True) self.conv38 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn38 = nn.BatchNorm2d(3) self.relu38 = nn.ReLU(inplace=True) self.conv39 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn39 = nn.BatchNorm2d(3) self.relu39 = nn.ReLU(inplace=True) self.conv40 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn40 = nn.BatchNorm2d(3) self.relu40 = nn.ReLU(inplace=True) self.conv41 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn41 = nn.BatchNorm2d(3) self.relu41 = nn.ReLU(inplace=True) self.conv42 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn42 = nn.BatchNorm2d(3) self.relu42 = nn.ReLU(inplace=True) self.conv43 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn43 = nn.BatchNorm2d(3) self.relu43 = nn.ReLU(inplace=True) self.conv44 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn44 = nn.BatchNorm2d(3) self.relu44 = nn.ReLU(inplace=True) self.conv45 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn45 = nn.BatchNorm2d(3) self.relu45 = nn.ReLU(inplace=True) self.conv46 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn46 = nn.BatchNorm2d(3) self.relu46 = nn.ReLU(inplace=True) self.conv47 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn47 = nn.BatchNorm2d(3) self.relu47 = nn.ReLU(inplace=True) self.conv48 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn48 = nn.BatchNorm2d(3) self.relu48 = nn.ReLU(inplace=True) self.conv49 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn49 = nn.BatchNorm2d(3) self.relu49 = nn.ReLU(inplace=True) self.conv50 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn50 = nn.BatchNorm2d(3) self.relu50 = nn.ReLU(inplace=True) self.conv51 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn51 = nn.BatchNorm2d(3) self.relu51 = nn.ReLU(inplace=True) self.conv52 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn52 = nn.BatchNorm2d(3) self.relu52 = nn.ReLU(inplace=True) self.conv53 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn53 = nn.BatchNorm2d(3) self.relu53 = nn.ReLU(inplace=True) self.conv54 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn54 = nn.BatchNorm2d(3) self.relu54 = nn.ReLU(inplace=True) self.conv55 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn55 = nn.BatchNorm2d(3) self.relu55 = nn.ReLU(inplace=True) self.conv56 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False) self.bn56 = nn.BatchNorm2d(3) self.relu56 = nn.ReLU(inplace=True) self.conv57 = nn.Conv2d(3, 3, kernel_size=3, stride=1, padding=1, bias=False)

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值