TensorFlow代码摘抄


data_dir = pathlib.Path("D://document//Datas//1233")
train_dataset = tf.keras.preprocessing.image_dataset_from_directory(
  data_dir,
  subset=None,
  image_size=(img_height, img_width),
  batch_size=batch_size)

data_augmentation = tf.keras.Sequential([
  tf.keras.layers.experimental.preprocessing.RandomFlip('horizontal'),
  tf.keras.layers.experimental.preprocessing.RandomRotation(0.08),
])
class_names = train_dataset.class_names
t = 10
for images, labels in train_dataset:
    for i in images:
        print("fggfdg")
        for j in range(2):
            t = t + 1
            augmented_image = data_augmentation(i)
            pathh = ("D://document//Datas//1233//"+str(t)+".jpeg")
            cv2.imwrite(pathh,augmented_image.numpy().astype("uint8")[..., ::-1])
import seaborn as sns

model = tf.keras.models.load_model('D://document//Datas//Problem_C_Data//1model.h5')  

data_dirs = "D://document//Datas//Problem_C_Data//Unprocessed"
train_dataset2 = tf.keras.preprocessing.image_dataset_from_directory(
    data_dirs,
    subset=None,
    image_size=(img_height, img_width),
    batch_size=100)

image_batch, label_batch = train_dataset2.as_numpy_iterator().next()
predictions = model.predict_on_batch(image_batch).flatten()
#predictions = model.predict(test_dataset)
predictions = tf.nn.sigmoid(predictions)
y_data = []
for i in predictions.numpy():
    y_data.append(i)
    





from sklearn.metrics import roc_curve, auc
import matplotlib.pyplot as plt

fpr,tpr,threshold = roc_curve(y_label, y_data) ###计算真正率和假正率
roc_auc = auc(fpr,tpr) ###计算auc的值
 
plt.figure()
lw = 10
plt.figure(figsize=(10,10))
plt.plot(fpr, tpr, color='darkorange',
         lw=lw, label='ROC curve (area = %0.2f)' % roc_auc) ###假正率为横坐标,真正率为纵坐标做曲线
plt.plot([0, 1], [0, 1], color='navy', lw=lw, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic example')
plt.legend(loc="lower right")
plt.show()







data_dirs = "D://document//Datas//Problem_C_Data//Unverified"
train_dataset2 = tf.keras.preprocessing.image_dataset_from_directory(
    data_dirs,
    subset=None,
    image_size=(img_height, img_width),
    batch_size=100)
class_names = train_dataset.class_names

image_batch, label_batch = train_dataset2.as_numpy_iterator().next()
predictions = model.predict_on_batch(image_batch).flatten()
#predictions = model.predict(test_dataset)
predictions = tf.nn.sigmoid(predictions)
t = 0
predictions = tf.where(predictions < 0.5, 0, 1)
for i in predictions.numpy():
    print(t)
    print( "This image most likely belongs to {},is reall is {} ." .format(class_names[i],class_names[label_batch[t]]))
    t = t + 1
import webbrowser as wb

geo = Geo().add_schema(maptype="美国")

t=0
for i in Positive_ID:
    geo.add_coordinate('a'+str(t),i[0],i[1])
    #print('a'+str(t),i[0],i[1])
    t = t+1
attr = [(str(i),1) for i in range(t)]
#print(attr)
#    .add("  ",attr,value,is_piecewise = True,is_visualmap= True,geo_cities_coords=Positive_ID)
#    .add_schema(maptype="world")
#    .set_series_opts(label_opts=opts.LabelOpts(is_show=True))
#    .set_global_opts(
#        visualmap_opts=opts.VisualMapOpts(), title_opts=opts.TitleOpts(title="Geo-基本示例")
#    )
#    .render("geo_base.html")
#)
print('****************************************************')
geo.add('',attr,type_ = GeoType.EFFECT_SCATTER, symbol_size=2)
print('******************************************')
geo.set_series_opts(label_opts=opts.LabelOpts(is_show=False))
print('**********************************')
geo.set_global_opts(visualmap_opts=opts.VisualMapOpts(), title_opts=opts.TitleOpts(title="Geo-基本示例") )
print('********************')
resaule = geo.render("geo_base.html")
wb.open_new_tab(resaule)
 ['鄂尔多斯', [109.781327, 39.608266, 12]],

import webbrowser as wb
(
    BMap(init_opts=opts.InitOpts(width="1400px", height="800px"))
    .add(
        type_=ChartType.SCATTER,
        series_name="pm2.5",
        symbol = "roundRect",
        data_pair=Positive_ID3,
        label_opts=opts.LabelOpts(formatter="{b}"),
        itemstyle_opts=opts.ItemStyleOpts(color="purple"),
    )
    .add_schema(
        baidu_ak=ak,
        center=[ -121.879440, 47.581927],
        zoom=5,
        is_roam=True,
        
    )
    .set_global_opts(
        title_opts=opts.TitleOpts(title="BMap-热力图"), visualmap_opts=opts.VisualMapOpts()
    )
    .set_series_opts(
        # 为了不影响标记点,这里把标签关掉
        label_opts=opts.LabelOpts(is_show=False),
        markpoint_opts=opts.MarkPointOpts(
            data=[
                # 根据坐标定位
                opts.MarkPointItem(coord=[-122.386562, 45.968985], value='hi'),]))
    .render("air_quality_baidu_map.html")
)
wb.open_new_tab("air_quality_baidu_map.html")


[['0', [-120.678818, 47.624576, 2]],
 ['1', [-120.67867, 47.62445, 2]],
 ['2', [-120.333437, 48.361534, 2]],
 ['3', [-120.700545, 47.293294, 2]],
 ['4', [-122.69302, 47.90459, 2]],
 ['5', [-122.387154, 47.56473, 2]],
 ['6', [-122.164895, 47.14447, 2]],
 ['7', [-122.571348, 47.148848, 2]],
 ['8', [-121.549, 47.80399, 2]],
 ['9', [-121.549, 47.80399, 2]],
 ['10', [-122.566734, 47.145673, 2]],
 ['11', [-122.72989, 48.997938, 2]],
 ['12', [-122.69275, 47.65171, 2]],
 ['13', [-122.50978, 47.69012, 2]],
 ['14', [-122.386562, 45.968985, 2]]]

import time
import http.client
import hashlib
import json
import urllib
import random
def baidu_translate(q,fromLang,toLang):
    appid = '**************'  # 填写你的appid
    secretKey = '**************'  # 填写你的密钥
    httpClient = None
    myurl = '/api/trans/vip/translate'
    #fromLang = 'auto'   #原文语种
    #toLang = 'en'   #译文语种
    salt = random.randint(32768, 65536)
    sign = appid + q + str(salt) + secretKey
    sign = hashlib.md5(sign.encode()).hexdigest()
    myurl = myurl + '?appid=' + appid + '&q=' + urllib.parse.quote(q) + '&from=' + fromLang + '&to=' + toLang + '&salt=' + str(
    salt) + '&sign=' + sign
    try:
        time.sleep(1)
        httpClient = http.client.HTTPConnection('api.fanyi.baidu.com')
        httpClient.request('GET', myurl)
        # response是HTTPResponse对象
        response = httpClient.getresponse()
        result_all = response.read().decode("utf-8")
        result = json.loads(result_all)
        dst = str(result["trans_result"][0]["dst"])
        return dst
    except Exception as e:
        print (e)
    finally:
        if httpClient:
            httpClient.close()






path1 = "D://document//Datas//tteexxt//"
if not os.path.exists(path1):
    os.makedirs(path1)
t = 22
for j in sours_date[((sours_date['Lab Status'] == 'Positive ID')&(sours_date['Notes'] != ' '))]['Notes']:
    print(t)
    print(j)
    results = baidu_translate(j,"en","jp")
    print(results)
    results = baidu_translate(results,"jp","en")
    print(results)
    path2 =  path1+str(t)+".txt"
    fh = open(path2, 'w', encoding='utf-8')
    fh.write(results)
    fh.close()
    t = t + 1
22
One dead wasp seen in Blaine, and suspect flying nearby
ブレーンで見られる1匹の枯れたスズメバチ
One dead hornet found in branes
23
Hornet specimen sent to WSU
スズメバチの標本
Hornet sting
24
This was the colony that was found and destroyed in Nanaimo, BC Sep 18, 2019. There were ~150 live hornets and ~600 unhatched eggs & larvae
これは、紀元前19年9月18日のNanaimoで発見され破壊された植民地である。150匹の生きたスズメバチと
This is a colony discovered and destroyed in Nanaimo on September 18, 19 BC. 150 living Hornets and
25
Specimen collected by citizen scientist and reported to provincial government.
市民科学者によって集められて、地方政府に報告される標本。
Specimens collected by citizen scientists and reported to local government.
26
Insects observed late Sept or early Oct
10月下旬か10月初めに観察された昆虫
Insects observed in late October or early October
27
Spotted at outdoor dining in Birch Bay, posted to WSDA Facebook site
バーチ湾で屋外の食事で発見されて、WSDA Facebook
Found in outdoor meals at birch Bay, wsda Facebook
28
We caught and killed 2, 2nd on September 25th
我々は9月25日に2、2日を捕らえ殺した
We captured and killed two or two days on September
29
doorbell cam image
ドアベルカムイメージ
Door bell cam image
30
Dead hornet in light
デッドホーネット
Dead Hornet
31
Live hornet captured by WSDA staff
WSDAスタッフによって捕えられる生きているスズメバチ
Live hornet captured by wsda staff
32
WSDA submitted for citizen report
WSDAの市民報告書
Citizens' Report
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值