https://github.com/keras-team/keras-applications/releases/download/densenet/densenet201_weights_tf_dim_ordering_tf_kernels.h5
https://github.com/keras-team/keras-applications/releases/download/densenet/densenet201_weights_tf_dim_ordering_tf_kernels_notop.h5
import keras.layers as KL
import keras.models as KM
import keras
from keras.applications.densenet import DenseNet201
import os
# os.environ['CUDA_DEVICES_VISIBLE'] = '1'
print(keras.__version__)
input_image = KL.Input(shape=[512, 512, 3], name="input_image")
resnet50 = DenseNet201(input_tensor=input_image, include_top=True)
# conv5_block1_1_conv
x = resnet50.get_layer('fc1000').output # block5_pool max_pool conv5_block1_1_conv
# resnet50.summery(0)
inputs = [input_image]
outputs = [x]
model = KM.Model(inputs, outputs, name='ctpn')
model.summary()
/home/hlx2/anaconda3/bin/python3.6 "/home/hlx2/OCR/keras-ctpn2 _1/densenet121/taste.py"
/home/hlx2/anaconda3/lib/python3.6/site-packages/h5py/__init__.py:34: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.
from ._conv import register_converters as _register_converters
Using TensorFlow backend.
2.2.5
WARNING:tensorflow:From /home/hlx2/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:541: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.
WARNING:tensorflow:From /home/hlx2/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:66: The name tf.get_default_graph is deprecated. Please use tf.compat.v1.get_default_graph instead.
WARNING:tensorflow:From /home/hlx2/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:4432: The name tf.random_uniform is deprecated. Please use tf.random.uniform instead.
WARNING:tensorflow:From /home/hlx2/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:190: The name tf.get_default_session is deprecated. Please use tf.compat.v1.get_default_session instead.
WARNING:tensorflow:From /home/hlx2/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:197: The name tf.ConfigProto is deprecated. Please use tf.compat.v1.ConfigProto instead.
2020-12-01 16:26:30.386109: I tensorflow/core/platform/cpu_feature_guard.cc:142] Your CPU supports instructions that this TensorFlow binary was not compiled to use: SSE4.1 SSE4.2 AVX AVX2 AVX512F FMA
2020-12-01 16:26:30.394812: I tensorflow/core/platform/profile_utils/cpu_utils.cc:94] CPU Frequency: 3299990000 Hz
2020-12-01 16:26:30.396098: I tensorflow/compiler/xla/service/service.cc:168] XLA service 0x5638ab3a9710 executing computations on platform Host. Devices:
2020-12-01 16:26:30.396134: I tensorflow/compiler/xla/service/service.cc:175] StreamExecutor device (0): <undefined>, <undefined>
2020-12-01 16:26:30.397649: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcuda.so.1
2020-12-01 16:26:30.618893: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1640] Found device 0 with properties:
name: GeForce GTX 1080 Ti major: 6 minor: 1 memoryClockRate(GHz): 1.6575
pciBusID: 0000:17:00.0
2020-12-01 16:26:30.619314: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1640] Found device 1 with properties:
name: GeForce GTX 1080 Ti major: 6 minor: 1 memoryClockRate(GHz): 1.6575
pciBusID: 0000:65:00.0
2020-12-01 16:26:30.619554: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcudart.so.10.1
2020-12-01 16:26:30.620749: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcublas.so.10
2020-12-01 16:26:30.621994: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcufft.so.10
2020-12-01 16:26:30.622271: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcurand.so.10
2020-12-01 16:26:30.623459: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcusolver.so.10
2020-12-01 16:26:30.624025: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcusparse.so.10
2020-12-01 16:26:30.626488: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcudnn.so.7
2020-12-01 16:26:30.627917: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1763] Adding visible gpu devices: 0, 1
2020-12-01 16:26:30.627951: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcudart.so.10.1
2020-12-01 16:26:30.629021: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1181] Device interconnect StreamExecutor with strength 1 edge matrix:
2020-12-01 16:26:30.629033: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1187] 0 1
2020-12-01 16:26:30.629037: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1200] 0: N Y
2020-12-01 16:26:30.629040: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1200] 1: Y N
2020-12-01 16:26:30.630607: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1326] Created TensorFlow device (/job:localhost/replica:0/task:0/device:GPU:0 with 10479 MB memory) -> physical GPU (device: 0, name: GeForce GTX 1080 Ti, pci bus id: 0000:17:00.0, compute capability: 6.1)
2020-12-01 16:26:30.631557: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1326] Created TensorFlow device (/job:localhost/replica:0/task:0/device:GPU:1 with 6795 MB memory) -> physical GPU (device: 1, name: GeForce GTX 1080 Ti, pci bus id: 0000:65:00.0, compute capability: 6.1)
2020-12-01 16:26:30.633249: I tensorflow/compiler/xla/service/service.cc:168] XLA service 0x5638ac3f9c70 executing computations on platform CUDA. Devices:
2020-12-01 16:26:30.633263: I tensorflow/compiler/xla/service/service.cc:175] StreamExecutor device (0): GeForce GTX 1080 Ti, Compute Capability 6.1
2020-12-01 16:26:30.633266: I tensorflow/compiler/xla/service/service.cc:175] StreamExecutor device (1): GeForce GTX 1080 Ti, Compute Capability 6.1
WARNING:tensorflow:From /home/hlx2/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:2041: The name tf.nn.fused_batch_norm is deprecated. Please use tf.compat.v1.nn.fused_batch_norm instead.
WARNING:tensorflow:From /home/hlx2/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:4267: The name tf.nn.max_pool is deprecated. Please use tf.nn.max_pool2d instead.
WARNING:tensorflow:From /home/hlx2/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:4271: The name tf.nn.avg_pool is deprecated. Please use tf.nn.avg_pool2d instead.
Model: "ctpn"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_image (InputLayer) (None, 512, 512, 3) 0
__________________________________________________________________________________________________
zero_padding2d_1 (ZeroPadding2D (None, 518, 518, 3) 0 input_image[0][0]
__________________________________________________________________________________________________
conv1/conv (Conv2D) (None, 256, 256, 64) 9408 zero_padding2d_1[0][0]
__________________________________________________________________________________________________
conv1/bn (BatchNormalization) (None, 256, 256, 64) 256 conv1/conv[0][0]
__________________________________________________________________________________________________
conv1/relu (Activation) (None, 256, 256, 64) 0 conv1/bn[0][0]
__________________________________________________________________________________________________
zero_padding2d_2 (ZeroPadding2D (None, 258, 258, 64) 0 conv1/relu[0][0]
__________________________________________________________________________________________________
pool1 (MaxPooling2D) (None, 128, 128, 64) 0 zero_padding2d_2[0][0]
__________________________________________________________________________________________________
conv2_block1_0_bn (BatchNormali (None, 128, 128, 64) 256 pool1[0][0]
__________________________________________________________________________________________________
conv2_block1_0_relu (Activation (None, 128, 128, 64) 0 conv2_block1_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_1_conv (Conv2D) (None, 128, 128, 128 8192 conv2_block1_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_1_bn (BatchNormali (None, 128, 128, 128 512 conv2_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_1_relu (Activation (None, 128, 128, 128 0 conv2_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_2_conv (Conv2D) (None, 128, 128, 32) 36864 conv2_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_concat (Concatenat (None, 128, 128, 96) 0 pool1[0][0]
conv2_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_0_bn (BatchNormali (None, 128, 128, 96) 384 conv2_block1_concat[0][0]
__________________________________________________________________________________________________
conv2_block2_0_relu (Activation (None, 128, 128, 96) 0 conv2_block2_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_1_conv (Conv2D) (None, 128, 128, 128 12288 conv2_block2_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_1_bn (BatchNormali (None, 128, 128, 128 512 conv2_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_1_relu (Activation (None, 128, 128, 128 0 conv2_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_2_conv (Conv2D) (None, 128, 128, 32) 36864 conv2_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_concat (Concatenat (None, 128, 128, 128 0 conv2_block1_concat[0][0]
conv2_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_0_bn (BatchNormali (None, 128, 128, 128 512 conv2_block2_concat[0][0]
__________________________________________________________________________________________________
conv2_block3_0_relu (Activation (None, 128, 128, 128 0 conv2_block3_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_1_conv (Conv2D) (None, 128, 128, 128 16384 conv2_block3_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_1_bn (BatchNormali (None, 128, 128, 128 512 conv2_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_1_relu (Activation (None, 128, 128, 128 0 conv2_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_2_conv (Conv2D) (None, 128, 128, 32) 36864 conv2_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_concat (Concatenat (None, 128, 128, 160 0 conv2_block2_concat[0][0]
conv2_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block4_0_bn (BatchNormali (None, 128, 128, 160 640 conv2_block3_concat[0][0]
__________________________________________________________________________________________________
conv2_block4_0_relu (Activation (None, 128, 128, 160 0 conv2_block4_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block4_1_conv (Conv2D) (None, 128, 128, 128 20480 conv2_block4_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block4_1_bn (BatchNormali (None, 128, 128, 128 512 conv2_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block4_1_relu (Activation (None, 128, 128, 128 0 conv2_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block4_2_conv (Conv2D) (None, 128, 128, 32) 36864 conv2_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block4_concat (Concatenat (None, 128, 128, 192 0 conv2_block3_concat[0][0]
conv2_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block5_0_bn (BatchNormali (None, 128, 128, 192 768 conv2_block4_concat[0][0]
__________________________________________________________________________________________________
conv2_block5_0_relu (Activation (None, 128, 128, 192 0 conv2_block5_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block5_1_conv (Conv2D) (None, 128, 128, 128 24576 conv2_block5_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block5_1_bn (BatchNormali (None, 128, 128, 128 512 conv2_block5_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block5_1_relu (Activation (None, 128, 128, 128 0 conv2_block5_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block5_2_conv (Conv2D) (None, 128, 128, 32) 36864 conv2_block5_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block5_concat (Concatenat (None, 128, 128, 224 0 conv2_block4_concat[0][0]
conv2_block5_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block6_0_bn (BatchNormali (None, 128, 128, 224 896 conv2_block5_concat[0][0]
__________________________________________________________________________________________________
conv2_block6_0_relu (Activation (None, 128, 128, 224 0 conv2_block6_0_bn[0][0]
__________________________________________________________________________________________________
conv2_block6_1_conv (Conv2D) (None, 128, 128, 128 28672 conv2_block6_0_relu[0][0]
__________________________________________________________________________________________________
conv2_block6_1_bn (BatchNormali (None, 128, 128, 128 512 conv2_block6_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block6_1_relu (Activation (None, 128, 128, 128 0 conv2_block6_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block6_2_conv (Conv2D) (None, 128, 128, 32) 36864 conv2_block6_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block6_concat (Concatenat (None, 128, 128, 256 0 conv2_block5_concat[0][0]
conv2_block6_2_conv[0][0]
__________________________________________________________________________________________________
pool2_bn (BatchNormalization) (None, 128, 128, 256 1024 conv2_block6_concat[0][0]
__________________________________________________________________________________________________
pool2_relu (Activation) (None, 128, 128, 256 0 pool2_bn[0][0]
__________________________________________________________________________________________________
pool2_conv (Conv2D) (None, 128, 128, 128 32768 pool2_relu[0][0]
__________________________________________________________________________________________________
pool2_pool (AveragePooling2D) (None, 64, 64, 128) 0 pool2_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_0_bn (BatchNormali (None, 64, 64, 128) 512 pool2_pool[0][0]
__________________________________________________________________________________________________
conv3_block1_0_relu (Activation (None, 64, 64, 128) 0 conv3_block1_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_1_conv (Conv2D) (None, 64, 64, 128) 16384 conv3_block1_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_1_bn (BatchNormali (None, 64, 64, 128) 512 conv3_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_1_relu (Activation (None, 64, 64, 128) 0 conv3_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_2_conv (Conv2D) (None, 64, 64, 32) 36864 conv3_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_concat (Concatenat (None, 64, 64, 160) 0 pool2_pool[0][0]
conv3_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_0_bn (BatchNormali (None, 64, 64, 160) 640 conv3_block1_concat[0][0]
__________________________________________________________________________________________________
conv3_block2_0_relu (Activation (None, 64, 64, 160) 0 conv3_block2_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_1_conv (Conv2D) (None, 64, 64, 128) 20480 conv3_block2_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_1_bn (BatchNormali (None, 64, 64, 128) 512 conv3_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_1_relu (Activation (None, 64, 64, 128) 0 conv3_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_2_conv (Conv2D) (None, 64, 64, 32) 36864 conv3_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_concat (Concatenat (None, 64, 64, 192) 0 conv3_block1_concat[0][0]
conv3_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_0_bn (BatchNormali (None, 64, 64, 192) 768 conv3_block2_concat[0][0]
__________________________________________________________________________________________________
conv3_block3_0_relu (Activation (None, 64, 64, 192) 0 conv3_block3_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_1_conv (Conv2D) (None, 64, 64, 128) 24576 conv3_block3_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_1_bn (BatchNormali (None, 64, 64, 128) 512 conv3_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_1_relu (Activation (None, 64, 64, 128) 0 conv3_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_2_conv (Conv2D) (None, 64, 64, 32) 36864 conv3_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_concat (Concatenat (None, 64, 64, 224) 0 conv3_block2_concat[0][0]
conv3_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_0_bn (BatchNormali (None, 64, 64, 224) 896 conv3_block3_concat[0][0]
__________________________________________________________________________________________________
conv3_block4_0_relu (Activation (None, 64, 64, 224) 0 conv3_block4_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_1_conv (Conv2D) (None, 64, 64, 128) 28672 conv3_block4_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_1_bn (BatchNormali (None, 64, 64, 128) 512 conv3_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_1_relu (Activation (None, 64, 64, 128) 0 conv3_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_2_conv (Conv2D) (None, 64, 64, 32) 36864 conv3_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_concat (Concatenat (None, 64, 64, 256) 0 conv3_block3_concat[0][0]
conv3_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block5_0_bn (BatchNormali (None, 64, 64, 256) 1024 conv3_block4_concat[0][0]
__________________________________________________________________________________________________
conv3_block5_0_relu (Activation (None, 64, 64, 256) 0 conv3_block5_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block5_1_conv (Conv2D) (None, 64, 64, 128) 32768 conv3_block5_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block5_1_bn (BatchNormali (None, 64, 64, 128) 512 conv3_block5_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block5_1_relu (Activation (None, 64, 64, 128) 0 conv3_block5_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block5_2_conv (Conv2D) (None, 64, 64, 32) 36864 conv3_block5_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block5_concat (Concatenat (None, 64, 64, 288) 0 conv3_block4_concat[0][0]
conv3_block5_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block6_0_bn (BatchNormali (None, 64, 64, 288) 1152 conv3_block5_concat[0][0]
__________________________________________________________________________________________________
conv3_block6_0_relu (Activation (None, 64, 64, 288) 0 conv3_block6_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block6_1_conv (Conv2D) (None, 64, 64, 128) 36864 conv3_block6_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block6_1_bn (BatchNormali (None, 64, 64, 128) 512 conv3_block6_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block6_1_relu (Activation (None, 64, 64, 128) 0 conv3_block6_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block6_2_conv (Conv2D) (None, 64, 64, 32) 36864 conv3_block6_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block6_concat (Concatenat (None, 64, 64, 320) 0 conv3_block5_concat[0][0]
conv3_block6_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block7_0_bn (BatchNormali (None, 64, 64, 320) 1280 conv3_block6_concat[0][0]
__________________________________________________________________________________________________
conv3_block7_0_relu (Activation (None, 64, 64, 320) 0 conv3_block7_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block7_1_conv (Conv2D) (None, 64, 64, 128) 40960 conv3_block7_0_relu[0][0]
__________________________________________________________________________________________________
conv3_block7_1_bn (BatchNormali (None, 64, 64, 128) 512 conv3_block7_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block7_1_relu (Activation (None, 64, 64, 128) 0 conv3_block7_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block7_2_conv (Conv2D) (None, 64, 64, 32) 36864 conv3_block7_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block7_concat (Concatenat (None, 64, 64, 352) 0 conv3_block6_concat[0][0]
conv3_block7_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block8_0_bn (BatchNormali (None, 64, 64, 352) 1408 conv3_block7_concat[0][0]
__________________________________________________________________________________________________
conv3_block8_0_relu (Activation (None, 64, 64, 352) 0 conv3_block8_0_bn[0][0]
__________________________________________________________________________________________________
conv3_block8_1_conv (Conv2D) (None, 64, 64, 128) 45056 conv3_block8_0_relu[0][0]
________________________________________________________