pull/3/head
123456 2 years ago
parent 1aca03f6e4
commit bc106bcd5e

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="D:\Python 3.11\python.exe" project-jdk-type="Python SDK" />
</project>

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/train_cancer.iml" filepath="$PROJECT_DIR$/.idea/train_cancer.iml" />
</modules>
</component>
</project>

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

@ -0,0 +1,49 @@
#### *第一步、搜集数据集*
- *文件保存在picture文件夹中*
- *benign 良性乳腺癌图片*
- *malignant 恶性乳腺癌图片*
- *normal 正常乳腺癌图片*
- *以70%作为训练集、30%作为测试集*
#### *第二步、处理数据集*
- *(1) 读取图片*
- *(2) 使用sklearn.model_selection中的train_test_split 分割数据集*
- *(3) 使用plt打印图片*
#### *第三步训练*
- *训练模型选择*
- *使用Microsoft提出的DenseNet201框架进行训练*
- *DenseNet201包含201层卷积层和全连接层*
- *拥有池化操作,非常适合训练模型*
- *激活函数选择*
- *使用softmax作为激活函数*
- $$
Softmax(z_{i} )=\frac{e^{z_{i}}}{ {\textstyle \sum_{c=1}^{c} e^{z_{c}}}}
其中zi为第i个节点的输出值c为输出节点的个数
$$
- *损失函数选择*
- *使用二元交叉熵给出*
- $$
Loss = \frac{1}{N} \sum_{i=1}^{N}[y_{i}log(p(y_{i})) + (1-y_{i})(1 - log(p(y_{i})))]
$$
- *优化器选择*
- *Nadam优化器*
- *该优化器综合Adam将RMSprop和动量结合起来*
- *优于Adam优化器*
#### *第四步*测试
- *导入图片*
- *使用PIL进行读取图片*
- *使用test pic进行测试*
- *tensorflow load_model进行模型的加载*
- *predict进行模型的预测*

@ -0,0 +1,10 @@
import os
path = '../picture/malignant'
dir = os.listdir(path)
x = 211
y = 438
for i in range(len(dir)):
if str(y) in dir[i]:
os.rename(path + '/' + dir[i] , path + '/' + "malignant (" + str(x) + ").png")
x += 1
y += 1

@ -0,0 +1,83 @@
'''
dir 图片路径
size 图片尺寸
'''
import os
import cv2
import tensorflow as tf
from PIL import Image
import numpy as np
from keras.utils import to_categorical
from sklearn.model_selection import train_test_split
class Loader:
def __init__(self):
# 相对路径老出问题
self.benign_train = np.array(self.data_loader('D:\\pro_of_program\\Python\\train_cancer\\picture\\benign', 224))
self.malignant_train = np.array(self.data_loader('D:\\pro_of_program\\Python\\train_cancer\\picture\\malignant', 224))
self.malignant_test = np.array(self.data_loader('D:\\pro_of_program\\Python\\train_cancer\\picture\\malignant', 224))
self.benign_test = np.array(self.data_loader('D:\\pro_of_program\\Python\\train_cancer\\picture\\benign', 224))
# 创造标签用于标记图像 0矩阵表示良性 1矩阵表示恶性
self.benign_train_label = np.zeros(len(self.benign_train))
self.benign_test_label = np.zeros(len(self.benign_test))
self.malignant_train_label = np.ones(len(self.malignant_train))
self.malignant_test_label = np.ones(len(self.malignant_test))
self.x_train = np.concatenate((self.benign_train , self.malignant_train))
self.y_train = np.concatenate((self.benign_train_label , self.malignant_train_label))
self.x_test = np.concatenate((self.benign_test , self.malignant_test))
self.y_test = np.concatenate((self.benign_test_label , self.malignant_test_label))
s = np.arange(self.x_train.shape[0])
np.random.shuffle(s)
# 随机打乱train
self.x_train = self.x_train[s]
self.y_train = self.y_train[s]
s = np.arange(self.x_train.shape[0])
np.random.shuffle(s)
# 随机打乱test
self.x_test = self.x_test[s]
self.y_test = self.y_test[s]
self.y_train = to_categorical(self.y_train , 2)
self.y_test = to_categorical(self.y_test , 2)
'''
参数
train_data => x_train
train_target => y_ train
test_size 样本占比测试集占总体样本 测试集和训练集3/7 85%
test_size 样本占比测试集占总体样本 测试集和训练集2/8 89%
random_state 随机种子
'''
self.train_of_x , self.val_of_x , self.train_of_y , self.val_of_y = train_test_split(
self.x_train , self.y_train,
# test_size=0.3,
test_size=0.2,
random_state=11
)
def data_loader(self, dir, size):
IMG = [] # 图片
# 打开该路径下的图像文件转换为RGB模式并返回numpy数组
read = lambda i: np.asarray(Image.open(i).convert("RGB"))
home_dir = sorted(os.listdir(dir))
n = len(home_dir)
for i in range(n):
# 获取图片路径
path = os.path.join(dir, home_dir[i])
l = os.path.split(path)
if "_mask" not in l[1]:
# 正常png图片
img = read(path)
# <PIL.PngImagePlugin.PngImageFile image mode=RGB size=562x471 at 0x1D3B7465150>
# 需要resize缩放为224x224
img = cv2.resize(img, (size, size))
IMG.append(np.array(img))
return IMG

@ -0,0 +1,11 @@
import cv2
from PIL import Image
import numpy as np
def load_single(dir , size):
read = lambda i: np.asarray(Image.open(i).convert("RGB"))
path = dir
img = read(path)
return np.array(cv2.resize(img, (size, size)))

@ -0,0 +1,4 @@
node {
input: "root"
device: "_tf_keras_sequential"
}

@ -0,0 +1,16 @@
def check(dir_pic , dir_model):
import numpy as np
from loader_picture import data_single_loader
import tensorflow as tf
pic_test = dir_pic
# 转换为numpy格式
img_test = data_single_loader.load_single(pic_test , 224)
# 需要12242243这种格式输入
img_test = np.expand_dims(img_test , axis=0)
# 载入模型
x = tf.keras.models.load_model(dir_model)
res = x.predict(img_test)
np.set_printoptions(suppress=True)
return "良性:" + str(res[0][0]) + "恶性:" + str(res[0][1])

@ -0,0 +1,30 @@
from loader_picture import data_loader
import numpy as np
from matplotlib import pyplot as plt
from train_model.modeling import reduce_study_rate
from train_model.modeling import modeling
from train_model.data_gen import data_output
# 载入图片
load = data_loader.Loader()
# 获取模型
models = modeling.breast_train_test()
# 展现模型
models.model.summary()
# data
data = data_output.gen_data()
# 降低学习率
reduces = reduce_study_rate.reduce()
reduces.train()
# 训练+评估
history = models.model.fit(
data.tr_gen.flow(load.train_of_x , load.train_of_y , batch_size=data.batch),
steps_per_epoch = load.train_of_x.shape[0] / data.batch,
# 训练20次
epochs=20,
validation_data=(load.val_of_x , load.val_of_y),
callbacks=[reduces.learn_control , reduces.checkpoint]
)

@ -0,0 +1,647 @@
6.092087723175155e-09 1.0
0.9989112615585327 0.001088793040253222
0.9935709238052368 0.006429135799407959
0.9997281432151794 0.00027184083592146635
3.0335837436723523e-05 0.9999697208404541
0.999747097492218 0.0002529561170376837
0.008596532978117466 0.9914035201072693
0.999934196472168 6.57897544442676e-05
0.9989705085754395 0.0010294488165527582
0.5978875756263733 0.4021124839782715
3.6031291529070586e-05 0.9999639987945557
3.7862635053897975e-06 0.9999961853027344
0.9993938207626343 0.0006061863387003541
8.81914729689015e-06 0.9999911785125732
0.3917083740234375 0.6082916259765625
0.9998810291290283 0.00011889902816619724
0.6820005178451538 0.3179994821548462
0.9987319111824036 0.0012680714717134833
0.00011924829595955089 0.9998807907104492
0.9993540644645691 0.0006459243595600128
0.9996631145477295 0.0003369428450241685
0.48353785276412964 0.5164620876312256
3.497324087220477e-06 0.999996542930603
0.9905140399932861 0.009485905058681965
0.9999887943267822 1.1235452802793588e-05
0.04187602177262306 0.9581239223480225
0.9910897016525269 0.008910246193408966
0.999971866607666 2.8184229449834675e-05
0.9999890327453613 1.0908943295362405e-05
0.8602142333984375 0.1397857666015625
0.9999949932098389 5.041587883169996e-06
0.9978411197662354 0.002158836927264929
0.6723547577857971 0.3276452124118805
0.9998103976249695 0.00018961272144224495
0.9999525547027588 4.740445001516491e-05
1.4190926833634876e-07 0.9999998807907104
0.7048918008804321 0.29510819911956787
1.4167305835144361e-06 0.9999985694885254
0.00013711843348573893 0.9998629093170166
0.9996042847633362 0.00039568531792610884
0.7229845523834229 0.2770155072212219
0.999221682548523 0.000778281013481319
0.00015150148828979582 0.999848484992981
0.9998829364776611 0.00011702731717377901
0.7218467593193054 0.2781532406806946
0.15666170418262482 0.8433382511138916
0.9998588562011719 0.0001410984550602734
0.7082615494728088 0.29173845052719116
0.02286633290350437 0.9771337509155273
0.1373814195394516 0.8626185059547424
0.8673439621925354 0.13265608251094818
0.9999347925186157 6.523412594106048e-05
0.00019901152700185776 0.999800980091095
0.9999953508377075 4.663339950639056e-06
0.9997709393501282 0.00022907997481524944
0.9986693859100342 0.001330674858763814
0.353473961353302 0.646526038646698
0.003105917014181614 0.996894121170044
0.9999948740005493 5.0977191676793154e-06
0.9999982118606567 1.7473138314016978e-06
0.9951081871986389 0.004891766235232353
0.9999829530715942 1.6995192709146068e-05
0.9996659755706787 0.0003340912808198482
3.4974067375515006e-07 0.9999996423721313
0.0729290321469307 0.9270709753036499
0.9704300761222839 0.029569925740361214
1.4645347157227206e-09 1.0
3.1297126952267718e-06 0.9999969005584717
0.6081261038780212 0.39187392592430115
0.44829297065734863 0.5517070293426514
0.7791184186935425 0.2208816111087799
0.7523340582847595 0.24766597151756287
0.025663873180747032 0.9743360877037048
0.9983474016189575 0.0016525561222806573
0.9999810457229614 1.8939881556434557e-05
0.9934049248695374 0.006595127750188112
0.9381039142608643 0.061896029859781265
0.9972034692764282 0.002796591492369771
0.002707625972107053 0.9972923398017883
1.0068716619571205e-05 0.9999899864196777
0.9999748468399048 2.5135010218946263e-05
0.9998923540115356 0.00010767456114990637
0.21499860286712646 0.7850013971328735
0.9999240636825562 7.588239532196894e-05
7.526091962972714e-07 0.9999992847442627
0.9973554611206055 0.0026445514522492886
0.00015752170293126255 0.9998424053192139
0.0067070042714476585 0.993293046951294
0.005396672990173101 0.9946033358573914
0.9996371269226074 0.000362917227903381
0.9676159620285034 0.03238402307033539
0.9100929498672485 0.08990702033042908
0.8950210213661194 0.10497894883155823
0.22162331640720367 0.7783766984939575
0.36174920201301575 0.6382507681846619
0.999675989151001 0.0003239802608732134
0.8803484439849854 0.11965155601501465
0.9999208450317383 7.915231253718957e-05
0.9998499155044556 0.0001500368380220607
0.8224522471427917 0.17754776775836945
0.9999898672103882 1.0138361176359467e-05
0.9995653033256531 0.0004346343921497464
0.9999706745147705 2.9308854209375568e-05
0.9904494881629944 0.009550555609166622
0.5672293305397034 0.43277063965797424
0.9821467995643616 0.01785319484770298
0.9999518394470215 4.8160614824155346e-05
2.017002458387651e-07 0.9999997615814209
0.9127623438835144 0.08723766356706619
0.9998904466629028 0.00010953448509098962
0.9998772144317627 0.0001228056789841503
0.9999266862869263 7.326720515266061e-05
1.6869115881945618e-08 1.0
0.11213619261980057 0.8878637552261353
0.9999957084655762 4.337792688602349e-06
0.751361072063446 0.24863891303539276
0.9999035596847534 9.643758676247671e-05
1.7767119686595834e-07 0.9999998807907104
0.9999873638153076 1.2616713320312556e-05
0.999969482421875 3.0498564228764735e-05
0.9956398010253906 0.004360210616141558
0.6791333556175232 0.32086658477783203
0.5872390866279602 0.412760853767395
8.371390691763736e-09 1.0
0.9992006421089172 0.0007993809995241463
0.9999994039535522 5.464415266942524e-07
2.5259971607738407e-06 0.9999974966049194
0.0002582882298156619 0.9997417330741882
0.8582680225372314 0.14173197746276855
0.8277018666267395 0.1722981035709381
0.0014999492559581995 0.9985001087188721
0.9972853660583496 0.002714633010327816
0.9923239946365356 0.007676038425415754
0.26004090905189514 0.7399591207504272
0.9973554611206055 0.002644606865942478
0.9820063710212708 0.017993640154600143
0.4168517291545868 0.5831482410430908
0.9994215965270996 0.0005783918313682079
0.00042614186531864107 0.9995738863945007
0.9971297383308411 0.002870315220206976
0.9999992847442627 7.468157150469779e-07
0.9985871315002441 0.0014128603506833315
0.9996757507324219 0.00032424350501969457
0.6582497954368591 0.3417501747608185
0.9576846361160278 0.04231531545519829
4.166609505773522e-05 0.9999582767486572
1.7872223452286562e-06 0.9999982118606567
0.9824076294898987 0.01759237051010132
0.4607923626899719 0.5392076969146729
0.9996966123580933 0.0003033890388906002
9.082500037038699e-05 0.9999091625213623
0.9999431371688843 5.6826243962859735e-05
0.5971385836601257 0.4028613269329071
0.9952648878097534 0.004735115449875593
0.9955756664276123 0.004424356389790773
0.9168573021888733 0.08314268290996552
0.9162992835044861 0.08370067924261093
0.6812700033187866 0.31873002648353577
0.08008529990911484 0.9199146628379822
0.9923356175422668 0.007664340082556009
0.9992988109588623 0.0007011212292127311
3.826002739515388e-06 0.9999961853027344
0.997008740901947 0.002991274232044816
0.9996020197868347 0.00039801234379410744
0.9995643496513367 0.00043558195466175675
0.9841747879981995 0.015825193375349045
0.31447848677635193 0.6855215430259705
0.9996471405029297 0.00035283094621263444
0.9999393224716187 6.067020513000898e-05
0.9979230761528015 0.00207689986564219
0.9967340230941772 0.0032659852877259254
0.9981361627578735 0.0018637717003002763
0.16870082914829254 0.8312992453575134
0.9999783039093018 2.1755000489065424e-05
0.4760817289352417 0.5239183306694031
5.6607412261655554e-05 0.9999433755874634
2.6039100703201257e-05 0.9999740123748779
0.8238982558250427 0.17610180377960205
0.9999961853027344 3.803294475801522e-06
0.8306329846382141 0.16936703026294708
0.004989967681467533 0.9950100183486938
0.9999562501907349 4.37545204476919e-05
0.8104994893074036 0.18950051069259644
0.1924203634262085 0.8075796365737915
0.9986788630485535 0.0013210757169872522
0.9925569295883179 0.007443094160407782
0.9999228715896606 7.715148240095004e-05
0.9976639747619629 0.002336043631657958
0.9923255443572998 0.007674520369619131
0.9998101592063904 0.00018985586939379573
0.9999511241912842 4.883933797827922e-05
5.096000313642435e-05 0.9999489784240723
0.9829124212265015 0.017087625339627266
6.052123353583738e-05 0.9999394416809082
0.999498724937439 0.000501279893796891
0.9998984336853027 0.00010158013901673257
0.01065274141728878 0.9893472194671631
0.9580251574516296 0.04197491332888603
5.609523577732034e-05 0.9999438524246216
0.9839915633201599 0.01600835658609867
0.9588726162910461 0.04112736135721207
0.9999382495880127 6.175404269015417e-05
0.00030937488190829754 0.9996906518936157
0.9998912811279297 0.00010867450328078121
0.961337149143219 0.0386628620326519
0.9600986838340759 0.03990129381418228
0.05181041359901428 0.9481896162033081
0.9890230894088745 0.010976864024996758
0.998705267906189 0.0012946996139362454
0.0001094204853870906 0.9998905658721924
0.9997100234031677 0.00028997473418712616
0.04300781711935997 0.9569922089576721
0.9338480830192566 0.06615191698074341
0.9958118200302124 0.00418825400993228
0.9998983144760132 0.00010166210995521396
0.999958872795105 4.1132105252472684e-05
4.890392233392049e-07 0.9999995231628418
0.9992521405220032 0.0007478176848962903
0.6054016351699829 0.3945983648300171
0.9868423342704773 0.013157615438103676
0.9889911413192749 0.011008881963789463
0.9998748302459717 0.00012516767310444266
0.9898144006729126 0.010185537859797478
0.9999963045120239 3.6559715681505622e-06
8.248216545325704e-06 0.999991774559021
7.083381206030026e-05 0.9999291896820068
0.8517681360244751 0.14823181927204132
0.9998573064804077 0.00014268388622440398
0.226741760969162 0.7732582688331604
0.0021568185184150934 0.9978431463241577
0.9999974966049194 2.5507665668556e-06
0.7784472703933716 0.22155267000198364
0.00014429011207539588 0.999855637550354
0.9575188159942627 0.04248115047812462
0.9979748129844666 0.0020251877140253782
7.86535558905399e-11 1.0
0.9999089241027832 9.101148316403851e-05
0.9999138116836548 8.623124449513853e-05
0.9910886287689209 0.008911311626434326
0.9989448189735413 0.0010552076855674386
4.641624684609269e-07 0.9999995231628418
0.7891150116920471 0.21088503301143646
0.9999868869781494 1.3058684089628514e-05
0.9999970197677612 2.941983666460146e-06
0.8689448833465576 0.1310550570487976
0.3376055657863617 0.6623944044113159
0.9999071359634399 9.286151907872409e-05
0.8381814360618591 0.16181853413581848
0.9999415874481201 5.843859617016278e-05
0.00017024714907165617 0.9998297691345215
0.9549890756607056 0.045010898262262344
0.9431204795837402 0.05687952786684036
0.9730234146118164 0.026976628229022026
0.4345017969608307 0.5654981732368469
0.008959585800766945 0.9910404086112976
0.999984622001648 1.534017428639345e-05
4.797702491364519e-13 1.0
0.9999799728393555 2.0020206648041494e-05
0.9994391798973083 0.0005607671337202191
0.003458332037553191 0.9965416789054871
0.9998607635498047 0.00013921862409915775
0.9989172220230103 0.0010828474769368768
0.6554301381111145 0.3445698320865631
0.7769662737846375 0.22303368151187897
0.9932230710983276 0.006776981987059116
0.0023162488359957933 0.9976837635040283
0.46665382385253906 0.5333462357521057
0.9976379871368408 0.002362056402489543
0.9998397827148438 0.00016021626652218401
0.9999264478683472 7.349231600528583e-05
0.9166674613952637 0.08333252370357513
0.999894380569458 0.00010558007488725707
0.9996912479400635 0.00030878346296958625
6.543018571392167e-08 0.9999998807907104
0.5708987712860107 0.42910119891166687
0.9327595829963684 0.06724049150943756
0.9999959468841553 4.0053882912616245e-06
0.9675412774085999 0.03245868906378746
0.11647456884384155 0.8835254907608032
0.15205606818199158 0.847943902015686
0.924543023109436 0.07545702159404755
7.285142783075571e-05 0.9999271631240845
0.9997857213020325 0.0002142655721399933
0.9999442100524902 5.574339593295008e-05
3.4832971778087085e-06 0.999996542930603
0.9999926090240479 7.385258868453093e-06
0.9275205135345459 0.07247941941022873
0.9290695786476135 0.07093050330877304
0.999881386756897 0.00011856786295538768
0.9999496936798096 5.024946585763246e-05
0.9999481439590454 5.185203917790204e-05
0.9999880790710449 1.1961413292738143e-05
0.0003657848574221134 0.9996341466903687
9.216146850121731e-07 0.9999990463256836
0.9996147155761719 0.00038531466270796955
0.03485872969031334 0.965141236782074
0.00018975343846250325 0.9998102784156799
0.9999996423721313 3.3433522617087874e-07
0.9893149137496948 0.010685115121304989
0.014397694729268551 0.9856023192405701
0.6347071528434753 0.36529284715652466
0.9973329305648804 0.002667102962732315
0.999889612197876 0.00011041222751373425
0.9851716160774231 0.014828396029770374
0.02498050220310688 0.9750195741653442
0.8889596462249756 0.11104034632444382
0.9999948740005493 5.113650786370272e-06
0.17990393936634064 0.8200960755348206
0.9314794540405273 0.06852050870656967
0.9999984502792358 1.5780791500219493e-06
0.7367360591888428 0.26326388120651245
4.930030627292581e-06 0.9999951124191284
0.9433773159980774 0.05662274733185768
0.739848792552948 0.2601512372493744
0.006819512695074081 0.993180513381958
0.8011875152587891 0.19881251454353333
0.9301743507385254 0.0698256567120552
0.9873091578483582 0.012690817005932331
0.006474942900240421 0.9935250282287598
0.0854438841342926 0.9145561456680298
0.9875878691673279 0.012412124313414097
0.9999783039093018 2.1693302187486552e-05
0.9971367120742798 0.0028633116744458675
0.8282755017280579 0.17172443866729736
0.8381868004798889 0.1618131399154663
0.9996523857116699 0.0003476418205536902
0.9999303817749023 6.960502651054412e-05
6.0231057432247326e-05 0.9999397993087769
0.9745855927467346 0.02541440911591053
0.8063266277313232 0.19367335736751556
0.9988768696784973 0.0011231729295104742
0.8995218276977539 0.10047809779644012
0.9800066351890564 0.019993405789136887
0.9988716244697571 0.0011284009087830782
0.9986936450004578 0.0013063679216429591
0.9999983310699463 1.6610330249022809e-06
0.9999798536300659 2.0186693291179836e-05
0.08027368038892746 0.9197263717651367
0.9517129063606262 0.048287104815244675
0.015422538854181767 0.984577476978302
0.997600257396698 0.002399696735665202
0.007619958836585283 0.9923800230026245
0.9923267960548401 0.007673161569982767
0.9993860721588135 0.0006139011820778251
0.9609296321868896 0.039070434868335724
0.9367172122001648 0.06328282505273819
0.5607011914253235 0.4392988383769989
5.318564717526897e-07 0.9999995231628418
0.9966511130332947 0.0033489190973341465
0.9173072576522827 0.0826927125453949
2.406881449701359e-08 1.0
0.9999890327453613 1.1017826182069257e-05
0.9950845837593079 0.004915405996143818
0.9972246885299683 0.002775282831862569
0.999876856803894 0.00012309230805840343
0.511687695980072 0.48831233382225037
0.00017383099475409836 0.9998262524604797
0.9997095465660095 0.0002903938584495336
0.04250754043459892 0.957492470741272
0.0825970247387886 0.917402982711792
0.883415699005127 0.11658426374197006
0.9992890357971191 0.0007109928992576897
3.6706478567793965e-06 0.9999963045120239
0.00041957912617363036 0.999580442905426
0.0034337968099862337 0.9965662360191345
0.0025795837864279747 0.9974204301834106
0.9940451383590698 0.0059548188000917435
0.0002426278661005199 0.9997573494911194
0.953295111656189 0.046704936772584915
0.7597107887268066 0.24028916656970978
5.75362435029092e-07 0.9999994039535522
0.999833345413208 0.00016662846610415727
1.0535859962246263e-09 1.0
0.15889044106006622 0.841109573841095
0.8487240076065063 0.15127600729465485
0.9998119473457336 0.00018804952560458332
0.7748852372169495 0.22511470317840576
0.9990695118904114 0.0009304761188104749
0.995189905166626 0.004810101818293333
0.9996241331100464 0.0003758403763640672
0.9999953508377075 4.590831395034911e-06
0.9997206330299377 0.0002793869352899492
0.030857346951961517 0.9691426753997803
0.9999920129776001 7.935722351248842e-06
0.9986991882324219 0.0013007957022637129
0.9632222056388855 0.03677775710821152
0.9999549388885498 4.506126788328402e-05
0.9833811521530151 0.016618827357888222
0.9945966005325317 0.005403310991823673
0.9999295473098755 7.049996929708868e-05
0.8726794719696045 0.1273205429315567
0.5480396151542664 0.45196038484573364
0.9999912977218628 8.708932909939904e-06
3.044563356979779e-07 0.9999996423721313
0.4884990453720093 0.511500895023346
0.9999558925628662 4.410284964251332e-05
0.0013927229447290301 0.998607337474823
0.009765226393938065 0.9902348518371582
0.9685745239257812 0.03142549470067024
0.9254651665687561 0.0745348185300827
0.3337121903896332 0.6662877798080444
0.9999887943267822 1.119600437959889e-05
0.9906603693962097 0.009339618496596813
0.9685745239257812 0.03142549470067024
0.006740563083440065 0.9932593703269958
0.9999961853027344 3.85885050491197e-06
3.629901357271592e-07 0.9999996423721313
0.934252142906189 0.06574781984090805
0.007116943132132292 0.9928830862045288
7.037731393211288e-07 0.9999992847442627
0.999893069267273 0.00010697087418520823
0.9997791647911072 0.00022086691751610488
0.09022868424654007 0.9097712635993958
0.09035655111074448 0.9096434116363525
0.6130415797233582 0.3869584798812866
0.03604685515165329 0.9639530777931213
0.9997678399085999 0.00023215077817440033
0.8518919944763184 0.14810799062252045
0.9921340942382812 0.007865877822041512
0.9982209801673889 0.0017789960838854313
0.4990655183792114 0.5009344816207886
0.9998867511749268 0.00011325316881993786
0.9972509741783142 0.002748970640823245
0.9990136623382568 0.0009862740989774466
0.04258143529295921 0.9574185013771057
0.9979773163795471 0.002022656612098217
0.00170273391995579 0.9982972741127014
0.9992154836654663 0.0007844708161428571
0.9999994039535522 5.988696329950471e-07
0.29590702056884766 0.7040929794311523
0.9998394250869751 0.0001605670986464247
0.9999637603759766 3.619980998337269e-05
0.9999910593032837 8.895711289369501e-06
0.9202988743782043 0.07970114797353745
0.0006385140586644411 0.9993614554405212
0.999945878982544 5.410108860814944e-05
0.9620537161827087 0.03794630244374275
2.8449434466892853e-05 0.9999715089797974
0.008708189241588116 0.9912918210029602
0.999786913394928 0.0002130416687577963
0.9994038343429565 0.0005961833521723747
0.08757160604000092 0.9124283194541931
0.2788959741592407 0.721104085445404
0.9998784065246582 0.0001216340679093264
0.000586844515055418 0.9994131326675415
0.9932354092597961 0.006764642894268036
0.5138823390007019 0.4861176908016205
0.9999340772628784 6.58786520943977e-05
0.9999055862426758 9.441948350286111e-05
0.058314643800258636 0.9416853189468384
3.1827673296902503e-07 0.9999996423721313
0.9062193632125854 0.09378059953451157
0.3755261301994324 0.6244738698005676
0.9990484118461609 0.0009515314595773816
4.8617998515965155e-09 1.0
0.9998499155044556 0.0001500763464719057
0.9525101184844971 0.047489847987890244
8.557045475754421e-06 0.9999914169311523
0.6955783367156982 0.30442163348197937
0.0013446572702378035 0.9986553192138672
0.9748040437698364 0.025195982307195663
0.41111892461776733 0.5888810753822327
0.9934592247009277 0.006540720816701651
0.9999262094497681 7.374441338470206e-05
0.00020276627037674189 0.9997972846031189
0.9937252402305603 0.006274723447859287
0.00010733706585597247 0.9998925924301147
0.9928871989250183 0.007112863473594189
0.0602891631424427 0.9397108554840088
0.9996383190155029 0.00036165796336717904
0.0011588835623115301 0.9988411068916321
0.9999188184738159 8.113295916700736e-05
0.9908154010772705 0.009184620343148708
0.522026002407074 0.47797396779060364
0.9999701976776123 2.9817896574968472e-05
0.9991376399993896 0.0008623311878181994
0.9985169768333435 0.0014830041909590364
0.9996399879455566 0.0003599420888349414
0.9999990463256836 9.134150218415016e-07
0.640516459941864 0.359483540058136
0.9618650674819946 0.038134992122650146
0.9396104216575623 0.06038954481482506
0.999846339225769 0.00015368135063908994
0.9973353743553162 0.0026645760517567396
0.07111228257417679 0.9288877844810486
0.9820839762687683 0.017916034907102585
0.9932542443275452 0.006745814345777035
0.903160572052002 0.09683939814567566
7.4447594755611135e-09 1.0
0.0007499365019612014 0.9992499947547913
0.9993971586227417 0.0006028888747096062
0.5567896366119385 0.4432104229927063
5.036302354710642e-06 0.9999949932098389
0.9275308847427368 0.07246915251016617
1.5821095075807534e-05 0.9999841451644897
0.9999226331710815 7.732493395451456e-05
0.0025079061742872 0.9974920749664307
0.9999935626983643 6.464079433499137e-06
2.063066162349969e-08 1.0
0.999344527721405 0.0006554379360750318
0.999762237071991 0.00023780007904861122
0.5607470273971558 0.43925294280052185
0.004888234660029411 0.9951117634773254
0.9999599456787109 4.00401622755453e-05
0.9981156587600708 0.0018843263387680054
0.9999803304672241 1.9677449017763138e-05
0.008506403304636478 0.9914935827255249
0.9981905817985535 0.0018094099359586835
0.9999939203262329 6.022808065608842e-06
0.9997492432594299 0.0002507257158868015
0.8678835034370422 0.13211651146411896
0.04346703737974167 0.9565330147743225
0.00040210483712144196 0.99959796667099
0.9017825126647949 0.09821751713752747
0.9939233660697937 0.0060766092501580715
5.989947453599598e-07 0.9999994039535522
0.9997304081916809 0.0002695649745874107
3.269677506523294e-07 0.9999996423721313
0.9969833493232727 0.0030166786164045334
0.9517912268638611 0.04820884019136429
0.9999783039093018 2.1741187083534896e-05
9.318217780673876e-05 0.9999067783355713
0.9998562335968018 0.00014380061475094408
0.3769559860229492 0.623043954372406
0.9998469352722168 0.0001530707668280229
0.9999862909317017 1.3655897419084795e-05
0.9993302822113037 0.0006697573116980493
0.15087568759918213 0.8491243124008179
0.3585548996925354 0.6414450407028198
0.9995833039283752 0.0004167572478763759
0.9699705839157104 0.030029406771063805
0.9976567029953003 0.0023432616144418716
0.9995654225349426 0.0004345984198153019
0.9999468326568604 5.319152114680037e-05
0.01776832714676857 0.9822316765785217
0.9999912977218628 8.687638910487294e-06
0.9995306730270386 0.0004693038354162127
0.9969593286514282 0.0030407337471842766
0.7958576083183289 0.20414242148399353
0.7064720988273621 0.2935279309749603
0.9999697208404541 3.022661985596642e-05
3.67343527614139e-05 0.9999632835388184
0.9659992456436157 0.0340007022023201
1.5184506310106372e-06 0.9999984502792358
0.999897837638855 0.00010216181544819847
0.08395311236381531 0.9160469174385071
0.15092000365257263 0.849079966545105
0.9279717206954956 0.07202835381031036
0.9999750852584839 2.48734049819177e-05
0.9203755259513855 0.07962450385093689
4.529081252258038e-06 0.9999954700469971
0.9543004035949707 0.0456995815038681
0.8035893440246582 0.1964106261730194
0.9665433168411255 0.033456698060035706
0.9999411106109619 5.8903253375319764e-05
2.718790312883357e-07 0.9999997615814209
0.22823651134967804 0.7717635035514832
0.9616555571556091 0.03834441676735878
0.9989759922027588 0.0010240200208500028
0.9999855756759644 1.4466210814134683e-05
0.9992571473121643 0.0007428252720274031
4.4184514990774915e-05 0.9999557733535767
0.9813732504844666 0.018626734614372253
0.9996235370635986 0.00037640007212758064
0.45522207021713257 0.5447779297828674
0.9984334111213684 0.0015666189137846231
0.5999354124069214 0.4000645875930786
0.9992015957832336 0.0007983882678672671
1.1163784847667557e-06 0.999998927116394
0.9999932050704956 6.819630016252631e-06
0.9733205437660217 0.026679448783397675
0.9996452331542969 0.0003547593660186976
0.9831674695014954 0.01683255285024643
0.6404561996459961 0.3595438301563263
0.9756172895431519 0.024382784962654114
5.3901224816854665e-08 1.0
0.9999752044677734 2.4776625650702044e-05
6.151201523607597e-05 0.9999384880065918
0.06782747805118561 0.9321725964546204
0.8198803067207336 0.18011964857578278
0.7196502685546875 0.2803496718406677
0.9816163778305054 0.018383584916591644
0.10275167971849442 0.8972483277320862
0.0002488196187186986 0.9997511506080627
8.342409273609519e-05 0.9999165534973145
0.9998685121536255 0.00013146884157322347
0.9984714388847351 0.0015285988338291645
0.9258694648742676 0.07413050532341003
1.120878323490615e-08 1.0
0.9015117287635803 0.09848826378583908
0.0031838531140238047 0.9968162178993225
0.9999969005584717 3.1560648494632915e-06
0.9999675750732422 3.247032145736739e-05
0.999393105506897 0.000606913585215807
0.9986691474914551 0.0013309227069839835
0.8089115619659424 0.19108842313289642
0.9996803998947144 0.0003196683246642351
0.3002210855484009 0.6997789144515991
0.9999908208847046 9.12171890377067e-06
0.9135971665382385 0.08640281111001968
0.9970927238464355 0.0029073168989270926
0.06237632781267166 0.9376236796379089
0.9997960925102234 0.0002039155806414783
0.9999871253967285 1.286438691749936e-05
0.8027787804603577 0.19722121953964233
1.0446061793345507e-08 1.0
0.9908953905105591 0.009104611352086067
0.3253200054168701 0.6746799945831299
0.999521017074585 0.0004789357481058687
7.699640036662458e-08 0.9999998807907104
0.9891263246536255 0.010873646475374699
0.9063988327980042 0.09360110759735107
0.7809692025184631 0.21903081238269806
0.999873161315918 0.0001268410123884678
0.008283156901597977 0.9917168021202087
0.9999409914016724 5.894730566069484e-05
0.9855729937553406 0.014427030459046364
0.9714321494102478 0.02856782265007496
0.9999531507492065 4.679941412177868e-05
0.9885430335998535 0.01145696360617876
0.989084780216217 0.01091520581394434
0.6579383015632629 0.34206169843673706
0.993756890296936 0.006243090145289898
3.4057687781086088e-09 1.0
0.9917623996734619 0.008237648755311966
0.9420074224472046 0.05799262225627899
0.9658148288726807 0.03418519347906113
0.962051510810852 0.03794848546385765
0.9975982308387756 0.0024017158430069685
0.725468099117279 0.27453184127807617
0.9995902180671692 0.0004097826895304024
0.027873143553733826 0.972126841545105
0.9987267851829529 0.0012732003815472126
0.9999923706054688 7.612093213538174e-06
0.9996806383132935 0.00031932350248098373
0.9975622892379761 0.0024377263616770506
0.9914565682411194 0.008543377742171288
0.850246012210846 0.14975394308567047
0.00613927049562335 0.9938607215881348
0.04651568830013275 0.953484296798706
0.8069931268692017 0.19300685822963715
0.011340790428221226 0.9886592030525208
0.5788864493370056 0.4211135506629944
0.06980060786008835 0.930199384689331
0.9999834299087524 1.6547259292565286e-05
0.9645374417304993 0.03546259179711342
0.9999163150787354 8.368112321477383e-05

@ -0,0 +1,23 @@
from keras.preprocessing.image import ImageDataGenerator
class gen_data:
def __init__(self):
# batch 表示训练样本数
# 这里推测20一组为好
# 过大会过拟合
# self.batch = 20
# self.batch = 16
self.batch = 32
# keras 提供的数据生成器
'''
zoom_range 随机缩放的幅度
rotation_range 数据提升时图片随机转动的角度
horizontal_flip 图片随机水平翻转
vertical_flip 图片竖直翻转
'''
self.tr_gen = ImageDataGenerator(
zoom_range=2,
rotation_range=90,
horizontal_flip=True,
vertical_flip=True
)

@ -0,0 +1,10 @@
import tensorflow as tf
a = tf.test.is_built_with_cuda() # 判断CUDA是否可以用
b = tf.test.is_gpu_available(
cuda_only=False,
min_cuda_compute_capability=None
) # 判断GPU是否可以用
print(a) # 显示True表示CUDA可用
print(b) # 显示True表示GPU可用

@ -0,0 +1,45 @@
from keras.models import Sequential
from keras import layers
from keras.src.applications import DenseNet201
from keras.src.optimizers import Nadam
class breast_train_test:
def __init__(self):
# 构造模型序列
self.model = Sequential()
# DenseNet201 201层的卷积神经网络
net = DenseNet201(
# 使用处理图片神经网络
weights='imagenet',
include_top=False,
# 224 * 224 的图片通道数为3 RGB
input_shape=(224, 224, 3)
)
# 学习率 0.0001
study_rate = 10**(-4)
self.build(net , study_rate)
'''
resnetDenseNet201网络
study_rate 学习率
'''
def build(self , resnet , study_rate):
self.model.add(resnet)
# GlobalAveragePooling2D每个通道值各自加起来再求平均,只剩下个数与平均值两个维度
self.model.add(layers.GlobalAveragePooling2D())
# dropout 减少中间神经元个数 保留概率为0.5
self.model.add(layers.Dropout(0.5))
# BatchNormalization 每一个批次的数据中标准化前一层的激活项
self.model.add(layers.BatchNormalization())
# dense 全连接层 输出维度为2 activation激活函数为softmax在思路整理中给出
self.model.add(layers.Dense(2, activation='softmax'))
self.model.compile(
# 二元交叉熵在思路整理给出
loss = "binary_crossentropy",
# nadam作为优化器在思路整理中给出
optimizer=Nadam(learning_rate=study_rate),
# 评估函数
metrics=['accuracy']
)

@ -0,0 +1,24 @@
# 降低学习率
from keras.src.callbacks import ReduceLROnPlateau
from tensorflow.python.keras.callbacks import ModelCheckpoint
class reduce:
def train(self):
# 控制学习率
self.learn_control = ReduceLROnPlateau(
monitor='val_accuracy',
patience=5,
verbose=1,
factor=0.2,
min_lr=1e-7
)
path = "D:\\pro_of_program\\Python\\train_cancer\\train_model\\third_model"
# 保存模型只保存最优解
self.checkpoint = ModelCheckpoint(
filepath=path,
monitor='val_accuracy',
verbose=1,
save_best_only=True,
mode='max'
)

@ -0,0 +1,24 @@
import numpy as np
import cv2
from PIL import Image
import tensorflow as tf
import sys
def load_single(dir , size):
read = lambda i: np.asarray(Image.open(i).convert("RGB"))
path = dir
img = read(path)
return np.array(cv2.resize(img, (size, size)))
def check(dir_pic , dir_model):
pic_test = dir_pic
# 转换为numpy格式
img_test = load_single(pic_test , 224)
# 需要12242243这种格式输入
img_test = np.expand_dims(img_test , axis=0)
# 载入模型
x = tf.keras.models.load_model(dir_model)
res = x.predict(img_test)
np.set_printoptions(suppress=True)
return f'{res[0][0] * 100 : .4f}%,{res[0][1] * 100 : .4f}%'

@ -0,0 +1,30 @@
from loader_picture import data_loader
import numpy as np
from matplotlib import pyplot as plt
from train_model.modeling import reduce_study_rate
from train_model.modeling import modeling
from train_model.data_gen import data_output
# 载入图片
load = data_loader.Loader()
# 获取模型
models = modeling.breast_train_test()
# 展现模型
models.model.summary()
# data
data = data_output.gen_data()
# 降低学习率
reduces = reduce_study_rate.reduce()
reduces.train()
# 训练+评估
history = models.model.fit(
data.tr_gen.flow(load.train_of_x , load.train_of_y , batch_size=data.batch),
steps_per_epoch = load.train_of_x.shape[0] / data.batch,
# 训练30次
epochs=50,
validation_data=(load.val_of_x , load.val_of_y),
callbacks=[reduces.learn_control , reduces.checkpoint]
)

File diff suppressed because one or more lines are too long

@ -0,0 +1,49 @@
#### *第一步、搜集数据集*
- *文件保存在picture文件夹中*
- *benign 良性乳腺癌图片*
- *malignant 恶性乳腺癌图片*
- *normal 正常乳腺癌图片*
- *以70%作为训练集、30%作为测试集*
#### *第二步、处理数据集*
- *(1) 读取图片*
- *(2) 使用sklearn.model_selection中的train_test_split 分割数据集*
- *(3) 使用plt打印图片*
#### *第三步训练*
- *训练模型选择*
- *使用Microsoft提出的DenseNet201框架进行训练*
- *DenseNet201包含201层卷积层和全连接层*
- *拥有池化操作,非常适合训练模型*
- *激活函数选择*
- *使用softmax作为激活函数*
- $$
Softmax(z_{i} )=\frac{e^{z_{i}}}{ {\textstyle \sum_{c=1}^{c} e^{z_{c}}}}
其中zi为第i个节点的输出值c为输出节点的个数
$$
- *损失函数选择*
- *使用二元交叉熵给出*
- $$
Loss = \frac{1}{N} \sum_{i=1}^{N}[y_{i}log(p(y_{i})) + (1-y_{i})(1 - log(p(y_{i})))]
$$
- *优化器选择*
- *Nadam优化器*
- *该优化器综合Adam将RMSprop和动量结合起来*
- *优于Adam优化器*
#### *第四步*测试
- *导入图片*
- *使用PIL进行读取图片*
- *使用test pic进行测试*
- *tensorflow load_model进行模型的加载*
- *predict进行模型的预测*
Loading…
Cancel
Save