티스토리 뷰
# tensorflow
import tensorflow.keras as keras
import tensorflow as tf
# image processing
from tensorflow.keras.preprocessing.image import ImageDataGenerator, load_img
from tensorflow.keras.layers import Input, Dense, Activation, GlobalAveragePooling2D, Dropout
# model / neural network
from tensorflow.keras import layers
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.applications.efficientnet_v2 import EfficientNetV2M
from tensorflow.keras.applications import InceptionResNetV2, EfficientNetB0, EfficientNetV2L,ResNet152V2
import tensorboard
import os, math, datetime, random
import numpy as np
from PIL import Image, ImageEnhance
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
learning_rate = 0.001
dropout_rate = 0.4
N_EPOCHS = 50
N_BATCH = 32
img_height, img_width = 160, 160
data_dir = '/train/'
image_gen_train = ImageDataGenerator(
featurewise_center=True,
featurewise_std_normalization=True,
rotation_range=20,
width_shift_range=0.1,
height_shift_range=0.1,
horizontal_flip=True,
rescale=1./255,
brightness_range=[0.2, 1.0]
)
image_gen_val = ImageDataGenerator(
validation_split=0.2,
rescale=1./255
)
train_ds = image_gen_train.flow_from_directory(
directory = data_dir,
subset="training",
shuffle=True,
target_size=(img_height, img_width),
batch_size=N_BATCH,
class_mode='binary',
color_mode = 'rgb'
)
val_ds = image_gen_val.flow_from_directory(
directory = data_dir,
subset="validation",
shuffle=True,
target_size=(img_height, img_width),
batch_size=N_BATCH,
class_mode='binary',
color_mode = 'rgb'
)
class_names = train_ds.class_indices.keys()
print(class_names)
print(len(class_names))
def Customer_model():
input_tensor = Input(shape=(img_height, img_width, 3))
base_model = ResNet152V2(input_tensor=input_tensor, include_top=False, weights='imagenet')
base_model.trainable = False # 재학습 여부
x = base_model.output
x = GlobalAveragePooling2D()(x)
x = Dropout(dropout_rate)(x)
x = Dense(512, activation='relu6')(x)
x = Dropout(dropout_rate)(x)
x = Dense(256, activation='relu6')(x)
x = Dropout(dropout_rate)(x)
output = Dense(1, activation='sigmoid', kernel_regularizer='l2')(x)
model = Model(inputs=input_tensor, outputs=output)
return model
model = Customer_model()
model.compile(optimizer=tf.keras.optimizers.AdamW(learning_rate=learning_rate, beta_1=0.9, beta_2=0.999), loss=tf.keras.losses.BinaryCrossentropy(),
metrics=['acc'])
total_sample=train_ds.n
#Callbacks
#tensorboard
log_dir = "logs/fit/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
#Learning rate Scheduler
def scheduler(epoch, lr):
# if epoch < 10:
if epoch < 5:
return lr
else:
return lr * tf.math.exp(-0.1)
lr_callback = tf.keras.callbacks.LearningRateScheduler(scheduler)
checkpoint_filepath = "/tmp/training_checkpoints"
model_checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(
filepath=checkpoint_filepath,
save_weights_only=True,
monitor='val_accuracy',
mode='max',
save_best_only=True)
history = model.fit(
train_ds,
steps_per_epoch = train_ds.samples // N_BATCH,
epochs=N_EPOCHS,
validation_data=val_ds,
validation_steps=val_ds.samples // N_BATCH,
callbacks=[tensorboard_callback, lr_callback, model_checkpoint_callback],
use_multiprocessing=True, workers=8
)
# Save the entire model as a SavedModel.
model.save('saved_model/my_model')
# tensorflow import tensorflow.keras as keras import tensorflow as tf # image processing from tensorflow.keras.preprocessing.image import ImageDataGenerator, load_img from tensorflow.keras.layers import Input, Dense, Activation, GlobalAveragePooling2D, Dropout # model / neural network from tensorflow.keras import layers from tensorflow.keras.models import Sequential, Model from tensorflow.keras.applications.efficientnet_v2 import EfficientNetV2M from tensorflow.keras.applications import InceptionResNetV2, EfficientNetB0, EfficientNetV2L,ResNet152V2 import tensorboard import os, math, datetime, random import numpy as np from PIL import Image, ImageEnhance os.environ["CUDA_VISIBLE_DEVICES"] = "1" learning_rate = 0.001 dropout_rate = 0.4 N_EPOCHS = 50 N_BATCH = 32 img_height, img_width = 160, 160 data_dir = '/train/' image_gen_train = ImageDataGenerator( featurewise_center=True, featurewise_std_normalization=True, rotation_range=20, width_shift_range=0.1, height_shift_range=0.1, horizontal_flip=True, rescale=1./255, brightness_range=[0.2, 1.0] ) image_gen_val = ImageDataGenerator( validation_split=0.2, rescale=1./255 ) train_ds = image_gen_train.flow_from_directory( directory = data_dir, subset="training", shuffle=True, target_size=(img_height, img_width), batch_size=N_BATCH, class_mode='binary', color_mode = 'rgb' ) val_ds = image_gen_val.flow_from_directory( directory = data_dir, subset="validation", shuffle=True, target_size=(img_height, img_width), batch_size=N_BATCH, class_mode='binary', color_mode = 'rgb' ) class_names = train_ds.class_indices.keys() print(class_names) print(len(class_names)) def Customer_model(): input_tensor = Input(shape=(img_height, img_width, 3)) base_model = ResNet152V2(input_tensor=input_tensor, include_top=False, weights='imagenet') base_model.trainable = False # 재학습 여부 x = base_model.output x = GlobalAveragePooling2D()(x) x = Dropout(dropout_rate)(x) x = Dense(512, activation='relu6')(x) x = Dropout(dropout_rate)(x) x = Dense(256, activation='relu6')(x) x = Dropout(dropout_rate)(x) output = Dense(1, activation='sigmoid', kernel_regularizer='l2')(x) model = Model(inputs=input_tensor, outputs=output) return model model = Customer_model() model.compile(optimizer=tf.keras.optimizers.AdamW(learning_rate=learning_rate, beta_1=0.9, beta_2=0.999), loss=tf.keras.losses.BinaryCrossentropy(), metrics=['acc']) total_sample=train_ds.n #Callbacks #tensorboard log_dir = "logs/fit/" + datetime.datetime.now().strftime("%Y%m%d-%H%M%S") tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1) #Learning rate Scheduler def scheduler(epoch, lr): # if epoch < 10: if epoch < 5: return lr else: return lr * tf.math.exp(-0.1) lr_callback = tf.keras.callbacks.LearningRateScheduler(scheduler) checkpoint_filepath = "/tmp/training_checkpoints" model_checkpoint_callback = tf.keras.callbacks.ModelCheckpoint( filepath=checkpoint_filepath, save_weights_only=True, monitor='val_accuracy', mode='max', save_best_only=True) history = model.fit( train_ds, steps_per_epoch = train_ds.samples // N_BATCH, epochs=N_EPOCHS, validation_data=val_ds, validation_steps=val_ds.samples // N_BATCH, callbacks=[tensorboard_callback, lr_callback, model_checkpoint_callback], use_multiprocessing=True, workers=8 ) # Save the entire model as a SavedModel. model.save('saved_model/my_model')
- Total
- Today
- Yesterday
- 콜레스테롤 낮추는 방법
- 티스토리챌린지
- 고지혈증 원인
- 고지혈증 정상수치
- 요양보호사
- 고지혈증에 좋은 음식
- 혈당 정상수치
- 공복혈당 정상수치
- 콜레스테롤
- 통풍 원인
- 통풍 증상
- 당화혈색소
- 통풍 치료법
- b형간염 보균자
- 감마지티피 높으면
- 콜레스테롤 정상수치
- 감마지티피 정상수치
- 신장기능
- 공복혈당
- 감마지티피 수치낮추는 법
- B형간염 예방접종
- 감마지티피 치료법
- 신우신염
- 오블완
- 왼쪽 등 통증
- 손가락 마디 통증
- b형간염 항체검사
- 콜레스테롤 높으면 나타나는 증상
- 당화혈색소 정상치
- 고지혈증 수치
일 | 월 | 화 | 수 | 목 | 금 | 토 |
---|---|---|---|---|---|---|
1 | 2 | 3 | 4 | |||
5 | 6 | 7 | 8 | 9 | 10 | 11 |
12 | 13 | 14 | 15 | 16 | 17 | 18 |
19 | 20 | 21 | 22 | 23 | 24 | 25 |
26 | 27 | 28 | 29 | 30 | 31 |