AI模型大概训练流程
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
# 假设我们已经有了数据集,以便于读取
data = pd.read_csv('your_dataset.csv')
import tensorflow as tf
# 定义模型
model = tf.keras.models.Sequential([
tf.keras.layers.Dense(64, activation='relu', input_shape=(X_train.shape[1],)),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid') # 对于二分类问题
])
# 编译模型
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
# 训练模型
history = model.fit(X_train, y_train,
epochs=50,
batch_size=32,
validation_split=0.2, # 使用20%的数据来验证
verbose=1)
# 评估模型在测试集上的表现
test_loss, test_accuracy = model.evaluate(X_test, y_test, verbose=0)
print(f"Test Accuracy: {test_accuracy}")
from keras.callbacks import EarlyStopping
# 早期停止回调
early_stopping = EarlyStopping(monitor='val_loss', patience=10)
# 重新编译和训练模型
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001),
loss='binary_crossentropy',
metrics=['accuracy'])
history = model.fit(X_train, y_train,
epochs=100,
batch_size=32,
validation_split=0.2,
callbacks=[early_stopping],
verbose=1)
# 添加正则化来防止过拟合
model.add(tf.keras.layers.Dense(64, activation='relu', kernel_regularizer=tf.keras.regularizers.l2(0.01)))
# 最终评估
test_loss, test_accuracy = model.evaluate(X_test, y_test)
print(f"Final Test Accuracy: {test_accuracy}")
# 保存模型
model.save('my_model.h5')
# 或者导出为其他格式,如TensorFlow Lite或SavedModel
# tf.saved_model.save(model, 'saved_model')
# 转换为TensorFlow Lite格式
converter = tf.lite.TFLiteConverter.from_keras_model(model)
tflite_model = converter.convert()
with open('model.tflite', 'wb') as f:
f.write(tflite_model)
# 假设我们有新的数据new_data
new_data = pd.read_csv('new_data.csv')
X_new = new_data.drop('target_column', axis=1)
y_new = new_data['target_column']
# 标准化新数据
X_new = scaler.transform(X_new)
# 重新训练模型
model.fit(X_new, y_new, epochs=10)
原文地址:https://blog.csdn.net/qq_73252299/article/details/144302770
免责声明:本站文章内容转载自网络资源,如本站内容侵犯了原著者的合法权益,可联系本站删除。更多内容请关注自学内容网(zxcms.com)!