UAV/app_plugin.py

161 lines
5.4 KiB
Python
Raw Normal View History

2024-12-23 11:31:20 +08:00
import os
import shutil
from dataclasses import dataclass
2024-12-31 22:29:24 +08:00
from typing import Dict, Tuple
2025-01-06 17:18:03 +08:00
import psutil
2024-12-23 11:31:20 +08:00
import pandas as pd
from filter.cluster_filter import GPSCluster
2025-04-11 17:22:11 +08:00
from utils.directory_manager import DirectoryManager
from utils.odm_monitor import ODMProcessMonitor
2024-12-23 11:31:20 +08:00
from utils.gps_extractor import GPSExtractor
from utils.grid_divider import GridDivider
from utils.logger import setup_logger
from utils.visualizer import FilterVisualizer
from post_pro.merge_tif import MergeTif
2025-04-11 15:38:15 +08:00
from post_pro.conv_obj import ConvertOBJ
2024-12-23 11:31:20 +08:00
@dataclass
2025-04-11 15:38:15 +08:00
class ProcessConfig:
2024-12-23 11:31:20 +08:00
"""预处理配置类"""
image_dir: str
output_dir: str
# 聚类过滤参数
cluster_eps: float = 0.01
cluster_min_samples: int = 5
2025-04-11 17:22:11 +08:00
2024-12-23 11:31:20 +08:00
# 网格划分参数
grid_overlap: float = 0.05
grid_size: float = 500
2025-04-11 17:22:11 +08:00
mode: str = "三维模式"
2024-12-23 11:31:20 +08:00
2025-04-11 15:38:15 +08:00
class ODM_Plugin:
def __init__(self, config):
2024-12-23 11:31:20 +08:00
self.config = config
2025-04-11 17:22:11 +08:00
# 初始化目录管理器
self.dir_manager = DirectoryManager(config)
2024-12-23 11:31:20 +08:00
# 清理并重建输出目录
2025-04-11 17:22:11 +08:00
self.dir_manager.clean_output_dir()
self.dir_manager.setup_output_dirs()
# 检查磁盘空间
self.dir_manager.check_disk_space()
2024-12-23 11:31:20 +08:00
# 初始化其他组件
self.logger = setup_logger(config.output_dir)
2025-04-11 17:22:11 +08:00
self.gps_points = pd.DataFrame(columns=["file", "lat", "lon"])
self.odm_monitor = ODMProcessMonitor(
2024-12-23 11:31:20 +08:00
config.output_dir, mode=config.mode)
self.visualizer = FilterVisualizer(config.output_dir)
def extract_gps(self) -> pd.DataFrame:
"""提取GPS数据"""
self.logger.info("开始提取GPS数据")
extractor = GPSExtractor(self.config.image_dir)
self.gps_points = extractor.extract_all_gps()
self.logger.info(f"成功提取 {len(self.gps_points)} 个GPS点")
2025-01-04 15:19:23 +08:00
def cluster(self):
2024-12-23 11:31:20 +08:00
"""使用DBSCAN对GPS点进行聚类只保留最大的类"""
previous_points = self.gps_points.copy()
clusterer = GPSCluster(
2025-01-06 17:18:03 +08:00
self.gps_points,
eps=self.config.cluster_eps,
min_samples=self.config.cluster_min_samples
)
2024-12-23 11:31:20 +08:00
self.clustered_points = clusterer.fit()
2025-01-06 17:18:03 +08:00
self.gps_points = clusterer.get_cluster_stats(self.clustered_points)
2024-12-23 11:31:20 +08:00
self.visualizer.visualize_filter_step(
self.gps_points, previous_points, "1-Clustering")
2025-04-11 17:22:11 +08:00
def divide_grids(self) -> Dict[tuple, pd.DataFrame]:
2025-01-02 16:39:46 +08:00
"""划分网格
Returns:
- grid_points: 网格点数据字典
- translations: 网格平移量字典
"""
2024-12-23 14:21:42 +08:00
grid_divider = GridDivider(
overlap=self.config.grid_overlap,
2025-01-04 14:49:42 +08:00
grid_size=self.config.grid_size,
2024-12-23 14:21:42 +08:00
output_dir=self.config.output_dir
)
2025-04-11 17:22:11 +08:00
grids, grid_points = grid_divider.adjust_grid_size_and_overlap(
2025-01-04 14:49:42 +08:00
self.gps_points
2024-12-23 11:31:20 +08:00
)
2025-01-04 14:49:42 +08:00
grid_divider.visualize_grids(self.gps_points, grids)
2025-02-19 10:06:17 +08:00
if len(grids) >= 20:
self.logger.warning("网格数量已超过20, 需要人工调整分区")
2024-12-31 21:37:44 +08:00
2025-04-11 17:22:11 +08:00
return grid_points
2024-12-23 11:31:20 +08:00
2024-12-31 21:37:44 +08:00
def copy_images(self, grid_points: Dict[tuple, pd.DataFrame]):
2024-12-23 11:31:20 +08:00
"""复制图像到目标文件夹"""
self.logger.info("开始复制图像文件")
2024-12-31 21:37:44 +08:00
for grid_id, points in grid_points.items():
2024-12-23 11:31:20 +08:00
output_dir = os.path.join(
2025-01-06 15:50:11 +08:00
self.config.output_dir,
f"grid_{grid_id[0]}_{grid_id[1]}",
"project",
2025-01-02 17:21:23 +08:00
"images"
2024-12-23 11:31:20 +08:00
)
os.makedirs(output_dir, exist_ok=True)
2025-02-06 16:54:23 +08:00
for point in points:
2024-12-23 11:31:20 +08:00
src = os.path.join(self.config.image_dir, point["file"])
dst = os.path.join(output_dir, point["file"])
shutil.copy(src, dst)
2025-01-06 15:50:11 +08:00
self.logger.info(
f"网格 ({grid_id[0]},{grid_id[1]}) 包含 {len(points)} 张图像")
2024-12-23 11:31:20 +08:00
2025-04-11 17:22:11 +08:00
def merge_tif(self, grid_lt):
2024-12-29 12:03:53 +08:00
"""合并所有网格的影像产品"""
self.logger.info("开始合并所有影像产品")
merger = MergeTif(self.config.output_dir)
2025-04-11 17:22:11 +08:00
merger.merge_orthophoto(grid_lt)
2024-12-29 12:03:53 +08:00
2025-04-11 17:22:11 +08:00
def convert_obj(self, grid_lt):
2025-02-06 16:54:23 +08:00
"""转换OBJ模型"""
self.logger.info("开始转换OBJ模型")
2025-02-15 14:53:02 +08:00
converter = ConvertOBJ(self.config.output_dir)
2025-04-11 17:22:11 +08:00
converter.convert_grid_obj(grid_lt)
2025-02-06 16:54:23 +08:00
2025-04-11 17:22:11 +08:00
def post_process(self, successful_grid_lt: list, grid_points: Dict[tuple, pd.DataFrame]):
2025-01-09 11:45:55 +08:00
"""后处理:合并或复制处理结果"""
2025-04-11 17:22:11 +08:00
if len(successful_grid_lt) < len(grid_points):
2025-01-06 15:50:11 +08:00
self.logger.warning(
2025-04-11 17:22:11 +08:00
f"{len(grid_points) - len(successful_grid_lt)} 个网格处理失败,"
f"将只合并成功处理的 {len(successful_grid_lt)} 个网格"
2025-01-06 15:50:11 +08:00
)
2025-01-09 11:45:55 +08:00
2025-04-11 17:22:11 +08:00
self.merge_tif(successful_grid_lt)
if self.config.mode == "三维模式":
2025-04-11 17:22:11 +08:00
self.convert_obj(successful_grid_lt)
else:
pass
2024-12-29 12:03:53 +08:00
2024-12-23 11:31:20 +08:00
def process(self):
"""执行完整的预处理流程"""
try:
self.extract_gps()
self.cluster()
2025-04-11 17:22:11 +08:00
grid_points = self.divide_grids()
2024-12-23 11:31:20 +08:00
self.copy_images(grid_points)
self.logger.info("预处理任务完成")
2025-04-11 17:22:11 +08:00
successful_grid_lt = self.odm_monitor.process_all_grids(
grid_points)
2025-01-06 15:50:11 +08:00
2025-04-11 17:22:11 +08:00
self.post_process(successful_grid_lt, grid_points)
2025-04-11 15:38:15 +08:00
self.logger.info("重建任务完成")
2025-01-04 17:54:03 +08:00
2024-12-23 11:31:20 +08:00
except Exception as e:
self.logger.error(f"处理过程中发生错误: {str(e)}", exc_info=True)
raise