UAV/utils/odm_monitor.py

257 lines
10 KiB
Python
Raw Normal View History

2024-12-23 11:31:20 +08:00
import os
import logging
2024-12-26 15:30:50 +08:00
import subprocess
from typing import Dict, Tuple
import pandas as pd
2025-01-04 17:54:03 +08:00
import numpy as np
from osgeo import gdal
2024-12-23 11:31:20 +08:00
2025-01-04 17:54:03 +08:00
class NotOverlapError(Exception):
"""图像重叠度不足异常"""
pass
class DockerNotRunError(Exception):
"""Docker未启动异常"""
pass
class DockerShareError(Exception):
"""Docker目录共享异常"""
pass
2024-12-23 11:31:20 +08:00
class ODMProcessMonitor:
"""ODM处理监控器"""
2024-12-23 11:31:20 +08:00
def __init__(self, output_dir: str, mode: str = "快拼模式"):
self.output_dir = output_dir
2024-12-23 11:31:20 +08:00
self.logger = logging.getLogger('UAV_Preprocess.ODMMonitor')
self.mode = mode
def _check_success(self, grid_dir: str) -> bool:
2025-01-04 17:54:03 +08:00
"""检查ODM是否执行成功
检查项目:
1. 必要的文件夹是否存在
2. 正射影像是否生成且有效
3. 正射影像文件大小是否正常
"""
# 检查必要文件夹
success_markers = ['odm_orthophoto']
2024-12-25 10:04:08 +08:00
if self.mode != "快拼模式":
2025-01-04 17:54:03 +08:00
success_markers.extend(['odm_texturing', 'odm_georeferencing'])
if not all(os.path.exists(os.path.join(grid_dir, 'project', marker)) for marker in success_markers):
self.logger.error("必要的文件夹未生成")
return False
# 检查正射影像文件
ortho_path = os.path.join(grid_dir, 'project', 'odm_orthophoto', 'odm_orthophoto.original.tif')
if not os.path.exists(ortho_path):
self.logger.error("正射影像文件未生成")
return False
# 检查文件大小
file_size_mb = os.path.getsize(ortho_path) / (1024 * 1024) # 转换为MB
if file_size_mb < 1:
self.logger.error(f"正射影像文件过小: {file_size_mb:.2f}MB")
return False
try:
# 打开影像文件
ds = gdal.Open(ortho_path)
if ds is None:
self.logger.error("无法打开正射影像文件")
return False
# 读取第一个波段
band = ds.GetRasterBand(1)
# 获取统计信息
stats = band.GetStatistics(False, True)
if stats is None:
self.logger.error("无法获取影像统计信息")
return False
min_val, max_val, mean, std = stats
# 计算空值比例
no_data_value = band.GetNoDataValue()
array = band.ReadAsArray()
if no_data_value is not None:
no_data_ratio = np.sum(array == no_data_value) / array.size
else:
no_data_ratio = 0
# 检查空值比例是否过高超过50%
if no_data_ratio > 0.5:
self.logger.error(f"正射影像空值比例过高: {no_data_ratio:.2%}")
return False
# 检查影像是否全黑或全白
if max_val - min_val < 1:
self.logger.error("正射影像可能无效:像素值范围过小")
return False
ds = None # 关闭数据集
return True
except Exception as e:
self.logger.error(f"检查正射影像时发生错误: {str(e)}")
return False
2024-12-23 11:31:20 +08:00
2025-01-02 20:11:47 +08:00
def run_odm_with_monitor(self, grid_dir: str, grid_id: tuple, fast_mode: bool = True, produce_dem: bool = False) -> Tuple[bool, str]:
2024-12-26 15:30:50 +08:00
"""运行ODM命令"""
2025-01-02 20:11:47 +08:00
if produce_dem and fast_mode:
self.logger.error("快拼模式下无法生成DEM请调整生产参数")
return False, "快拼模式下无法生成DEM请调整生产参数"
2024-12-31 21:37:44 +08:00
self.logger.info(f"开始处理网格 ({grid_id[0]},{grid_id[1]})")
2025-01-04 17:54:03 +08:00
max_retries = 3
current_try = 0
use_lowest_quality = True # 初始使用lowest quality
2024-12-26 11:32:46 +08:00
2025-01-04 17:54:03 +08:00
while current_try < max_retries:
current_try += 1
self.logger.info(f"{current_try} 次尝试处理网格 ({grid_id[0]},{grid_id[1]})")
2025-01-02 20:11:47 +08:00
2025-01-04 17:54:03 +08:00
try:
# 构建Docker命令
grid_dir = grid_dir[0].lower()+grid_dir[1:].replace('\\', '/')
docker_command = (
f"docker run --gpus all -ti --rm "
f"-v {grid_dir}:/datasets "
f"opendronemap/odm:gpu "
f"--project-path /datasets project "
f"--max-concurrency 15 "
f"--force-gps "
)
# 根据是否使用lowest quality添加参数
if use_lowest_quality:
docker_command += f"--feature-quality lowest "
docker_command += f"--orthophoto-resolution 10 "
if produce_dem:
docker_command += (
f"--dsm "
f"--dtm "
)
if fast_mode:
docker_command += (
f"--fast-orthophoto "
f"--skip-3dmodel "
)
docker_command += "--rerun-all"
self.logger.info(docker_command)
result = subprocess.run(
docker_command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = result.stdout.decode(
'utf-8'), result.stderr.decode('utf-8')
2024-12-26 11:20:55 +08:00
2025-01-04 17:54:03 +08:00
self.logger.error(f"==========stderr==========: {stderr}")
# 检查是否有错误
stdout_lines = stdout.strip().split('\n')
last_lines = stdout_lines[-10:] if len(stdout_lines) > 10 else stdout_lines
# 检查Docker是否未运行
if any("docker not run" in line.lower() for line in last_lines) or \
any("docker daemon" in line.lower() for line in last_lines) or \
any("cannot connect to the docker daemon" in line.lower() for line in last_lines):
raise DockerNotRunError("Docker服务未启动")
# 检查目录共享问题
if any("not share" in line.lower() for line in last_lines) or \
any("permission denied" in line.lower() for line in last_lines) or \
any("access is denied" in line.lower() for line in last_lines):
raise DockerShareError("Docker无法访问目录")
# 检查是否有重叠度不足错误
if any("not overlap" in line.lower() for line in last_lines):
raise NotOverlapError("检测到图像重叠度不足错误")
# 检查执行结果
if self._check_success(grid_dir):
self.logger.info(f"网格 ({grid_id[0]},{grid_id[1]}) 处理成功")
return True, ""
if current_try < max_retries:
self.logger.warning(f"网格处理失败,准备第 {current_try + 1} 次重试")
else:
self.logger.error(f"网格 ({grid_id[0]},{grid_id[1]}) 处理失败,已达到最大重试次数")
return False, f"网格 ({grid_id[0]},{grid_id[1]}) 处理失败,已重试{max_retries}"
except NotOverlapError:
if use_lowest_quality:
self.logger.warning("检测到'not overlap'错误移除lowest quality参数后重试")
use_lowest_quality = False
continue
else:
self.logger.error("即使移除lowest quality参数后仍然出现'not overlap'错误")
return False, "图像重叠度不足"
except DockerNotRunError:
self.logger.error("Docker服务未启动")
return False, "Docker没有启动请启动Docker"
except DockerShareError:
self.logger.error("Docker无法访问目录")
return False, "Docker无法访问数据目录或输出目录请检查目录权限和共享设置"
return False, f"网格 ({grid_id[0]},{grid_id[1]}) 处理失败"
def process_all_grids(self, grid_points: Dict[tuple, pd.DataFrame], produce_dem: bool) -> Dict[tuple, pd.DataFrame]:
"""处理所有网格
Returns:
Dict[tuple, pd.DataFrame]: 成功处理的网格点数据字典
"""
self.logger.info("开始执行网格处理")
2025-01-04 17:54:03 +08:00
successful_grid_points = {}
failed_grids = []
for grid_id, points in grid_points.items():
grid_dir = os.path.join(
2024-12-31 21:37:44 +08:00
self.output_dir, f'grid_{grid_id[0]}_{grid_id[1]}'
)
2025-01-04 17:54:03 +08:00
try:
success, error_msg = self.run_odm_with_monitor(
grid_dir=grid_dir,
grid_id=grid_id,
fast_mode=(self.mode == "快拼模式"),
produce_dem=produce_dem
)
if success:
successful_grid_points[grid_id] = points
else:
self.logger.error(f"网格 ({grid_id[0]},{grid_id[1]}) 处理失败: {error_msg}")
failed_grids.append((grid_id, error_msg))
except Exception as e:
error_msg = str(e)
self.logger.error(f"处理网格 ({grid_id[0]},{grid_id[1]}) 时发生异常: {error_msg}")
failed_grids.append((grid_id, error_msg))
# 汇总处理结果
total_grids = len(grid_points)
failed_count = len(failed_grids)
success_count = len(successful_grid_points)
self.logger.info(f"网格处理完成。总计: {total_grids}, 成功: {success_count}, 失败: {failed_count}")
if failed_grids:
self.logger.error("失败的网格:")
for grid_id, error_msg in failed_grids:
self.logger.error(f"网格 ({grid_id[0]},{grid_id[1]}): {error_msg}")
if len(successful_grid_points) == 0:
raise Exception("所有网格处理都失败,无法继续处理")
2025-01-04 17:54:03 +08:00
return successful_grid_points