|
|
以下是使用C++和OpenCV实现纹理-结构分解的代码示例,主要使用引导滤波和全变分两种方法。
1. 使用引导滤波方法
```cpp
#include <opencv2/opencv.hpp>
#include <opencv2/ximgproc.hpp>
#include <iostream>
using namespace cv;
using namespace std;
using namespace cv::ximgproc;
// 使用引导滤波进行纹理-结构分解
void textureStructureSeparationGuidedFilter(const Mat& src, Mat& structure, Mat& texture,
int radius = 15, double eps = 0.01) {
// 转换为浮点型便于计算
Mat srcFloat;
src.convertTo(srcFloat, CV_32F, 1.0/255.0);
// 初始化输出矩阵
structure = Mat::zeros(srcFloat.size(), srcFloat.type());
texture = Mat::zeros(srcFloat.size(), srcFloat.type());
// 如果是彩色图像,对每个通道分别处理
if (srcFloat.channels() == 3) {
vector<Mat> srcChannels, structChannels, textureChannels;
split(srcFloat, srcChannels);
for (int i = 0; i < 3; i++) {
Mat channelStruct;
// 引导滤波:使用原图作为引导图
guidedFilter(srcFloat, srcChannels[i], channelStruct, radius, eps);
structChannels.push_back(channelStruct);
// 计算纹理层
Mat channelTexture = srcChannels[i] - channelStruct;
textureChannels.push_back(channelTexture);
}
// 合并通道
merge(structChannels, structure);
merge(textureChannels, texture);
} else {
// 灰度图像处理
guidedFilter(srcFloat, srcFloat, structure, radius, eps);
texture = srcFloat - structure;
}
}
// 显示结果(将图像归一化到0-255范围显示)
void displayNormalized(const string& windowName, const Mat& img) {
Mat display;
if (img.channels() == 3) {
img.convertTo(display, CV_8U, 255.0);
} else {
// 单通道图像转换为3通道用于显示
Mat temp;
img.convertTo(temp, CV_8U, 255.0);
applyColorMap(temp, display, COLORMAP_JET);
}
imshow(windowName, display);
}
int main() {
// 读取图像
Mat src = imread("texture_image.jpg");
if (src.empty()) {
cout << "无法加载图像!" << endl;
return -1;
}
Mat structure, texture;
// 执行纹理-结构分离
textureStructureSeparationGuidedFilter(src, structure, texture);
// 显示结果
namedWindow("原始图像", WINDOW_AUTOSIZE);
namedWindow("结构层", WINDOW_AUTOSIZE);
namedWindow("纹理层", WINDOW_AUTOSIZE);
imshow("原始图像", src);
displayNormalized("结构层", structure);
displayNormalized("纹理层", texture + 0.5); // 加上0.5使中值为灰色
waitKey(0);
// 保存结果
Mat structureDisplay, textureDisplay;
structure.convertTo(structureDisplay, CV_8U, 255.0);
texture.convertTo(textureDisplay, CV_8U, 255.0, 128.0); // 加上128偏移量
imwrite("structure.jpg", structureDisplay);
imwrite("texture.jpg", textureDisplay);
return 0;
}
```
2. 使用全变分(TV)方法
```cpp
#include <opencv2/opencv.hpp>
#include <iostream>
using namespace cv;
using namespace std;
// 全变分纹理-结构分离(使用迭代优化)
void textureStructureSeparationTV(const Mat& src, Mat& structure, Mat& texture,
int iterations = 100, double lambda = 0.1) {
// 转换为浮点型
Mat srcFloat;
src.convertTo(srcFloat, CV_32F, 1.0/255.0);
// 初始化结构层为原图
srcFloat.copyTo(structure);
// 迭代优化
for (int iter = 0; iter < iterations; iter++) {
Mat gradientX, gradientY;
// 计算x和y方向的梯度
Sobel(structure, gradientX, CV_32F, 1, 0, 3);
Sobel(structure, gradientY, CV_32F, 0, 1, 3);
// 计算梯度的模
Mat gradientMag;
magnitude(gradientX, gradientY, gradientMag);
// 避免除零
gradientMag += 1e-6;
// 计算散度
Mat divX, divY;
Sobel(gradientX / gradientMag, divX, CV_32F, 1, 0, 3);
Sobel(gradientY / gradientMag, divY, CV_32F, 0, 1, 3);
Mat divergence = divX + divY;
// 更新结构层
structure = structure + lambda * (srcFloat - structure) - 0.1 * divergence;
// 限制在合理范围内
structure = max(0.0f, min(1.0f, structure));
if (iter % 20 == 0) {
cout << "迭代: " << iter << "/" << iterations << endl;
}
}
// 计算纹理层
texture = srcFloat - structure;
}
// 使用双边滤波的简化方法
void textureStructureSeparationBilateral(const Mat& src, Mat& structure, Mat& texture,
int d = 15, double sigmaColor = 25, double sigmaSpace = 25) {
Mat srcFloat;
src.convertTo(srcFloat, CV_32F, 1.0/255.0);
// 双边滤波
if (srcFloat.channels() == 3) {
bilateralFilter(srcFloat, structure, d, sigmaColor, sigmaSpace);
} else {
bilateralFilter(srcFloat, structure, d, sigmaColor, sigmaSpace);
}
// 计算纹理层
texture = srcFloat - structure;
}
```
3. 完整的演示程序
```cpp
#include <opencv2/opencv.hpp>
#include <opencv2/ximgproc.hpp>
#include <iostream>
using namespace cv;
using namespace std;
using namespace cv::ximgproc;
class TextureStructureSeparator {
private:
int method_; // 0: Guided Filter, 1: Bilateral, 2: TV
public:
TextureStructureSeparator(int method = 0) : method_(method) {}
void setMethod(int method) { method_ = method; }
void separate(const Mat& src, Mat& structure, Mat& texture) {
switch (method_) {
case 0:
guidedFilterSeparation(src, structure, texture);
break;
case 1:
bilateralFilterSeparation(src, structure, texture);
break;
case 2:
tvSeparation(src, structure, texture);
break;
default:
guidedFilterSeparation(src, structure, texture);
}
}
private:
void guidedFilterSeparation(const Mat& src, Mat& structure, Mat& texture) {
Mat srcFloat;
src.convertTo(srcFloat, CV_32F, 1.0/255.0);
structure = Mat::zeros(srcFloat.size(), srcFloat.type());
if (srcFloat.channels() == 3) {
vector<Mat> srcChannels, structChannels;
split(srcFloat, srcChannels);
for (int i = 0; i < 3; i++) {
Mat channelStruct;
guidedFilter(srcFloat, srcChannels[i], channelStruct, 15, 0.01);
structChannels.push_back(channelStruct);
}
merge(structChannels, structure);
} else {
guidedFilter(srcFloat, srcFloat, structure, 15, 0.01);
}
texture = srcFloat - structure;
}
void bilateralFilterSeparation(const Mat& src, Mat& structure, Mat& texture) {
Mat srcFloat;
src.convertTo(srcFloat, CV_32F, 1.0/255.0);
bilateralFilter(srcFloat, structure, 15, 25, 25);
texture = srcFloat - structure;
}
void tvSeparation(const Mat& src, Mat& structure, Mat& texture) {
Mat srcFloat;
src.convertTo(srcFloat, CV_32F, 1.0/255.0);
srcFloat.copyTo(structure);
// 简化的TV迭代
for (int iter = 0; iter < 50; iter++) {
Mat gradX, gradY;
Sobel(structure, gradX, CV_32F, 1, 0, 3);
Sobel(structure, gradY, CV_32F, 0, 1, 3);
Mat gradMag;
magnitude(gradX, gradY, gradMag);
gradMag += 1e-6;
Mat divX, divY;
Sobel(gradX / gradMag, divX, CV_32F, 1, 0, 3);
Sobel(gradY / gradMag, divY, CV_32F, 0, 1, 3);
Mat divergence = divX + divY;
structure += 0.1 * (srcFloat - structure) - 0.05 * divergence;
structure = max(0.0f, min(1.0f, structure));
}
texture = srcFloat - structure;
}
};
// 创建结果展示面板
Mat createResultPanel(const Mat& original, const Mat& structure, const Mat& texture) {
int panelWidth = original.cols * 3;
int panelHeight = original.rows;
Mat panel(panelHeight, panelWidth, CV_8UC3);
// 转换图像格式用于显示
Mat origDisplay, structDisplay, textureDisplay;
if (original.channels() == 1) {
cvtColor(original, origDisplay, COLOR_GRAY2BGR);
} else {
origDisplay = original.clone();
}
structure.convertTo(structDisplay, CV_8U, 255.0);
if (structDisplay.channels() == 1) {
cvtColor(structDisplay, structDisplay, COLOR_GRAY2BGR);
}
texture.convertTo(textureDisplay, CV_8U, 255.0, 128.0);
if (textureDisplay.channels() == 1) {
cvtColor(textureDisplay, textureDisplay, COLOR_GRAY2BGR);
}
// 拼接图像
Rect origROI(0, 0, original.cols, original.rows);
Rect structROI(original.cols, 0, original.cols, original.rows);
Rect textureROI(original.cols * 2, 0, original.cols, original.rows);
origDisplay.copyTo(panel(origROI));
structDisplay.copyTo(panel(structROI));
textureDisplay.copyTo(panel(textureROI));
// 添加标签
putText(panel, "Original", Point(10, 30), FONT_HERSHEY_SIMPLEX, 1, Scalar(255, 255, 255), 2);
putText(panel, "Structure", Point(original.cols + 10, 30), FONT_HERSHEY_SIMPLEX, 1, Scalar(255, 255, 255), 2);
putText(panel, "Texture", Point(original.cols * 2 + 10, 30), FONT_HERSHEY_SIMPLEX, 1, Scalar(255, 255, 255), 2);
return panel;
}
int main() {
// 读取图像
Mat src = imread("texture_image.jpg");
if (src.empty()) {
cout << "无法加载图像,使用默认图像..." << endl;
// 创建一个测试图像
src = Mat(400, 400, CV_8UC3);
// 创建包含纹理和结构的测试图像
for (int i = 0; i < src.rows; i++) {
for (int j = 0; j < src.cols; j++) {
// 结构:渐变背景
float structure = sin(j * 0.02f) * 0.3f + 0.5f;
// 纹理:高频噪声
float texture = (rand() % 100 - 50) * 0.005f;
float value = max(0.0f, min(1.0f, structure + texture));
src.at<Vec3b>(i, j) = Vec3b(value * 255, value * 255, value * 255);
}
}
}
TextureStructureSeparator separator(0); // 使用引导滤波方法
Mat structure, texture;
// 执行分离
separator.separate(src, structure, texture);
// 创建结果面板
Mat resultPanel = createResultPanel(src, structure, texture);
// 显示结果
namedWindow("Texture-Structure Separation", WINDOW_AUTOSIZE);
imshow("Texture-Structure Separation", resultPanel);
cout << "按任意键继续..." << endl;
waitKey(0);
// 保存结果
imwrite("separation_result.jpg", resultPanel);
// 测试不同方法
vector<string> methods = {"Guided Filter", "Bilateral Filter", "TV"};
for (int i = 0; i < 3; i++) {
separator.setMethod(i);
separator.separate(src, structure, texture);
Mat panel = createResultPanel(src, structure, texture);
string filename = "result_" + methods[i] + ".jpg";
imwrite(filename, panel);
cout << "保存: " << filename << endl;
}
return 0;
}
```
编译说明
要编译这些代码,你需要安装OpenCV并链接必要的库:
```bash
# 使用g++编译
g++ -std=c++11 texture_separation.cpp -o texture_separation \
`pkg-config --cflags --libs opencv4`
# 或者使用CMake
```
CMakeLists.txt:
```cmake
cmake_minimum_required(VERSION 3.10)
project(TextureSeparation)
find_package(OpenCV REQUIRED)
include_directories(${OpenCV_INCLUDE_DIRS})
add_executable(texture_separation texture_separation.cpp)
target_link_libraries(texture_separation ${OpenCV_LIBS})
```
关键参数说明
1. 引导滤波参数:
· radius: 滤波半径,值越大平滑效果越强
· eps: 正则化参数,值越小边缘保持越好
2. 双边滤波参数:
· d: 滤波直径
· sigmaColor: 颜色空间标准差
· sigmaSpace: 坐标空间标准差
3. TV方法参数:
· iterations: 迭代次数
· lambda: 平衡参数
这个C++实现提供了多种传统算法来进行纹理-结构分离,你可以根据具体需求选择合适的方和调整参数。 |
|