您的位置:首页 > 其它

关于视频后期美白的一些事

2015-06-01 14:53 260 查看
前段时间因为项目需要 研究了下关于视频美白方面的问题。在这方面,视频上的实时效果也比较多了,比如360相机一起其他一些相机自带的美颜功能,当然图片上的就更多了,最具代表性的就是美图秀秀。

有必要说明一下,现在所说的美白严格意义上已经不单单是“白”了,其一键功能里面包括了对皮肤的嫩滑处理,而且在一些扩展功能比较强的美白功能中还带有去皱、瘦脸等功能。但一般而言,当前的美白主要是由磨皮和增亮两个步骤完成。接下来就两步大致说一说:

1、磨皮功能:磨皮算法主要是通过图像处理中的滤波算法来实现的。现在能够实现磨皮功能的算法算是多种多样了,各有其特色。具体到我这里,我重点了解了两种,一种是传统的高斯磨皮,另一个是网上比较多的双边滤波(传送门:http://www.cnblogs.com/Imageshop/p/3293300.html)。其实双边滤波和高斯滤波挺像的,不过是加上了边缘的判定,具有一定的包边效果,算是一种改进吧。

在项目实践中,说实话我觉得这两个滤波效果差不太多,在参数合适的时候都能取得不错的嫩滑效果。但是也同时都面临着两个问题:第一是效率问题,我在实验中直接实现两个算法,结果很卡,即使我的视频分辨率都已经是480*480了,大致时间耗费是视频长度的1.5倍。后来即使是实用了分离滤波,即横竖滤波分离进行,也效率没有提高太多。第二个这个网上很少有人提及,但却是我头疼了很久至今没有太好解决办法的,那就是磨皮中出现的面部橡胶感。当面部一部分区域的亮度和颜色度相近的时候(这种情况在面部还是比较常见的),磨皮后这些区域由于色度亮度更接近造成了一种类似橡胶的感觉,从而降低了处理后的真实性。

针对上两个问题,说一下最后找到的办法吧,不太成熟尽做借鉴。效率问题最后是通过opengl解决的,因为调研发现滤波算法在cpu上的处理即使是在低分辨率的情况下也很高,而且随着分辨率的增加更是大幅增加。同时由于cpu还要处理其他上层以及逻辑上的一些运算,处理滤波的效率就会更低,更不用说在移动端了。橡胶感的问题其实并没有从根本上解决,最后使用的方法是较好的增亮算法+合适的磨皮参数。

2、美白功能:美白的算法现在也是多种多样,我在项目中实验了线性、指数等等多种调节方法,效果都不是特别满意。究其原因一是调节算法效果比较生硬,二是即使调节算法还不错但需要对输入图片进行分析来计算参数,这也是一个重要的算法(这里给出一篇当时参考的亮度调节的博文http://blog.csdn.net/maozefa/article/details/4493395)。后来还测试过直方图均衡调整亮度,也曾经实验着用直方图分析+直方图规划的方法来分析现有数据(这个另拉一篇总结)。在实践中,实践了多种方法后,最后采用的是一种基于色温调节的增亮算法。

最后,给出相应的磨皮和增亮的shader编码:

增亮:

NSString *const kGPUImageWhiteFragmentShaderString =
SHADER_STRING
(
uniform sampler2D inputImageTexture;
varying highp vec2 textureCoordinate;

uniform lowp
float temperature;

const lowp vec3 whiteFilter = vec3(0.1,
0.1, 0.1);
//save for light whiting
const lowp vec3 warmFilter = vec3(0.0,
0.78, 0.92);

void main()
{
lowp vec4 source = texture2D(inputImageTexture, textureCoordinate);

highp
float y = source.r * 0.299 + source.g *
0.587 + source.b *
0.114;
highp
float cr = (source.r - y)*0.713 +
0.5;
highp
float cb = (source.b - y)*0.564 +
0.5;

highp
float gray = y * 255.0;

//gray = gray + (gray - 128.0)*cv + 0.5;

gray = gray / (0.896865160897715 +
0.0032021590610318*gray - 0.0442923728433528*sqrt(gray));
gray = gray<256.0? gray:255.0;
y = gray /
255.0;

highp vec3 rgb;
rgb.r = y +
1.403*(cr - 0.5);
rgb.g = y -
0.344*(cb - 0.5) -
0.714*(cr - 0.5);
rgb.b = y +
1.773*(cb - 0.5);

lowp vec3 whiteprocessed = vec3(
(rgb.r <
0.5 ? (2.0 * rgb.r * whiteFilter.r) : (1.0 -
2.0 * (1.0 - rgb.r) * (1.0 - whiteFilter.r))),
//adjusting temperature
(rgb.g <
0.5 ? (2.0 * rgb.g * whiteFilter.g) : (1.0 -
2.0 * (1.0 - rgb.g) * (1.0 - whiteFilter.g))),
(rgb.b <
0.5 ? (2.0 * rgb.b * whiteFilter.b) : (1.0 -
2.0 * (1.0 - rgb.b) * (1.0 - whiteFilter.b))));
lowp vec3 balancewhite = mix(rgb, whiteprocessed, -0.4756);

lowp vec3 temperprocessed = vec3(
(balancewhite.r <
0.5 ? (2.0 * balancewhite.r * warmFilter.r) : (1.0 -
2.0 * (1.0 - balancewhite.r) * (1.0 - warmFilter.r))),
(balancewhite.g <
0.5 ? (2.0 * balancewhite.g * warmFilter.g) : (1.0 -
2.0 * (1.0 - balancewhite.g) * (1.0 - warmFilter.g))),
(balancewhite.b <
0.5 ? (2.0 * balancewhite.b * warmFilter.b) : (1.0 -
2.0 * (1.0 - balancewhite.b) * (1.0 - warmFilter.b))));
lowp vec3 balanceresult = mix(balancewhite, temperprocessed, temperature);

gl_FragColor = vec4(balanceresult, source.a);
}
);

//temperature = 5000 default

磨皮:

NSString *const kGPUImageSkinPolishBlurVertexShaderString =
SHADER_STRING
(
attribute vec4 position;
attribute vec4 inputTextureCoordinate;

const
int GAUSSIAN_SAMPLES =
9;

uniform
float texelWidthOffset;
uniform
float texelHeightOffset;

varying vec2 textureCoordinate;
varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];

void main()
{
gl_Position = position;
textureCoordinate = inputTextureCoordinate.xy;

// Calculate the positions for the blur

int multiplier = 0;
vec2 blurStep;
vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);

for (int i =
0; i < GAUSSIAN_SAMPLES; i++)
{
multiplier = (i - ((GAUSSIAN_SAMPLES -
1) / 2));

// Blur in x (horizontal)
blurStep =
float(multiplier) * singleStepOffset;
blurCoordinates[i] = inputTextureCoordinate.xy + blurStep;
}
}
);

#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
NSString *const kGPUImageSkinPolishFilterFragmentShaderString =
SHADER_STRING
(
uniform sampler2D inputImageTexture;

const lowp
int GAUSSIAN_SAMPLES =
9;

varying highp vec2 textureCoordinate;
varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];

uniform mediump
float distanceNormalizationFactor;

void main()
{
lowp vec4 centralColor;
lowp
float gaussianWeightTotal;
lowp vec4 sum;
lowp vec4 sampleColor;
lowp
float distanceFromCentralColor;
lowp
float gaussianWeight;

lowp
float colorparam = 40000.0;
lowp
float colordistance;
lowp
float colorweight;

/////////////////////////9 spacesigma=5////////////////////

centralColor = texture2D(inputImageTexture, blurCoordinates[4]);
gaussianWeightTotal =
0.188493;
sum = centralColor *
0.188493;

sampleColor = texture2D(inputImageTexture, blurCoordinates[0]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor,
1.0);
gaussianWeight =
0.017261 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;

sampleColor = texture2D(inputImageTexture, blurCoordinates[1]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor,
1.0);
gaussianWeight =
0.095437 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;

sampleColor = texture2D(inputImageTexture, blurCoordinates[2]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor,
1.0);
gaussianWeight =
0.142134 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;

sampleColor = texture2D(inputImageTexture, blurCoordinates[3]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor,
1.0);
gaussianWeight =
0.150921 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;

sampleColor = texture2D(inputImageTexture, blurCoordinates[5]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor,
1.0);
gaussianWeight =
0.150921 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;

sampleColor = texture2D(inputImageTexture, blurCoordinates[6]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor,
1.0);
gaussianWeight =
0.142134 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;

sampleColor = texture2D(inputImageTexture, blurCoordinates[7]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor,
1.0);
gaussianWeight =
0.095437 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;

sampleColor = texture2D(inputImageTexture, blurCoordinates[8]);
distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor,
1.0);
gaussianWeight =
0.017261 * (1.0 - distanceFromCentralColor);
gaussianWeightTotal += gaussianWeight;
sum += sampleColor * gaussianWeight;

gl_FragColor = sum / gaussianWeightTotal;

}
);

该记录的差不多了 ,最后啰嗦一句,个人感觉关于图像/视频的美化其实是个很值得研究的东西,能做出来和做出来好东西真的是差距特别大。
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: