本篇Codelab是基于ArkTS的声明式开发范式的样例,主要介绍了图片编辑实现过程。样例主要包含以下功能:
完成本篇Codelab我们首先要完成开发环境的搭建,本示例以RK3568开发板为例,参照以下步骤进行:
[获取OpenHarmony系统版本]:标准系统解决方案(二进制)。以3.2 Release版本为例:
搭建烧录环境。
搭建开发环境。
HarmonyOS与OpenHarmony鸿蒙文档籽料:mau123789是v直接拿
本篇Codelab只对核心代码进行讲解,对于完整代码,我们会在gitee中提供。
├──entry/src/main/ets // 代码区
│ ├──common
│ │ └──constant
│ │ └──CommonConstant.ets // 常量类
│ ├──entryability
│ │ └──EntryAbility.ts // 本地启动ability
│ ├──pages
│ │ └──HomePage.ets // 本地主页面
│ ├──utils
│ │ ├──AdjustUtil.ets // 调节工具类
│ │ ├──CropUtil.ets // 裁剪工具类
│ │ ├──DecodeUtil.ets // 解码工具类
│ │ ├──DrawingUtils.ets // Canvas画图工具类
│ │ ├──EncodeUtil.ets // 编码工具类
│ │ ├──LoggerUtil.ets // 日志工具类
│ │ ├──MathUtils.ets // 坐标转换工具类
│ │ └──OpacityUtil.ets // 透明度调节工具类
│ ├──view
│ │ ├──AdjustContentView.ets // 色域调整视图
│ │ └──ImageSelect.ets // Canvas选择框实现类
│ ├──viewmodel
│ │ ├──CropShow.ets // 选择框显示控制类
│ │ ├──CropType.ets // 按比例选取图片
│ │ ├──IconListViewModel.ets // icon数据
│ │ ├──ImageEditCrop.ets // 图片编辑操作类
│ │ ├──ImageFilterCrop.ets // 图片操作收集类
│ │ ├──ImageSizeItem.ets // 图片尺寸
│ │ ├──Line.ets // 线封装类
│ │ ├──MessageItem.ets // 多线程封装消息
│ │ ├──OptionViewModel.ets // 图片处理封装类
│ │ ├──PixelMapWrapper.ets // PixelMap封装类
│ │ ├──Point.ets // 点封装类
│ │ ├──Ratio.ets // 比例封装类
│ │ ├──Rect.ets // 矩形封装类
│ │ ├──RegionItem.ets // 区域封装类
│ │ └──ScreenManager.ts // 屏幕尺寸计算工具类
│ └──workers
│ ├──AdjustBrightnessWork.ts // 亮度异步调节
│ └──AdjustSaturationWork.ts // 饱和度异步调节
└──entry/src/main/resources // 资源文件目录
鸿蒙开发指导文档:gitee.com/li-shizhen-skin/harmony-os/blob/master/README.md
点击或者复制转到。
在这个章节中,需要完成图片解码的操作,并将解码后的图片展示。效果如图所示:
在进行图片编辑前需要先加载图片,当前文档是在生命周期aboutToAppear开始加载。具体实现步骤。
// HomePage.ets
aboutToAppear() {
this.pixelInit();
...
}
build() {
Column() {
...
Column() {
if (this.isCrop && this.showCanvas && this.statusBar > 0) {
if (this.isSaveFresh) {
ImageSelect({
statusBar: this.statusBar
})
}
...
} else {
if (this.isPixelMapChange) {
Image(this.pixelMap)
.scale({ x: this.imageScale, y: this.imageScale, z: 1 })
.objectFit(ImageFit.None)
}
...
}
}
...
}
...
}
async getResourceFd(filename: string) {
const resourceMgr = getContext(this).resourceManager;
const context = getContext(this);
if (filename === CommonConstants.RAW_FILE_NAME) {
let imageBuffer = await resourceMgr.getMediaContent($r("app.media.ic_low"))
let filePath = context.cacheDir + '/' + filename;
let file = fs.openSync(filePath, fs.OpenMode.READ_WRITE | fs.OpenMode.CREATE);
let writeLen = fs.writeSync(file.fd, imageBuffer.buffer);
fs.copyFileSync(filePath, context.cacheDir + '/' + CommonConstants.RAW_FILE_NAME_TEST);
return file.fd;
} else {
let filePath = context.cacheDir + '/' + filename;
let file = fs.openSync(filePath, fs.OpenMode.READ_WRITE | fs.OpenMode.CREATE);
return file.fd;
}
}
async getPixelMap(fileName: string) {
const fd = await this.getResourceFd(fileName);
const imageSourceApi = image.createImageSource(fd);
if (!imageSourceApi) {
Logger.error(TAG, 'imageSourceAPI created failed!');
return;
}
const pixelMap = await imageSourceApi.createPixelMap({
editable: true
});
return pixelMap;
}
当前章节需要完成图片的裁剪、旋转、色域调节(本章只介绍亮度、透明度、饱和度)等功能。
裁剪:选取图片中的部分进行裁剪生成新的图片。
旋转:将图片按照不同的角度进行旋转,生成新的图片。
色域调节:当前Codelab色域调节的亮度、透明度和饱和度,使用色域模型RGB-HSV来实现的。
RGB:是我们接触最多的颜色空间,分别为红色®,绿色(G)和蓝色(B)。
HSV:是用色相H,饱和度S,明亮度V来描述颜色的变化。
// AdjustUtil.ets
// rgb转hsv
function rgb2hsv(red: number, green: number, blue: number) {
let hsvH: number = 0, hsvS: number = 0, hsvV: number = 0;
const rgbR: number = colorTransform(red);
const rgbG: number = colorTransform(green);
const rgbB: number = colorTransform(blue);
const maxValue = Math.max(rgbR, Math.max(rgbG, rgbB));
const minValue = Math.min(rgbR, Math.min(rgbG, rgbB));
hsvV = maxValue * CommonConstants.CONVERT_INT;
if (maxValue === 0) {
hsvS = 0;
} else {
hsvS = Number((1 - minValue / maxValue).toFixed(CommonConstants.DECIMAL_TWO)) * CommonConstants.CONVERT_INT;
}
if (maxValue === minValue) {
hsvH = 0;
}
if (maxValue === rgbR && rgbG >= rgbB) {
hsvH = Math.floor(CommonConstants.ANGLE_60 * ((rgbG - rgbB) / (maxValue - minValue)));
}
if (maxValue === rgbR && rgbG < rgbB) {
hsvH = Math.floor(CommonConstants.ANGLE_60 * ((rgbG - rgbB) / (maxValue - minValue)) + CommonConstants.ANGLE_360);
}
if (maxValue === rgbG) {
hsvH = Math.floor(CommonConstants.ANGLE_60 * ((rgbB - rgbR) / (maxValue - minValue)) + CommonConstants.ANGLE_120);
}
if (maxValue === rgbB) {
hsvH = Math.floor(CommonConstants.ANGLE_60 * ((rgbR - rgbG) / (maxValue - minValue)) + CommonConstants.ANGLE_240);
}
return [hsvH, hsvS, hsvV];
}
// hsv转rgb
function hsv2rgb(hue: number, saturation: number, value: number) {
let rgbR: number = 0, rgbG: number = 0, rgbB: number = 0;
if (saturation === 0) {
rgbR = rgbG = rgbB = Math.round((value * CommonConstants.COLOR_LEVEL_MAX) / CommonConstants.CONVERT_INT);
return { rgbR, rgbG, rgbB };
}
const cxmC = (value * saturation) / (CommonConstants.CONVERT_INT * CommonConstants.CONVERT_INT);
const cxmX = cxmC * (1 - Math.abs((hue / CommonConstants.ANGLE_60) % CommonConstants.MOD_2 - 1));
const cxmM = (value - cxmC * CommonConstants.CONVERT_INT) / CommonConstants.CONVERT_INT;
const hsvHRange = Math.floor(hue / CommonConstants.ANGLE_60);
switch (hsvHRange) {
case AngelRange.ANGEL_0_60:
rgbR = (cxmC + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbG = (cxmX + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbB = (0 + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
break;
case AngelRange.ANGEL_60_120:
rgbR = (cxmX + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbG = (cxmC + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbB = (0 + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
break;
case AngelRange.ANGEL_120_180:
rgbR = (0 + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbG = (cxmC + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbB = (cxmX + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
break;
case AngelRange.ANGEL_180_240:
rgbR = (0 + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbG = (cxmX + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbB = (cxmC + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
break;
case AngelRange.ANGEL_240_300:
rgbR = (cxmX + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbG = (0 + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbB = (cxmC + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
break;
case AngelRange.ANGEL_300_360:
rgbR = (cxmC + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbG = (0 + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
rgbB = (cxmX + cxmM) * CommonConstants.COLOR_LEVEL_MAX;
break;
default:
break;
}
return [
Math.round(rgbR),
Math.round(rgbG),
Math.round(rgbB)
];
}
说明: 当前裁剪功能采用pixelMap裁剪能力直接做切割,会有叠加效果,后续会通过增加选取框对当前功能进行优化。
// HomePage.ets
cropImage(index: CropType) {
this.currentCropIndex = index;
switch (this.currentCropIndex) {
case CropType.ORIGINAL_IMAGE:
this.cropRatio = CropRatioType.RATIO_TYPE_FREE;
break;
case CropType.SQUARE:
this.cropRatio = CropRatioType.RATIO_TYPE_1_1;
break;
case CropType.BANNER:
this.cropRatio = CropRatioType.RATIO_TYPE_4_3;
break;
case CropType.RECTANGLE:
this.cropRatio = CropRatioType.RATIO_TYPE_16_9;
break;
default:
this.cropRatio = CropRatioType.RATIO_TYPE_FREE;
break;
}
}
// ImageFilterCrop.ets
cropImage(pixelMap: PixelMapWrapper, realCropRect: RectF, callback: () => void) {
let offWidth = realCropRect.getWidth();
let offHeight = realCropRect.getHeight();
if (pixelMap.pixelMap!== undefined) {
pixelMap.pixelMap.crop({
size:{ height: vp2px(offHeight), width: vp2px(offWidth) },
x: vp2px(realCropRect.left),
y: vp2px(realCropRect.top)
}, callback);
}
}
// HomePage.ets
rotateImage(rotateType: RotateType) {
if (rotateType === RotateType.CLOCKWISE) {
try {
if (this.pixelMap !== undefined) {
this.pixelMap.rotate(CommonConstants.CLOCK_WISE)
.then(() => {
this.flushPixelMapNew();
})
}
} catch (error) {
Logger.error(TAG, `there is a error in rotate process with ${error?.code}`);
}
}
if (rotateType === RotateType.ANTI_CLOCK) {
try {
if (this.pixelMap !== undefined) {
this.pixelMap.rotate(CommonConstants.ANTI_CLOCK)
.then(() => {
this.flushPixelMapNew();
})
}
} catch (error) {
Logger.error(TAG, `there is a error in rotate process with ${error?.code}`);
}
}
}
说明: 当前亮度调节是在UI层面实现的,未实现细节优化算法,只做简单示例。调节后的图片会有色彩上的失真。
// AdjustContentView.ets
// 转化成pixelMap及发送buffer到worker,返回数据刷新ui
postToWorker(type: AdjustId, value: number, workerName: string) {
let sliderValue = type === AdjustId.BRIGHTNESS ? this.brightnessLastSlider : this.saturationLastSlider;
try {
let workerInstance = new worker.ThreadWorker(workerName);
const bufferArray = new ArrayBuffer(this.pixelMap.getPixelBytesNumber());
this.pixelMap.readPixelsToBuffer(bufferArray).then(() => {
let message = new MessageItem(bufferArray, sliderValue, value);
workerInstance.postMessage(message);
if (this.postState) {
this.deviceListDialogController.open();
}
this.postState = false;
workerInstance.onmessage = (event: MessageEvents) => {
this.updatePixelMap(event)
};
if (type === AdjustId.BRIGHTNESS) {
this.brightnessLastSlider = Math.round(value);
} else {
this.saturationLastSlider = Math.round(value);
}
workerInstance.onexit = () => {
if (workerInstance !== undefined) {
workerInstance.terminate();
}
}
});
} catch (error) {
Logger.error(`Create work instance fail, error message: ${JSON.stringify(error)}`)
}
}
// AdjustBrightnessWork.ts
// worker线程处理部分
workerPort.onmessage = function(event : MessageEvents) {
let bufferArray = event.data.buf;
let last = event.data.last;
let cur = event.data.cur;
let buffer = adjustImageValue(bufferArray, last, cur);
workerPort.postMessage(buffer);
workerPort.close();
}
// AdjustUtil.ets
// 倍率计算部分
export function adjustImageValue(bufferArray: ArrayBuffer, last: number, cur: number) {
return execColorInfo(bufferArray, last, cur, HSVIndex.VALUE);
}
// OpacityUtil.ets
export async function adjustOpacity(pixelMap: PixelMap, value: number) {
if (!pixelMap) {
return;
}
const newPixelMap = pixelMap;
await newPixelMap.opacity(value / CommonConstants.SLIDER_MAX);
return newPixelMap;
}
说明: 当前饱和度调节是在UI层面实现的,未实现细节优化算法,只做简单示例。调节后的图片会有色彩上的失真。
// AdjustContentView.ets
// 转化成pixelMap及发送buffer到worker,返回数据刷新ui
postToWorker(type: AdjustId, value: number, workerName: string) {
let sliderValue = type === AdjustId.BRIGHTNESS ? this.brightnessLastSlider : this.saturationLastSlider;
try {
let workerInstance = new worker.ThreadWorker(workerName);
const bufferArray = new ArrayBuffer(this.pixelMap.getPixelBytesNumber());
this.pixelMap.readPixelsToBuffer(bufferArray).then(() => {
let message = new MessageItem(bufferArray, sliderValue, value);
workerInstance.postMessage(message);
if (this.postState) {
this.deviceListDialogController.open();
}
this.postState = false;
workerInstance.onmessage = (event: MessageEvents) => {
this.updatePixelMap(event)
};
if (type === AdjustId.BRIGHTNESS) {
this.brightnessLastSlider = Math.round(value);
} else {
this.saturationLastSlider = Math.round(value);
}
workerInstance.onexit = () => {
if (workerInstance !== undefined) {
workerInstance.terminate();
}
}
});
} catch (error) {
Logger.error(`Create work instance fail, error message: ${JSON.stringify(error)}`);
}
}
// AdjustSaturationWork.ts
// worker线程处理部分
workerPort.onmessage = function(event : MessageEvents) {
let bufferArray = event.data.buf;
let last = event.data.last;
let cur = event.data.cur;
let buffer = adjustSaturation(bufferArray, last, cur)
workerPort.postMessage(buffer);
workerPort.close();
}
// AdjustUtil.ets
// 倍率计算部分
export function adjustSaturation(bufferArray: ArrayBuffer, last: number, cur: number) {
return execColorInfo(bufferArray, last, cur, HSVIndex.SATURATION);
}
图片位图经过处理之后,还是属于解码的状态,还需要进行打包编码成对应的格式,本章讲解编码的具体过程。
// ImageSelect.ets
async encode(pixelMap: PixelMap | undefined) {
if (pixelMap === undefined) {
return;
}
const newPixelMap = pixelMap;
// 打包图片
const imagePackerApi = image.createImagePacker();
const packOptions: image.PackingOption = {
format: CommonConstants.ENCODE_FORMAT,
quality: CommonConstants.ENCODE_QUALITY
}
const imageData = await imagePackerApi.packing(newPixelMap, packOptions);
Logger.info(TAG, `imageData's length is ${imageData.byteLength}`);
// 获取相册路径
const context = getContext(this);
const media = mediaLibrary.getMediaLibrary(context);
const publicPath = await media.getPublicDirectory(mediaLibrary.DirectoryType.DIR_IMAGE);
const currentTime = new Date().getTime();
// 创建图片资源
const imageAssetInfo = await media.createAsset(
mediaLibrary.MediaType.IMAGE,
`${CommonConstants.IMAGE_PREFIX}_${currentTime}${CommonConstants.IMAGE_FORMAT}`,
publicPath
);
const imageFd = await imageAssetInfo.open(CommonConstants.ENCODE_FILE_PERMISSION);
await fs.write(imageFd, imageData);
// 释放资源
await imageAssetInfo.close(imageFd);
imagePackerApi.release();
await media.release();
}