鸿蒙 NEXT 开发:媒体操作汇总
选择图片并上传
ets
import fs from '@ohos.file.fs';
// utils 中的方法见《鸿蒙NEXT开发2:一些工具的包装》
import { requestPermissions } from '../../utils/premissions';
import { selectPicture } from '../../utils/mediaHelper';
import { uploadFile } from '../../utils/uploadUtils';
@Entry
@Component
struct UploadFromAlbumPage {
@Styles
commonText() {
.width(200)
.padding(10)
.margin(20)
.border({ width: 0.5, color: '#ff38f84b', radius: 15 })
}
build() {
Row() {
Column() {
Text("点击打开相册,并上传")
.fontSize(20)
.fontWeight(FontWeight.Bold)
.commonText()
.onClick(() => {
this.openAndUpload();
})
}
.width('100%')
}
.height('100%')
}
async openAndUpload() {
try {
const context = getContext()
const hasPermission = await requestPermissions(['ohos.permission.READ_IMAGEVIDEO'])
if (hasPermission) {
const path = await selectPicture();
if (path) {
let newPath = context.cacheDir + "/ceshi.jpg";
const file = fs.openSync(path, fs.OpenMode.READ_WRITE);
fs.copyFileSync(file.fd, newPath);
// url需要自行修改
uploadFile(newPath, "http://10.88.27.41:8011/common/upload", [{ name: "files", value: "files" }])
}
}
}
catch (e) {
console.log(JSON.stringify(e))
}
}
}
如何把 Canvas
的内容保存为图片
ts
// 通过 picker 的方式将图片保存到相册
saveImage() {
//文件保存路径
let uri = '';
try {
let PhotoSaveOptions = new picker.PhotoSaveOptions();
//保存图片默认名称
PhotoSaveOptions.newFileNames = ['test.png'];
let photoPicker = new picker.PhotoViewPicker();
//调起系统的图片保存功能
photoPicker.save(PhotoSaveOptions).then((PhotoSaveResult) => {
uri = PhotoSaveResult[0];
//获取图片的base64字符串
let imageStr = this.context.toDataURL().split(',')[1];
//打开文件
let file = fs.openSync(uri, fs.OpenMode.READ_WRITE);
//base64字符串转成buffer
const decodeBuffer = buffer.from(imageStr, 'base64').buffer;
//写入文件
fs.writeSync(file.fd, decodeBuffer);
//关闭文件
fs.closeSync(file);
}).catch((err: Error) => {
console.error(err + '');
})
} catch (e) {
console.error(e);
}
}
// ...
将图片直接保存到相册
TIP
ACL:ohos.permission.WRITE_IMAGEVIDEO
ts
// 使用前请检查 ohos.permission.WRITE_IMAGEVIDEO 权限
async function addAsset(fileUri: string, mediaType: MediaType) {
let photoType: photoAccessHelper.PhotoType = photoAccessHelper.PhotoType.IMAGE;
let extension: string = fileUri.split(".")[fileUri.split(".").length - 1].toLowerCase();
if (mediaType === MediaType.image) {
photoType = photoAccessHelper.PhotoType.IMAGE;
} else if (mediaType === MediaType.video) {
photoType = photoAccessHelper.PhotoType.VIDEO;
}
Log.d(TAG, `fileUri: ${fileUri}`);
Log.d(TAG, `extension: ${extension}`);
let assetChangeRequest = photoAccessHelper.MediaAssetChangeRequest.createAssetRequest(this.context, photoType, extension);
assetChangeRequest.addResource(photoAccessHelper.ResourceType.IMAGE_RESOURCE, fileUri);
let phAccessHelper = photoAccessHelper.getPhotoAccessHelper(this.context);
await phAccessHelper.applyChanges(assetChangeRequest);
}
通过离屏 canvas 合成水印图
ts
/**
* 通过离屏canvas合成水印图
* @param img
* @param waterMarkImg
* @returns
*/
async function createImageAndSave(img: image.ImageSource, waterMarkImg: image.ImageSource) {
const info = await img.getImageInfo();
Log.d(TAG, "已获取图片信息" + JSON.stringify(info.size));
const offCanvas: OffscreenCanvas = new OffscreenCanvas(px2vp(info.size.width), px2vp(info.size.height));
const settings: RenderingContextSettings = new RenderingContextSettings(true);
let offContext = offCanvas.getContext("2d", settings);
Log.d(TAG, "已创建画布");
const imgPixi = await img.createPixelMap();
Log.d(TAG, "已创建imgPixi");
offContext.drawImage(imgPixi, 0, 0);
if (alignment) {
const waterMarkImgPixi = await waterMarkImg.createPixelMap();
Log.d(TAG, "已创建waterMarkImgPixi");
const scale = alignment.get("scale") || 1;
await waterMarkImgPixi.scale(scale, scale);
let x = alignment.get("x") || 0;
let y = alignment.get("y") || 0;
const markInfo = await waterMarkImg.getImageInfo();
if (x === -1) {
// top
x = 0;
} else if (x === 0) {
// center
x = offCanvas.width / 2 - (px2vp(markInfo.size.width) * scale) / 2;
} else if (x === 1) {
// bottom
x = offCanvas.width - px2vp(markInfo.size.width) * scale;
} else {
x = 0;
}
if (y === -1) {
// left
y = 0;
} else if (y === 0) {
// center
y = offCanvas.height / 2 - (px2vp(markInfo.size.height) * scale) / 2;
} else if (y === 1) {
// right
y = offCanvas.height - px2vp(markInfo.size.height) * scale;
} else {
y = offCanvas.height - px2vp(markInfo.size.height) * scale;
}
offContext.drawImage(waterMarkImgPixi, x, y);
}
Log.d(TAG, "已获绘制离屏图片");
const pixelMap = offContext.getPixelMap(0, 0, offCanvas.width, offCanvas.height);
const imagePackerApi = image.createImagePacker();
const packOpts: image.PackingOption = { format: "image/jpeg", quality: 98 };
const path: string = this.ability?.context.tempDir + "/pixel_map.jpg";
const file = fs.openSync(path, fs.OpenMode.CREATE | fs.OpenMode.READ_WRITE);
await imagePackerApi.packToFile(pixelMap, file.fd, packOpts);
await this.checkPermissionAndSaveFile(path);
}
从 rawfile 读图片
ts
const fileData = this.context.resourceManager.getRawFileContentSync(path); // 这里的context是ability的上下文
const buffer = fileData?.buffer;
const source: image.ImageSource = image.createImageSource(buffer);
ArrayBuffer 和 PixelMap 互相转换
ts
export async function bufferToPixelMap(buf: ArrayBuffer): Promise<PixelMap> {
let source = image.createImageSource(buf);
return await source.createPixelMap();
}
export async function pixiMapToBuffer(pixel: PixelMap): Promise<ArrayBuffer> {
let buf = new ArrayBuffer(pixel.getPixelBytesNumber());
await pixel.readPixelsToBuffer(buf);
return buf;
}
PixelMap 转图片文件
ts
export async function pixelMapToImageFile(
pixelMap: PixelMap,
path: string,
packOpts: image.PackingOption = {
format: "image/jpeg",
quality: 90,
}
) {
const imagePackerApi = image.createImagePacker();
const file = fs.openSync(path, fs.OpenMode.CREATE | fs.OpenMode.READ_WRITE);
await imagePackerApi.packToFile(pixelMap, file.fd, packOpts);
}
获取视频/音频信息
两种获取视频/音频信息的方式
ts
import { Log } from "@ohos/flutter_ohos";
import fs from "@ohos.file.fs";
import media from "@ohos.multimedia.media";
import { common } from "@kit.AbilityKit";
import { TAG } from "./Const";
interface AVPlayerMediaInfo {
track_index: number;
track_type: number;
codec_mime: string;
duration: number;
bitrate: number;
width: number;
height: number;
frame_rate: number;
channel_count: number;
sample_rate: number;
}
export class Unity {
context: common.UIAbilityContext;
constructor(context: common.UIAbilityContext) {
this.context = context;
}
/**
* https://developer.huawei.com/consumer/cn/doc/harmonyos-guides-V5/avmetadataextractor-0000001813575908-V5
* @param path
* @returns
*/
async getMediaInfoJson(path: string): Promise<Record<string, string | boolean | number>> {
Log.i(TAG, `getMediaInfoJson ` + path);
let file = fs.openSync(path, 0o0);
let fileSize: number = fs.statSync(path).size;
let avMetadataExtractor: media.AVMetadataExtractor = await media.createAVMetadataExtractor();
avMetadataExtractor.fdSrc = file;
let metadata = await avMetadataExtractor.fetchMetadata();
Log.d(TAG, `get meta data, ${JSON.stringify(metadata)}`);
await avMetadataExtractor.release();
const title = metadata.title;
const author = metadata.author;
const duration = metadata.duration ? Number(metadata.duration) : 0;
const width = metadata.videoWidth ? Number(metadata.videoWidth) : 0;
const height = metadata.videoHeight ? Number(metadata.videoHeight) : 0;
return {
path: path,
title: title,
author: author,
width: width,
height: height,
duration: duration,
filesize: fileSize,
} as Record<string, string | boolean | number>;
}
/**
* 由于AVMetadataExtractor获取到的信息不全,用AVPlayer获取
* @param _path
* @returns
*/
getMediaInfoJsonByAVPlayer(_path: string): Promise<AVPlayerMediaInfo> {
return new Promise(async (resolve) => {
// 创建avPlayer实例对象
let avPlayer: media.AVPlayer = await media.createAVPlayer();
avPlayer.on("stateChange", async (state: string, reason: media.StateChangeReason) => {
if (state === "initialized") {
avPlayer.prepare();
} else if (state === "prepared") {
const trackList = await avPlayer.getTrackDescription();
Log.d(TAG, JSON.stringify(trackList));
const videoTrack = trackList.find((track) => (track["codec_mime"] as string).indexOf("video") > -1)!;
const r = {
track_index: videoTrack["track_index"] as number,
track_type: videoTrack["track_type"] as number,
codec_mime: videoTrack["codec_mime"] as string,
duration: avPlayer.duration,
bitrate: videoTrack["bitrate"] as number,
width: videoTrack["width"] as number,
height: videoTrack["height"] as number,
frame_rate: videoTrack["frame_rate"] as number,
channel_count: videoTrack["channel_count"] as number,
sample_rate: videoTrack["sample_rate"] as number,
} as AVPlayerMediaInfo;
avPlayer.stop(); //调用播放结束接口
avPlayer.release();
resolve(r);
} else if (state === "playing") {
avPlayer.stop(); //调用播放结束接口
avPlayer.release();
}
});
let fdPath = "fd://";
// 通过UIAbilityContext获取沙箱地址filesDir,以Stage模型为例
// let pathDir = this.context.filesDir;
// let path = pathDir + _path;
// 打开相应的资源文件地址获取fd,并为url赋值触发initialized状态机上报
Log.d(TAG, `get meta data 2 path, ${_path}`);
let file = await fs.open(_path);
fdPath = fdPath + "" + file.fd;
Log.d(TAG, `get meta data 2 fdPath, ${fdPath}`);
avPlayer.url = fdPath;
});
}
}