109 lines
5.0 KiB
TypeScript
Raw Normal View History

2022-08-26 16:48:17 +08:00
export module ImageConver {
export function ImageToBase64(Texture2D: cc.Texture2D): string {
let image: ImageBitmap = Texture2D["_image"];
let cv: HTMLCanvasElement = document.createElement("canvas");
let context: CanvasRenderingContext2D = cv.getContext("2d");
cv.width = image.width || 128;
cv.height = image.height || 128;
context.drawImage(image, 0, 0, image.width || 128, image.height || 128);
let DataURL: string = cv.toDataURL();
return DataURL;
}
export function Base64toBlob(b64Data: any, contentType: string = "image/png", sliceSize: number = 512): Blob {
let byteCharacters: string = atob(b64Data.substring(b64Data.indexOf(",") + 1));
const byteArrays: any[] = [];
for (let offset: number = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice: string = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers: any[] = new Array(slice.length);
for (let i: number = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray: Uint8Array = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob: Blob = new Blob(byteArrays, { type: contentType });
return blob;
}
export function BlobToImageNode(BlobData: Blob, node: cc.Node): void {
let reader: FileReader = new FileReader();
reader.onloadend = function (): void {
ImageConver.Base64ToImageNode(reader.result + "", node);
};
reader.readAsDataURL(BlobData);
}
export function Base64ToImageNode(base64: string, node: cc.Node): void {
let image: HTMLImageElement = new Image();
image.src = base64;
image.onload = function (): void {
let texture: cc.Texture2D = new cc.Texture2D();
texture.initWithElement(image);
texture.handleLoadedTexture();
let spriteFrame: cc.SpriteFrame = new cc.SpriteFrame(texture);
let sprite: cc.Sprite = node.addComponent(cc.Sprite);
sprite.spriteFrame = spriteFrame;
return;
};
}
export function Node2Base64(nodeCapture: cc.Node): string {
let nodeCamera: cc.Node = new cc.Node();
nodeCamera.parent = cc.find("Canvas");
let camera: cc.Camera = nodeCamera.addComponent(cc.Camera);
let position: cc.Vec2 = nodeCapture.getPosition();
let width: number = nodeCapture.width;
let height: number = nodeCapture.height;
// 当 alignWithScreen 为 true 的时候,摄像机会自动将视窗大小调整为整个屏幕的大小。如果想要完全自由地控制摄像机,则需要将 alignWithScreen 设置为 false。v2.2.1 新增)
camera.alignWithScreen = false;
// 设置摄像机的投影模式是正交true还是透视false模式
camera.ortho = true;
// 摄像机在正交投影模式下的视窗大小。该属性在 alignWithScreen 设置为 false 时生效。
camera.orthoSize = height / 2;
let texture: cc.RenderTexture = new cc.RenderTexture();
// 如果截图内容中不包含 Mask 组件,可以不用传递第三个参数
texture.initWithSize(width, height);
// 如果设置了 targetTexture那么摄像机渲染的内容不会输出到屏幕上而是会渲染到 targetTexture 上。
camera.targetTexture = texture;
// 创建画布
let canvas: HTMLCanvasElement = document.createElement("canvas");
canvas.width = width;
canvas.height = height;
let ctx: CanvasRenderingContext2D = canvas.getContext("2d");
nodeCapture.setPosition(cc.Vec2.ZERO);
// 渲染一次摄像机,即更新一次内容到 RenderTexture 中
camera.render(nodeCapture);
nodeCapture.setPosition(position);
// 从 render texture 读取像素数据,数据类型为 RGBA 格式的 Uint8Array 数组。
// 默认每次调用此函数会生成一个大小为 (长 x 高 x 4 的 Uint8Array。
let data: Uint8Array = texture.readPixels();
// write the render data
// PNG 中 1 像素 = 32 bitRGBA1 byte = 8 bit所以 1 像素 = 4 byte
// 每行 width 像素,即 width * 4 字节
let rowBytes: number = width * 4;
for (let row: number = 0; row < height; row++) {
// RenderTexture 得到的纹理是上下翻转的
let srow: number = height - 1 - row;
let imageData: ImageData = ctx.createImageData(width, 1);
let start: number = srow * width * 4;
for (let i: number = 0; i < rowBytes; i++) {
imageData.data[i] = data[start + i];
}
ctx.putImageData(imageData, 0, row);
}
let dataURL: string = canvas.toDataURL("image/png");
return dataURL;
}
}