Compare commits
3 Commits
master
...
improve_pn
Author | SHA1 | Date | |
---|---|---|---|
|
75e2d9e359 | ||
|
6592517122 | ||
|
bd953a6287 |
@ -43,7 +43,8 @@ import {
|
||||
import {
|
||||
APP_NAME,
|
||||
CURSOR_TYPE,
|
||||
DEFAULT_MAX_IMAGE_WIDTH_OR_HEIGHT,
|
||||
DEFAULT_MAX_IMAGE_WIDTH_OR_HEIGHT_JPG,
|
||||
DEFAULT_MAX_IMAGE_WIDTH_OR_HEIGHT_OTHER,
|
||||
DEFAULT_UI_OPTIONS,
|
||||
DEFAULT_VERTICAL_ALIGN,
|
||||
DRAGGING_THRESHOLD,
|
||||
@ -222,6 +223,7 @@ import {
|
||||
} from "../data/blob";
|
||||
import {
|
||||
getInitializedImageElements,
|
||||
hasTransparentPixels,
|
||||
loadHTMLImageElement,
|
||||
normalizeSVG,
|
||||
updateImageCache as _updateImageCache,
|
||||
@ -4001,20 +4003,30 @@ class App extends React.Component<AppProps, AppState> {
|
||||
const existingFileData = this.files[fileId];
|
||||
if (!existingFileData?.dataURL) {
|
||||
try {
|
||||
imageFile = await resizeImageFile(
|
||||
imageFile,
|
||||
DEFAULT_MAX_IMAGE_WIDTH_OR_HEIGHT,
|
||||
);
|
||||
if (!(await hasTransparentPixels(imageFile))) {
|
||||
const _imageFile = await resizeImageFile(imageFile, {
|
||||
maxWidthOrHeight: DEFAULT_MAX_IMAGE_WIDTH_OR_HEIGHT_JPG,
|
||||
outputType: MIME_TYPES.jpg,
|
||||
});
|
||||
if (_imageFile.size > MAX_ALLOWED_FILE_BYTES) {
|
||||
imageFile = await resizeImageFile(imageFile, {
|
||||
maxWidthOrHeight: DEFAULT_MAX_IMAGE_WIDTH_OR_HEIGHT_OTHER,
|
||||
outputType: MIME_TYPES.jpg,
|
||||
});
|
||||
} else {
|
||||
imageFile = _imageFile;
|
||||
}
|
||||
} else {
|
||||
imageFile = await resizeImageFile(imageFile, {
|
||||
maxWidthOrHeight: DEFAULT_MAX_IMAGE_WIDTH_OR_HEIGHT_OTHER,
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
console.error("error trying to resing image file on insertion", error);
|
||||
}
|
||||
|
||||
if (imageFile.size > MAX_ALLOWED_FILE_BYTES) {
|
||||
throw new Error(
|
||||
t("errors.fileTooBig", {
|
||||
maxSize: `${Math.trunc(MAX_ALLOWED_FILE_BYTES / 1024 / 1024)}MB`,
|
||||
}),
|
||||
);
|
||||
throw new Error(t("errors.fileTooBig"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -4113,7 +4125,9 @@ class App extends React.Component<AppProps, AppState> {
|
||||
// https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Basic_User_Interface/Using_URL_values_for_the_cursor_property
|
||||
const cursorImageSizePx = 96;
|
||||
|
||||
const imagePreview = await resizeImageFile(imageFile, cursorImageSizePx);
|
||||
const imagePreview = await resizeImageFile(imageFile, {
|
||||
maxWidthOrHeight: cursorImageSizePx,
|
||||
});
|
||||
|
||||
let previewDataURL = await getDataURL(imagePreview);
|
||||
|
||||
|
@ -162,7 +162,8 @@ export const MAX_DECIMALS_FOR_SVG_EXPORT = 2;
|
||||
export const EXPORT_SCALES = [1, 2, 3];
|
||||
export const DEFAULT_EXPORT_PADDING = 10; // px
|
||||
|
||||
export const DEFAULT_MAX_IMAGE_WIDTH_OR_HEIGHT = 1440;
|
||||
export const DEFAULT_MAX_IMAGE_WIDTH_OR_HEIGHT_JPG = 10000;
|
||||
export const DEFAULT_MAX_IMAGE_WIDTH_OR_HEIGHT_OTHER = 1440;
|
||||
|
||||
export const ALLOWED_IMAGE_MIME_TYPES = [
|
||||
MIME_TYPES.png,
|
||||
|
@ -237,7 +237,11 @@ export const dataURLToFile = (dataURL: DataURL, filename = "") => {
|
||||
|
||||
export const resizeImageFile = async (
|
||||
file: File,
|
||||
maxWidthOrHeight: number,
|
||||
opts: {
|
||||
/** undefined indicates auto */
|
||||
outputType?: typeof MIME_TYPES["jpg"];
|
||||
maxWidthOrHeight: number;
|
||||
},
|
||||
): Promise<File> => {
|
||||
// SVG files shouldn't a can't be resized
|
||||
if (file.type === MIME_TYPES.svg) {
|
||||
@ -257,6 +261,16 @@ export const resizeImageFile = async (
|
||||
pica: pica({ features: ["js", "wasm"] }),
|
||||
});
|
||||
|
||||
if (opts.outputType) {
|
||||
const { outputType } = opts;
|
||||
reduce._create_blob = function (env) {
|
||||
return this.pica.toBlob(env.out_canvas, outputType, 0.8).then((blob) => {
|
||||
env.out_blob = blob;
|
||||
return env;
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
const fileType = file.type;
|
||||
|
||||
if (!isSupportedImageFile(file)) {
|
||||
@ -264,9 +278,11 @@ export const resizeImageFile = async (
|
||||
}
|
||||
|
||||
return new File(
|
||||
[await reduce.toBlob(file, { max: maxWidthOrHeight })],
|
||||
[await reduce.toBlob(file, { max: opts.maxWidthOrHeight })],
|
||||
file.name,
|
||||
{ type: fileType },
|
||||
{
|
||||
type: fileType,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
|
@ -1,8 +1,11 @@
|
||||
import decodePng from "png-chunks-extract";
|
||||
import extractPngChunks from "png-chunks-extract";
|
||||
import tEXt from "png-chunk-text";
|
||||
import encodePng from "png-chunks-encode";
|
||||
import { stringToBase64, encode, decode, base64ToString } from "./encode";
|
||||
import { EXPORT_DATA_TYPES, MIME_TYPES } from "../constants";
|
||||
import { PngChunk } from "../types";
|
||||
|
||||
export { extractPngChunks };
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// PNG
|
||||
@ -28,7 +31,9 @@ const blobToArrayBuffer = (blob: Blob): Promise<ArrayBuffer> => {
|
||||
export const getTEXtChunk = async (
|
||||
blob: Blob,
|
||||
): Promise<{ keyword: string; text: string } | null> => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
const chunks = extractPngChunks(
|
||||
new Uint8Array(await blobToArrayBuffer(blob)),
|
||||
);
|
||||
const metadataChunk = chunks.find((chunk) => chunk.name === "tEXt");
|
||||
if (metadataChunk) {
|
||||
return tEXt.decode(metadataChunk.data);
|
||||
@ -36,6 +41,28 @@ export const getTEXtChunk = async (
|
||||
return null;
|
||||
};
|
||||
|
||||
export const findPngChunk = (
|
||||
chunks: PngChunk[],
|
||||
name: PngChunk["name"],
|
||||
/** this makes the search stop before IDAT chunk (before which most
|
||||
* metadata chunks reside). This is a perf optim. */
|
||||
breakBeforeIDAT = true,
|
||||
) => {
|
||||
let i = 0;
|
||||
const len = chunks.length;
|
||||
while (i <= len) {
|
||||
const chunk = chunks[i];
|
||||
if (chunk.name === name) {
|
||||
return chunk;
|
||||
}
|
||||
if (breakBeforeIDAT && chunk.name === "IDAT") {
|
||||
return null;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
export const encodePngMetadata = async ({
|
||||
blob,
|
||||
metadata,
|
||||
@ -43,7 +70,9 @@ export const encodePngMetadata = async ({
|
||||
blob: Blob;
|
||||
metadata: string;
|
||||
}) => {
|
||||
const chunks = decodePng(new Uint8Array(await blobToArrayBuffer(blob)));
|
||||
const chunks = extractPngChunks(
|
||||
new Uint8Array(await blobToArrayBuffer(blob)),
|
||||
);
|
||||
|
||||
const metadataChunk = tEXt.encode(
|
||||
MIME_TYPES.excalidraw,
|
||||
|
@ -3,6 +3,7 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
import { MIME_TYPES, SVG_NS } from "../constants";
|
||||
import { getDataURL } from "../data/blob";
|
||||
import { t } from "../i18n";
|
||||
import { AppClassProperties, DataURL, BinaryFiles } from "../types";
|
||||
import { isInitializedImageElement } from "./typeChecks";
|
||||
@ -109,3 +110,81 @@ export const normalizeSVG = async (SVGString: string) => {
|
||||
return svg.outerHTML;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* To improve perf, uses `createImageBitmap` is available. But there are
|
||||
* quality issues across browsers, so don't use this API where quality matters.
|
||||
*/
|
||||
export const speedyImageToCanvas = async (imageFile: Blob | File) => {
|
||||
let imageSrc: HTMLImageElement | ImageBitmap;
|
||||
if (
|
||||
typeof ImageBitmap !== "undefined" &&
|
||||
ImageBitmap.prototype &&
|
||||
ImageBitmap.prototype.close &&
|
||||
window.createImageBitmap
|
||||
) {
|
||||
imageSrc = await window.createImageBitmap(imageFile);
|
||||
} else {
|
||||
imageSrc = await loadHTMLImageElement(await getDataURL(imageFile));
|
||||
}
|
||||
const { width, height } = imageSrc;
|
||||
|
||||
const canvas = document.createElement("canvas");
|
||||
canvas.height = height;
|
||||
canvas.width = width;
|
||||
const context = canvas.getContext("2d")!;
|
||||
context.drawImage(imageSrc, 0, 0, width, height);
|
||||
|
||||
if (typeof ImageBitmap !== "undefined" && imageSrc instanceof ImageBitmap) {
|
||||
imageSrc.close();
|
||||
}
|
||||
|
||||
return { canvas, context, width, height };
|
||||
};
|
||||
|
||||
/**
|
||||
* Does its best at figuring out if an image (PNG) has any (semi)transparent
|
||||
* pixels. If not PNG, always returns false.
|
||||
*/
|
||||
export const hasTransparentPixels = async (imageFile: Blob | File) => {
|
||||
if (imageFile.type !== MIME_TYPES.png) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const { findPngChunk, extractPngChunks } = await import("../data/image");
|
||||
|
||||
const buffer = await imageFile.arrayBuffer();
|
||||
const chunks = extractPngChunks(new Uint8Array(buffer));
|
||||
|
||||
// early exit if tRNS not found and IHDR states no support for alpha
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
const IHDR = findPngChunk(chunks, "IHDR");
|
||||
|
||||
if (
|
||||
IHDR &&
|
||||
IHDR.data[9] !== 4 &&
|
||||
IHDR.data[9] !== 6 &&
|
||||
!findPngChunk(chunks, "tRNS")
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// otherwise loop through pixels to check if there's any actually
|
||||
// (semi)transparent pixel
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
const { width, height, context } = await speedyImageToCanvas(imageFile);
|
||||
{
|
||||
const { data } = context.getImageData(0, 0, width, height);
|
||||
const len = data.byteLength;
|
||||
let i = 3;
|
||||
while (i <= len) {
|
||||
if (data[i] !== 255) {
|
||||
return true;
|
||||
}
|
||||
i += 4;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
@ -199,11 +199,7 @@ export const encodeFilesForUpload = async ({
|
||||
});
|
||||
|
||||
if (buffer.byteLength > maxBytes) {
|
||||
throw new Error(
|
||||
t("errors.fileTooBig", {
|
||||
maxSize: `${Math.trunc(maxBytes / 1024 / 1024)}MB`,
|
||||
}),
|
||||
);
|
||||
throw new Error(t("errors.fileTooBig"));
|
||||
}
|
||||
|
||||
processedFiles.push({
|
||||
|
17
src/global.d.ts
vendored
17
src/global.d.ts
vendored
@ -1,3 +1,5 @@
|
||||
// import type {PngChunk} from "./types";
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
interface Document {
|
||||
fonts?: {
|
||||
@ -54,8 +56,6 @@ type NonOptional<T> = Exclude<T, undefined>;
|
||||
|
||||
// PNG encoding/decoding
|
||||
// -----------------------------------------------------------------------------
|
||||
type TEXtChunk = { name: "tEXt"; data: Uint8Array };
|
||||
|
||||
declare module "png-chunk-text" {
|
||||
function encode(
|
||||
name: string,
|
||||
@ -64,11 +64,11 @@ declare module "png-chunk-text" {
|
||||
function decode(data: Uint8Array): { keyword: string; text: string };
|
||||
}
|
||||
declare module "png-chunks-encode" {
|
||||
function encode(chunks: TEXtChunk[]): Uint8Array;
|
||||
function encode(chunks: import("./types").PngChunk[]): Uint8Array;
|
||||
export = encode;
|
||||
}
|
||||
declare module "png-chunks-extract" {
|
||||
function extract(buffer: Uint8Array): TEXtChunk[];
|
||||
function extract(buffer: Uint8Array): import("./types").PngChunk[];
|
||||
export = extract;
|
||||
}
|
||||
// -----------------------------------------------------------------------------
|
||||
@ -111,10 +111,17 @@ interface Uint8Array {
|
||||
|
||||
// https://github.com/nodeca/image-blob-reduce/issues/23#issuecomment-783271848
|
||||
declare module "image-blob-reduce" {
|
||||
import { PicaResizeOptions } from "pica";
|
||||
import { PicaResizeOptions, Pica } from "pica";
|
||||
namespace ImageBlobReduce {
|
||||
interface ImageBlobReduce {
|
||||
toBlob(file: File, options: ImageBlobReduceOptions): Promise<Blob>;
|
||||
_create_blob(
|
||||
this: { pica: Pica },
|
||||
env: {
|
||||
out_canvas: HTMLCanvasElement;
|
||||
out_blob: Blob;
|
||||
},
|
||||
): Promise<any>;
|
||||
}
|
||||
|
||||
interface ImageBlobReduceStatic {
|
||||
|
@ -169,7 +169,7 @@
|
||||
"errors": {
|
||||
"unsupportedFileType": "Unsupported file type.",
|
||||
"imageInsertError": "Couldn't insert image. Try again later...",
|
||||
"fileTooBig": "File is too big. Maximum allowed size is {{maxSize}}.",
|
||||
"fileTooBig": "File is too big.",
|
||||
"svgImageInsertError": "Couldn't insert SVG image. The SVG markup looks invalid.",
|
||||
"invalidSVGString": "Invalid SVG."
|
||||
},
|
||||
|
23
src/types.ts
23
src/types.ts
@ -396,3 +396,26 @@ export type ExcalidrawImperativeAPI = {
|
||||
ready: true;
|
||||
id: string;
|
||||
};
|
||||
|
||||
export type PngChunk = {
|
||||
name:
|
||||
| "PLTE"
|
||||
| "IHDR"
|
||||
| "IDAT"
|
||||
| "IEND"
|
||||
| "cHRM"
|
||||
| "gAMA"
|
||||
| "iCCP"
|
||||
| "sRGB"
|
||||
| "sBIT"
|
||||
| "bKGD"
|
||||
| "hIST"
|
||||
| "tRNS"
|
||||
| "pHYs"
|
||||
| "sPLT"
|
||||
| "tIME"
|
||||
| "iTXt"
|
||||
| "tEXt"
|
||||
| "zTXt";
|
||||
data: Uint8Array;
|
||||
};
|
||||
|
Loading…
x
Reference in New Issue
Block a user