1
0
Fork 0
mirror of https://github.com/denoland/deno.git synced 2025-01-21 04:52:26 -05:00
This commit is contained in:
Hajime-san 2025-01-20 23:27:15 +01:00 committed by GitHub
commit 9c31afae0a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 1800 additions and 304 deletions

106
Cargo.lock generated
View file

@ -646,9 +646,9 @@ dependencies = [
[[package]] [[package]]
name = "bytemuck" name = "bytemuck"
version = "1.15.0" version = "1.17.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" checksum = "773d90827bc3feecfb67fab12e24de0749aad83c74b9504ecde46237b5cd24e2"
[[package]] [[package]]
name = "byteorder" name = "byteorder"
@ -656,6 +656,12 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "byteorder-lite"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495"
[[package]] [[package]]
name = "bytes" name = "bytes"
version = "1.9.0" version = "1.9.0"
@ -1490,10 +1496,14 @@ dependencies = [
name = "deno_canvas" name = "deno_canvas"
version = "0.55.0" version = "0.55.0"
dependencies = [ dependencies = [
"bytemuck",
"deno_core", "deno_core",
"deno_error", "deno_error",
"deno_terminal 0.2.0",
"deno_webgpu", "deno_webgpu",
"image", "image",
"lcms2",
"num-traits",
"serde", "serde",
"thiserror 2.0.3", "thiserror 2.0.3",
] ]
@ -3046,6 +3056,12 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "dunce"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813"
[[package]] [[package]]
name = "dyn-clone" name = "dyn-clone"
version = "1.0.17" version = "1.0.17"
@ -3691,6 +3707,16 @@ dependencies = [
"polyval", "polyval",
] ]
[[package]]
name = "gif"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fb2d69b19215e18bb912fa30f7ce15846e301408695e44e0ef719f1da9e19f2"
dependencies = [
"color_quant",
"weezl",
]
[[package]] [[package]]
name = "gimli" name = "gimli"
version = "0.29.0" version = "0.29.0"
@ -4449,15 +4475,29 @@ dependencies = [
[[package]] [[package]]
name = "image" name = "image"
version = "0.24.9" version = "0.25.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5690139d2f55868e080017335e4b94cb7414274c74f1669c84fb5feba2c9f69d" checksum = "bc144d44a31d753b02ce64093d532f55ff8dc4ebf2ffb8a63c0dda691385acae"
dependencies = [ dependencies = [
"bytemuck", "bytemuck",
"byteorder", "byteorder-lite",
"color_quant", "color_quant",
"gif",
"image-webp",
"num-traits", "num-traits",
"png", "png",
"zune-core",
"zune-jpeg",
]
[[package]]
name = "image-webp"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e031e8e3d94711a9ccb5d6ea357439ef3dcbed361798bd4071dc4d9793fbe22f"
dependencies = [
"byteorder-lite",
"quick-error 2.0.1",
] ]
[[package]] [[package]]
@ -4778,6 +4818,29 @@ dependencies = [
"spin", "spin",
] ]
[[package]]
name = "lcms2"
version = "6.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "680ec3fa42c36e0af9ca02f20a3742a82229c7f1ee0e6754294de46a80be6f74"
dependencies = [
"bytemuck",
"foreign-types",
"lcms2-sys",
]
[[package]]
name = "lcms2-sys"
version = "4.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "593265f9a3172180024fb62580ee31348f31be924b19416da174ebb7fb623d2e"
dependencies = [
"cc",
"dunce",
"libc",
"pkg-config",
]
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.168" version = "0.2.168"
@ -5372,9 +5435,9 @@ dependencies = [
[[package]] [[package]]
name = "num-traits" name = "num-traits"
version = "0.2.18" version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"libm", "libm",
@ -6139,6 +6202,12 @@ version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quick-error"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
[[package]] [[package]]
name = "quick-junit" name = "quick-junit"
version = "0.3.6" version = "0.3.6"
@ -6472,7 +6541,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00" checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00"
dependencies = [ dependencies = [
"hostname", "hostname",
"quick-error", "quick-error 1.2.3",
] ]
[[package]] [[package]]
@ -8887,6 +8956,12 @@ dependencies = [
"rustls-pki-types", "rustls-pki-types",
] ]
[[package]]
name = "weezl"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082"
[[package]] [[package]]
name = "wgpu-core" name = "wgpu-core"
version = "0.21.1" version = "0.21.1"
@ -9583,3 +9658,18 @@ dependencies = [
"cc", "cc",
"pkg-config", "pkg-config",
] ]
[[package]]
name = "zune-core"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a"
[[package]]
name = "zune-jpeg"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16099418600b4d8f028622f73ff6e3deaabdff330fb9a2a131dea781ee8b0768"
dependencies = [
"zune-core",
]

View file

@ -1,7 +1,7 @@
// Copyright 2018-2025 the Deno authors. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
import { internals, primordials } from "ext:core/mod.js"; import { internals, primordials } from "ext:core/mod.js";
import { op_image_decode_png, op_image_process } from "ext:core/ops"; import { op_create_image_bitmap } from "ext:core/ops";
import * as webidl from "ext:deno_webidl/00_webidl.js"; import * as webidl from "ext:deno_webidl/00_webidl.js";
import { DOMException } from "ext:deno_web/01_dom_exception.js"; import { DOMException } from "ext:deno_web/01_dom_exception.js";
import { createFilteredInspectProxy } from "ext:deno_console/01_console.js"; import { createFilteredInspectProxy } from "ext:deno_console/01_console.js";
@ -11,13 +11,11 @@ const {
ObjectPrototypeIsPrototypeOf, ObjectPrototypeIsPrototypeOf,
Symbol, Symbol,
SymbolFor, SymbolFor,
TypeError,
TypedArrayPrototypeGetBuffer, TypedArrayPrototypeGetBuffer,
Uint8Array, Uint8Array,
MathCeil,
PromiseResolve,
PromiseReject, PromiseReject,
RangeError, RangeError,
ArrayPrototypeJoin,
} = primordials; } = primordials;
import { import {
_data, _data,
@ -164,6 +162,12 @@ function createImageBitmap(
options = undefined, options = undefined,
) { ) {
const prefix = "Failed to execute 'createImageBitmap'"; const prefix = "Failed to execute 'createImageBitmap'";
// Add the value when implementing to add support for ImageBitmapSource
const imageBitmapSources = [
"Blob",
"ImageData",
"ImageBitmap",
];
// Overload: createImageBitmap(image [, options ]) // Overload: createImageBitmap(image [, options ])
if (arguments.length < 3) { if (arguments.length < 3) {
@ -184,6 +188,7 @@ function createImageBitmap(
"Argument 6", "Argument 6",
); );
// 1.
if (sw === 0) { if (sw === 0) {
return PromiseReject(new RangeError("sw has to be greater than 0")); return PromiseReject(new RangeError("sw has to be greater than 0"));
} }
@ -193,6 +198,7 @@ function createImageBitmap(
} }
} }
// 2.
if (options.resizeWidth === 0) { if (options.resizeWidth === 0) {
return PromiseReject( return PromiseReject(
new DOMException( new DOMException(
@ -204,7 +210,7 @@ function createImageBitmap(
if (options.resizeHeight === 0) { if (options.resizeHeight === 0) {
return PromiseReject( return PromiseReject(
new DOMException( new DOMException(
"options.resizeWidth has to be greater than 0", "options.resizeHeight has to be greater than 0",
"InvalidStateError", "InvalidStateError",
), ),
); );
@ -212,139 +218,143 @@ function createImageBitmap(
const imageBitmap = webidl.createBranded(ImageBitmap); const imageBitmap = webidl.createBranded(ImageBitmap);
if (ObjectPrototypeIsPrototypeOf(ImageDataPrototype, image)) { // 3.
const processedImage = processImage( const isBlob = ObjectPrototypeIsPrototypeOf(BlobPrototype, image);
image[_data], const isImageData = ObjectPrototypeIsPrototypeOf(ImageDataPrototype, image);
image[_width], const isImageBitmap = ObjectPrototypeIsPrototypeOf(
image[_height], ImageBitmapPrototype,
sxOrOptions, image,
sy, );
sw, if (!isBlob && !isImageData && !isImageBitmap) {
sh, return PromiseReject(
options, new DOMException(
`${prefix}: The provided value for 'image' is not of type '(${
ArrayPrototypeJoin(imageBitmapSources, " or ")
})'`,
"InvalidStateError",
),
); );
imageBitmap[_bitmapData] = processedImage.data;
imageBitmap[_width] = processedImage.outputWidth;
imageBitmap[_height] = processedImage.outputHeight;
return PromiseResolve(imageBitmap);
} }
if (ObjectPrototypeIsPrototypeOf(BlobPrototype, image)) {
return (async () => { // 4.
const data = await image.arrayBuffer(); return (async () => {
const mimetype = sniffImage(image.type); //
if (mimetype !== "image/png") { // For performance reasons, the arguments passed to op are represented as numbers that don't need to be serialized.
throw new DOMException( //
`Unsupported type '${image.type}'`,
"InvalidStateError", let width = 0;
let height = 0;
// If the image doesn't have a MIME type, mark it as 0.
let mimeType = 0;
let imageBitmapSource, buf;
if (isBlob) {
imageBitmapSource = 0;
buf = new Uint8Array(await image.arrayBuffer());
const mimeTypeString = sniffImage(image.type);
if (mimeTypeString === "image/png") {
mimeType = 1;
} else if (mimeTypeString === "image/jpeg") {
mimeType = 2;
} else if (mimeTypeString === "image/gif") {
mimeType = 3;
} else if (mimeTypeString === "image/bmp") {
mimeType = 4;
} else if (mimeTypeString === "image/x-icon") {
mimeType = 5;
} else if (mimeTypeString === "image/webp") {
mimeType = 6;
} else if (mimeTypeString === "") {
return PromiseReject(
new DOMException(
`The MIME type of source image is not specified\n
hint: When you want to get a "Blob" from "fetch", make sure to go through a file server that returns the appropriate content-type response header,
and specify the URL to the file server like "await(await fetch('http://localhost:8000/sample.png').blob()".
Alternatively, if you are reading a local file using 'Deno.readFile' etc.,
set the appropriate MIME type like "new Blob([await Deno.readFile('sample.png')], { type: 'image/png' })".\n`,
"InvalidStateError",
),
);
} else {
return PromiseReject(
new DOMException(
`The the MIME type ${mimeTypeString} of source image is not a supported format\n
info: The following MIME types are supported.
docs: https://mimesniff.spec.whatwg.org/#image-type-pattern-matching-algorithm\n`,
"InvalidStateError",
),
); );
} }
const { data: imageData, width, height } = op_image_decode_png( } else if (isImageData) {
new Uint8Array(data), width = image[_width];
); height = image[_height];
const processedImage = processImage( imageBitmapSource = 1;
imageData, buf = new Uint8Array(TypedArrayPrototypeGetBuffer(image[_data]));
width, } else if (isImageBitmap) {
height, width = image[_width];
sxOrOptions, height = image[_height];
sy, imageBitmapSource = 2;
sw, buf = new Uint8Array(TypedArrayPrototypeGetBuffer(image[_bitmapData]));
sh, }
options,
);
imageBitmap[_bitmapData] = processedImage.data;
imageBitmap[_width] = processedImage.outputWidth;
imageBitmap[_height] = processedImage.outputHeight;
return imageBitmap;
})();
} else {
return PromiseReject(new TypeError("Invalid or unsupported image value"));
}
}
function processImage(input, width, height, sx, sy, sw, sh, options) { // If those options are not provided, assign 0 to mean undefined(None).
let sourceRectangle; const _sx = typeof sxOrOptions === "number" ? sxOrOptions : 0;
const _sy = sy ?? 0;
const _sw = sw ?? 0;
const _sh = sh ?? 0;
if ( // If those options are not provided, assign 0 to mean undefined(None).
sx !== undefined && sy !== undefined && sw !== undefined && sh !== undefined const resizeWidth = options.resizeWidth ?? 0;
) { const resizeHeight = options.resizeHeight ?? 0;
sourceRectangle = [
[sx, sy],
[sx + sw, sy],
[sx + sw, sy + sh],
[sx, sy + sh],
];
} else {
sourceRectangle = [
[0, 0],
[width, 0],
[width, height],
[0, height],
];
}
const widthOfSourceRect = sourceRectangle[1][0] - sourceRectangle[0][0];
const heightOfSourceRect = sourceRectangle[3][1] - sourceRectangle[0][1];
let outputWidth; // If the imageOrientation option is set "from-image" or not set, assign 0.
if (options.resizeWidth !== undefined) { const imageOrientation = options.imageOrientation === "flipY" ? 1 : 0;
outputWidth = options.resizeWidth;
} else if (options.resizeHeight !== undefined) {
outputWidth = MathCeil(
(widthOfSourceRect * options.resizeHeight) / heightOfSourceRect,
);
} else {
outputWidth = widthOfSourceRect;
}
let outputHeight; // If the premultiplyAlpha option is "default" or not set, assign 0.
if (options.resizeHeight !== undefined) { let premultiplyAlpha = 0;
outputHeight = options.resizeHeight; if (options.premultiplyAlpha === "premultiply") {
} else if (options.resizeWidth !== undefined) { premultiplyAlpha = 1;
outputHeight = MathCeil( } else if (options.premultiplyAlpha === "none") {
(heightOfSourceRect * options.resizeWidth) / widthOfSourceRect, premultiplyAlpha = 2;
); }
} else {
outputHeight = heightOfSourceRect;
}
if (options.colorSpaceConversion === "none") { // If the colorSpaceConversion option is "default" or not set, assign 0.
throw new TypeError( const colorSpaceConversion = options.colorSpaceConversion === "none"
"Cannot create image: invalid colorSpaceConversion option, 'none' is not supported", ? 1
); : 0;
}
/* // If the resizeQuality option is "low" or not set, assign 0.
* The cropping works differently than the spec specifies: let resizeQuality = 0;
* The spec states to create an infinite surface and place the top-left corner if (options.resizeQuality === "pixelated") {
* of the image a 0,0 and crop based on sourceRectangle. resizeQuality = 1;
* } else if (options.resizeQuality === "medium") {
* We instead create a surface the size of sourceRectangle, and position resizeQuality = 2;
* the image at the correct location, which is the inverse of the x & y of } else if (options.resizeQuality === "high") {
* sourceRectangle's top-left corner. resizeQuality = 3;
*/ }
const data = op_image_process(
new Uint8Array(TypedArrayPrototypeGetBuffer(input)), const processedImage = op_create_image_bitmap(
{ buf,
width, width,
height, height,
surfaceWidth: widthOfSourceRect, _sx,
surfaceHeight: heightOfSourceRect, _sy,
inputX: sourceRectangle[0][0] * -1, // input_x _sw,
inputY: sourceRectangle[0][1] * -1, // input_y _sh,
outputWidth, imageOrientation,
outputHeight, premultiplyAlpha,
resizeQuality: options.resizeQuality, colorSpaceConversion,
flipY: options.imageOrientation === "flipY", resizeWidth,
premultiply: options.premultiplyAlpha === "default" resizeHeight,
? null resizeQuality,
: (options.premultiplyAlpha === "premultiply"), imageBitmapSource,
}, mimeType,
); );
imageBitmap[_bitmapData] = processedImage[0];
return { imageBitmap[_width] = processedImage[1];
data, imageBitmap[_height] = processedImage[2];
outputWidth, return imageBitmap;
outputHeight, })();
};
} }
function getBitmapData(imageBitmap) { function getBitmapData(imageBitmap) {

View file

@ -14,9 +14,18 @@ description = "OffscreenCanvas implementation for Deno"
path = "lib.rs" path = "lib.rs"
[dependencies] [dependencies]
bytemuck = "1.17.1"
deno_core.workspace = true deno_core.workspace = true
deno_error.workspace = true deno_error.workspace = true
deno_terminal.workspace = true
deno_webgpu.workspace = true deno_webgpu.workspace = true
image = { version = "0.24.7", default-features = false, features = ["png"] } image = { version = "0.25.4", default-features = false, features = ["png", "jpeg", "bmp", "ico", "webp", "gif"] }
# NOTE: The qcms is a color space conversion crate which parses ICC profiles that used in Gecko,
# however it supports only 8-bit color depth currently.
# https://searchfox.org/mozilla-central/rev/f09e3f9603a08b5b51bf504846091579bc2ff531/gfx/qcms/src/transform.rs#130-137
# It seems to be failed to build for aarch64-unknown-linux-gnu with pkg-config.
# https://github.com/kornelski/rust-lcms2-sys/blob/b8e9c3efcf266b88600318fb519c073b9ebb61b7/README.md#L26
lcms2 = { version = "6.1.0", features = ["static"] }
num-traits = { version = "0.2.19" }
serde = { workspace = true, features = ["derive"] } serde = { workspace = true, features = ["derive"] }
thiserror.workspace = true thiserror.workspace = true

View file

@ -1,3 +1,32 @@
# deno_canvas # deno_canvas
Extension that implements various OffscreenCanvas related APIs. Extension that implements various OffscreenCanvas related APIs.
## Image processing architecture in Rust
```mermaid
flowchart LR
Input["input binary<br/>( &[u8] )"]
II["intermediate image<br/>( DynamicImage )"]
Ops["processing pixel<br/>( ImageBuffer< P, S > )"]
Output["output binary<br/>( Box<[u8]> )"]
Input --> II
II --> Ops --> II
II --> Output
```
The architecture of image processing in Rust is rely on the structure of
[image](https://github.com/image-rs/image) crate.\
If the input is a image of binary, it convert to an intermediate image
(`DynamicImage` in `image`) with using a decoder corresponding to its image
formats.\
After converting to an intermediate image, it can process various way for
example, to use the pixel processong operation
[imageops](https://github.com/image-rs/image?tab=readme-ov-file#image-processing-functions)
supplied by `image`.\
On the other hand, there can also to implement your own pixel processong
operation to refer to
[the implementation of imageops as here](https://github.com/image-rs/image/blob/4afe9572b5c867cf4d07cd88107e8c49354de9f3/src/imageops/colorops.rs#L156-L182)
or [image_ops.rs module](./image_ops.rs).\
You can treat any bit depth that supported by `image` with generics in the
processing pixel layer.

609
ext/canvas/image_ops.rs Normal file
View file

@ -0,0 +1,609 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use bytemuck::cast_slice;
use bytemuck::cast_slice_mut;
use image::ColorType;
use image::DynamicImage;
use image::GenericImageView;
use image::ImageBuffer;
use image::Luma;
use image::LumaA;
use image::Pixel;
use image::Primitive;
use image::Rgb;
use image::Rgba;
use lcms2::PixelFormat;
use lcms2::Pod;
use lcms2::Profile;
use lcms2::Transform;
use num_traits::NumCast;
use num_traits::SaturatingMul;
use crate::CanvasError;
pub(crate) trait PremultiplyAlpha {
fn premultiply_alpha(&self) -> Self;
}
impl<T: Primitive> PremultiplyAlpha for LumaA<T> {
fn premultiply_alpha(&self) -> Self {
let max_t = T::DEFAULT_MAX_VALUE;
let mut pixel = [self.0[0], self.0[1]];
let alpha_index = pixel.len() - 1;
let alpha = pixel[alpha_index];
let normalized_alpha = alpha.to_f32().unwrap() / max_t.to_f32().unwrap();
if normalized_alpha == 0.0 {
return LumaA([pixel[0], pixel[alpha_index]]);
}
for rgb in pixel.iter_mut().take(alpha_index) {
*rgb = NumCast::from((rgb.to_f32().unwrap() * normalized_alpha).round())
.unwrap()
}
LumaA([pixel[0], pixel[alpha_index]])
}
}
impl<T: Primitive> PremultiplyAlpha for Rgba<T> {
fn premultiply_alpha(&self) -> Self {
let max_t = T::DEFAULT_MAX_VALUE;
let mut pixel = [self.0[0], self.0[1], self.0[2], self.0[3]];
let alpha_index = pixel.len() - 1;
let alpha = pixel[alpha_index];
let normalized_alpha = alpha.to_f32().unwrap() / max_t.to_f32().unwrap();
if normalized_alpha == 0.0 {
return Rgba([pixel[0], pixel[1], pixel[2], pixel[alpha_index]]);
}
for rgb in pixel.iter_mut().take(alpha_index) {
*rgb = NumCast::from((rgb.to_f32().unwrap() * normalized_alpha).round())
.unwrap()
}
Rgba([pixel[0], pixel[1], pixel[2], pixel[alpha_index]])
}
}
fn process_premultiply_alpha<I, P, S>(image: &I) -> ImageBuffer<P, Vec<S>>
where
I: GenericImageView<Pixel = P>,
P: Pixel<Subpixel = S> + PremultiplyAlpha + 'static,
S: Primitive + 'static,
{
let (width, height) = image.dimensions();
let mut out = ImageBuffer::new(width, height);
for (x, y, pixel) in image.pixels() {
let pixel = pixel.premultiply_alpha();
out.put_pixel(x, y, pixel);
}
out
}
/// Premultiply the alpha channel of the image.
pub(crate) fn premultiply_alpha(
image: DynamicImage,
) -> Result<DynamicImage, CanvasError> {
match image {
DynamicImage::ImageLumaA8(image) => {
Ok(process_premultiply_alpha(&image).into())
}
DynamicImage::ImageLumaA16(image) => {
Ok(process_premultiply_alpha(&image).into())
}
DynamicImage::ImageRgba8(image) => {
Ok(process_premultiply_alpha(&image).into())
}
DynamicImage::ImageRgba16(image) => {
Ok(process_premultiply_alpha(&image).into())
}
DynamicImage::ImageRgb32F(_) => {
Err(CanvasError::UnsupportedColorType(image.color()))
}
DynamicImage::ImageRgba32F(_) => {
Err(CanvasError::UnsupportedColorType(image.color()))
}
// If the image does not have an alpha channel, return the image as is.
_ => Ok(image),
}
}
pub(crate) trait UnpremultiplyAlpha {
/// To determine if the image is premultiplied alpha,
/// checking premultiplied RGBA value is one where any of the R/G/B channel values exceeds the alpha channel value.\
/// https://www.w3.org/TR/webgpu/#color-spaces
fn is_premultiplied_alpha(&self) -> bool;
fn unpremultiply_alpha(&self) -> Self;
}
impl<T: Primitive + SaturatingMul + Ord> UnpremultiplyAlpha for Rgba<T> {
fn is_premultiplied_alpha(&self) -> bool {
let max_t = T::DEFAULT_MAX_VALUE;
let pixel = [self.0[0], self.0[1], self.0[2]];
let alpha_index = self.0.len() - 1;
let alpha = self.0[alpha_index];
match pixel.iter().max() {
Some(rgb_max) => rgb_max < &max_t.saturating_mul(&alpha),
// usually doesn't reach here
None => false,
}
}
fn unpremultiply_alpha(&self) -> Self {
let max_t = T::DEFAULT_MAX_VALUE;
let mut pixel = [self.0[0], self.0[1], self.0[2], self.0[3]];
let alpha_index = pixel.len() - 1;
let alpha = pixel[alpha_index];
for rgb in pixel.iter_mut().take(alpha_index) {
*rgb = NumCast::from(
(rgb.to_f32().unwrap()
/ (alpha.to_f32().unwrap() / max_t.to_f32().unwrap()))
.round(),
)
.unwrap();
}
Rgba([pixel[0], pixel[1], pixel[2], pixel[alpha_index]])
}
}
impl<T: Primitive + SaturatingMul + Ord> UnpremultiplyAlpha for LumaA<T> {
fn is_premultiplied_alpha(&self) -> bool {
let max_t = T::DEFAULT_MAX_VALUE;
let pixel = [self.0[0]];
let alpha_index = self.0.len() - 1;
let alpha = self.0[alpha_index];
pixel[0] < max_t.saturating_mul(&alpha)
}
fn unpremultiply_alpha(&self) -> Self {
let max_t = T::DEFAULT_MAX_VALUE;
let mut pixel = [self.0[0], self.0[1]];
let alpha_index = pixel.len() - 1;
let alpha = pixel[alpha_index];
for rgb in pixel.iter_mut().take(alpha_index) {
*rgb = NumCast::from(
(rgb.to_f32().unwrap()
/ (alpha.to_f32().unwrap() / max_t.to_f32().unwrap()))
.round(),
)
.unwrap();
}
LumaA([pixel[0], pixel[alpha_index]])
}
}
fn is_premultiplied_alpha<I, P, S>(image: &I) -> bool
where
I: GenericImageView<Pixel = P>,
P: Pixel<Subpixel = S> + UnpremultiplyAlpha + 'static,
S: Primitive + 'static,
{
image
.pixels()
.any(|(_, _, pixel)| pixel.is_premultiplied_alpha())
}
fn process_unpremultiply_alpha<I, P, S>(image: &I) -> ImageBuffer<P, Vec<S>>
where
I: GenericImageView<Pixel = P>,
P: Pixel<Subpixel = S> + UnpremultiplyAlpha + 'static,
S: Primitive + 'static,
{
let (width, height) = image.dimensions();
let mut out = ImageBuffer::new(width, height);
for (x, y, pixel) in image.pixels() {
let pixel = pixel.unpremultiply_alpha();
out.put_pixel(x, y, pixel);
}
out
}
/// Invert the premultiplied alpha channel of the image.
pub(crate) fn unpremultiply_alpha(
image: DynamicImage,
) -> Result<DynamicImage, CanvasError> {
match image {
DynamicImage::ImageLumaA8(image) => Ok(if is_premultiplied_alpha(&image) {
process_unpremultiply_alpha(&image).into()
} else {
image.into()
}),
DynamicImage::ImageLumaA16(image) => {
Ok(if is_premultiplied_alpha(&image) {
process_unpremultiply_alpha(&image).into()
} else {
image.into()
})
}
DynamicImage::ImageRgba8(image) => Ok(if is_premultiplied_alpha(&image) {
process_unpremultiply_alpha(&image).into()
} else {
image.into()
}),
DynamicImage::ImageRgba16(image) => Ok(if is_premultiplied_alpha(&image) {
process_unpremultiply_alpha(&image).into()
} else {
image.into()
}),
DynamicImage::ImageRgb32F(_) => {
Err(CanvasError::UnsupportedColorType(image.color()))
}
DynamicImage::ImageRgba32F(_) => {
Err(CanvasError::UnsupportedColorType(image.color()))
}
// If the image does not have an alpha channel, return the image as is.
_ => Ok(image),
}
}
pub(crate) trait SliceToPixel {
fn slice_to_pixel(pixel: &[u8]) -> Self;
}
impl<T: Primitive + Pod> SliceToPixel for Luma<T> {
fn slice_to_pixel(pixel: &[u8]) -> Self {
let pixel: &[T] = cast_slice(pixel);
let pixel = [pixel[0]];
Luma(pixel)
}
}
impl<T: Primitive + Pod> SliceToPixel for LumaA<T> {
fn slice_to_pixel(pixel: &[u8]) -> Self {
let pixel: &[T] = cast_slice(pixel);
let pixel = [pixel[0], pixel[1]];
LumaA(pixel)
}
}
impl<T: Primitive + Pod> SliceToPixel for Rgb<T> {
fn slice_to_pixel(pixel: &[u8]) -> Self {
let pixel: &[T] = cast_slice(pixel);
let pixel = [pixel[0], pixel[1], pixel[2]];
Rgb(pixel)
}
}
impl<T: Primitive + Pod> SliceToPixel for Rgba<T> {
fn slice_to_pixel(pixel: &[u8]) -> Self {
let pixel: &[T] = cast_slice(pixel);
let pixel = [pixel[0], pixel[1], pixel[2], pixel[3]];
Rgba(pixel)
}
}
pub(crate) trait TransformColorProfile {
fn transform_color_profile<P, S>(
&mut self,
transformer: &Transform<u8, u8>,
) -> P
where
P: Pixel<Subpixel = S> + SliceToPixel + 'static,
S: Primitive + 'static;
}
macro_rules! impl_transform_color_profile {
($type:ty) => {
impl TransformColorProfile for $type {
fn transform_color_profile<P, S>(
&mut self,
transformer: &Transform<u8, u8>,
) -> P
where
P: Pixel<Subpixel = S> + SliceToPixel + 'static,
S: Primitive + 'static,
{
let mut pixel = cast_slice_mut(self.0.as_mut_slice());
transformer.transform_in_place(&mut pixel);
P::slice_to_pixel(&pixel)
}
}
};
}
impl_transform_color_profile!(Luma<u8>);
impl_transform_color_profile!(Luma<u16>);
impl_transform_color_profile!(LumaA<u8>);
impl_transform_color_profile!(LumaA<u16>);
impl_transform_color_profile!(Rgb<u8>);
impl_transform_color_profile!(Rgb<u16>);
impl_transform_color_profile!(Rgba<u8>);
impl_transform_color_profile!(Rgba<u16>);
fn process_icc_profile_conversion<I, P, S>(
image: &I,
color: ColorType,
input_icc_profile: Profile,
output_icc_profile: Profile,
) -> Result<ImageBuffer<P, Vec<S>>, CanvasError>
where
I: GenericImageView<Pixel = P>,
P: Pixel<Subpixel = S> + SliceToPixel + TransformColorProfile + 'static,
S: Primitive + 'static,
{
let (width, height) = image.dimensions();
let mut out = ImageBuffer::new(width, height);
let pixel_format = match color {
ColorType::L8 => Ok(PixelFormat::GRAY_8),
ColorType::L16 => Ok(PixelFormat::GRAY_16),
ColorType::La8 => Ok(PixelFormat::GRAYA_8),
ColorType::La16 => Ok(PixelFormat::GRAYA_16),
ColorType::Rgb8 => Ok(PixelFormat::RGB_8),
ColorType::Rgb16 => Ok(PixelFormat::RGB_16),
ColorType::Rgba8 => Ok(PixelFormat::RGBA_8),
ColorType::Rgba16 => Ok(PixelFormat::RGBA_16),
_ => Err(CanvasError::UnsupportedColorType(color)),
}?;
let transformer = Transform::new(
&input_icc_profile,
pixel_format,
&output_icc_profile,
pixel_format,
output_icc_profile.header_rendering_intent(),
)
.map_err(CanvasError::Lcms)?;
for (x, y, mut pixel) in image.pixels() {
let pixel = pixel.transform_color_profile(&transformer);
out.put_pixel(x, y, pixel);
}
Ok(out)
}
/// Convert the color space of the image from the ICC profile to sRGB.
pub(crate) fn to_srgb_from_icc_profile(
image: DynamicImage,
icc_profile: Option<Vec<u8>>,
) -> Result<DynamicImage, CanvasError> {
match icc_profile {
// If there is no color profile information, return the image as is.
None => Ok(image),
Some(icc_profile) => match Profile::new_icc(&icc_profile) {
// If the color profile information is invalid, return the image as is.
Err(_) => Ok(image),
Ok(icc_profile) => {
let srgb_icc_profile = Profile::new_srgb();
let color = image.color();
match image {
DynamicImage::ImageLuma8(image) => Ok(
process_icc_profile_conversion(
&image,
color,
icc_profile,
srgb_icc_profile,
)?
.into(),
),
DynamicImage::ImageLuma16(image) => Ok(
process_icc_profile_conversion(
&image,
color,
icc_profile,
srgb_icc_profile,
)?
.into(),
),
DynamicImage::ImageLumaA8(image) => Ok(
process_icc_profile_conversion(
&image,
color,
icc_profile,
srgb_icc_profile,
)?
.into(),
),
DynamicImage::ImageLumaA16(image) => Ok(
process_icc_profile_conversion(
&image,
color,
icc_profile,
srgb_icc_profile,
)?
.into(),
),
DynamicImage::ImageRgb8(image) => Ok(
process_icc_profile_conversion(
&image,
color,
icc_profile,
srgb_icc_profile,
)?
.into(),
),
DynamicImage::ImageRgb16(image) => Ok(
process_icc_profile_conversion(
&image,
color,
icc_profile,
srgb_icc_profile,
)?
.into(),
),
DynamicImage::ImageRgba8(image) => Ok(
process_icc_profile_conversion(
&image,
color,
icc_profile,
srgb_icc_profile,
)?
.into(),
),
DynamicImage::ImageRgba16(image) => Ok(
process_icc_profile_conversion(
&image,
color,
icc_profile,
srgb_icc_profile,
)?
.into(),
),
DynamicImage::ImageRgb32F(_) => {
Err(CanvasError::UnsupportedColorType(image.color()))
}
DynamicImage::ImageRgba32F(_) => {
Err(CanvasError::UnsupportedColorType(image.color()))
}
_ => Err(CanvasError::UnsupportedColorType(image.color())),
}
}
},
}
}
/// Create an image buffer from raw bytes.
fn process_image_buffer_from_raw_bytes<P, S>(
width: u32,
height: u32,
buffer: &[u8],
bytes_per_pixel: usize,
) -> ImageBuffer<P, Vec<S>>
where
P: Pixel<Subpixel = S> + SliceToPixel + 'static,
S: Primitive + 'static,
{
let mut out = ImageBuffer::new(width, height);
for (index, buffer) in buffer.chunks_exact(bytes_per_pixel).enumerate() {
let pixel = P::slice_to_pixel(buffer);
out.put_pixel(index as u32, index as u32, pixel);
}
out
}
pub(crate) fn create_image_from_raw_bytes(
width: u32,
height: u32,
buffer: &[u8],
) -> Result<DynamicImage, CanvasError> {
let total_pixels = (width * height) as usize;
// avoid to divide by zero
let bytes_per_pixel = buffer
.len()
.checked_div(total_pixels)
.ok_or(CanvasError::InvalidSizeZero(width, height))?;
// convert from a bytes per pixel to the color type of the image
// https://github.com/image-rs/image/blob/2c986d353333d2604f0c3f1fcef262cc763c0001/src/color.rs#L38-L49
match bytes_per_pixel {
1 => Ok(DynamicImage::ImageLuma8(
process_image_buffer_from_raw_bytes(
width,
height,
buffer,
bytes_per_pixel,
),
)),
2 => Ok(
// NOTE: ImageLumaA8 is also the same bytes per pixel.
DynamicImage::ImageLuma16(process_image_buffer_from_raw_bytes(
width,
height,
buffer,
bytes_per_pixel,
)),
),
3 => Ok(DynamicImage::ImageRgb8(
process_image_buffer_from_raw_bytes(
width,
height,
buffer,
bytes_per_pixel,
),
)),
4 => Ok(
// NOTE: ImageLumaA16 is also the same bytes per pixel.
DynamicImage::ImageRgba8(process_image_buffer_from_raw_bytes(
width,
height,
buffer,
bytes_per_pixel,
)),
),
6 => Ok(DynamicImage::ImageRgb16(
process_image_buffer_from_raw_bytes(
width,
height,
buffer,
bytes_per_pixel,
),
)),
8 => Ok(DynamicImage::ImageRgba16(
process_image_buffer_from_raw_bytes(
width,
height,
buffer,
bytes_per_pixel,
),
)),
12 => Err(CanvasError::UnsupportedColorType(ColorType::Rgb32F)),
16 => Err(CanvasError::UnsupportedColorType(ColorType::Rgba32F)),
_ => Err(CanvasError::UnsupportedColorType(ColorType::L8)),
}
}
#[cfg(test)]
mod tests {
use image::Rgba;
use super::*;
#[test]
fn test_premultiply_alpha() {
let rgba = Rgba::<u8>([255, 128, 0, 128]);
let rgba = rgba.premultiply_alpha();
assert_eq!(rgba, Rgba::<u8>([128, 64, 0, 128]));
let rgba = Rgba::<u8>([255, 255, 255, 255]);
let rgba = rgba.premultiply_alpha();
assert_eq!(rgba, Rgba::<u8>([255, 255, 255, 255]));
}
#[test]
fn test_unpremultiply_alpha() {
let rgba = Rgba::<u8>([127, 0, 0, 127]);
let rgba = rgba.unpremultiply_alpha();
assert_eq!(rgba, Rgba::<u8>([255, 0, 0, 127]));
}
#[test]
fn test_process_image_buffer_from_raw_bytes() {
let buffer = &[255, 255, 0, 0, 0, 0, 255, 255];
let color = ColorType::Rgba16;
let bytes_per_pixel = color.bytes_per_pixel() as usize;
let image = DynamicImage::ImageRgba16(process_image_buffer_from_raw_bytes(
1,
1,
buffer,
bytes_per_pixel,
))
.to_rgba16();
assert_eq!(image.get_pixel(0, 0), &Rgba::<u16>([65535, 0, 0, 65535]));
}
}

View file

@ -42,7 +42,7 @@ type ResizeQuality = "high" | "low" | "medium" | "pixelated";
* used to create an `ImageBitmap`. * used to create an `ImageBitmap`.
* *
* @category Canvas */ * @category Canvas */
type ImageBitmapSource = Blob | ImageData; type ImageBitmapSource = Blob | ImageData | ImageBitmap;
/** /**
* The options of {@linkcode createImageBitmap}. * The options of {@linkcode createImageBitmap}.

View file

@ -2,158 +2,49 @@
use std::path::PathBuf; use std::path::PathBuf;
use deno_core::op2; mod image_ops;
use deno_core::ToJsBuffer; mod op_create_image_bitmap;
use image::imageops::FilterType;
use image::ColorType; use image::ColorType;
use image::ImageDecoder; use op_create_image_bitmap::op_create_image_bitmap;
use image::Pixel;
use image::RgbaImage;
use serde::Deserialize;
use serde::Serialize;
#[derive(Debug, thiserror::Error, deno_error::JsError)] #[derive(Debug, thiserror::Error, deno_error::JsError)]
pub enum CanvasError { pub enum CanvasError {
/// Image formats that is 32-bit depth are not supported currently due to the following reasons:
/// - e.g. OpenEXR, it's not covered by the spec.
/// - JPEG XL supported by WebKit, but it cannot be called a standard today.
/// https://github.com/whatwg/mimesniff/issues/143
///
#[class(type)] #[class(type)]
#[error("Color type '{0:?}' not supported")] #[error("Unsupported color type and bit depth: '{0:?}'")]
UnsupportedColorType(ColorType), UnsupportedColorType(ColorType),
#[class("DOMExceptionInvalidStateError")]
#[error("Cannot decode image '{0}'")]
InvalidImage(image::ImageError),
#[class("DOMExceptionInvalidStateError")]
#[error("The chunk data is not big enough with the specified width: {0} and height: {1}")]
NotBigEnoughChunk(u32, u32),
#[class("DOMExceptionInvalidStateError")]
#[error("The width: {0} or height: {1} could not be zero")]
InvalidSizeZero(u32, u32),
#[class(generic)]
#[error(transparent)]
Lcms(#[from] lcms2::Error),
#[class(generic)] #[class(generic)]
#[error(transparent)] #[error(transparent)]
Image(#[from] image::ImageError), Image(#[from] image::ImageError),
} }
#[derive(Debug, Deserialize)] impl CanvasError {
#[serde(rename_all = "snake_case")] /// Convert an [`image::ImageError`] to an [`CanvasError::InvalidImage`].
enum ImageResizeQuality { fn image_error_to_invalid_image(error: image::ImageError) -> Self {
Pixelated, CanvasError::InvalidImage(error)
Low,
Medium,
High,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct ImageProcessArgs {
width: u32,
height: u32,
surface_width: u32,
surface_height: u32,
input_x: i64,
input_y: i64,
output_width: u32,
output_height: u32,
resize_quality: ImageResizeQuality,
flip_y: bool,
premultiply: Option<bool>,
}
#[op2]
#[serde]
fn op_image_process(
#[buffer] buf: &[u8],
#[serde] args: ImageProcessArgs,
) -> ToJsBuffer {
let view =
RgbaImage::from_vec(args.width, args.height, buf.to_vec()).unwrap();
let surface = if !(args.width == args.surface_width
&& args.height == args.surface_height
&& args.input_x == 0
&& args.input_y == 0)
{
let mut surface = RgbaImage::new(args.surface_width, args.surface_height);
image::imageops::overlay(&mut surface, &view, args.input_x, args.input_y);
surface
} else {
view
};
let filter_type = match args.resize_quality {
ImageResizeQuality::Pixelated => FilterType::Nearest,
ImageResizeQuality::Low => FilterType::Triangle,
ImageResizeQuality::Medium => FilterType::CatmullRom,
ImageResizeQuality::High => FilterType::Lanczos3,
};
let mut image_out = image::imageops::resize(
&surface,
args.output_width,
args.output_height,
filter_type,
);
if args.flip_y {
image::imageops::flip_vertical_in_place(&mut image_out);
} }
// ignore 9.
if let Some(premultiply) = args.premultiply {
let is_not_premultiplied = image_out.pixels().any(|pixel| {
(pixel.0[0].max(pixel.0[1]).max(pixel.0[2])) > (255 * pixel.0[3])
});
if premultiply {
if is_not_premultiplied {
for pixel in image_out.pixels_mut() {
let alpha = pixel.0[3];
pixel.apply_without_alpha(|channel| {
(channel as f32 * (alpha as f32 / 255.0)) as u8
})
}
}
} else if !is_not_premultiplied {
for pixel in image_out.pixels_mut() {
let alpha = pixel.0[3];
pixel.apply_without_alpha(|channel| {
(channel as f32 / (alpha as f32 / 255.0)) as u8
})
}
}
}
image_out.to_vec().into()
}
#[derive(Debug, Serialize)]
struct DecodedPng {
data: ToJsBuffer,
width: u32,
height: u32,
}
#[op2]
#[serde]
fn op_image_decode_png(
#[buffer] buf: &[u8],
) -> Result<DecodedPng, CanvasError> {
let png = image::codecs::png::PngDecoder::new(buf)?;
let (width, height) = png.dimensions();
// TODO(@crowlKats): maybe use DynamicImage https://docs.rs/image/0.24.7/image/enum.DynamicImage.html ?
if png.color_type() != ColorType::Rgba8 {
return Err(CanvasError::UnsupportedColorType(png.color_type()));
}
// read_image will assert that the buffer is the correct size, so we need to fill it with zeros
let mut png_data = vec![0_u8; png.total_bytes() as usize];
png.read_image(&mut png_data)?;
Ok(DecodedPng {
data: png_data.into(),
width,
height,
})
} }
deno_core::extension!( deno_core::extension!(
deno_canvas, deno_canvas,
deps = [deno_webidl, deno_web, deno_webgpu], deps = [deno_webidl, deno_web, deno_webgpu],
ops = [op_image_process, op_image_decode_png], ops = [op_create_image_bitmap],
lazy_loaded_esm = ["01_image.js"], lazy_loaded_esm = ["01_image.js"],
); );

View file

@ -0,0 +1,557 @@
// Copyright 2018-2025 the Deno authors. MIT license.
use std::io::BufReader;
use std::io::Cursor;
use deno_core::op2;
use deno_core::JsBuffer;
use deno_core::ToJsBuffer;
use image::codecs::bmp::BmpDecoder;
use image::codecs::gif::GifDecoder;
use image::codecs::ico::IcoDecoder;
use image::codecs::jpeg::JpegDecoder;
use image::codecs::png::PngDecoder;
use image::codecs::webp::WebPDecoder;
use image::imageops::overlay;
use image::imageops::FilterType;
use image::metadata::Orientation;
use image::DynamicImage;
use image::ImageDecoder;
use image::RgbaImage;
use crate::image_ops::create_image_from_raw_bytes;
use crate::image_ops::premultiply_alpha as process_premultiply_alpha;
use crate::image_ops::to_srgb_from_icc_profile;
use crate::image_ops::unpremultiply_alpha;
use crate::CanvasError;
#[derive(Debug, PartialEq)]
enum ImageBitmapSource {
Blob,
ImageData,
ImageBitmap,
}
#[derive(Debug, PartialEq)]
enum ImageOrientation {
FlipY,
FromImage,
}
#[derive(Debug, PartialEq)]
enum PremultiplyAlpha {
Default,
Premultiply,
None,
}
#[derive(Debug, PartialEq)]
enum ColorSpaceConversion {
Default,
None,
}
#[derive(Debug, PartialEq)]
enum ResizeQuality {
Pixelated,
Low,
Medium,
High,
}
#[derive(Debug, PartialEq)]
enum MimeType {
NoMatch,
Png,
Jpeg,
Gif,
Bmp,
Ico,
Webp,
}
type DecodeBitmapDataReturn =
(DynamicImage, u32, u32, Option<Orientation>, Option<Vec<u8>>);
fn decode_bitmap_data(
buf: &[u8],
width: u32,
height: u32,
image_bitmap_source: &ImageBitmapSource,
mime_type: MimeType,
) -> Result<DecodeBitmapDataReturn, CanvasError> {
let (image, width, height, orientation, icc_profile) =
match image_bitmap_source {
ImageBitmapSource::Blob => {
//
// About the animated image
// > Blob .4
// > ... If this is an animated image, imageBitmap's bitmap data must only be taken from
// > the default image of the animation (the one that the format defines is to be used when animation is
// > not supported or is disabled), or, if there is no such image, the first frame of the animation.
// https://html.spec.whatwg.org/multipage/imagebitmap-and-animations.html
//
// see also browser implementations: (The implementation of Gecko and WebKit is hard to read.)
// https://source.chromium.org/chromium/chromium/src/+/bdbc054a6cabbef991904b5df9066259505cc686:third_party/blink/renderer/platform/image-decoders/image_decoder.h;l=175-189
//
let (image, orientation, icc_profile) = match mime_type {
MimeType::Png => {
// If PngDecoder decodes an animated image, it returns the default image if one is set, or the first frame if not.
let mut decoder = PngDecoder::new(BufReader::new(Cursor::new(buf)))
.map_err(CanvasError::image_error_to_invalid_image)?;
let orientation = decoder.orientation()?;
let icc_profile = decoder.icc_profile()?;
(
DynamicImage::from_decoder(decoder)
.map_err(CanvasError::image_error_to_invalid_image)?,
orientation,
icc_profile,
)
}
MimeType::Jpeg => {
let mut decoder =
JpegDecoder::new(BufReader::new(Cursor::new(buf)))
.map_err(CanvasError::image_error_to_invalid_image)?;
let orientation = decoder.orientation()?;
let icc_profile = decoder.icc_profile()?;
(
DynamicImage::from_decoder(decoder)
.map_err(CanvasError::image_error_to_invalid_image)?,
orientation,
icc_profile,
)
}
MimeType::Gif => {
// The GifDecoder decodes the first frame.
let mut decoder = GifDecoder::new(BufReader::new(Cursor::new(buf)))
.map_err(CanvasError::image_error_to_invalid_image)?;
let orientation = decoder.orientation()?;
let icc_profile = decoder.icc_profile()?;
(
DynamicImage::from_decoder(decoder)
.map_err(CanvasError::image_error_to_invalid_image)?,
orientation,
icc_profile,
)
}
MimeType::Bmp => {
let mut decoder = BmpDecoder::new(BufReader::new(Cursor::new(buf)))
.map_err(CanvasError::image_error_to_invalid_image)?;
let orientation = decoder.orientation()?;
let icc_profile = decoder.icc_profile()?;
(
DynamicImage::from_decoder(decoder)
.map_err(CanvasError::image_error_to_invalid_image)?,
orientation,
icc_profile,
)
}
MimeType::Ico => {
let mut decoder = IcoDecoder::new(BufReader::new(Cursor::new(buf)))
.map_err(CanvasError::image_error_to_invalid_image)?;
let orientation = decoder.orientation()?;
let icc_profile = decoder.icc_profile()?;
(
DynamicImage::from_decoder(decoder)
.map_err(CanvasError::image_error_to_invalid_image)?,
orientation,
icc_profile,
)
}
MimeType::Webp => {
// The WebPDecoder decodes the first frame.
let mut decoder =
WebPDecoder::new(BufReader::new(Cursor::new(buf)))
.map_err(CanvasError::image_error_to_invalid_image)?;
let orientation = decoder.orientation()?;
let icc_profile = decoder.icc_profile()?;
(
DynamicImage::from_decoder(decoder)
.map_err(CanvasError::image_error_to_invalid_image)?,
orientation,
icc_profile,
)
}
// This pattern is unreachable due to current block is already checked by the ImageBitmapSource above.
MimeType::NoMatch => unreachable!(),
};
let width = image.width();
let height = image.height();
(image, width, height, Some(orientation), icc_profile)
}
ImageBitmapSource::ImageData => {
// > 4.12.5.1.15 Pixel manipulation
// > imagedata.data
// > Returns the one-dimensional array containing the data in RGBA order, as integers in the range 0 to 255.
// https://html.spec.whatwg.org/multipage/canvas.html#pixel-manipulation
let image = match RgbaImage::from_raw(width, height, buf.into()) {
Some(image) => image.into(),
None => {
return Err(CanvasError::NotBigEnoughChunk(width, height));
}
};
(image, width, height, None, None)
}
ImageBitmapSource::ImageBitmap => {
let image = create_image_from_raw_bytes(width, height, buf)?;
(image, width, height, None, None)
}
};
Ok((image, width, height, orientation, icc_profile))
}
/// According to the spec, it's not clear how to handle the color space conversion.
///
/// Therefore, if you interpret the specification description from the implementation and wpt results, it will be as follows.
///
/// Let val be the value of the colorSpaceConversion member of options, and then run these substeps:
/// 1. If val is "default", to convert to the sRGB color space.
/// 2. If val is "none", to use the decoded image data as is.
///
/// related issue in whatwg
/// https://github.com/whatwg/html/issues/10578
///
/// reference in wpt
/// https://github.com/web-platform-tests/wpt/blob/d575dc75ede770df322fbc5da3112dcf81f192ec/html/canvas/element/manual/imagebitmap/createImageBitmap-colorSpaceConversion.html#L18
/// https://wpt.live/html/canvas/element/manual/imagebitmap/createImageBitmap-colorSpaceConversion.html
fn apply_color_space_conversion(
image: DynamicImage,
icc_profile: Option<Vec<u8>>,
color_space_conversion: &ColorSpaceConversion,
) -> Result<DynamicImage, CanvasError> {
match color_space_conversion {
// return the decoded image as is.
ColorSpaceConversion::None => Ok(image),
ColorSpaceConversion::Default => {
to_srgb_from_icc_profile(image, icc_profile)
}
}
}
fn apply_premultiply_alpha(
image: DynamicImage,
image_bitmap_source: &ImageBitmapSource,
premultiply_alpha: &PremultiplyAlpha,
) -> Result<DynamicImage, CanvasError> {
match premultiply_alpha {
// 1.
PremultiplyAlpha::Default => Ok(image),
// https://html.spec.whatwg.org/multipage/canvas.html#convert-from-premultiplied
// 2.
PremultiplyAlpha::Premultiply => process_premultiply_alpha(image),
// 3.
PremultiplyAlpha::None => {
// NOTE: It's not clear how to handle the case of ImageData.
// https://issues.chromium.org/issues/339759426
// https://github.com/whatwg/html/issues/5365
if *image_bitmap_source == ImageBitmapSource::ImageData {
return Ok(image);
}
unpremultiply_alpha(image)
}
}
}
#[derive(Debug, PartialEq)]
struct ParsedArgs {
resize_width: Option<u32>,
resize_height: Option<u32>,
sx: Option<i32>,
sy: Option<i32>,
sw: Option<i32>,
sh: Option<i32>,
image_orientation: ImageOrientation,
premultiply_alpha: PremultiplyAlpha,
color_space_conversion: ColorSpaceConversion,
resize_quality: ResizeQuality,
image_bitmap_source: ImageBitmapSource,
mime_type: MimeType,
}
#[allow(clippy::too_many_arguments)]
fn parse_args(
sx: i32,
sy: i32,
sw: i32,
sh: i32,
image_orientation: u8,
premultiply_alpha: u8,
color_space_conversion: u8,
resize_width: u32,
resize_height: u32,
resize_quality: u8,
image_bitmap_source: u8,
mime_type: u8,
) -> ParsedArgs {
let resize_width = if resize_width == 0 {
None
} else {
Some(resize_width)
};
let resize_height = if resize_height == 0 {
None
} else {
Some(resize_height)
};
let sx = if sx == 0 { None } else { Some(sx) };
let sy = if sy == 0 { None } else { Some(sy) };
let sw = if sw == 0 { None } else { Some(sw) };
let sh = if sh == 0 { None } else { Some(sh) };
// Their unreachable wildcard patterns are validated in JavaScript-side.
let image_orientation = match image_orientation {
0 => ImageOrientation::FromImage,
1 => ImageOrientation::FlipY,
_ => unreachable!(),
};
let premultiply_alpha = match premultiply_alpha {
0 => PremultiplyAlpha::Default,
1 => PremultiplyAlpha::Premultiply,
2 => PremultiplyAlpha::None,
_ => unreachable!(),
};
let color_space_conversion = match color_space_conversion {
0 => ColorSpaceConversion::Default,
1 => ColorSpaceConversion::None,
_ => unreachable!(),
};
let resize_quality = match resize_quality {
0 => ResizeQuality::Low,
1 => ResizeQuality::Pixelated,
2 => ResizeQuality::Medium,
3 => ResizeQuality::High,
_ => unreachable!(),
};
let image_bitmap_source = match image_bitmap_source {
0 => ImageBitmapSource::Blob,
1 => ImageBitmapSource::ImageData,
2 => ImageBitmapSource::ImageBitmap,
_ => unreachable!(),
};
let mime_type = match mime_type {
0 => MimeType::NoMatch,
1 => MimeType::Png,
2 => MimeType::Jpeg,
3 => MimeType::Gif,
4 => MimeType::Bmp,
5 => MimeType::Ico,
6 => MimeType::Webp,
_ => unreachable!(),
};
ParsedArgs {
resize_width,
resize_height,
sx,
sy,
sw,
sh,
image_orientation,
premultiply_alpha,
color_space_conversion,
resize_quality,
image_bitmap_source,
mime_type,
}
}
#[op2]
#[serde]
#[allow(clippy::too_many_arguments)]
pub(super) fn op_create_image_bitmap(
#[buffer] buf: JsBuffer,
width: u32,
height: u32,
sx: i32,
sy: i32,
sw: i32,
sh: i32,
image_orientation: u8,
premultiply_alpha: u8,
color_space_conversion: u8,
resize_width: u32,
resize_height: u32,
resize_quality: u8,
image_bitmap_source: u8,
mime_type: u8,
) -> Result<(ToJsBuffer, u32, u32), CanvasError> {
let ParsedArgs {
resize_width,
resize_height,
sx,
sy,
sw,
sh,
image_orientation,
premultiply_alpha,
color_space_conversion,
resize_quality,
image_bitmap_source,
mime_type,
} = parse_args(
sx,
sy,
sw,
sh,
image_orientation,
premultiply_alpha,
color_space_conversion,
resize_width,
resize_height,
resize_quality,
image_bitmap_source,
mime_type,
);
// 6. Switch on image:
let (image, width, height, orientation, icc_profile) =
decode_bitmap_data(&buf, width, height, &image_bitmap_source, mime_type)?;
// crop bitmap data
// 2.
#[rustfmt::skip]
let source_rectangle: [[i32; 2]; 4] =
if let (Some(sx), Some(sy), Some(sw), Some(sh)) = (sx, sy, sw, sh) {
[
[sx, sy],
[sx + sw, sy],
[sx + sw, sy + sh],
[sx, sy + sh]
]
} else {
[
[0, 0],
[width as i32, 0],
[width as i32, height as i32],
[0, height as i32],
]
};
/*
* The cropping works differently than the spec specifies:
* The spec states to create an infinite surface and place the top-left corner
* of the image a 0,0 and crop based on sourceRectangle.
*
* We instead create a surface the size of sourceRectangle, and position
* the image at the correct location, which is the inverse of the x & y of
* sourceRectangle's top-left corner.
*/
let input_x = -(source_rectangle[0][0] as i64);
let input_y = -(source_rectangle[0][1] as i64);
let surface_width = (source_rectangle[1][0] - source_rectangle[0][0]) as u32;
let surface_height = (source_rectangle[3][1] - source_rectangle[0][1]) as u32;
// 3.
let output_width = if let Some(resize_width) = resize_width {
resize_width
} else if let Some(resize_height) = resize_height {
(surface_width * resize_height).div_ceil(surface_height)
} else {
surface_width
};
// 4.
let output_height = if let Some(resize_height) = resize_height {
resize_height
} else if let Some(resize_width) = resize_width {
(surface_height * resize_width).div_ceil(surface_width)
} else {
surface_height
};
// 5.
let image = if !(width == surface_width
&& height == surface_height
&& input_x == 0
&& input_y == 0)
{
let mut surface =
DynamicImage::new(surface_width, surface_height, image.color());
overlay(&mut surface, &image, input_x, input_y);
surface
} else {
image
};
// 7.
let filter_type = match resize_quality {
ResizeQuality::Pixelated => FilterType::Nearest,
ResizeQuality::Low => FilterType::Triangle,
ResizeQuality::Medium => FilterType::CatmullRom,
ResizeQuality::High => FilterType::Lanczos3,
};
// should use resize_exact
// https://github.com/image-rs/image/issues/1220#issuecomment-632060015
let mut image = image.resize_exact(output_width, output_height, filter_type);
// 8.
let image = match image_bitmap_source {
ImageBitmapSource::Blob => {
// Note: According to browser behavior and wpt results, if Exif contains image orientation,
// it applies the rotation from it before following the value of imageOrientation.
// This is not stated in the spec but in MDN currently.
// https://github.com/mdn/content/pull/34366
// SAFETY: The orientation is always Some if the image is from a Blob.
let orientation = orientation.unwrap();
DynamicImage::apply_orientation(&mut image, orientation);
match image_orientation {
ImageOrientation::FlipY => image.flipv(),
ImageOrientation::FromImage => image,
}
}
ImageBitmapSource::ImageData | ImageBitmapSource::ImageBitmap => {
match image_orientation {
ImageOrientation::FlipY => image.flipv(),
ImageOrientation::FromImage => image,
}
}
};
// 9.
let image =
apply_color_space_conversion(image, icc_profile, &color_space_conversion)?;
// 10.
let image =
apply_premultiply_alpha(image, &image_bitmap_source, &premultiply_alpha)?;
Ok((image.into_bytes().into(), output_width, output_height))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_args() {
let parsed_args = parse_args(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
assert_eq!(
parsed_args,
ParsedArgs {
resize_width: None,
resize_height: None,
sx: None,
sy: None,
sw: None,
sh: None,
image_orientation: ImageOrientation::FromImage,
premultiply_alpha: PremultiplyAlpha::Default,
color_space_conversion: ColorSpaceConversion::Default,
resize_quality: ResizeQuality::Low,
image_bitmap_source: ImageBitmapSource::Blob,
mime_type: MimeType::NoMatch,
}
);
}
}

View file

@ -395,6 +395,10 @@ const ImageTypePatternTable = [
/** /**
* Ref: https://mimesniff.spec.whatwg.org/#image-type-pattern-matching-algorithm * Ref: https://mimesniff.spec.whatwg.org/#image-type-pattern-matching-algorithm
* NOTE: Some browsers have implementation-defined image formats.
* For example, The AVIF image format is supported by all browsers today.
* However, the standardization seems to have hard going.
* See: https://github.com/whatwg/mimesniff/issues/143
* @param {Uint8Array} input * @param {Uint8Array} input
* @returns {string | undefined} * @returns {string | undefined}
*/ */

View file

@ -361,6 +361,12 @@ core.registerErrorBuilder(
return new DOMException(msg, "DataError"); return new DOMException(msg, "DataError");
}, },
); );
core.registerErrorBuilder(
"DOMExceptionInvalidStateError",
function DOMExceptionInvalidStateError(msg) {
return new DOMException(msg, "InvalidStateError");
},
);
function runtimeStart( function runtimeStart(
denoVersion, denoVersion,

Binary file not shown.

After

Width:  |  Height:  |  Size: 233 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 271 B

BIN
tests/testdata/image/1x1-3f-animated.gif vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 126 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 188 B

BIN
tests/testdata/image/1x1-red16.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 77 B

BIN
tests/testdata/image/1x1-red32f.exr vendored Normal file

Binary file not shown.

BIN
tests/testdata/image/1x1-red8.bmp vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 126 B

BIN
tests/testdata/image/1x1-red8.gif vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 B

BIN
tests/testdata/image/1x1-red8.ico vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 95 B

BIN
tests/testdata/image/1x1-red8.jpeg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 631 B

BIN
tests/testdata/image/1x1-red8.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 73 B

BIN
tests/testdata/image/1x1-red8.webp vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 109 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 B

BIN
tests/testdata/image/squares_6.jpg vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

View file

@ -1,6 +1,8 @@
// Copyright 2018-2025 the Deno authors. MIT license. // Copyright 2018-2025 the Deno authors. MIT license.
import { assertEquals } from "./test_util.ts"; import { assertEquals, assertRejects } from "./test_util.ts";
const prefix = "tests/testdata/image";
function generateNumberedData(n: number): Uint8ClampedArray { function generateNumberedData(n: number): Uint8ClampedArray {
return new Uint8ClampedArray( return new Uint8ClampedArray(
@ -19,6 +21,21 @@ Deno.test(async function imageBitmapDirect() {
); );
}); });
Deno.test(async function imageBitmapRecivesImageBitmap() {
const imageData = new Blob(
[await Deno.readFile(`${prefix}/1x1-red16.png`)],
{ type: "image/png" },
);
const imageBitmap1 = await createImageBitmap(imageData);
const imageBitmap2 = await createImageBitmap(imageBitmap1);
assertEquals(
// @ts-ignore: Deno[Deno.internal].core allowed
Deno[Deno.internal].getBitmapData(imageBitmap1),
// @ts-ignore: Deno[Deno.internal].core allowed
Deno[Deno.internal].getBitmapData(imageBitmap2),
);
});
Deno.test(async function imageBitmapCrop() { Deno.test(async function imageBitmapCrop() {
const data = generateNumberedData(3 * 3); const data = generateNumberedData(3 * 3);
const imageData = new ImageData(data, 3, 3); const imageData = new ImageData(data, 3, 3);
@ -37,8 +54,8 @@ Deno.test(async function imageBitmapCropPartialNegative() {
// @ts-ignore: Deno[Deno.internal].core allowed // @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore // deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([ assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 1 0, 0, 0, 0, 1, 0, 0, 1
])); ]));
}); });
@ -49,11 +66,11 @@ Deno.test(async function imageBitmapCropGreater() {
// @ts-ignore: Deno[Deno.internal].core allowed // @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore // deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([ assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 4, 0, 0, 1, 5, 0, 0, 1, 6, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 1, 5, 0, 0, 1, 6, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 7, 0, 0, 1, 8, 0, 0, 1, 9, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 1, 8, 0, 0, 1, 9, 0, 0, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
])); ]));
}); });
@ -68,36 +85,310 @@ Deno.test(async function imageBitmapScale() {
// @ts-ignore: Deno[Deno.internal].core allowed // @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore // deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([ assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([
1, 0, 0, 1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 3, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 3, 0, 0, 1,
1, 0, 0, 1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 3, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 3, 0, 0, 1,
1, 0, 0, 1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 3, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 3, 0, 0, 1,
1, 0, 0, 1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 3, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 3, 0, 0, 1,
1, 0, 0, 1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 3, 0, 0, 1 1, 0, 0, 1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 3, 0, 0, 1
])); ]));
}); });
Deno.test(async function imageBitmapFlipY() { Deno.test("imageOrientation", async (t) => {
const data = generateNumberedData(9); await t.step('"ImageData" imageOrientation: "flipY"', async () => {
const imageData = new ImageData(data, 3, 3); const data = generateNumberedData(9);
const imageBitmap = await createImageBitmap(imageData, { const imageData = new ImageData(data, 3, 3);
imageOrientation: "flipY", const imageBitmap = await createImageBitmap(imageData, {
imageOrientation: "flipY",
});
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([
7, 0, 0, 1, 8, 0, 0, 1, 9, 0, 0, 1,
4, 0, 0, 1, 5, 0, 0, 1, 6, 0, 0, 1,
1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1,
]));
});
const imageData = new Blob(
[await Deno.readFile(`${prefix}/squares_6.jpg`)],
{ type: "image/jpeg" },
);
const WIDTH = 320;
const CHANNELS = 3;
const TARGET_PIXEL_X = 40;
const START = TARGET_PIXEL_X * WIDTH * CHANNELS;
const END = START + CHANNELS;
// reference:
// https://github.com/web-platform-tests/wpt/blob/a1f4bbf4c6e1a9a861a145a34cd097ea260b5a49/html/canvas/element/manual/imagebitmap/createImageBitmap-exif-orientation.html#L30
await t.step('"Blob" imageOrientation: "from-image"', async () => {
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
const targetPixel = Deno[Deno.internal].getBitmapData(imageBitmap).slice(
START,
END,
);
assertEquals(targetPixel, new Uint8Array([253, 0, 0]));
});
// reference:
// https://github.com/web-platform-tests/wpt/blob/a1f4bbf4c6e1a9a861a145a34cd097ea260b5a49/html/canvas/element/manual/imagebitmap/createImageBitmap-exif-orientation.html#L55
await t.step('"Blob" imageOrientation: "flipY"', async () => {
const imageBitmap = await createImageBitmap(imageData, {
imageOrientation: "flipY",
});
// @ts-ignore: Deno[Deno.internal].core allowed
const targetPixel = Deno[Deno.internal].getBitmapData(imageBitmap).slice(
START,
END,
);
assertEquals(targetPixel, new Uint8Array([253, 127, 127]));
}); });
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([
7, 0, 0, 1, 8, 0, 0, 1, 9, 0, 0, 1,
4, 0, 0, 1, 5, 0, 0, 1, 6, 0, 0, 1,
1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1,
]));
}); });
Deno.test(async function imageBitmapFromBlob() { Deno.test("imageBitmapPremultiplyAlpha", async (t) => {
const path = "tests/testdata/image/1x1-white.png"; const imageData = new ImageData(
const imageData = new Blob([await Deno.readFile(path)], { new Uint8ClampedArray([
type: "image/png", 255,
255,
0,
153,
]),
1,
1,
);
await t.step('"ImageData" premultiplyAlpha: "default"', async () => {
const imageBitmap = await createImageBitmap(imageData, {
premultiplyAlpha: "default",
});
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([
255, 255, 0, 153,
]));
});
await t.step('"ImageData" premultiplyAlpha: "premultiply"', async () => {
const imageBitmap = await createImageBitmap(imageData, {
premultiplyAlpha: "premultiply",
});
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([
153, 153, 0, 153
]));
});
await t.step('"ImageData" premultiplyAlpha: "none"', async () => {
const imageBitmap = await createImageBitmap(imageData, {
premultiplyAlpha: "none",
});
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([
255, 255, 0, 153,
]));
});
await t.step('"Blob" premultiplyAlpha: "none"', async () => {
const imageData = new Blob(
[await Deno.readFile(`${prefix}/2x2-transparent8.png`)],
{ type: "image/png" },
);
const imageBitmap = await createImageBitmap(imageData, {
premultiplyAlpha: "none",
});
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([
255, 0, 0, 255, 0, 255, 0, 255,
0, 0, 255, 255, 255, 0, 0, 127
]));
});
});
Deno.test("imageBitmapFromBlob", async (t) => {
await t.step("8-bit png", async () => {
const imageData = new Blob(
[await Deno.readFile(`${prefix}/1x1-red8.png`)],
{ type: "image/png" },
);
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255]));
});
await t.step("16-bit png", async () => {
const imageData = new Blob(
[await Deno.readFile(`${prefix}/1x1-red16.png`)],
{ type: "image/png" },
);
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap),
// deno-fmt-ignore
new Uint8Array(
[
255, 255, // R
0, 0, // G
0, 0, // B
255, 255 // A
]
)
);
});
await t.step("8-bit jpeg", async () => {
const imageData = new Blob(
[await Deno.readFile(`${prefix}/1x1-red8.jpeg`)],
{ type: "image/jpeg" },
);
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([254, 0, 0]));
});
await t.step("8-bit bmp", async () => {
const imageData = new Blob(
[await Deno.readFile(`${prefix}/1x1-red8.bmp`)],
{ type: "image/bmp" },
);
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255]));
});
await t.step("8-bit gif", async () => {
const imageData = new Blob(
[await Deno.readFile(`${prefix}/1x1-red8.gif`)],
{ type: "image/gif" },
);
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255]));
});
await t.step("8-bit webp", async () => {
const imageData = new Blob(
[await Deno.readFile(`${prefix}/1x1-red8.webp`)],
{ type: "image/webp" },
);
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255]));
});
await t.step("8-bit ico", async () => {
const imageData = new Blob(
[await Deno.readFile(`${prefix}/1x1-red8.ico`)],
{ type: "image/x-icon" },
);
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255]));
});
await t.step("flotat-32-bit exr", async () => {
// image/x-exr is a known mimetype for OpenEXR
// https://www.digipres.org/formats/sources/fdd/formats/#fdd000583
const imageData = new Blob([
await Deno.readFile(`${prefix}/1x1-red32f.exr`),
], { type: "image/x-exr" });
await assertRejects(() => createImageBitmap(imageData), DOMException);
});
});
Deno.test("imageBitmapFromBlobAnimatedImage", async (t) => {
await t.step("animated png has a default image", async () => {
// the chunk of animated apng is below (2 frames, 1x1, 8-bit, RGBA), default [255, 0, 0, 255] image
// [ 0, 255, 0, 255,
// 0, 0, 255, 255 ]
const imageData = new Blob([
await Deno.readFile(`${prefix}/1x1-2f-animated-has-def.png`),
], { type: "image/png" });
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255]));
});
await t.step("animated png does not have any default image", async () => {
// the chunk of animated apng is below (3 frames, 1x1, 8-bit, RGBA)
// [ 255, 0, 0, 255,
// 0, 255, 0, 255,
// 0, 0, 255, 255 ]
const imageData = new Blob([
await Deno.readFile(`${prefix}/1x1-3f-animated-no-def.png`),
], { type: "image/png" });
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255]));
});
await t.step("animated webp", async () => {
// the chunk of animated webp is below (3 frames, 1x1, 8-bit, RGBA)
//
// [ 255, 0, 0, 127,
// 0, 255, 0, 127,
// 0, 0, 255, 127 ]
const imageData = new Blob([
await Deno.readFile(
`${prefix}/1x1-3f-lossless-animated-semi-transparent.webp`,
),
], { type: "image/webp" });
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 127]));
});
await t.step("animated gif", async () => {
// the chunk of animated gif is below (3 frames, 1x1, 8-bit, RGBA)
// [ 255, 0, 0, 255,
// 0, 255, 0, 255,
// 0, 0, 255, 255 ]
const imageData = new Blob([
await Deno.readFile(`${prefix}/1x1-3f-animated.gif`),
], { type: "image/gif" });
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255, 0, 0, 255]));
});
});
/**
* extract high bytes from Uint16Array
*/
function extractHighBytes(array: Uint8Array): Uint8Array {
const highBytes = new Uint8Array(array.length / 2);
for (let i = 0, j = 1; i < array.length; i++, j += 2) {
highBytes[i] = array[j];
}
return highBytes;
}
Deno.test("imageBitmapFromBlobColorspaceConversion", async (t) => {
// reference:
// https://github.com/web-platform-tests/wpt/blob/d575dc75ede770df322fbc5da3112dcf81f192ec/html/canvas/element/manual/imagebitmap/createImageBitmap-colorSpaceConversion.html#L18
// https://wpt.fyi/results/html/canvas/element/manual/imagebitmap/createImageBitmap-colorSpaceConversion.html?label=experimental&label=master&aligned
await t.step('"Blob" colorSpaceConversion: "none"', async () => {
const imageData = new Blob([
await Deno.readFile(`${prefix}/wide-gamut-pattern.png`),
], { type: "image/png" });
const imageBitmap = await createImageBitmap(imageData, {
colorSpaceConversion: "none",
});
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
const firstPixel = extractHighBytes(Deno[Deno.internal].getBitmapData(imageBitmap)).slice(0, 4);
// picking the high bytes of the first pixel
assertEquals(firstPixel, new Uint8Array([123, 0, 27, 255]));
});
await t.step('"Blob" colorSpaceConversion: "default"', async () => {
const imageData = new Blob([
await Deno.readFile(`${prefix}/wide-gamut-pattern.png`),
], { type: "image/png" });
const imageBitmap = await createImageBitmap(imageData, {
colorSpaceConversion: "default",
});
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
const firstPixel = extractHighBytes(Deno[Deno.internal].getBitmapData(imageBitmap)).slice(0, 4);
// picking the high bytes of the first pixel
assertEquals(firstPixel, new Uint8Array([255, 0, 0, 255]));
}); });
const imageBitmap = await createImageBitmap(imageData);
// @ts-ignore: Deno[Deno.internal].core allowed
// deno-fmt-ignore
assertEquals(Deno[Deno.internal].getBitmapData(imageBitmap), new Uint8Array([255,255,255,255]));
}); });