Skip to content

Commit

Permalink
Allow APNG with reductions disabled (#511)
Browse files Browse the repository at this point in the history
  • Loading branch information
andrews05 authored Jul 5, 2023
1 parent d81236f commit 75a0f0d
Show file tree
Hide file tree
Showing 7 changed files with 134 additions and 31 deletions.
4 changes: 3 additions & 1 deletion src/headers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,9 @@ pub enum StripChunks {

impl StripChunks {
/// List of chunks that will be kept when using the `Safe` option
pub const KEEP_SAFE: [[u8; 4]; 4] = [*b"cICP", *b"iCCP", *b"sRGB", *b"pHYs"];
pub const KEEP_SAFE: [[u8; 4]; 7] = [
*b"cICP", *b"iCCP", *b"sRGB", *b"pHYs", *b"acTL", *b"fcTL", *b"fdAT",
];

pub(crate) fn keep(&self, name: &[u8; 4]) -> bool {
match &self {
Expand Down
61 changes: 56 additions & 5 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ use crate::png::PngImage;
use crate::reduction::*;
use log::{debug, info, trace, warn};
use rayon::prelude::*;
use std::borrow::Cow;
use std::fmt;
use std::fs::{copy, File, Metadata};
use std::io::{stdin, stdout, BufWriter, Read, Write};
Expand Down Expand Up @@ -388,7 +389,7 @@ impl RawImage {
/// Create an optimized png from the raw image data using the options provided
pub fn create_optimized_png(&self, opts: &Options) -> PngResult<Vec<u8>> {
let deadline = Arc::new(Deadline::new(opts.timeout));
let mut png = optimize_raw(self.png.clone(), opts, deadline, None)
let mut png = optimize_raw(self.png.clone(), opts, deadline.clone(), None)
.ok_or_else(|| PngError::new("Failed to optimize input data"))?;

// Process aux chunks
Expand All @@ -398,7 +399,7 @@ impl RawImage {
.filter(|c| opts.strip.keep(&c.name))
.cloned()
.collect();
postprocess_chunks(&mut png, opts, &self.png.ihdr);
postprocess_chunks(&mut png, opts, deadline, &self.png.ihdr);

Ok(png.output())
}
Expand Down Expand Up @@ -564,17 +565,30 @@ fn optimize_png(
debug!(" IDAT size = {} bytes", idat_original_size);
debug!(" File size = {} bytes", file_original_size);

// Check for APNG by presence of acTL chunk
let opts = if png.aux_chunks.iter().any(|c| &c.name == b"acTL") {
warn!("APNG detected, disabling all reductions");
let mut opts = opts.to_owned();
opts.interlace = None;
opts.bit_depth_reduction = false;
opts.color_type_reduction = false;
opts.palette_reduction = false;
opts.grayscale_reduction = false;
Cow::Owned(opts)
} else {
Cow::Borrowed(opts)
};
let max_size = if opts.force {
None
} else {
Some(png.estimated_output_size())
};
if let Some(new_png) = optimize_raw(raw.clone(), opts, deadline, max_size) {
if let Some(new_png) = optimize_raw(raw.clone(), &opts, deadline.clone(), max_size) {
png.raw = new_png.raw;
png.idat_data = new_png.idat_data;
}

postprocess_chunks(png, opts, &raw.ihdr);
postprocess_chunks(png, &opts, deadline, &raw.ihdr);

let output = png.output();

Expand Down Expand Up @@ -844,7 +858,12 @@ fn report_format(prefix: &str, png: &PngImage) {
}

/// Perform cleanup of certain chunks from the `PngData` object, after optimization has been completed
fn postprocess_chunks(png: &mut PngData, opts: &Options, orig_ihdr: &IhdrData) {
fn postprocess_chunks(
png: &mut PngData,
opts: &Options,
deadline: Arc<Deadline>,
orig_ihdr: &IhdrData,
) {
if let Some(iccp_idx) = png.aux_chunks.iter().position(|c| &c.name == b"iCCP") {
// See if we can replace an iCCP chunk with an sRGB chunk
let may_replace_iccp = opts.strip != StripChunks::None && opts.strip.keep(b"sRGB");
Expand Down Expand Up @@ -897,6 +916,38 @@ fn postprocess_chunks(png: &mut PngData, opts: &Options, orig_ihdr: &IhdrData) {
!invalid
});
}

// Find fdAT chunks and attempt to recompress them
// Note if there are multiple fdATs per frame then decompression will fail and nothing will change
let mut fdat: Vec<_> = png
.aux_chunks
.iter_mut()
.filter(|c| &c.name == b"fdAT")
.collect();
if !fdat.is_empty() {
let buffer_size = orig_ihdr.raw_data_size();
fdat.par_iter_mut()
.with_max_len(1)
.enumerate()
.for_each(|(i, c)| {
if deadline.passed() || c.data.len() <= 4 {
return;
}
if let Ok(mut data) = deflate::inflate(&c.data[4..], buffer_size).and_then(|data| {
let max_size = AtomicMin::new(Some(c.data.len() - 5));
opts.deflate.deflate(&data, &max_size)
}) {
debug!(
"Recompressed fdAT #{:<2}: {} ({} bytes decrease)",
i,
c.data.len(),
c.data.len() - 4 - data.len()
);
c.data.truncate(4);
c.data.append(&mut data);
}
})
}
}

/// Check if an image was already optimized prior to oxipng's operations
Expand Down
2 changes: 1 addition & 1 deletion src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ fn main() {
)
.arg(
Arg::new("strip")
.help("Strip metadata objects ['safe', 'all', or comma-separated list]")
.help("Strip metadata objects ['safe', 'all', or comma-separated list]\nCAUTION: stripping 'all' will convert APNGs to standard PNGs")
.long("strip")
.value_name("mode")
.conflicts_with("strip-safe"),
Expand Down
33 changes: 26 additions & 7 deletions src/png/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use crate::interlace::{deinterlace_image, interlace_image, Interlacing};
use crate::Options;
use bitvec::bitarr;
use libdeflater::{CompressionLvl, Compressor};
use log::warn;
use rgb::ComponentSlice;
use rustc_hash::FxHashMap;
use std::fs::File;
Expand Down Expand Up @@ -93,8 +94,16 @@ impl PngData {
let mut aux_chunks: Vec<Chunk> = Vec::new();
while let Some(chunk) = parse_next_chunk(byte_data, &mut byte_offset, opts.fix_errors)? {
match &chunk.name {
b"IDAT" => idat_data.extend_from_slice(chunk.data),
b"acTL" => return Err(PngError::APNGNotSupported),
b"IDAT" => {
if idat_data.is_empty() {
// Keep track of where the first IDAT sits relative to other chunks
aux_chunks.push(Chunk {
name: chunk.name,
data: Vec::new(),
})
}
idat_data.extend_from_slice(chunk.data);
}
b"IHDR" | b"PLTE" | b"tRNS" => {
key_chunks.insert(chunk.name, chunk.data.to_owned());
}
Expand All @@ -104,6 +113,10 @@ impl PngData {
name: chunk.name,
data: chunk.data.to_owned(),
})
} else if chunk.name == *b"acTL" {
warn!(
"Stripping animation data from APNG - image will become standard PNG"
);
}
}
}
Expand Down Expand Up @@ -165,9 +178,10 @@ impl PngData {
ihdr_data.write_all(&[0]).ok(); // Filter method -- 5-way adaptive filtering
ihdr_data.write_all(&[self.raw.ihdr.interlaced as u8]).ok();
write_png_block(b"IHDR", &ihdr_data, &mut output);
// Ancillary chunks
for chunk in self
.aux_chunks
// Ancillary chunks - split into those that come before IDAT and those that come after
let mut aux_split = self.aux_chunks.split(|c| &c.name == b"IDAT");
let aux_pre = aux_split.next().unwrap();
for chunk in aux_pre
.iter()
.filter(|c| !(&c.name == b"bKGD" || &c.name == b"hIST" || &c.name == b"tRNS"))
{
Expand Down Expand Up @@ -202,15 +216,20 @@ impl PngData {
_ => {}
}
// Special ancillary chunks that need to come after PLTE but before IDAT
for chunk in self
.aux_chunks
for chunk in aux_pre
.iter()
.filter(|c| &c.name == b"bKGD" || &c.name == b"hIST" || &c.name == b"tRNS")
{
write_png_block(&chunk.name, &chunk.data, &mut output);
}
// IDAT data
write_png_block(b"IDAT", &self.idat_data, &mut output);
// Ancillary chunks that come after IDAT
for aux_post in aux_split {
for chunk in aux_post {
write_png_block(&chunk.name, &chunk.data, &mut output);
}
}
// Stream end
write_png_block(b"IEND", &[], &mut output);

Expand Down
18 changes: 18 additions & 0 deletions src/rayon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,12 @@ pub trait IntoParallelRefIterator<'data> {
fn par_iter(&'data self) -> Self::Iter;
}

pub trait IntoParallelRefMutIterator<'data> {
type Iter: ParallelIterator<Item = Self::Item>;
type Item: Send + 'data;
fn par_iter_mut(&'data mut self) -> Self::Iter;
}

impl<I: IntoIterator> IntoParallelIterator for I
where
I::Item: Send,
Expand All @@ -50,6 +56,18 @@ where
}
}

impl<'data, I: 'data + ?Sized> IntoParallelRefMutIterator<'data> for I
where
&'data mut I: IntoParallelIterator,
{
type Iter = <&'data mut I as IntoParallelIterator>::Iter;
type Item = <&'data mut I as IntoParallelIterator>::Item;

fn par_iter_mut(&'data mut self) -> Self::Iter {
self.into_par_iter()
}
}

impl<I: Iterator> ParallelIterator for I {}

#[allow(dead_code)]
Expand Down
41 changes: 27 additions & 14 deletions src/sanity_checks.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
use image::{DynamicImage, GenericImageView, ImageFormat, Pixel};
use image::{codecs::png::PngDecoder, *};
use log::{error, warn};
use std::io::Cursor;

/// Validate that the output png data still matches the original image
pub fn validate_output(output: &[u8], original_data: &[u8]) -> bool {
let (old_png, new_png) = rayon::join(
let (old_frames, new_frames) = rayon::join(
|| load_png_image_from_memory(original_data),
|| load_png_image_from_memory(output),
);

match (new_png, old_png) {
match (new_frames, old_frames) {
(Err(new_err), _) => {
error!("Failed to read output image for validation: {}", new_err);
false
Expand All @@ -21,26 +20,40 @@ pub fn validate_output(output: &[u8], original_data: &[u8]) -> bool {
warn!("Failed to read input image for validation: {}", old_err);
true
}
(Ok(new_png), Ok(old_png)) => images_equal(&old_png, &new_png),
(Ok(new_frames), Ok(old_frames)) if new_frames.len() != old_frames.len() => false,
(Ok(new_frames), Ok(old_frames)) => {
for (a, b) in old_frames.iter().zip(new_frames) {
if !images_equal(&a, &b) {
return false;
}
}
true
}
}
}

/// Loads a PNG image from memory to a [DynamicImage]
fn load_png_image_from_memory(png_data: &[u8]) -> Result<DynamicImage, image::ImageError> {
let mut reader = image::io::Reader::new(Cursor::new(png_data));
reader.set_format(ImageFormat::Png);
reader.no_limits();
reader.decode()
/// Loads a PNG image from memory to frames of [RgbaImage]
fn load_png_image_from_memory(png_data: &[u8]) -> Result<Vec<RgbaImage>, image::ImageError> {
let decoder = PngDecoder::new(png_data)?;
if decoder.is_apng() {
decoder
.apng()
.into_frames()
.map(|f| f.map(|f| f.into_buffer()))
.collect()
} else {
DynamicImage::from_decoder(decoder).map(|i| vec![i.into_rgba8()])
}
}

/// Compares images pixel by pixel for equivalent content
fn images_equal(old_png: &DynamicImage, new_png: &DynamicImage) -> bool {
fn images_equal(old_png: &RgbaImage, new_png: &RgbaImage) -> bool {
let a = old_png.pixels().filter(|x| {
let p = x.2.channels();
let p = x.channels();
!(p.len() == 4 && p[3] == 0)
});
let b = new_png.pixels().filter(|x| {
let p = x.2.channels();
let p = x.channels();
!(p.len() == 4 && p[3] == 0)
});
a.eq(b)
Expand Down
6 changes: 3 additions & 3 deletions tests/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ fn optimize_from_memory_apng() {
in_file.read_to_end(&mut in_file_buf).unwrap();

let result = oxipng::optimize_from_memory(&in_file_buf, &Options::default());
assert!(result.is_err());
assert!(result.is_ok());
}

#[test]
Expand Down Expand Up @@ -58,9 +58,9 @@ fn optimize_apng() {
let result = oxipng::optimize(
&"tests/files/apng_file.png".into(),
&OutFile::Path(None),
&Options::default(),
&Options::from_preset(0),
);
assert!(result.is_err());
assert!(result.is_ok());
}

#[test]
Expand Down

0 comments on commit 75a0f0d

Please sign in to comment.