From 43f13d1cb315dd42fb8c2332b530228e55fdf391 Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Fri, 8 Sep 2023 09:42:32 -0400 Subject: [PATCH 01/13] Initial work on palette changer script This script allows a user to change Freedoom's palette, while also changing any graphics that use the old palette to use the new colours --- scripts/update-palette | 166 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 166 insertions(+) create mode 100755 scripts/update-palette diff --git a/scripts/update-palette b/scripts/update-palette new file mode 100755 index 00000000..243ad0fa --- /dev/null +++ b/scripts/update-palette @@ -0,0 +1,166 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: BSD-3-Clause +# +# update-palette - Update the palettes of all PNG graphics +# +# All of the PNGs in Freedoom are paletted, and the palettes of each PNG match +# the colours in the PLAYPAL lump. If a user wants to make changes to the +# palette, they would have to update the palette in all of Freedoom's graphics +# for consistency. +# +# This script takes a new PLAYPAL as an argument, compares the old and new +# palettes, and modifies every paletted PNG file in the repo so that the new +# colours are used. + +import argparse +from collections import namedtuple +from itertools import zip_longest +from shutil import copy +import struct +import os +from zlib import crc32 + +PngChunk = namedtuple("PNGChunk", "type data") + +PNG_SIGNATURE = b"\x89PNG\r\n\x1a\n" + +# Parse the command line arguments, and return a dict with the arguments +def parse_args(): + parser = argparse.ArgumentParser("update-palette") + parser.add_argument("palette", help="The new palette to use") + parser.add_argument( + "--dir", "-d", help=( + "The directory to recursively process. " + "Defaults to working directory"), + default=os.getcwd()) + args = parser.parse_args() + return args + + +# https://docs.python.org/3/library/itertools.html#itertools-recipes +def grouper(iterable, n, *, incomplete='fill', fillvalue=None): + "Collect data into non-overlapping fixed-length chunks or blocks" + # grouper('ABCDEFG', 3, fillvalue='x') --> ABC DEF Gxx + # grouper('ABCDEFG', 3, incomplete='strict') --> ABC DEF ValueError + # grouper('ABCDEFG', 3, incomplete='ignore') --> ABC DEF + args = [iter(iterable)] * n + if incomplete == 'fill': + return zip_longest(*args, fillvalue=fillvalue) + if incomplete == 'strict': + return zip(*args, strict=True) + if incomplete == 'ignore': + return zip(*args) + else: + raise ValueError('Expected fill, strict, or ignore') + + +# Compare the old palette and the new palette, and return a dict with the +# differences. +def compare_palettes(new_palette): + old_to_new = {} + old_palette = "lumps/playpal/playpal-base.lmp" + with open(old_palette, "rb") as handle: + old_palette = handle.read() + if len(old_palette) < 768: + raise ValueError("Old palette is too short!") + old_palette = list(grouper(old_palette[:768], 3, incomplete='strict')) + with open(new_palette, "rb") as handle: + new_palette = handle.read() + if len(new_palette) < 768: + raise ValueError("New palette is too short!") + new_palette = list(grouper(new_palette[:768], 3, incomplete='strict')) + for index, zipper in enumerate(zip(old_palette, new_palette)): + old_colour, new_colour = zipper + if old_colour != new_colour: + # Index is unnecessary (I hope)? + # old_to_new[(index, old_colour)] = new_colour + old_to_new[old_colour] = new_colour + return old_to_new + + +# "Stolen" from the map-color-index script +# Process a directory recursively for PNG files. +def process_dir(colour_map, args_dir): + pngs_changed_count = 0 + pngs_examined_count = 0 + + for dirpath, dirnames, filenames in os.walk(args_dir): + for png_base in filenames: + if not png_base.lower().endswith(".png"): + continue + png_path = os.path.join(dirpath, png_base) + pngs_examined_count += 1 + if process_png(colour_map, png_path): + pngs_changed_count += 1 + + +# Process a PNG file in place. +def process_png(colour_map, png_path): + + # Read a chunk from the PNG file + def read_png_chunk(png_file): + chunk_len = png_file.read(4) + if chunk_len == b"": return None # End of file + chunk_len = struct.unpack("!I", chunk_len)[0] + chunk_type = png_file.read(4) + chunk_data = png_file.read(chunk_len) + chunk_crc = png_file.read(4) + chunk_crc = struct.unpack("!I", chunk_crc)[0] + # crc = crc32(chunk_type) + # crc = crc32(chunk_data, crc) + # if crc != chunk_crc: + # return None + return PngChunk(chunk_type, chunk_data) + + # Change the old colours to the new colours + def maybe_modify_plte(plte_data): + nonlocal colour_map + modified = False + colours = list(grouper(plte_data, 3, incomplete='strict')) + for index, colour in enumerate(colours): + if colour in colour_map: + modified = True + colours[index] = colour_map[colour] + colours = b"".join(map(bytes, colours)) + return modified, colours + + # Read the PNG file + chunks = [] + with open(png_path, "rb") as handle: + if handle.read(8) != PNG_SIGNATURE: + print("{0} is not a valid PNG file!".format(png_path)) + return False + while chunk := read_png_chunk(handle): + chunks.append(chunk) + + # Modify the PLTE chunk, if necessary + plte_modified = False + for index, chunk in enumerate(chunks): + if chunk.type == b"PLTE": + chunk_name = chunk.type + plte_modified, chunk_data = maybe_modify_plte(chunk.data) + chunks[index] = PngChunk(chunk_name, chunk_data) + + # Write the modified PNG file + if plte_modified: + with open(png_path, "wb") as handle: + handle.write(PNG_SIGNATURE) + for chunk in chunks: + chunk_crc = crc32(chunk.type) + chunk_crc = crc32(chunk.data, chunk_crc) + chunk_crc = struct.pack("!I", chunk_crc) + chunk_len = struct.pack("!I", len(chunk.data)) + handle.write(chunk_len) + handle.write(chunk.type) + handle.write(chunk.data) + handle.write(chunk_crc) + + return True + + +if __name__ == "__main__": + args = parse_args() + comparison = compare_palettes(args.palette) + process_dir(comparison, args.dir) + # Replace old playpal-base.lmp + copy(args.palette, "lumps/playpal/playpal-base.lmp") From c679130360316bf06ba511574820f39511f9867a Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Sun, 10 Sep 2023 06:43:51 -0400 Subject: [PATCH 02/13] Make update-palette smarter about replacing colors Do not update a colour if all of its duplicates have not been replaced Attempt to check whether the IDAT(s) contain any modified colours before re-writing it --- scripts/update-palette | 167 +++++++++++++++++++++++++++++++---------- 1 file changed, 128 insertions(+), 39 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index 243ad0fa..35c10f35 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -14,10 +14,12 @@ import argparse from collections import namedtuple +from functools import reduce from itertools import zip_longest -from shutil import copy import struct import os +from os.path import dirname, join +from sys import argv from zlib import crc32 PngChunk = namedtuple("PNGChunk", "type data") @@ -31,8 +33,10 @@ def parse_args(): parser.add_argument( "--dir", "-d", help=( "The directory to recursively process. " - "Defaults to working directory"), - default=os.getcwd()) + "Defaults to repository root directory."), + default=join(dirname(argv[0]), "..")) + parser.add_argument( + "--dry-run", help="Do not modify any PNGs", action='store_true') args = parser.parse_args() return args @@ -54,48 +58,87 @@ def grouper(iterable, n, *, incomplete='fill', fillvalue=None): raise ValueError('Expected fill, strict, or ignore') + # Compare the old palette and the new palette, and return a dict with the # differences. def compare_palettes(new_palette): - old_to_new = {} old_palette = "lumps/playpal/playpal-base.lmp" + + # Read both palettes into a more usable format with open(old_palette, "rb") as handle: old_palette = handle.read() if len(old_palette) < 768: raise ValueError("Old palette is too short!") old_palette = list(grouper(old_palette[:768], 3, incomplete='strict')) + with open(new_palette, "rb") as handle: new_palette = handle.read() if len(new_palette) < 768: raise ValueError("New palette is too short!") new_palette = list(grouper(new_palette[:768], 3, incomplete='strict')) + + # Given a colour palette and a dict, return a dict with the indexes of + # each colour. This function is meant to be used with functools.reduce. + def get_duplicate_colours(value, index_colour): + index, colour = index_colour + value.setdefault(colour, []).append(index) + return value + + # Scan the old palette for duplicate colours + old_palette_duplicates = reduce( + get_duplicate_colours, enumerate(old_palette), {}) + # Eliminate unique colours + old_palette_duplicates = dict(filter( + # kv[1] is the value, and it's unique if it's length is 1 + lambda kv: None if len(kv[1]) == 1 else kv, + old_palette_duplicates.items() + )) + + # Map old indices to new colours for now. + old_to_new = {} for index, zipper in enumerate(zip(old_palette, new_palette)): old_colour, new_colour = zipper if old_colour != new_colour: - # Index is unnecessary (I hope)? - # old_to_new[(index, old_colour)] = new_colour - old_to_new[old_colour] = new_colour + old_to_new[index] = new_colour + + # Does the new palette replace ALL instances of a duplicate colour in the + # old palette? If so, map the old colour to the first replacement in the + # new palette, but if not, leave the old colour unchanged. + replaced = {} + for colour, indices in old_palette_duplicates.items(): + if all(map(lambda i: i in old_to_new.keys(), indices)): + replaced[colour] = old_to_new[min(indices)] + else: + replaced[colour] = None + + # Replace the keys in old_to_new, which are indices, with the old palette + # colours they correspond to. This way, we have a colour-to-colour dict. + old_to_new = dict(filter(lambda kv: kv[1], map( + lambda iv: (old_palette[iv[0]], + replaced.get(old_palette[iv[0]], iv[1])), + old_to_new.items() + ))) return old_to_new # "Stolen" from the map-color-index script # Process a directory recursively for PNG files. -def process_dir(colour_map, args_dir): +def process_dir(colour_map, dir, dry_run, palette): pngs_changed_count = 0 pngs_examined_count = 0 - for dirpath, dirnames, filenames in os.walk(args_dir): + for dirpath, dirnames, filenames in os.walk(dir): for png_base in filenames: if not png_base.lower().endswith(".png"): continue png_path = os.path.join(dirpath, png_base) pngs_examined_count += 1 - if process_png(colour_map, png_path): + if process_png(colour_map, png_path, dry_run): pngs_changed_count += 1 # Process a PNG file in place. -def process_png(colour_map, png_path): +def process_png(colour_map, png_path, dry): # Read a chunk from the PNG file def read_png_chunk(png_file): @@ -112,18 +155,6 @@ def process_png(colour_map, png_path): # return None return PngChunk(chunk_type, chunk_data) - # Change the old colours to the new colours - def maybe_modify_plte(plte_data): - nonlocal colour_map - modified = False - colours = list(grouper(plte_data, 3, incomplete='strict')) - for index, colour in enumerate(colours): - if colour in colour_map: - modified = True - colours[index] = colour_map[colour] - colours = b"".join(map(bytes, colours)) - return modified, colours - # Read the PNG file chunks = [] with open(png_path, "rb") as handle: @@ -133,27 +164,80 @@ def process_png(colour_map, png_path): while chunk := read_png_chunk(handle): chunks.append(chunk) - # Modify the PLTE chunk, if necessary + # Change the old colours to the new colours + modified_palette_colours = set() + def maybe_modify_plte(plte_data): + nonlocal colour_map + nonlocal modified_palette_colours + modified = False + colours = list(grouper(plte_data, 3, incomplete='strict')) + for index, colour in enumerate(colours): + if colour in colour_map: + modified = True + modified_palette_colours.add(index) + colours[index] = colour_map[colour] + colours = b"".join(map(bytes, colours)) + return modified, colours + + def is_colour_changed(bit_depth): + def split_byte(byte): + if bit_depth == 1: + indices = [ + byte & 0x80, byte & 0x40, byte & 0x20, byte & 0x10, + byte & 0x08, byte & 0x04, byte & 0x02, byte & 0x01 + ] + elif bit_depth == 2: + # 1 + 2 = 3 = 0x03 + # 4 + 8 = 12 = 0x0C + indices = [byte & 0xC0, byte & 0x30, byte & 0x0C, byte & 0x03] + elif bit_depth == 4: + # Nibbles + indices = [byte & 0xF0, byte & 0x0F] + elif bit_depth == 8: + indices = [byte] + return indices + def colour_changed(byte): + return any(map( + lambda b: b in modified_palette_colours, split_byte(byte))) + return colour_changed + + # Modify the PLTE chunk, if necessary, and check if the PLTE modifications + # affect the IDAT chunk. plte_modified = False + idat_modified = False + is_paletted = False for index, chunk in enumerate(chunks): - if chunk.type == b"PLTE": + if chunk.type == b"IHDR": + width, height, bit_depth, colour_type = struct.unpack_from( + "!IIBB", chunk.data + ) + is_paletted = colour_type == 3 and ( + bit_depth == 1 or bit_depth == 2 or + bit_depth == 4 or bit_depth == 8 + ) + if is_paletted and chunk.type == b"PLTE": chunk_name = chunk.type plte_modified, chunk_data = maybe_modify_plte(chunk.data) chunks[index] = PngChunk(chunk_name, chunk_data) + if plte_modified and chunk.type == b"IDAT": + if any(map(is_colour_changed(bit_depth), chunk.data)): + idat_modified = True # Write the modified PNG file - if plte_modified: - with open(png_path, "wb") as handle: - handle.write(PNG_SIGNATURE) - for chunk in chunks: - chunk_crc = crc32(chunk.type) - chunk_crc = crc32(chunk.data, chunk_crc) - chunk_crc = struct.pack("!I", chunk_crc) - chunk_len = struct.pack("!I", len(chunk.data)) - handle.write(chunk_len) - handle.write(chunk.type) - handle.write(chunk.data) - handle.write(chunk_crc) + if plte_modified and idat_modified: + print("{} was changed".format(png_path)) + if not dry: + with open(png_path, "wb") as handle: + handle.write(PNG_SIGNATURE) + for chunk in chunks: + chunk_crc = crc32(chunk.type) + chunk_crc = crc32(chunk.data, chunk_crc) + chunk_crc = struct.pack("!I", chunk_crc) + chunk_len = struct.pack("!I", len(chunk.data)) + handle.write(chunk_len) + handle.write(chunk.type) + handle.write(chunk.data) + handle.write(chunk_crc) return True @@ -161,6 +245,11 @@ def process_png(colour_map, png_path): if __name__ == "__main__": args = parse_args() comparison = compare_palettes(args.palette) - process_dir(comparison, args.dir) + process_dir(comparison, **vars(args)) # Replace old playpal-base.lmp - copy(args.palette, "lumps/playpal/playpal-base.lmp") + if not args.dry_run: + with (open(args.palette, "rb") as new_palfile, + open("lumps/playpal/playpal-base.lmp", "wb") as old_palfile): + # Only copy the first 768 bytes of the new palette to playpal-base + new_pal = new_palfile.read(768) + old_palfile.write(new_pal) From 724573e065842bfa2b3fe53efb44775c981aa0a5 Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Sun, 10 Sep 2023 07:23:17 -0400 Subject: [PATCH 03/13] Use pypng to make it easier to process IDAT chunks Properly processing the IDAT chunks in a PNG file requires decompression, decoding, de-filtering, and de-interlacing. Using pypng takes care of this for the developer. --- scripts/update-palette | 39 +++++++++++++-------------------------- 1 file changed, 13 insertions(+), 26 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index 35c10f35..27fb6591 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -11,6 +11,9 @@ # This script takes a new PLAYPAL as an argument, compares the old and new # palettes, and modifies every paletted PNG file in the repo so that the new # colours are used. +# +# Dependencies: +# - pypng: https://gitlab.com/drj11/pypng (retrieved Sept 10 2023) import argparse from collections import namedtuple @@ -18,7 +21,8 @@ from functools import reduce from itertools import zip_longest import struct import os -from os.path import dirname, join +from os.path import dirname, realpath, join, normpath +import png from sys import argv from zlib import crc32 @@ -34,7 +38,7 @@ def parse_args(): "--dir", "-d", help=( "The directory to recursively process. " "Defaults to repository root directory."), - default=join(dirname(argv[0]), "..")) + default=normpath(join(dirname(realpath(argv[0])), ".."))) parser.add_argument( "--dry-run", help="Do not modify any PNGs", action='store_true') args = parser.parse_args() @@ -179,27 +183,9 @@ def process_png(colour_map, png_path, dry): colours = b"".join(map(bytes, colours)) return modified, colours - def is_colour_changed(bit_depth): - def split_byte(byte): - if bit_depth == 1: - indices = [ - byte & 0x80, byte & 0x40, byte & 0x20, byte & 0x10, - byte & 0x08, byte & 0x04, byte & 0x02, byte & 0x01 - ] - elif bit_depth == 2: - # 1 + 2 = 3 = 0x03 - # 4 + 8 = 12 = 0x0C - indices = [byte & 0xC0, byte & 0x30, byte & 0x0C, byte & 0x03] - elif bit_depth == 4: - # Nibbles - indices = [byte & 0xF0, byte & 0x0F] - elif bit_depth == 8: - indices = [byte] - return indices - def colour_changed(byte): - return any(map( - lambda b: b in modified_palette_colours, split_byte(byte))) - return colour_changed + def is_colour_changed(colour_index): + nonlocal modified_palette_colours + return colour_index in modified_palette_colours # Modify the PLTE chunk, if necessary, and check if the PLTE modifications # affect the IDAT chunk. @@ -219,9 +205,10 @@ def process_png(colour_map, png_path, dry): chunk_name = chunk.type plte_modified, chunk_data = maybe_modify_plte(chunk.data) chunks[index] = PngChunk(chunk_name, chunk_data) - if plte_modified and chunk.type == b"IDAT": - if any(map(is_colour_changed(bit_depth), chunk.data)): - idat_modified = True + if plte_modified: + png_reader = png.Reader(filename=png_path) + width, height, values, info = png_reader.read_flat() + idat_modified = any(map(is_colour_changed, values)) # Write the modified PNG file if plte_modified and idat_modified: From 042c463cc8752496d91722f2a493fb48c8c9f1db Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Sun, 10 Sep 2023 10:22:55 -0400 Subject: [PATCH 04/13] Fix a few nitpicks with update-palette Don't just "return True" at the end of process_png. Use relpath for more readable paths --- scripts/update-palette | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index 27fb6591..fa59cdc2 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -21,7 +21,7 @@ from functools import reduce from itertools import zip_longest import struct import os -from os.path import dirname, realpath, join, normpath +from os.path import dirname, relpath, realpath, join, normpath import png from sys import argv from zlib import crc32 @@ -137,12 +137,12 @@ def process_dir(colour_map, dir, dry_run, palette): continue png_path = os.path.join(dirpath, png_base) pngs_examined_count += 1 - if process_png(colour_map, png_path, dry_run): + if process_png(colour_map, png_path, dry_run, dir): pngs_changed_count += 1 # Process a PNG file in place. -def process_png(colour_map, png_path, dry): +def process_png(colour_map, png_path, dry, dir): # Read a chunk from the PNG file def read_png_chunk(png_file): @@ -211,8 +211,8 @@ def process_png(colour_map, png_path, dry): idat_modified = any(map(is_colour_changed, values)) # Write the modified PNG file - if plte_modified and idat_modified: - print("{} was changed".format(png_path)) + if idat_modified: + print("{} was changed".format(relpath(png_path, start=dir))) if not dry: with open(png_path, "wb") as handle: handle.write(PNG_SIGNATURE) @@ -226,7 +226,7 @@ def process_png(colour_map, png_path, dry): handle.write(chunk.data) handle.write(chunk_crc) - return True + return idat_modified if __name__ == "__main__": From 6a213da702211ad6529ed32850d0f71c8087e7fb Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Sat, 16 Sep 2023 12:03:42 -0400 Subject: [PATCH 05/13] Initial work on PNG decoding without pypng Most of the features a PNG decoder should have, except for deinterlacing, are implemented. --- scripts/update-palette | 148 ++++++++++++++++++++++++++++++++++++----- 1 file changed, 133 insertions(+), 15 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index fa59cdc2..c6c403d4 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -12,19 +12,20 @@ # palettes, and modifies every paletted PNG file in the repo so that the new # colours are used. # -# Dependencies: +# Optional dependencies: # - pypng: https://gitlab.com/drj11/pypng (retrieved Sept 10 2023) import argparse from collections import namedtuple from functools import reduce -from itertools import zip_longest +from itertools import chain, starmap, tee, zip_longest +from math import floor, ceil import struct import os from os.path import dirname, relpath, realpath, join, normpath -import png +# import png from sys import argv -from zlib import crc32 +import zlib PngChunk = namedtuple("PNGChunk", "type data") @@ -62,6 +63,11 @@ def grouper(iterable, n, *, incomplete='fill', fillvalue=None): raise ValueError('Expected fill, strict, or ignore') +# https://docs.python.org/3.8/library/itertools.html#itertools-recipes +def flatten(list_of_lists): + "Flatten one level of nesting" + return chain.from_iterable(list_of_lists) + # Compare the old palette and the new palette, and return a dict with the # differences. @@ -141,6 +147,109 @@ def process_dir(colour_map, dir, dry_run, palette): pngs_changed_count += 1 +# https://docs.python.org/3.8/library/itertools.html#itertools-recipes +def pairwise(iterable): + "s -> (s0,s1), (s1,s2), (s2, s3), ..." + a, b = tee(iterable) + next(b, None) + return zip(a, b) + + +def process_idat( + all_idats, width, bit_depth, compression, filtering, interlacing + ): + + def mask_bits(bit_depth): + base_bit_mask = reduce(lambda a, b: a | (1 << b), range(bit_depth), 0) + bit_mask = base_bit_mask + shift = 0 + while bit_mask: + yield bit_mask, shift + bit_mask = (bit_mask << bit_depth) & 0xFF + shift += bit_depth + + def paeth_predictor(a_byte, b_byte, c_byte): + # Paeth predictor of x: + paeth = a_byte + b_byte - c_byte + pa = abs(paeth - a_byte) + pb = abs(paeth - b_byte) + pc = abs(paeth - c_byte) + if pa <= pb and pa <= pc: + predictor = a_byte + elif pb <= pc: + predictor = b_byte + else: + predictor = c_byte + return predictor + + def defilter(data, width, bit_depth): + stride = ceil(width / 8 * bit_depth) + filtered_stride = stride + 1 + bgn_end = pairwise(range(0, len(data), filtered_stride)) + lines = [] + for bgn, end in bgn_end: + prev_bgn = bgn - filtered_stride + 1 + filtered = data[bgn:end] + filter_type = filtered[0] + scanline = tuple(chain((0,), filtered[1:])) + if prev_bgn >= 0: + prev_scanline = tuple(chain((0,), lines[-1])) + else: + prev_scanline = tuple([0 for _ in range(filtered_stride)]) + # c b + # a x + reconstructed = [0] + for byte_index in range(1, len(scanline)): + unfilters = { # Filter type to reconstruction function + # No filter + 0: lambda fx, ra, rb, rc: fx % 256, + # Sub + 1: lambda fx, ra, rb, rc: (fx + ra) % 256, + # Up + 2: lambda fx, ra, rb, rc: (fx + rb) % 256, + # Average + 3: lambda fx, ra, rb, rc: \ + (fx + floor((ra + rb) / 2)) % 256, + # Paeth + 4: lambda fx, ra, rb, rc: \ + (fx + paeth_predictor(ra, rb, rc)) % 256 + } + x_byte = scanline[byte_index] + a_byte = reconstructed[byte_index - 1] + b_byte = prev_scanline[byte_index] + c_byte = prev_scanline[byte_index - 1] + unfilter = unfilters[filter_type] + reconstructed.append(unfilter(x_byte, a_byte, b_byte, c_byte)) + lines.append(reconstructed) + # Remove zeros from scanlines in lines list + return bytes(flatten(map(lambda l: l[1:], lines))) + + # Expand the packed bytes into colour or index samples + def expand_bytes(data, bit_depth): + if bit_depth not in {1,2,4,8}: + return None + + # No expansion necessary + if bit_depth == 8: + return data + + return bytes(flatten(map( + lambda b: bytes(starmap( + lambda mask, shift: (b & mask) >> shift, + mask_bits(bit_depth))), + data))) + + if compression == 0: + data = zlib.decompress(all_idats) + if filtering == 0: + data = defilter(data, width, bit_depth) + if interlacing != 0: + print("WARNING! This image is interlaced! De-interlacing is not" + "implemented yet!") + data = expand_bytes(data, bit_depth) + return data + + # Process a PNG file in place. def process_png(colour_map, png_path, dry, dir): @@ -153,8 +262,8 @@ def process_png(colour_map, png_path, dry, dir): chunk_data = png_file.read(chunk_len) chunk_crc = png_file.read(4) chunk_crc = struct.unpack("!I", chunk_crc)[0] - # crc = crc32(chunk_type) - # crc = crc32(chunk_data, crc) + # crc = zlib.crc32(chunk_type) + # crc = zlib.crc32(chunk_data, crc) # if crc != chunk_crc: # return None return PngChunk(chunk_type, chunk_data) @@ -192,23 +301,32 @@ def process_png(colour_map, png_path, dry, dir): plte_modified = False idat_modified = False is_paletted = False + all_idat = [] for index, chunk in enumerate(chunks): if chunk.type == b"IHDR": - width, height, bit_depth, colour_type = struct.unpack_from( - "!IIBB", chunk.data + (width, height, bit_depth, colour_type, + compression, filtering, interlacing) = struct.unpack_from( + "!IIBBBBB", chunk.data ) is_paletted = colour_type == 3 and ( bit_depth == 1 or bit_depth == 2 or bit_depth == 4 or bit_depth == 8 ) - if is_paletted and chunk.type == b"PLTE": + elif is_paletted and chunk.type == b"PLTE": chunk_name = chunk.type plte_modified, chunk_data = maybe_modify_plte(chunk.data) chunks[index] = PngChunk(chunk_name, chunk_data) + elif chunk.type == b"IDAT": + all_idat.append(chunk.data) if plte_modified: - png_reader = png.Reader(filename=png_path) - width, height, values, info = png_reader.read_flat() + all_idat = bytes(flatten(all_idat)) + values = process_idat(all_idat, width, bit_depth, + compression, filtering, interlacing) + # png_reader = png.Reader(filename=png_path) + # width, height, values, info = png_reader.read_flat() idat_modified = any(map(is_colour_changed, values)) + del values + del all_idat # Write the modified PNG file if idat_modified: @@ -217,8 +335,8 @@ def process_png(colour_map, png_path, dry, dir): with open(png_path, "wb") as handle: handle.write(PNG_SIGNATURE) for chunk in chunks: - chunk_crc = crc32(chunk.type) - chunk_crc = crc32(chunk.data, chunk_crc) + chunk_crc = zlib.crc32(chunk.type) + chunk_crc = zlib.crc32(chunk.data, chunk_crc) chunk_crc = struct.pack("!I", chunk_crc) chunk_len = struct.pack("!I", len(chunk.data)) handle.write(chunk_len) @@ -235,8 +353,8 @@ if __name__ == "__main__": process_dir(comparison, **vars(args)) # Replace old playpal-base.lmp if not args.dry_run: - with (open(args.palette, "rb") as new_palfile, - open("lumps/playpal/playpal-base.lmp", "wb") as old_palfile): + with open(args.palette, "rb") as new_palfile, \ + open("lumps/playpal/playpal-base.lmp", "wb") as old_palfile: # Only copy the first 768 bytes of the new palette to playpal-base new_pal = new_palfile.read(768) old_palfile.write(new_pal) From ae5e4b0e58c8aa6efb4083e7d2f173f9cdf9d410 Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Sat, 16 Sep 2023 14:06:17 -0400 Subject: [PATCH 06/13] Fix discrepancies between pypng and my decoder --- scripts/update-palette | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index c6c403d4..ff454263 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -156,7 +156,7 @@ def pairwise(iterable): def process_idat( - all_idats, width, bit_depth, compression, filtering, interlacing + data, width, bit_depth, compression, filtering, interlacing ): def mask_bits(bit_depth): @@ -193,12 +193,13 @@ def process_idat( filter_type = filtered[0] scanline = tuple(chain((0,), filtered[1:])) if prev_bgn >= 0: - prev_scanline = tuple(chain((0,), lines[-1])) + # Reconstructed scanlines in lines list all start with a zero + prev_scanline = tuple(lines[-1]) else: prev_scanline = tuple([0 for _ in range(filtered_stride)]) # c b # a x - reconstructed = [0] + reconstructed = bytearray(1) for byte_index in range(1, len(scanline)): unfilters = { # Filter type to reconstruction function # No filter @@ -240,7 +241,7 @@ def process_idat( data))) if compression == 0: - data = zlib.decompress(all_idats) + data = zlib.decompress(data) if filtering == 0: data = defilter(data, width, bit_depth) if interlacing != 0: From 12836941ac14c514f77208a318fdd0af0bed44cc Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Sun, 17 Sep 2023 21:01:40 -0400 Subject: [PATCH 07/13] Use older grouper function for Python 3.8 support --- scripts/update-palette | 25 ++++++++----------------- 1 file changed, 8 insertions(+), 17 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index ff454263..7d91f5d4 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -46,21 +46,12 @@ def parse_args(): return args -# https://docs.python.org/3/library/itertools.html#itertools-recipes -def grouper(iterable, n, *, incomplete='fill', fillvalue=None): - "Collect data into non-overlapping fixed-length chunks or blocks" - # grouper('ABCDEFG', 3, fillvalue='x') --> ABC DEF Gxx - # grouper('ABCDEFG', 3, incomplete='strict') --> ABC DEF ValueError - # grouper('ABCDEFG', 3, incomplete='ignore') --> ABC DEF +# https://docs.python.org/3.8/library/itertools.html#itertools-recipes +def grouper(iterable, n, fillvalue=None): + "Collect data into fixed-length chunks or blocks" + # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" args = [iter(iterable)] * n - if incomplete == 'fill': - return zip_longest(*args, fillvalue=fillvalue) - if incomplete == 'strict': - return zip(*args, strict=True) - if incomplete == 'ignore': - return zip(*args) - else: - raise ValueError('Expected fill, strict, or ignore') + return zip_longest(*args, fillvalue=fillvalue) # https://docs.python.org/3.8/library/itertools.html#itertools-recipes @@ -79,13 +70,13 @@ def compare_palettes(new_palette): old_palette = handle.read() if len(old_palette) < 768: raise ValueError("Old palette is too short!") - old_palette = list(grouper(old_palette[:768], 3, incomplete='strict')) + old_palette = list(grouper(old_palette[:768], 3)) with open(new_palette, "rb") as handle: new_palette = handle.read() if len(new_palette) < 768: raise ValueError("New palette is too short!") - new_palette = list(grouper(new_palette[:768], 3, incomplete='strict')) + new_palette = list(grouper(new_palette[:768], 3)) # Given a colour palette and a dict, return a dict with the indexes of # each colour. This function is meant to be used with functools.reduce. @@ -284,7 +275,7 @@ def process_png(colour_map, png_path, dry, dir): nonlocal colour_map nonlocal modified_palette_colours modified = False - colours = list(grouper(plte_data, 3, incomplete='strict')) + colours = list(grouper(plte_data, 3)) for index, colour in enumerate(colours): if colour in colour_map: modified = True From 48fa66354a70e47faa2e18523ebdd2f6bfd31ccb Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Fri, 29 Sep 2023 18:05:31 -0400 Subject: [PATCH 08/13] Add description, and remove directory argument Add a description of the script to the ArgumentParser Remove the directory argument, since it has limited use, and is a potential security flaw Rename "dir" argument to "directory", since "dir" is a built-in function. --- scripts/update-palette | 34 ++++++++++++++++++++++------------ 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index 7d91f5d4..91381d6a 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -33,15 +33,24 @@ PNG_SIGNATURE = b"\x89PNG\r\n\x1a\n" # Parse the command line arguments, and return a dict with the arguments def parse_args(): - parser = argparse.ArgumentParser("update-palette") + parser = argparse.ArgumentParser( + "update-palette", + description="This script takes a new palette, compares the new " + "palette with the old one, and scans and updates images in the repo " + "which use the colours that were replaced in the new palette." + ) parser.add_argument("palette", help="The new palette to use") + # This is a potential vulnerability, and besides, it doesn't make + # sense to update the palette for images which aren't in the repo. + # parser.add_argument( + # "--dir", "-d", help=( + # "The directory to recursively process. " + # "Defaults to repository root directory."), + # default=normpath(join(dirname(realpath(argv[0])), ".."))) parser.add_argument( - "--dir", "-d", help=( - "The directory to recursively process. " - "Defaults to repository root directory."), - default=normpath(join(dirname(realpath(argv[0])), ".."))) - parser.add_argument( - "--dry-run", help="Do not modify any PNGs", action='store_true') + "-d", "--dry-run", + help="Do not modify any PNGs, just show which ones would be modified", + action='store_true') args = parser.parse_args() return args @@ -124,17 +133,18 @@ def compare_palettes(new_palette): # "Stolen" from the map-color-index script # Process a directory recursively for PNG files. -def process_dir(colour_map, dir, dry_run, palette): +def process_dir(colour_map, dry_run, palette): + directory = normpath(join(dirname(realpath(argv[0])), "..")) pngs_changed_count = 0 pngs_examined_count = 0 - for dirpath, dirnames, filenames in os.walk(dir): + for dirpath, dirnames, filenames in os.walk(directory): for png_base in filenames: if not png_base.lower().endswith(".png"): continue png_path = os.path.join(dirpath, png_base) pngs_examined_count += 1 - if process_png(colour_map, png_path, dry_run, dir): + if process_png(colour_map, png_path, dry_run, directory): pngs_changed_count += 1 @@ -243,7 +253,7 @@ def process_idat( # Process a PNG file in place. -def process_png(colour_map, png_path, dry, dir): +def process_png(colour_map, png_path, dry, directory): # Read a chunk from the PNG file def read_png_chunk(png_file): @@ -322,7 +332,7 @@ def process_png(colour_map, png_path, dry, dir): # Write the modified PNG file if idat_modified: - print("{} was changed".format(relpath(png_path, start=dir))) + print("{} was changed".format(relpath(png_path, start=directory))) if not dry: with open(png_path, "wb") as handle: handle.write(PNG_SIGNATURE) From 7b8e82f76146251ed54ffe84963df8123078fe29 Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Fri, 29 Sep 2023 20:48:52 -0400 Subject: [PATCH 09/13] Replace all hard-coded paths with os.path.joins --- scripts/update-palette | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index 91381d6a..17d60c9d 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -71,8 +71,8 @@ def flatten(list_of_lists): # Compare the old palette and the new palette, and return a dict with the # differences. -def compare_palettes(new_palette): - old_palette = "lumps/playpal/playpal-base.lmp" +def compare_palettes(directory, new_palette): + old_palette = join(directory, "lumps", "playpal", "playpal-base.lmp") # Read both palettes into a more usable format with open(old_palette, "rb") as handle: @@ -133,8 +133,7 @@ def compare_palettes(new_palette): # "Stolen" from the map-color-index script # Process a directory recursively for PNG files. -def process_dir(colour_map, dry_run, palette): - directory = normpath(join(dirname(realpath(argv[0])), "..")) +def process_dir(colour_map, dry_run, directory, palette): pngs_changed_count = 0 pngs_examined_count = 0 @@ -351,12 +350,15 @@ def process_png(colour_map, png_path, dry, directory): if __name__ == "__main__": args = parse_args() - comparison = compare_palettes(args.palette) - process_dir(comparison, **vars(args)) + directory = normpath(join(dirname(realpath(argv[0])), "..")) + comparison = compare_palettes(directory, args.palette) + process_dir(comparison, directory=directory, **vars(args)) # Replace old playpal-base.lmp if not args.dry_run: + playpal_base_path = ( + join(directory, "lumps", "playpal", "playpal-base.lmp")) with open(args.palette, "rb") as new_palfile, \ - open("lumps/playpal/playpal-base.lmp", "wb") as old_palfile: + open(playpal_base_path, "wb") as old_palfile: # Only copy the first 768 bytes of the new palette to playpal-base new_pal = new_palfile.read(768) old_palfile.write(new_pal) From 648e0657d2e3d71491c219101cb4c3e9ae3f7b76 Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Mon, 2 Oct 2023 00:39:45 -0400 Subject: [PATCH 10/13] Address some comments by @selliott512 in #1108 --- scripts/update-palette | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index 17d60c9d..4c695fb3 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -37,7 +37,10 @@ def parse_args(): "update-palette", description="This script takes a new palette, compares the new " "palette with the old one, and scans and updates images in the repo " - "which use the colours that were replaced in the new palette." + "which use the colours that were replaced in the new palette. This " + "script only supports indexed-colour PNGs. You may need to run 'make " + "fix-deutex-pngs' before running this script in order to palettize " + "any true-colour PNGs in the repo." ) parser.add_argument("palette", help="The new palette to use") # This is a potential vulnerability, and besides, it doesn't make @@ -74,15 +77,22 @@ def flatten(list_of_lists): def compare_palettes(directory, new_palette): old_palette = join(directory, "lumps", "playpal", "playpal-base.lmp") + # The "new" palette is the old palette? + old_pal_full_path = relpath(normpath(old_palette), directory) + new_pal_full_path = relpath(normpath(new_palette), directory) + if old_pal_full_path == new_pal_full_path: + raise ValueError("You're trying to replace the old palette with " + "itself! Try another palette.") + # Read both palettes into a more usable format with open(old_palette, "rb") as handle: - old_palette = handle.read() + old_palette = handle.read(768) if len(old_palette) < 768: raise ValueError("Old palette is too short!") old_palette = list(grouper(old_palette[:768], 3)) with open(new_palette, "rb") as handle: - new_palette = handle.read() + new_palette = handle.read(768) if len(new_palette) < 768: raise ValueError("New palette is too short!") new_palette = list(grouper(new_palette[:768], 3)) @@ -357,8 +367,10 @@ if __name__ == "__main__": if not args.dry_run: playpal_base_path = ( join(directory, "lumps", "playpal", "playpal-base.lmp")) + # The old/new palette being the same is checked in compare_palettes with open(args.palette, "rb") as new_palfile, \ open(playpal_base_path, "wb") as old_palfile: # Only copy the first 768 bytes of the new palette to playpal-base new_pal = new_palfile.read(768) + # Palette length is checked in compare_palettes old_palfile.write(new_pal) From a32b7917b8a642f75415edbb06b38f19ba08c6cf Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Fri, 3 Nov 2023 18:17:48 -0400 Subject: [PATCH 11/13] Use pypng instead of using a custom PNG decoder Freedoom needs a very good reason to have a custom PNG decoder, but there is no good reason to write one. Furthermore, this may make it easier to write code to modify true-colour PNGs. However, because of issues I'm having (see comments), the code is currently disabled. --- scripts/update-palette | 266 +++++++++++++---------------------------- 1 file changed, 83 insertions(+), 183 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index 4c695fb3..d7b49ecb 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -12,23 +12,19 @@ # palettes, and modifies every paletted PNG file in the repo so that the new # colours are used. # -# Optional dependencies: +# Dependencies: # - pypng: https://gitlab.com/drj11/pypng (retrieved Sept 10 2023) import argparse -from collections import namedtuple from functools import reduce -from itertools import chain, starmap, tee, zip_longest -from math import floor, ceil +from itertools import zip_longest import struct import os from os.path import dirname, relpath, realpath, join, normpath -# import png +import png from sys import argv import zlib -PngChunk = namedtuple("PNGChunk", "type data") - PNG_SIGNATURE = b"\x89PNG\r\n\x1a\n" # Parse the command line arguments, and return a dict with the arguments @@ -66,12 +62,6 @@ def grouper(iterable, n, fillvalue=None): return zip_longest(*args, fillvalue=fillvalue) -# https://docs.python.org/3.8/library/itertools.html#itertools-recipes -def flatten(list_of_lists): - "Flatten one level of nesting" - return chain.from_iterable(list_of_lists) - - # Compare the old palette and the new palette, and return a dict with the # differences. def compare_palettes(directory, new_palette): @@ -157,136 +147,10 @@ def process_dir(colour_map, dry_run, directory, palette): pngs_changed_count += 1 -# https://docs.python.org/3.8/library/itertools.html#itertools-recipes -def pairwise(iterable): - "s -> (s0,s1), (s1,s2), (s2, s3), ..." - a, b = tee(iterable) - next(b, None) - return zip(a, b) - - -def process_idat( - data, width, bit_depth, compression, filtering, interlacing - ): - - def mask_bits(bit_depth): - base_bit_mask = reduce(lambda a, b: a | (1 << b), range(bit_depth), 0) - bit_mask = base_bit_mask - shift = 0 - while bit_mask: - yield bit_mask, shift - bit_mask = (bit_mask << bit_depth) & 0xFF - shift += bit_depth - - def paeth_predictor(a_byte, b_byte, c_byte): - # Paeth predictor of x: - paeth = a_byte + b_byte - c_byte - pa = abs(paeth - a_byte) - pb = abs(paeth - b_byte) - pc = abs(paeth - c_byte) - if pa <= pb and pa <= pc: - predictor = a_byte - elif pb <= pc: - predictor = b_byte - else: - predictor = c_byte - return predictor - - def defilter(data, width, bit_depth): - stride = ceil(width / 8 * bit_depth) - filtered_stride = stride + 1 - bgn_end = pairwise(range(0, len(data), filtered_stride)) - lines = [] - for bgn, end in bgn_end: - prev_bgn = bgn - filtered_stride + 1 - filtered = data[bgn:end] - filter_type = filtered[0] - scanline = tuple(chain((0,), filtered[1:])) - if prev_bgn >= 0: - # Reconstructed scanlines in lines list all start with a zero - prev_scanline = tuple(lines[-1]) - else: - prev_scanline = tuple([0 for _ in range(filtered_stride)]) - # c b - # a x - reconstructed = bytearray(1) - for byte_index in range(1, len(scanline)): - unfilters = { # Filter type to reconstruction function - # No filter - 0: lambda fx, ra, rb, rc: fx % 256, - # Sub - 1: lambda fx, ra, rb, rc: (fx + ra) % 256, - # Up - 2: lambda fx, ra, rb, rc: (fx + rb) % 256, - # Average - 3: lambda fx, ra, rb, rc: \ - (fx + floor((ra + rb) / 2)) % 256, - # Paeth - 4: lambda fx, ra, rb, rc: \ - (fx + paeth_predictor(ra, rb, rc)) % 256 - } - x_byte = scanline[byte_index] - a_byte = reconstructed[byte_index - 1] - b_byte = prev_scanline[byte_index] - c_byte = prev_scanline[byte_index - 1] - unfilter = unfilters[filter_type] - reconstructed.append(unfilter(x_byte, a_byte, b_byte, c_byte)) - lines.append(reconstructed) - # Remove zeros from scanlines in lines list - return bytes(flatten(map(lambda l: l[1:], lines))) - - # Expand the packed bytes into colour or index samples - def expand_bytes(data, bit_depth): - if bit_depth not in {1,2,4,8}: - return None - - # No expansion necessary - if bit_depth == 8: - return data - - return bytes(flatten(map( - lambda b: bytes(starmap( - lambda mask, shift: (b & mask) >> shift, - mask_bits(bit_depth))), - data))) - - if compression == 0: - data = zlib.decompress(data) - if filtering == 0: - data = defilter(data, width, bit_depth) - if interlacing != 0: - print("WARNING! This image is interlaced! De-interlacing is not" - "implemented yet!") - data = expand_bytes(data, bit_depth) - return data - - # Process a PNG file in place. def process_png(colour_map, png_path, dry, directory): - - # Read a chunk from the PNG file - def read_png_chunk(png_file): - chunk_len = png_file.read(4) - if chunk_len == b"": return None # End of file - chunk_len = struct.unpack("!I", chunk_len)[0] - chunk_type = png_file.read(4) - chunk_data = png_file.read(chunk_len) - chunk_crc = png_file.read(4) - chunk_crc = struct.unpack("!I", chunk_crc)[0] - # crc = zlib.crc32(chunk_type) - # crc = zlib.crc32(chunk_data, crc) - # if crc != chunk_crc: - # return None - return PngChunk(chunk_type, chunk_data) - # Read the PNG file - chunks = [] - with open(png_path, "rb") as handle: - if handle.read(8) != PNG_SIGNATURE: - print("{0} is not a valid PNG file!".format(png_path)) - return False - while chunk := read_png_chunk(handle): - chunks.append(chunk) + png_reader = png.Reader(filename=png_path) # Change the old colours to the new colours modified_palette_colours = set() @@ -294,66 +158,102 @@ def process_png(colour_map, png_path, dry, directory): nonlocal colour_map nonlocal modified_palette_colours modified = False - colours = list(grouper(plte_data, 3)) + + # A pypng palette colour is 3 elements long, or 4 if the image has + # transparent colours. + if len(plte_data[0]) > 3: + alphas = list(map(lambda co: co[3], plte_data)) + else: + alphas = [None] * len(plte_data) + + colours = list(map(lambda co: co[0:3], plte_data)) + # Scan the palette for colours that should be changed, and change them + # according to colour_map for index, colour in enumerate(colours): if colour in colour_map: modified = True modified_palette_colours.add(index) colours[index] = colour_map[colour] - colours = b"".join(map(bytes, colours)) + # pypng needs the palette as an iterable of 3-tuples for opaque + # paletted images, or 4-tuples for transparent paletted images + colours = list(map(lambda c, a: (*c,) if a is None else (*c, a), + colours, alphas)) return modified, colours - def is_colour_changed(colour_index): + def is_paletted_colour_changed(row): nonlocal modified_palette_colours - return colour_index in modified_palette_colours + return any(map(lambda colour_index: \ + colour_index in modified_palette_colours, row)) - # Modify the PLTE chunk, if necessary, and check if the PLTE modifications + def maybe_modify_truecolour_image(rows, channels, transparent_colour): + nonlocal colour_map + modified = False + + def maybe_modify_row(row): + nonlocal colour_map + nonlocal channels + nonlocal transparent_colour + nonlocal modified + def maybe_modify_colour(co): + nonlocal colour_map + nonlocal transparent_colour + nonlocal modified + # Seperate RGB and alpha + co_rgb = co[0:3] + co_alpha = co[3] if len(co) > 3 else None + # Get the new colour from the colour map + new_colour = ( + colour_map.get(co_rgb, co_rgb) + if co_rgb != transparent_colour else co_rgb + ) + if co_alpha is not None: + new_colour = (*new_colour, co_alpha) + modified = new_colour == co + return new_colour + pixels = map(maybe_modify_colour, + grouper(row, channels)) + pixels = bytearray(b"".join(map(bytes, pixels))) + return pixels + + new_rows = list(map(maybe_modify_row, rows)) + + return modified, new_rows + + # Modify the PLTE chunk if necessary, and check if the PLTE modifications # affect the IDAT chunk. plte_modified = False idat_modified = False - is_paletted = False - all_idat = [] - for index, chunk in enumerate(chunks): - if chunk.type == b"IHDR": - (width, height, bit_depth, colour_type, - compression, filtering, interlacing) = struct.unpack_from( - "!IIBBBBB", chunk.data - ) - is_paletted = colour_type == 3 and ( - bit_depth == 1 or bit_depth == 2 or - bit_depth == 4 or bit_depth == 8 - ) - elif is_paletted and chunk.type == b"PLTE": - chunk_name = chunk.type - plte_modified, chunk_data = maybe_modify_plte(chunk.data) - chunks[index] = PngChunk(chunk_name, chunk_data) - elif chunk.type == b"IDAT": - all_idat.append(chunk.data) - if plte_modified: - all_idat = bytes(flatten(all_idat)) - values = process_idat(all_idat, width, bit_depth, - compression, filtering, interlacing) - # png_reader = png.Reader(filename=png_path) - # width, height, values, info = png_reader.read_flat() - idat_modified = any(map(is_colour_changed, values)) - del values - del all_idat + width, height, rows, info = png_reader.read() + # "rows" is an iterator, so it needs to be a list so that it can be + # modified if the image is true-colour, or scanned if the image is + # paletted + rows = list(rows) + grayscale = info.get("greyscale") + is_paletted = info.get("palette") + bit_depth = info.get("bitdepth") + transparent_colour = info.get("transparent") + has_alpha = info.get("alpha", False) + if is_paletted: + plte_modified, new_palette = maybe_modify_plte(info["palette"]) + if plte_modified: + idat_modified = any(map(is_paletted_colour_changed, rows)) + # This code doesn't seem to work properly; it modifies plssa0.png, but it + # should not modify bpaka0.png + # + # elif not grayscale: + # new_palette = None + # idat_modified, rows = maybe_modify_truecolour_image( + # rows, info.get("planes"), transparent_colour) # Write the modified PNG file if idat_modified: print("{} was changed".format(relpath(png_path, start=directory))) if not dry: - with open(png_path, "wb") as handle: - handle.write(PNG_SIGNATURE) - for chunk in chunks: - chunk_crc = zlib.crc32(chunk.type) - chunk_crc = zlib.crc32(chunk.data, chunk_crc) - chunk_crc = struct.pack("!I", chunk_crc) - chunk_len = struct.pack("!I", len(chunk.data)) - handle.write(chunk_len) - handle.write(chunk.type) - handle.write(chunk.data) - handle.write(chunk_crc) + with open(png_path, "wb") as png_file: + png_writer = png.Writer( + width, height, bitdepth=bit_depth, palette=new_palette, + alpha=has_alpha, transparent=transparent_colour) + png_writer.write(png_file, rows) return idat_modified From 2da59702c2e1d8935ea8e25ea6103e926e2aee30 Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Sat, 4 Nov 2023 15:19:30 -0400 Subject: [PATCH 12/13] Fix bugs with true colour PNG support --- scripts/update-palette | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index d7b49ecb..82e68ae5 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -33,10 +33,7 @@ def parse_args(): "update-palette", description="This script takes a new palette, compares the new " "palette with the old one, and scans and updates images in the repo " - "which use the colours that were replaced in the new palette. This " - "script only supports indexed-colour PNGs. You may need to run 'make " - "fix-deutex-pngs' before running this script in order to palettize " - "any true-colour PNGs in the repo." + "which use the colours that were replaced in the new palette." ) parser.add_argument("palette", help="The new palette to use") # This is a potential vulnerability, and besides, it doesn't make @@ -208,7 +205,7 @@ def process_png(colour_map, png_path, dry, directory): ) if co_alpha is not None: new_colour = (*new_colour, co_alpha) - modified = new_colour == co + modified = modified or new_colour != co return new_colour pixels = map(maybe_modify_colour, grouper(row, channels)) @@ -233,17 +230,15 @@ def process_png(colour_map, png_path, dry, directory): bit_depth = info.get("bitdepth") transparent_colour = info.get("transparent") has_alpha = info.get("alpha", False) + channels = info.get("planes") if is_paletted: plte_modified, new_palette = maybe_modify_plte(info["palette"]) if plte_modified: idat_modified = any(map(is_paletted_colour_changed, rows)) - # This code doesn't seem to work properly; it modifies plssa0.png, but it - # should not modify bpaka0.png - # - # elif not grayscale: - # new_palette = None - # idat_modified, rows = maybe_modify_truecolour_image( - # rows, info.get("planes"), transparent_colour) + elif not grayscale: + new_palette = None + idat_modified, rows = maybe_modify_truecolour_image( + rows, channels, transparent_colour) # Write the modified PNG file if idat_modified: @@ -252,7 +247,8 @@ def process_png(colour_map, png_path, dry, directory): with open(png_path, "wb") as png_file: png_writer = png.Writer( width, height, bitdepth=bit_depth, palette=new_palette, - alpha=has_alpha, transparent=transparent_colour) + alpha=has_alpha, transparent=transparent_colour, + greyscale=grayscale) png_writer.write(png_file, rows) return idat_modified From 8aa60e87e9d0d650a475b5c7c6dbf91c27ba3790 Mon Sep 17 00:00:00 2001 From: Kevin Caccamo Date: Fri, 17 Nov 2023 15:59:12 -0500 Subject: [PATCH 13/13] Initial work on grayscale PNG support for update-palette --- scripts/update-palette | 79 +++++++++++++++++++++++++++++++----------- 1 file changed, 59 insertions(+), 20 deletions(-) diff --git a/scripts/update-palette b/scripts/update-palette index 82e68ae5..1e665715 100755 --- a/scripts/update-palette +++ b/scripts/update-palette @@ -16,16 +16,25 @@ # - pypng: https://gitlab.com/drj11/pypng (retrieved Sept 10 2023) import argparse +import array from functools import reduce -from itertools import zip_longest +from itertools import cycle, zip_longest import struct +from operator import eq import os from os.path import dirname, relpath, realpath, join, normpath import png from sys import argv import zlib -PNG_SIGNATURE = b"\x89PNG\r\n\x1a\n" +# Misc. utility functions +# https://docs.python.org/3.8/library/itertools.html#itertools-recipes +def grouper(iterable, n, fillvalue=None): + "Collect data into fixed-length chunks or blocks" + # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" + args = [iter(iterable)] * n + return zip_longest(*args, fillvalue=fillvalue) + # Parse the command line arguments, and return a dict with the arguments def parse_args(): @@ -51,14 +60,6 @@ def parse_args(): return args -# https://docs.python.org/3.8/library/itertools.html#itertools-recipes -def grouper(iterable, n, fillvalue=None): - "Collect data into fixed-length chunks or blocks" - # grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx" - args = [iter(iterable)] * n - return zip_longest(*args, fillvalue=fillvalue) - - # Compare the old palette and the new palette, and return a dict with the # differences. def compare_palettes(directory, new_palette): @@ -115,8 +116,38 @@ def compare_palettes(directory, new_palette): for colour, indices in old_palette_duplicates.items(): if all(map(lambda i: i in old_to_new.keys(), indices)): replaced[colour] = old_to_new[min(indices)] - else: - replaced[colour] = None + print("replaced", replaced) + + # Find the closest colour in the old palette for each grayscale colour + def is_grayscale(colour): + return all(map(eq, colour[1], cycle((colour[1][0],)))) + # def to_grayscale(colour): + # return colour[0] if is_grayscale(colour) else colour + grayscale_colours_in_palette = dict( + map(lambda g: (g[0], g[1][0]), + filter(is_grayscale, enumerate(new_palette))) + ) + def closest_grayscale(grayscale): + nonlocal grayscale_colours_in_palette + distances = sorted(map( + lambda kv: (kv[0], abs(kv[1] - grayscale)), + grayscale_colours_in_palette.items() + ), key=lambda kv: kv[1]) + closest = distances[0][1] + distances = sorted( + filter(lambda kv: kv[1] == closest, distances), + key=lambda kv: kv[0]) + return distances[0][0] + # This is a map from grayscale to palette index + gray_map = array.array('B', map(closest_grayscale, range(256))) + # Turn it into a grayscale to colour map, in case any grayscale colours + # were changed in the new palette + gray_map = dict( + map(lambda g: (g[0], old_to_new[g[1]]), + filter( + lambda g: g[1] in old_to_new, + enumerate(gray_map)))) + print(len(gray_map), gray_map) # Replace the keys in old_to_new, which are indices, with the old palette # colours they correspond to. This way, we have a colour-to-colour dict. @@ -125,12 +156,12 @@ def compare_palettes(directory, new_palette): replaced.get(old_palette[iv[0]], iv[1])), old_to_new.items() ))) - return old_to_new + return old_to_new, gray_map # "Stolen" from the map-color-index script # Process a directory recursively for PNG files. -def process_dir(colour_map, dry_run, directory, palette): +def process_dir(colour_map, gray_map, dry_run, directory, palette): pngs_changed_count = 0 pngs_examined_count = 0 @@ -140,12 +171,12 @@ def process_dir(colour_map, dry_run, directory, palette): continue png_path = os.path.join(dirpath, png_base) pngs_examined_count += 1 - if process_png(colour_map, png_path, dry_run, directory): + if process_png(colour_map, gray_map, png_path, dry_run, directory): pngs_changed_count += 1 # Process a PNG file in place. -def process_png(colour_map, png_path, dry, directory): +def process_png(colour_map, gray_map, png_path, dry, directory): # Read the PNG file png_reader = png.Reader(filename=png_path) @@ -216,8 +247,9 @@ def process_png(colour_map, png_path, dry, directory): return modified, new_rows - # Modify the PLTE chunk if necessary, and check if the PLTE modifications - # affect the IDAT chunk. + def maybe_modify_grayscale_image(rows, channels): + pass + plte_modified = False idat_modified = False width, height, rows, info = png_reader.read() @@ -232,6 +264,8 @@ def process_png(colour_map, png_path, dry, directory): has_alpha = info.get("alpha", False) channels = info.get("planes") if is_paletted: + # Modify the PLTE chunk if necessary, and check if the PLTE + # modifications affect the colours used in the IDAT chunk. plte_modified, new_palette = maybe_modify_plte(info["palette"]) if plte_modified: idat_modified = any(map(is_paletted_colour_changed, rows)) @@ -239,6 +273,8 @@ def process_png(colour_map, png_path, dry, directory): new_palette = None idat_modified, rows = maybe_modify_truecolour_image( rows, channels, transparent_colour) + else: + idat_modified, rows = maybe_modify_grayscale_image(rows, channels) # Write the modified PNG file if idat_modified: @@ -257,8 +293,11 @@ def process_png(colour_map, png_path, dry, directory): if __name__ == "__main__": args = parse_args() directory = normpath(join(dirname(realpath(argv[0])), "..")) - comparison = compare_palettes(directory, args.palette) - process_dir(comparison, directory=directory, **vars(args)) + colour_map, gray_map = compare_palettes(directory, args.palette) + print("The rest of the script is disabled for development right now") + exit(0) + + process_dir(colour_map, gray_map, directory=directory, **vars(args)) # Replace old playpal-base.lmp if not args.dry_run: playpal_base_path = (