All encoding of 2 palletes possible. Current best-fit code is favoring b&w constantly.

This commit is contained in:
mmcwilliams 2024-12-24 00:30:31 -05:00
parent 7a366525af
commit 60881319ab
8 changed files with 399 additions and 24 deletions

168
common.py Normal file
View File

@ -0,0 +1,168 @@
import cv2
import numpy as np
from math import sqrt, pow
def convert_color (color, color_space_a, color_space_b) :
pixel = np.zeros([1, 1, 3], dtype=np.uint8)
if color_space_a == 'RGB' :
pixel = cv2.cvtColor(pixel, cv2.COLOR_BGR2RGB)
elif color_space_a == 'LAB' :
pixel = cv2.cvtColor(pixel, cv2.COLOR_BGR2LAB)
elif color_space_a == 'HSV' :
pixel = cv2.cvtColor(pixel, cv2.COLOR_BGR2HSV)
#default is BGR
pixel[:] = color
if color_space_a == 'RGB' and color_space_b == 'BGR' :
b = cv2.COLOR_RGB2BGR
elif color_space_a == 'BGR' and color_space_b == 'RGB' :
b = cv2.COLOR_BGR2RGB
elif color_space_a == 'RGB' and color_space_b == 'LAB' :
b = cv2.COLOR_RGB2LAB
elif color_space_a == 'LAB' and color_space_b == 'RGB' :
b = cv2.COLOR_LAB2RGB
elif color_space_a == 'BGR' and color_space_b == 'LAB' :
b = cv2.COLOR_BGR2LAB
elif color_space_a == 'LAB' and color_space_b == 'BGR' :
b = cv2.COLOR_LAB2BGR
elif color_space_a == 'HSV' and color_space_b == 'LAB' :
b = cv2.COLOR_HSV2LAB
elif color_space_a == 'LAB' and color_space_b == 'HSV' :
b = cv2.COLOR_LAB2HSV
elif color_space_a == 'RGB' and color_space_b == 'HSV' :
b = cv2.COLOR_RGB2HSV
elif color_space_a == 'HSV' and color_space_b == 'RGB' :
b = cv2.COLOR_HSV2RGB
elif color_space_a == 'BGR' and color_space_b == 'HSV' :
b = cv2.COLOR_BGR2HSV
elif color_space_a == 'HSV' and color_space_b == 'BGR' :
b = cv2.COLOR_HSV2BGR
elif color_space_a == 'RGB' and color_space_b == 'RGB' :
b = None
elif color_space_a == 'BGR' and color_space_b == 'BGR' :
b = None
elif color_space_a == 'LAB' and color_space_b == 'LAB' :
b = None
elif color_space_a == 'HSV' and color_space_b == 'HSV' :
b = None
if b is not None :
cvt = cv2.cvtColor(pixel, b)
else :
cvt = pixel
return cvt[0, 0]
def closest_color (colors, color):
colors = np.array(colors)
color = np.array(color)
distances = np.sqrt(np.sum((colors - color) ** 2, axis=1))
index_of_smallest = np.where(distances == np.amin(distances))
smallest_distance = colors[index_of_smallest]
return smallest_distance[0]
# Works for RGB, BGR, LAB and HSV(?)
def closest_color_euclidean (colors, color) :
#print(len(colors))
mDist = float('inf')
mIdx = -1
color = [float(i) for i in list(color)]
for idx, comp in enumerate(colors) :
comp = [float(i) for i in list(comp)]
dist = euclidean_distance(comp[0], comp[1], comp[2], color[0], color[1], color[2])
#print(f'{color} -> {comp} = {dist}')
if dist < mDist :
mDist = dist
mIdx = idx
return colors[mIdx], mDist
def closest_color_weighted_euclidean (colors, color, space) :
#print(len(colors))
mDist = float('inf')
mIdx = -1
color = [float(i) for i in list(color)]
for idx, comp in enumerate(colors) :
comp = [float(i) for i in list(comp)]
if space == 'BGR' :
dist = weighted_euclidean_distance(comp[2], comp[1], comp[0], color[2], color[1], color[0])
elif space == 'RGB' :
dist = weighted_euclidean_distance(comp[0], comp[1], comp[2], color[0], color[1], color[2])
else :
raise Exception(f'closest_color_weighted_euclidean does not support color space {space}')
break
#print(f'{color} -> {comp} = {dist}')
if dist < mDist :
mDist = dist
mIdx = idx
return colors[mIdx], mDist
def create_colored_image (width, height, bgr_color):
image = np.zeros((height, width, 3), np.uint8)
image[:] = bgr_color
return image
def remove_from_list (l, item) :
new_array = []
for i in l :
if not list_match(i, item) :
new_array.append(i)
return new_array
def list_match (a, b) :
for i in range(len(a)) :
if a[i] != b[i] :
return False
return True
def rgb_euclidean_distance(rgba, rgbb) :
return euclidean_distance(rgba[0], rgba[1], rgba[2], rgbb[0], rgbb[1], rgbb[2])
def bgr_euclidean_distance(bgra, bgrb) :
return euclidean_distance(bgra[2], bgra[1], bgra[0], bgrb[2], bgrb[1], bgrb[0])
def numpy_distance (r1, g1, b1, r2, g2, b2) :
p0 = np.array([r1, g1, b1])
p1 = np.array([r2, g2, b2])
d = np.linalg.norm(p0 - p1)
return d
#return sqrt(pow(abs(r1-r2), 2) + pow(abs(g1-g2), 2) + pow(abs(b1-b2), 2))
def euclidean_distance (r1, g1, b1, r2, g2, b2):
d = 0.0
d = sqrt((r2 - r1)**2 + (g2 - g1)**2 + (b2 - b1)**2)
return d
def weighted_euclidean_distance (r1, g1, b1, r2, g2, b2) :
R = 0.30
G = 0.59
B = 0.11
#print(type(r1))
return sqrt( ((r2-r1) * R)**2 + ((g2-g1) * G)**2 + ((b2-b1) * B)**2 )
def to_luma (color, space) :
color = [float(i) for i in list(color)]
if space == 'RGB' :
return rgb_to_luma(color[0], color[1], color[2])
elif space == 'BGR' :
return rgb_to_luma(color[2], color[1], color[0])
def rgb_to_luma (r, g, b) :
R = 0.2126
G = 0.7152
B = 0.0722
return (r * R) + (g * G) + (b * B)
def convertScale(img, alpha, beta):
"""Add bias and gain to an image with saturation arithmetics. Unlike
cv2.convertScaleAbs, it does not take an absolute value, which would lead to
nonsensical results (e.g., a pixel at 44 with alpha = 3 and beta = -210
becomes 78 with OpenCV, when in fact it should become 0).
"""
new_img = img * alpha + beta
new_img[new_img < 0] = 0
new_img[new_img > 255] = 255
return new_img.astype(np.uint8)

View File

@ -1,5 +1,7 @@
import cv2
from argparse import ArgumentParser
import json
from common import convert_color, closest_color, create_colored_image, remove_from_list, closest_color_euclidean, closest_color_weighted_euclidean, euclidean_distance, weighted_euclidean_distance
parser = ArgumentParser()
@ -8,18 +10,69 @@ parser.add_argument('output', help='Output bin')
args = parser.parse_args()
with open('palette16.json', 'r') as file:
palette16 = json.load(file)
with open('palettegray.json', 'r') as file:
palettegray = json.load(file)
def find_array_index(arrays, target):
return next((i for i, arr in enumerate(arrays) if arr == target), -1)
palette = []
for a in range(16) :
for b in range(16) :
palette.append([ a, b ])
#print(palette)
#exit()
file_path = args.output
img = cv2.imread(args.input, cv2.IMREAD_GRAYSCALE)
img = cv2.imread(args.input, cv2.COLOR_BGR2RGB) #, cv2.IMREAD_GRAYSCALE)
height, width, depth = img.shape
height, width = img.shape
vals = []
def compare (p) :
score = 0
pair = [0, 0]
vals = []
for i in range(height):
for j in range(width):
k = img[i, j]
closest, dist = closest_color_weighted_euclidean(p, k, 'RGB')
score += dist
#print(closest)
index = find_array_index(p, closest)
if len(pair) == 2 :
b = find_array_index(palette, pair)
if b == -1 :
print(f'{pair}')
vals.append(b)
pair = []
for i in range(height):
for j in range(width):
k = img[i, j]
vals.append(k)
if len(pair) < 2 :
pair.append(index)
if len(pair) == 1 :
pair.append(0)
if len(pair) == 2 :
vals.append(find_array_index(palette, pair))
return score, vals
score16, vals16 = compare(palette16['palette'])
scoregray, valsgray = compare(palettegray['palette'])
print(f'{scoregray} vs. {score16 / 3}')
if scoregray < (score16 / 3) :
vals = valsgray
vals[0] = 0
print('B&W')
else :
vals = vals16
vals[0] = 1
print('Color')
print(f'{len(vals)} bytes')
with open(file_path, "wb") as file:
file.write(bytes(vals))

20
palette16.json Normal file
View File

@ -0,0 +1,20 @@
{
"palette" : [
[255, 0, 0],
[0, 255, 0],
[0, 0, 255],
[255, 255, 0],
[255, 0, 255],
[0, 255, 255],
[255, 128, 0],
[128, 0, 255],
[0, 255, 128],
[255, 255, 255],
[0, 0, 0],
[128, 128, 128],
[128, 64, 0],
[0, 128, 128],
[255, 192, 203],
[64, 128, 0]
]
}

20
palettegray.json Normal file
View File

@ -0,0 +1,20 @@
{
"palette": [
[0, 0, 0],
[17, 17, 17],
[34, 34, 34],
[51, 51, 51],
[68, 68, 68],
[85, 85, 85],
[102, 102, 102],
[119, 119, 119],
[136, 136, 136],
[153, 153, 153],
[170, 170, 170],
[187, 187, 187],
[204, 204, 204],
[221, 221, 221],
[238, 238, 238],
[255, 255, 255]
]
}

View File

@ -37,7 +37,46 @@ var __generator = (this && this.__generator) || function (thisArg, body) {
var Content = /** @class */ (function () {
function Content() {
this.width = 19;
this.height = 14;
this.height = 34;
this.size = 324;
this.palette16 = [
[255, 0, 0],
[0, 255, 0],
[0, 0, 255],
[255, 255, 0],
[255, 0, 255],
[0, 255, 255],
[255, 128, 0],
[128, 0, 255],
[0, 255, 128],
[255, 255, 255],
[0, 0, 0],
[128, 128, 128],
[128, 64, 0],
[0, 128, 128],
[255, 192, 203],
[64, 128, 0]
];
this.palettegray = [
[0, 0, 0],
[17, 17, 17],
[34, 34, 34],
[51, 51, 51],
[68, 68, 68],
[85, 85, 85],
[102, 102, 102],
[119, 119, 119],
[136, 136, 136],
[153, 153, 153],
[170, 170, 170],
[187, 187, 187],
[204, 204, 204],
[221, 221, 221],
[238, 238, 238],
[255, 255, 255]
];
this.palette = [];
this.gray = [];
var dpr = window.devicePixelRatio;
this.displayCanvas = document.getElementById('display');
this.displayCtx = this.displayCanvas.getContext('2d');
@ -46,6 +85,16 @@ var Content = /** @class */ (function () {
//document.getElementById('video') as HTMLCanvasElement;
this.videoCanvas = document.createElement('canvas');
this.videoCtx = this.videoCanvas.getContext('2d');
for (var a = 0; a < this.palette16.length; a++) {
for (var b = 0; b < this.palette16.length; b++) {
this.palette.push([this.palette16[a], this.palette16[b]]);
}
}
for (var a = 0; a < this.palettegray.length; a++) {
for (var b = 0; b < this.palettegray.length; b++) {
this.gray.push([this.palettegray[a], this.palettegray[b]]);
}
}
this.startCamera();
}
Content.prototype.startCamera = function () {
@ -102,8 +151,8 @@ var Content = /** @class */ (function () {
catch (err) {
//
}
if (code !== null && code.binaryData.length === 266) {
document.querySelector('#data').innerText = code.binaryData.toString();
if (code !== null && code.binaryData.length === this.size) {
//(document.querySelector('#data') as HTMLDivElement).innerText = code.binaryData.toString();
//console.dir(code);
this.display(code.binaryData);
}
@ -112,12 +161,18 @@ var Content = /** @class */ (function () {
}
};
Content.prototype.display = function (data) {
for (this.i = 0; this.i < data.length; this.i++) {
var pos = 0;
var palette = data[0] === 0 ? this.gray : this.palette;
for (this.i = 1; this.i < data.length; this.i++) {
this.val = data[this.i];
this.x = this.i % this.width;
this.y = Math.floor(this.i / this.width);
this.displayCtx.fillStyle = "rgba(".concat(this.val, ",").concat(this.val, ",").concat(this.val, ",1.0)");
this.displayCtx.fillRect(this.x, this.y, 1, 1);
for (var _i = 0, _a = palette[this.val]; _i < _a.length; _i++) {
var color = _a[_i];
this.x = pos % this.width;
this.y = Math.floor(pos / this.width);
this.displayCtx.fillStyle = "rgba(".concat(color[0], ",").concat(color[1], ",").concat(color[2], ",1.0)");
this.displayCtx.fillRect(this.x, this.y, 1, 1);
pos += 1;
}
}
};
return Content;

File diff suppressed because one or more lines are too long

View File

@ -1,6 +1,7 @@
class Content {
private width : number = 19;
private height : number = 14;
private height : number = 34;
private size : number = 324;
private x : number;
private y : number;
@ -15,6 +16,47 @@ class Content {
private video : HTMLVideoElement;
private palette16 : number[][] = [
[255, 0, 0],
[0, 255, 0],
[0, 0, 255],
[255, 255, 0],
[255, 0, 255],
[0, 255, 255],
[255, 128, 0],
[128, 0, 255],
[0, 255, 128],
[255, 255, 255],
[0, 0, 0],
[128, 128, 128],
[128, 64, 0],
[0, 128, 128],
[255, 192, 203],
[64, 128, 0]
];
private palettegray : number[][] = [
[0, 0, 0],
[17, 17, 17],
[34, 34, 34],
[51, 51, 51],
[68, 68, 68],
[85, 85, 85],
[102, 102, 102],
[119, 119, 119],
[136, 136, 136],
[153, 153, 153],
[170, 170, 170],
[187, 187, 187],
[204, 204, 204],
[221, 221, 221],
[238, 238, 238],
[255, 255, 255]
];
private palette : number[][][] = [];
private gray : number[][][] = [];
constructor () {
const dpr : number = window.devicePixelRatio;
@ -26,6 +68,18 @@ class Content {
//document.getElementById('video') as HTMLCanvasElement;
this.videoCanvas = document.createElement('canvas') as HTMLCanvasElement;
this.videoCtx = this.videoCanvas.getContext('2d');
for (let a = 0; a < this.palette16.length; a++) {
for (let b = 0; b < this.palette16.length; b++) {
this.palette.push([this.palette16[a], this.palette16[b]]);
}
}
for (let a = 0; a < this.palettegray.length; a++) {
for (let b = 0; b < this.palettegray.length; b++) {
this.gray.push([this.palettegray[a], this.palettegray[b]]);
}
}
this.startCamera();
}
@ -80,7 +134,7 @@ class Content {
//
}
if (code !== null && code.binaryData.length === 266) {
if (code !== null && code.binaryData.length === this.size) {
//(document.querySelector('#data') as HTMLDivElement).innerText = code.binaryData.toString();
//console.dir(code);
this.display(code.binaryData);
@ -90,12 +144,17 @@ class Content {
}
display (data : any[]) {
for (this.i = 0; this.i < data.length; this.i++) {
let pos : number = 0;
const palette : number[][][] = data[0] === 0 ? this.gray : this.palette;
for (this.i = 1; this.i < data.length; this.i++) {
this.val = data[this.i];
this.x = this.i % this.width;
this.y = Math.floor(this.i / this.width);
this.displayCtx.fillStyle = `rgba(${this.val},${this.val},${this.val},1.0)`;
this.displayCtx.fillRect(this.x, this.y, 1, 1);
for (let color of palette[this.val]) {
this.x = pos % this.width;
this.y = Math.floor(pos / this.width);
this.displayCtx.fillStyle = `rgba(${color[0]},${color[1]},${color[2]},1.0)`;
this.displayCtx.fillRect(this.x, this.y, 1, 1);
pos += 1
}
}
}
}

View File

@ -5,7 +5,7 @@ H=600
TMP=$(mktemp -d)
ffmpeg -ss 203 -i "${1}" -r 8 -t 10 -s 19x14 "${TMP}/frame_%06d.png"
ffmpeg -ss 460 -i "${1}" -r 8 -t 30 -vf "scale=-1:34,crop=19:34:(iw-19)/2:0" "${TMP}/frame_%06d.png"
QR=$(mktemp -d)