2019-02-04 23:47:24 +08:00
|
|
|
import pickle
|
2019-04-20 12:23:08 +08:00
|
|
|
import struct
|
2020-04-04 13:28:06 +08:00
|
|
|
import traceback
|
2019-04-20 12:23:08 +08:00
|
|
|
|
|
|
|
import cv2
|
2019-02-04 23:47:24 +08:00
|
|
|
import numpy as np
|
2019-04-20 12:23:08 +08:00
|
|
|
|
2020-04-04 13:28:06 +08:00
|
|
|
from core import imagelib
|
2020-06-29 01:45:20 +08:00
|
|
|
from core.cv2ex import *
|
2020-04-04 13:28:06 +08:00
|
|
|
from core.imagelib import SegIEPolys
|
2020-01-21 22:43:39 +08:00
|
|
|
from core.interact import interact as io
|
|
|
|
from core.structex import *
|
2019-02-04 23:47:24 +08:00
|
|
|
from facelib import FaceType
|
2020-04-04 13:28:06 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
|
|
|
|
class DFLJPG(object):
|
2020-03-21 05:18:15 +08:00
|
|
|
def __init__(self, filename):
|
|
|
|
self.filename = filename
|
2019-02-04 23:47:24 +08:00
|
|
|
self.data = b""
|
|
|
|
self.length = 0
|
|
|
|
self.chunks = []
|
|
|
|
self.dfl_dict = None
|
2020-06-29 01:45:20 +08:00
|
|
|
self.shape = None
|
|
|
|
self.img = None
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
@staticmethod
|
2019-12-22 03:16:55 +08:00
|
|
|
def load_raw(filename, loader_func=None):
|
2019-02-04 23:47:24 +08:00
|
|
|
try:
|
2019-12-22 03:16:55 +08:00
|
|
|
if loader_func is not None:
|
|
|
|
data = loader_func(filename)
|
|
|
|
else:
|
|
|
|
with open(filename, "rb") as f:
|
|
|
|
data = f.read()
|
2019-02-04 23:47:24 +08:00
|
|
|
except:
|
2019-04-04 14:22:53 +08:00
|
|
|
raise FileNotFoundError(filename)
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
try:
|
2020-03-21 05:18:15 +08:00
|
|
|
inst = DFLJPG(filename)
|
2019-02-04 23:47:24 +08:00
|
|
|
inst.data = data
|
|
|
|
inst.length = len(data)
|
|
|
|
inst_length = inst.length
|
|
|
|
chunks = []
|
|
|
|
data_counter = 0
|
|
|
|
while data_counter < inst_length:
|
|
|
|
chunk_m_l, chunk_m_h = struct.unpack ("BB", data[data_counter:data_counter+2])
|
|
|
|
data_counter += 2
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
if chunk_m_l != 0xFF:
|
2020-03-14 21:17:53 +08:00
|
|
|
raise ValueError(f"No Valid JPG info in {filename}")
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
chunk_name = None
|
|
|
|
chunk_size = None
|
|
|
|
chunk_data = None
|
|
|
|
chunk_ex_data = None
|
|
|
|
is_unk_chunk = False
|
2019-03-20 03:53:27 +08:00
|
|
|
|
|
|
|
if chunk_m_h & 0xF0 == 0xD0:
|
2019-02-04 23:47:24 +08:00
|
|
|
n = chunk_m_h & 0x0F
|
2019-03-20 03:53:27 +08:00
|
|
|
|
|
|
|
if n >= 0 and n <= 7:
|
2019-02-04 23:47:24 +08:00
|
|
|
chunk_name = "RST%d" % (n)
|
|
|
|
chunk_size = 0
|
2019-03-20 03:53:27 +08:00
|
|
|
elif n == 0x8:
|
2019-02-04 23:47:24 +08:00
|
|
|
chunk_name = "SOI"
|
|
|
|
chunk_size = 0
|
|
|
|
if len(chunks) != 0:
|
|
|
|
raise Exception("")
|
|
|
|
elif n == 0x9:
|
|
|
|
chunk_name = "EOI"
|
|
|
|
chunk_size = 0
|
2019-03-20 03:53:27 +08:00
|
|
|
elif n == 0xA:
|
|
|
|
chunk_name = "SOS"
|
|
|
|
elif n == 0xB:
|
2019-02-04 23:47:24 +08:00
|
|
|
chunk_name = "DQT"
|
|
|
|
elif n == 0xD:
|
|
|
|
chunk_name = "DRI"
|
|
|
|
chunk_size = 2
|
|
|
|
else:
|
|
|
|
is_unk_chunk = True
|
2019-03-20 03:53:27 +08:00
|
|
|
elif chunk_m_h & 0xF0 == 0xC0:
|
|
|
|
n = chunk_m_h & 0x0F
|
|
|
|
if n == 0:
|
2019-02-04 23:47:24 +08:00
|
|
|
chunk_name = "SOF0"
|
2019-03-20 03:53:27 +08:00
|
|
|
elif n == 2:
|
2019-02-04 23:47:24 +08:00
|
|
|
chunk_name = "SOF2"
|
2019-03-20 03:53:27 +08:00
|
|
|
elif n == 4:
|
2019-02-04 23:47:24 +08:00
|
|
|
chunk_name = "DHT"
|
|
|
|
else:
|
|
|
|
is_unk_chunk = True
|
2019-03-20 03:53:27 +08:00
|
|
|
elif chunk_m_h & 0xF0 == 0xE0:
|
2019-02-04 23:47:24 +08:00
|
|
|
n = chunk_m_h & 0x0F
|
|
|
|
chunk_name = "APP%d" % (n)
|
|
|
|
else:
|
|
|
|
is_unk_chunk = True
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2020-03-14 21:17:53 +08:00
|
|
|
#if is_unk_chunk:
|
|
|
|
# #raise ValueError(f"Unknown chunk {chunk_m_h} in {filename}")
|
|
|
|
# io.log_info(f"Unknown chunk {chunk_m_h} in {filename}")
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
if chunk_size == None: #variable size
|
|
|
|
chunk_size, = struct.unpack (">H", data[data_counter:data_counter+2])
|
|
|
|
chunk_size -= 2
|
|
|
|
data_counter += 2
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
if chunk_size > 0:
|
|
|
|
chunk_data = data[data_counter:data_counter+chunk_size]
|
|
|
|
data_counter += chunk_size
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
if chunk_name == "SOS":
|
2019-03-20 03:53:27 +08:00
|
|
|
c = data_counter
|
2019-02-04 23:47:24 +08:00
|
|
|
while c < inst_length and (data[c] != 0xFF or data[c+1] != 0xD9):
|
|
|
|
c += 1
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
chunk_ex_data = data[data_counter:c]
|
|
|
|
data_counter = c
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
chunks.append ({'name' : chunk_name,
|
|
|
|
'm_h' : chunk_m_h,
|
|
|
|
'data' : chunk_data,
|
|
|
|
'ex_data' : chunk_ex_data,
|
2019-03-20 03:53:27 +08:00
|
|
|
})
|
2019-02-04 23:47:24 +08:00
|
|
|
inst.chunks = chunks
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
return inst
|
|
|
|
except Exception as e:
|
2020-03-14 21:17:53 +08:00
|
|
|
raise Exception (f"Corrupted JPG file {filename} {e}")
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
@staticmethod
|
2019-12-22 03:16:55 +08:00
|
|
|
def load(filename, loader_func=None):
|
2019-03-17 00:55:51 +08:00
|
|
|
try:
|
2019-12-22 03:16:55 +08:00
|
|
|
inst = DFLJPG.load_raw (filename, loader_func=loader_func)
|
2020-03-21 05:18:15 +08:00
|
|
|
inst.dfl_dict = {}
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-03-17 00:55:51 +08:00
|
|
|
for chunk in inst.chunks:
|
|
|
|
if chunk['name'] == 'APP0':
|
|
|
|
d, c = chunk['data'], 0
|
|
|
|
c, id, _ = struct_unpack (d, c, "=4sB")
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-03-17 00:55:51 +08:00
|
|
|
if id == b"JFIF":
|
|
|
|
c, ver_major, ver_minor, units, Xdensity, Ydensity, Xthumbnail, Ythumbnail = struct_unpack (d, c, "=BBBHHBB")
|
|
|
|
else:
|
|
|
|
raise Exception("Unknown jpeg ID: %s" % (id) )
|
|
|
|
elif chunk['name'] == 'SOF0' or chunk['name'] == 'SOF2':
|
|
|
|
d, c = chunk['data'], 0
|
|
|
|
c, precision, height, width = struct_unpack (d, c, ">BHH")
|
|
|
|
inst.shape = (height, width, 3)
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-03-17 00:55:51 +08:00
|
|
|
elif chunk['name'] == 'APP15':
|
|
|
|
if type(chunk['data']) == bytes:
|
|
|
|
inst.dfl_dict = pickle.loads(chunk['data'])
|
2019-02-04 23:47:24 +08:00
|
|
|
|
2019-03-17 00:55:51 +08:00
|
|
|
return inst
|
|
|
|
except Exception as e:
|
2020-04-04 13:28:06 +08:00
|
|
|
io.log_err (f'Exception occured while DFLJPG.load : {traceback.format_exc()}')
|
2019-02-04 23:47:24 +08:00
|
|
|
return None
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2020-03-21 05:18:15 +08:00
|
|
|
def has_data(self):
|
|
|
|
return len(self.dfl_dict.keys()) != 0
|
|
|
|
|
|
|
|
def save(self):
|
|
|
|
try:
|
|
|
|
with open(self.filename, "wb") as f:
|
|
|
|
f.write ( self.dump() )
|
|
|
|
except:
|
|
|
|
raise Exception( f'cannot save {self.filename}' )
|
2019-04-20 12:23:08 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
def dump(self):
|
|
|
|
data = b""
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2020-03-21 05:18:15 +08:00
|
|
|
dict_data = self.dfl_dict
|
2020-04-04 13:28:06 +08:00
|
|
|
|
2020-03-30 18:00:40 +08:00
|
|
|
# Remove None keys
|
2020-03-21 05:18:15 +08:00
|
|
|
for key in list(dict_data.keys()):
|
2020-04-04 13:28:06 +08:00
|
|
|
if dict_data[key] is None:
|
2020-03-21 05:18:15 +08:00
|
|
|
dict_data.pop(key)
|
|
|
|
|
|
|
|
for chunk in self.chunks:
|
|
|
|
if chunk['name'] == 'APP15':
|
|
|
|
self.chunks.remove(chunk)
|
|
|
|
break
|
|
|
|
|
|
|
|
last_app_chunk = 0
|
|
|
|
for i, chunk in enumerate (self.chunks):
|
|
|
|
if chunk['m_h'] & 0xF0 == 0xE0:
|
|
|
|
last_app_chunk = i
|
|
|
|
|
|
|
|
dflchunk = {'name' : 'APP15',
|
|
|
|
'm_h' : 0xEF,
|
|
|
|
'data' : pickle.dumps(dict_data),
|
|
|
|
'ex_data' : None,
|
|
|
|
}
|
|
|
|
self.chunks.insert (last_app_chunk+1, dflchunk)
|
|
|
|
|
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
for chunk in self.chunks:
|
|
|
|
data += struct.pack ("BB", 0xFF, chunk['m_h'] )
|
|
|
|
chunk_data = chunk['data']
|
|
|
|
if chunk_data is not None:
|
|
|
|
data += struct.pack (">H", len(chunk_data)+2 )
|
|
|
|
data += chunk_data
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
chunk_ex_data = chunk['ex_data']
|
2019-03-20 03:53:27 +08:00
|
|
|
if chunk_ex_data is not None:
|
2019-02-04 23:47:24 +08:00
|
|
|
data += chunk_ex_data
|
|
|
|
|
|
|
|
return data
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2020-06-29 01:45:20 +08:00
|
|
|
def get_img(self):
|
|
|
|
if self.img is None:
|
|
|
|
self.img = cv2_imread(self.filename)
|
|
|
|
return self.img
|
|
|
|
|
2019-03-20 03:53:27 +08:00
|
|
|
def get_shape(self):
|
2020-06-29 01:45:20 +08:00
|
|
|
if self.shape is None:
|
|
|
|
img = self.get_img()
|
|
|
|
if img is not None:
|
|
|
|
self.shape = img.shape
|
2019-02-04 23:47:24 +08:00
|
|
|
return self.shape
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-02-04 23:47:24 +08:00
|
|
|
def get_height(self):
|
|
|
|
for chunk in self.chunks:
|
|
|
|
if type(chunk) == IHDR:
|
|
|
|
return chunk.height
|
|
|
|
return 0
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2020-03-21 05:18:15 +08:00
|
|
|
def get_dict(self):
|
2019-02-04 23:47:24 +08:00
|
|
|
return self.dfl_dict
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2020-03-21 05:18:15 +08:00
|
|
|
def set_dict (self, dict_data=None):
|
2019-02-04 23:47:24 +08:00
|
|
|
self.dfl_dict = dict_data
|
|
|
|
|
2020-03-21 05:18:15 +08:00
|
|
|
def get_face_type(self): return self.dfl_dict.get('face_type', FaceType.toString (FaceType.FULL) )
|
|
|
|
def set_face_type(self, face_type): self.dfl_dict['face_type'] = face_type
|
2019-02-04 23:47:24 +08:00
|
|
|
|
2020-03-21 05:18:15 +08:00
|
|
|
def get_landmarks(self): return np.array ( self.dfl_dict['landmarks'] )
|
|
|
|
def set_landmarks(self, landmarks): self.dfl_dict['landmarks'] = landmarks
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2020-03-21 05:18:15 +08:00
|
|
|
def get_eyebrows_expand_mod(self): return self.dfl_dict.get ('eyebrows_expand_mod', 1.0)
|
|
|
|
def set_eyebrows_expand_mod(self, eyebrows_expand_mod): self.dfl_dict['eyebrows_expand_mod'] = eyebrows_expand_mod
|
|
|
|
|
|
|
|
def get_source_filename(self): return self.dfl_dict.get ('source_filename', None)
|
|
|
|
def set_source_filename(self, source_filename): self.dfl_dict['source_filename'] = source_filename
|
|
|
|
|
|
|
|
def get_source_rect(self): return self.dfl_dict.get ('source_rect', None)
|
|
|
|
def set_source_rect(self, source_rect): self.dfl_dict['source_rect'] = source_rect
|
|
|
|
|
|
|
|
def get_source_landmarks(self): return np.array ( self.dfl_dict.get('source_landmarks', None) )
|
|
|
|
def set_source_landmarks(self, source_landmarks): self.dfl_dict['source_landmarks'] = source_landmarks
|
2019-03-20 03:53:27 +08:00
|
|
|
|
2019-04-20 12:23:08 +08:00
|
|
|
def get_image_to_face_mat(self):
|
2019-04-04 14:22:53 +08:00
|
|
|
mat = self.dfl_dict.get ('image_to_face_mat', None)
|
|
|
|
if mat is not None:
|
|
|
|
return np.array (mat)
|
2019-04-20 12:23:08 +08:00
|
|
|
return None
|
2020-03-21 05:18:15 +08:00
|
|
|
def set_image_to_face_mat(self, image_to_face_mat): self.dfl_dict['image_to_face_mat'] = image_to_face_mat
|
|
|
|
|
2020-04-07 01:45:46 +08:00
|
|
|
def has_seg_ie_polys(self):
|
|
|
|
return self.dfl_dict.get('seg_ie_polys',None) is not None
|
2020-06-29 01:45:20 +08:00
|
|
|
|
2020-04-04 13:28:06 +08:00
|
|
|
def get_seg_ie_polys(self):
|
2020-03-30 18:00:40 +08:00
|
|
|
d = self.dfl_dict.get('seg_ie_polys',None)
|
|
|
|
if d is not None:
|
|
|
|
d = SegIEPolys.load(d)
|
|
|
|
else:
|
|
|
|
d = SegIEPolys()
|
2020-04-04 13:28:06 +08:00
|
|
|
|
2020-03-30 18:00:40 +08:00
|
|
|
return d
|
2020-04-04 13:28:06 +08:00
|
|
|
|
2020-03-21 05:18:15 +08:00
|
|
|
def set_seg_ie_polys(self, seg_ie_polys):
|
2020-04-04 13:28:06 +08:00
|
|
|
if seg_ie_polys is not None:
|
2020-03-30 18:00:40 +08:00
|
|
|
if not isinstance(seg_ie_polys, SegIEPolys):
|
|
|
|
raise ValueError('seg_ie_polys should be instance of SegIEPolys')
|
2020-04-04 13:28:06 +08:00
|
|
|
|
2020-03-30 18:00:40 +08:00
|
|
|
if seg_ie_polys.has_polys():
|
|
|
|
seg_ie_polys = seg_ie_polys.dump()
|
|
|
|
else:
|
|
|
|
seg_ie_polys = None
|
2020-04-04 13:28:06 +08:00
|
|
|
|
2020-03-21 05:18:15 +08:00
|
|
|
self.dfl_dict['seg_ie_polys'] = seg_ie_polys
|
|
|
|
|
2020-04-07 01:45:46 +08:00
|
|
|
def has_xseg_mask(self):
|
|
|
|
return self.dfl_dict.get('xseg_mask',None) is not None
|
|
|
|
|
2020-11-22 22:26:54 +08:00
|
|
|
def get_xseg_mask_compressed(self):
|
|
|
|
mask_buf = self.dfl_dict.get('xseg_mask',None)
|
|
|
|
if mask_buf is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return mask_buf
|
|
|
|
|
2020-04-04 13:28:06 +08:00
|
|
|
def get_xseg_mask(self):
|
2020-03-30 18:00:40 +08:00
|
|
|
mask_buf = self.dfl_dict.get('xseg_mask',None)
|
|
|
|
if mask_buf is None:
|
|
|
|
return None
|
2020-04-04 13:28:06 +08:00
|
|
|
|
2020-03-30 18:00:40 +08:00
|
|
|
img = cv2.imdecode(mask_buf, cv2.IMREAD_UNCHANGED)
|
|
|
|
if len(img.shape) == 2:
|
|
|
|
img = img[...,None]
|
2020-04-04 13:28:06 +08:00
|
|
|
|
2020-03-30 18:00:40 +08:00
|
|
|
return img.astype(np.float32) / 255.0
|
2020-04-04 13:28:06 +08:00
|
|
|
|
|
|
|
|
2020-03-30 18:00:40 +08:00
|
|
|
def set_xseg_mask(self, mask_a):
|
|
|
|
if mask_a is None:
|
|
|
|
self.dfl_dict['xseg_mask'] = None
|
|
|
|
return
|
2020-03-21 05:18:15 +08:00
|
|
|
|
2020-04-04 13:28:06 +08:00
|
|
|
mask_a = imagelib.normalize_channels(mask_a, 1)
|
|
|
|
img_data = np.clip( mask_a*255, 0, 255 ).astype(np.uint8)
|
|
|
|
|
2021-10-13 18:35:26 +08:00
|
|
|
data_max_len = 50000
|
2020-03-21 05:18:15 +08:00
|
|
|
|
2020-04-04 13:28:06 +08:00
|
|
|
ret, buf = cv2.imencode('.png', img_data)
|
2020-03-21 05:18:15 +08:00
|
|
|
|
2020-04-04 13:28:06 +08:00
|
|
|
if not ret or len(buf) > data_max_len:
|
|
|
|
for jpeg_quality in range(100,-1,-1):
|
|
|
|
ret, buf = cv2.imencode( '.jpg', img_data, [int(cv2.IMWRITE_JPEG_QUALITY), jpeg_quality] )
|
|
|
|
if ret and len(buf) <= data_max_len:
|
|
|
|
break
|
2020-03-21 05:18:15 +08:00
|
|
|
|
2020-04-04 13:28:06 +08:00
|
|
|
if not ret:
|
|
|
|
raise Exception("set_xseg_mask: unable to generate image data for set_xseg_mask")
|
2020-03-21 05:18:15 +08:00
|
|
|
|
2020-04-04 13:28:06 +08:00
|
|
|
self.dfl_dict['xseg_mask'] = buf
|