extract_metadata: Improve error reporting

Signed-off-by:  Eric Callahan <arksine.code@gmail.com>
This commit is contained in:
Arksine 2020-08-11 14:20:21 -04:00
parent 1d520310ac
commit b3f02a73a4
1 changed files with 42 additions and 34 deletions

View File

@ -10,6 +10,7 @@ import re
import os import os
import sys import sys
import time import time
import traceback
# regex helpers # regex helpers
def _regex_find_floats(pattern, data, strict=False): def _regex_find_floats(pattern, data, strict=False):
@ -113,13 +114,13 @@ class PrusaSlicer(BaseSlicer):
data = "".join(lines[1:-1]) data = "".join(lines[1:-1])
if len(info) != 3: if len(info) != 3:
self.log.append( self.log.append(
{'MetadataError': "Error parsing thumbnail header: %s" f"MetadataError: Error parsing thumbnail"
% (lines[0])}) f" header: {lines[0]}")
continue continue
if len(data) != info[2]: if len(data) != info[2]:
self.log.append( self.log.append(
{'MetadataError': "Thumbnail Size Mismatch: detected %d, " f"MetadataError: Thumbnail Size Mismatch: "
"actual %d" % (info[2], len(data))}) f"detected {info[2]}, actual {len(data)}")
continue continue
parsed_matches.append({ parsed_matches.append({
'width': info[0], 'height': info[1], 'width': info[0], 'height': info[1],
@ -291,43 +292,50 @@ SUPPORTED_DATA = [
'first_layer_height', 'layer_height', 'object_height', 'first_layer_height', 'layer_height', 'object_height',
'filament_total', 'estimated_time', 'thumbnails'] 'filament_total', 'estimated_time', 'thumbnails']
def extract_metadata(file_path, log):
metadata = {}
slicers = [s() for s in SUPPORTED_SLICERS]
header_data = footer_data = slicer = None
size = os.path.getsize(file_path)
metadata['size'] = size
metadata['modified'] = time.ctime(os.path.getmtime(file_path))
with open(file_path, 'r') as f:
# read the default size, which should be enough to
# identify the slicer
header_data = f.read(READ_SIZE)
for s in slicers:
if re.search(s.get_id_pattern(), header_data) is not None:
slicer = s
break
if slicer is not None:
metadata['slicer'] = slicer.get_name()
if size > READ_SIZE * 2:
f.seek(size - READ_SIZE)
footer_data = f.read()
elif size > READ_SIZE:
remaining = size - READ_SIZE
footer_data = header_data[remaining - READ_SIZE:] + f.read()
else:
footer_data = header_data
slicer.set_data(header_data, footer_data, log)
for key in SUPPORTED_DATA:
func = getattr(slicer, "parse_" + key)
result = func()
if result is not None:
metadata[key] = result
return metadata
def main(path, filename): def main(path, filename):
file_path = os.path.join(path, filename) file_path = os.path.join(path, filename)
slicers = [s() for s in SUPPORTED_SLICERS]
log = [] log = []
metadata = {} metadata = {}
if not os.path.isfile(file_path): if not os.path.isfile(file_path):
log.append("File Not Found: %s" % (file_path)) log.append(f"File Not Found: {file_path}")
else: else:
header_data = footer_data = slicer = None try:
size = os.path.getsize(file_path) metadata = extract_metadata(file_path, log)
metadata['size'] = size except Exception:
metadata['modified'] = time.ctime(os.path.getmtime(file_path)) log.append(traceback.format_exc())
with open(file_path, 'r') as f:
# read the default size, which should be enough to
# identify the slicer
header_data = f.read(READ_SIZE)
for s in slicers:
if re.search(s.get_id_pattern(), header_data) is not None:
slicer = s
break
if slicer is not None:
metadata['slicer'] = slicer.get_name()
if size > READ_SIZE * 2:
f.seek(size - READ_SIZE)
footer_data = f.read()
elif size > READ_SIZE:
remaining = size - READ_SIZE
footer_data = header_data[remaining - READ_SIZE:] + f.read()
else:
footer_data = header_data
slicer.set_data(header_data, footer_data, log)
for key in SUPPORTED_DATA:
func = getattr(slicer, "parse_" + key)
result = func()
if result is not None:
metadata[key] = result
fd = sys.stdout.fileno() fd = sys.stdout.fileno()
data = json.dumps( data = json.dumps(
{'file': filename, 'log': log, 'metadata': metadata}).encode() {'file': filename, 'log': log, 'metadata': metadata}).encode()