🎨 PIO scripts cleanup
This commit is contained in:
parent
c847ef02a1
commit
bc91b1cdcd
|
@ -112,10 +112,10 @@ class ComputeBoundingBox:
|
||||||
if s:
|
if s:
|
||||||
m = re.search('viewBox="([0-9-.]+) ([0-9-.]+) ([0-9-.]+) ([0-9-.]+)"', svg)
|
m = re.search('viewBox="([0-9-.]+) ([0-9-.]+) ([0-9-.]+) ([0-9-.]+)"', svg)
|
||||||
if m:
|
if m:
|
||||||
self.x_min = float(m.group(1))
|
self.x_min = float(m[1])
|
||||||
self.y_min = float(m.group(2))
|
self.y_min = float(m[2])
|
||||||
self.x_max = float(m.group(3))
|
self.x_max = float(m[3])
|
||||||
self.y_max = float(m.group(4))
|
self.y_max = float(m[4])
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -205,18 +205,18 @@ class Parser:
|
||||||
pass # Just eat the spaces
|
pass # Just eat the spaces
|
||||||
|
|
||||||
elif self.eat_token('([LMHVZlmhvz])'):
|
elif self.eat_token('([LMHVZlmhvz])'):
|
||||||
cmd = self.m.group(1)
|
cmd = self.m[1]
|
||||||
# The following commands take no arguments
|
# The following commands take no arguments
|
||||||
if cmd == "Z" or cmd == "z":
|
if cmd == "Z" or cmd == "z":
|
||||||
self.process_svg_path_data_cmd(id, cmd, 0, 0)
|
self.process_svg_path_data_cmd(id, cmd, 0, 0)
|
||||||
|
|
||||||
elif self.eat_token('([CScsQqTtAa])'):
|
elif self.eat_token('([CScsQqTtAa])'):
|
||||||
print("Unsupported path data command:", self.m.group(1), "in path", id, "\n", file=sys.stderr)
|
print("Unsupported path data command:", self.m[1], "in path", id, "\n", file=sys.stderr)
|
||||||
quit()
|
quit()
|
||||||
|
|
||||||
elif self.eat_token('([ ,]*[-0-9e.]+)+'):
|
elif self.eat_token('([ ,]*[-0-9e.]+)+'):
|
||||||
# Process list of coordinates following command
|
# Process list of coordinates following command
|
||||||
coords = re.split('[ ,]+', self.m.group(0))
|
coords = re.split('[ ,]+', self.m[0])
|
||||||
# The following commands take two arguments
|
# The following commands take two arguments
|
||||||
if cmd == "L" or cmd == "l":
|
if cmd == "L" or cmd == "l":
|
||||||
while coords:
|
while coords:
|
||||||
|
@ -245,7 +245,7 @@ class Parser:
|
||||||
id = "<none>"
|
id = "<none>"
|
||||||
m = re.search(' id="(.*)"', path)
|
m = re.search(' id="(.*)"', path)
|
||||||
if m:
|
if m:
|
||||||
id = m.group(1)
|
id = m[1]
|
||||||
|
|
||||||
m = re.search(' transform="(.*)"', path)
|
m = re.search(' transform="(.*)"', path)
|
||||||
if m:
|
if m:
|
||||||
|
@ -254,7 +254,7 @@ class Parser:
|
||||||
|
|
||||||
m = re.search(' d="(.*)"', path)
|
m = re.search(' d="(.*)"', path)
|
||||||
if m:
|
if m:
|
||||||
self.process_svg_path_data(id, m.group(1))
|
self.process_svg_path_data(id, m[1])
|
||||||
self.op.path_finished(id)
|
self.op.path_finished(id)
|
||||||
self.reset()
|
self.reset()
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,7 @@ if pioutil.is_pio_build():
|
||||||
# Split up passed lines on commas or newlines and iterate
|
# Split up passed lines on commas or newlines and iterate
|
||||||
# Add common options to the features config under construction
|
# Add common options to the features config under construction
|
||||||
# For lib_deps replace a previous instance of the same library
|
# For lib_deps replace a previous instance of the same library
|
||||||
atoms = re.sub(r',\\s*', '\n', flines).strip().split('\n')
|
atoms = re.sub(r',\s*', '\n', flines).strip().split('\n')
|
||||||
for line in atoms:
|
for line in atoms:
|
||||||
parts = line.split('=')
|
parts = line.split('=')
|
||||||
name = parts.pop(0)
|
name = parts.pop(0)
|
||||||
|
@ -64,7 +64,7 @@ if pioutil.is_pio_build():
|
||||||
feat[name] = '='.join(parts)
|
feat[name] = '='.join(parts)
|
||||||
blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
|
blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
|
||||||
else:
|
else:
|
||||||
for dep in re.split(r",\s*", line):
|
for dep in re.split(r',\s*', line):
|
||||||
lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
|
lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
|
||||||
lib_re = re.compile('(?!^' + lib_name + '\\b)')
|
lib_re = re.compile('(?!^' + lib_name + '\\b)')
|
||||||
feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
|
feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
|
||||||
|
@ -91,7 +91,7 @@ if pioutil.is_pio_build():
|
||||||
except:
|
except:
|
||||||
val = None
|
val = None
|
||||||
if val:
|
if val:
|
||||||
opt = mat.group(1).upper()
|
opt = mat[1].upper()
|
||||||
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
|
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
|
||||||
add_to_feat_cnf(opt, val)
|
add_to_feat_cnf(opt, val)
|
||||||
|
|
||||||
|
|
|
@ -4,21 +4,21 @@
|
||||||
import os,subprocess,re,json,hashlib
|
import os,subprocess,re,json,hashlib
|
||||||
|
|
||||||
#
|
#
|
||||||
# The dumbest preprocessor in the world
|
# Return all macro names in a header as an array, so we can take
|
||||||
# Extract macro name from an header file and store them in an array
|
# the intersection with the preprocessor output, giving a decent
|
||||||
# No processing is done here, so they are raw values here and it does not match what actually enabled
|
# reflection of all enabled options that (probably) came from the
|
||||||
# in the file (since you can have #if SOMETHING_UNDEFINED / #define BOB / #endif)
|
# configuration files. We end up with the actual configured state,
|
||||||
# But it's useful to filter the useful macro spit out by the preprocessor from noise from the system
|
# better than what the config files say. You can then use the
|
||||||
# headers.
|
# resulting config.ini to produce more exact configuration files.
|
||||||
#
|
#
|
||||||
def extract_defines(filepath):
|
def extract_defines(filepath):
|
||||||
f = open(filepath, encoding="utf8").read().split("\n")
|
f = open(filepath, encoding="utf8").read().split("\n")
|
||||||
a = []
|
a = []
|
||||||
for line in f:
|
for line in f:
|
||||||
sline = line.strip(" \t\n\r")
|
sline = line.strip()
|
||||||
if sline[:7] == "#define":
|
if sline[:7] == "#define":
|
||||||
# Extract the key here (we don't care about the value)
|
# Extract the key here (we don't care about the value)
|
||||||
kv = sline[8:].strip().split(' ')
|
kv = sline[8:].strip().split()
|
||||||
a.append(kv[0])
|
a.append(kv[0])
|
||||||
return a
|
return a
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ def compute_build_signature(env):
|
||||||
# Definitions from these files will be kept
|
# Definitions from these files will be kept
|
||||||
files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
|
files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
|
||||||
|
|
||||||
build_dir=os.path.join(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
build_dir = os.path.join(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
||||||
|
|
||||||
# Check if we can skip processing
|
# Check if we can skip processing
|
||||||
hashes = ''
|
hashes = ''
|
||||||
|
@ -77,14 +77,14 @@ def compute_build_signature(env):
|
||||||
complete_cfg = run_preprocessor(env)
|
complete_cfg = run_preprocessor(env)
|
||||||
|
|
||||||
# Dumb #define extraction from the configuration files
|
# Dumb #define extraction from the configuration files
|
||||||
real_defines = {}
|
conf_defines = {}
|
||||||
all_defines = []
|
all_defines = []
|
||||||
for header in files_to_keep:
|
for header in files_to_keep:
|
||||||
defines = extract_defines(header)
|
defines = extract_defines(header)
|
||||||
# To filter only the define we want
|
# To filter only the define we want
|
||||||
all_defines = all_defines + defines
|
all_defines += defines
|
||||||
# To remember from which file it cames from
|
# To remember from which file it cames from
|
||||||
real_defines[header.split('/')[-1]] = defines
|
conf_defines[header.split('/')[-1]] = defines
|
||||||
|
|
||||||
r = re.compile(r"\(+(\s*-*\s*_.*)\)+")
|
r = re.compile(r"\(+(\s*-*\s*_.*)\)+")
|
||||||
|
|
||||||
|
@ -116,16 +116,16 @@ def compute_build_signature(env):
|
||||||
resolved_defines = {}
|
resolved_defines = {}
|
||||||
for key in defines:
|
for key in defines:
|
||||||
# Remove all boards now
|
# Remove all boards now
|
||||||
if key[0:6] == "BOARD_" and key != "BOARD_INFO_NAME":
|
if key.startswith("BOARD_") and key != "BOARD_INFO_NAME":
|
||||||
continue
|
continue
|
||||||
# Remove all keys ending by "_NAME" as it does not make a difference to the configuration
|
# Remove all keys ending by "_NAME" as it does not make a difference to the configuration
|
||||||
if key[-5:] == "_NAME" and key != "CUSTOM_MACHINE_NAME":
|
if key.endswith("_NAME") and key != "CUSTOM_MACHINE_NAME":
|
||||||
continue
|
continue
|
||||||
# Remove all keys ending by "_T_DECLARED" as it's a copy of not important system stuff
|
# Remove all keys ending by "_T_DECLARED" as it's a copy of extraneous system stuff
|
||||||
if key[-11:] == "_T_DECLARED":
|
if key.endswith("_T_DECLARED"):
|
||||||
continue
|
continue
|
||||||
# Remove keys that are not in the #define list in the Configuration list
|
# Remove keys that are not in the #define list in the Configuration list
|
||||||
if not (key in all_defines) and key != "DETAILED_BUILD_VERSION" and key != "STRING_DISTRIBUTION_DATE":
|
if key not in all_defines + [ 'DETAILED_BUILD_VERSION', 'STRING_DISTRIBUTION_DATE' ]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Don't be that smart guy here
|
# Don't be that smart guy here
|
||||||
|
@ -136,13 +136,13 @@ def compute_build_signature(env):
|
||||||
data = {}
|
data = {}
|
||||||
data['__INITIAL_HASH'] = hashes
|
data['__INITIAL_HASH'] = hashes
|
||||||
# First create a key for each header here
|
# First create a key for each header here
|
||||||
for header in real_defines:
|
for header in conf_defines:
|
||||||
data[header] = {}
|
data[header] = {}
|
||||||
|
|
||||||
# Then populate the object where each key is going to (that's a O(N^2) algorithm here...)
|
# Then populate the object where each key is going to (that's a O(N^2) algorithm here...)
|
||||||
for key in resolved_defines:
|
for key in resolved_defines:
|
||||||
for header in real_defines:
|
for header in conf_defines:
|
||||||
if key in real_defines[header]:
|
if key in conf_defines[header]:
|
||||||
data[header][key] = resolved_defines[key]
|
data[header][key] = resolved_defines[key]
|
||||||
|
|
||||||
# Append the source code version and date
|
# Append the source code version and date
|
||||||
|
@ -155,6 +155,9 @@ def compute_build_signature(env):
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
#
|
||||||
|
# Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_DUMP > 0
|
||||||
|
#
|
||||||
with open(marlin_json, 'w') as outfile:
|
with open(marlin_json, 'w') as outfile:
|
||||||
json.dump(data, outfile, separators=(',', ':'))
|
json.dump(data, outfile, separators=(',', ':'))
|
||||||
|
|
||||||
|
@ -163,10 +166,12 @@ def compute_build_signature(env):
|
||||||
|
|
||||||
# Generate a C source file for storing this array
|
# Generate a C source file for storing this array
|
||||||
with open('Marlin/src/mczip.h','wb') as result_file:
|
with open('Marlin/src/mczip.h','wb') as result_file:
|
||||||
result_file.write(b'#ifndef NO_CONFIGURATION_EMBEDDING_WARNING\n')
|
result_file.write(
|
||||||
result_file.write(b' #warning "Generated file \'mc.zip\' is embedded (Define NO_CONFIGURATION_EMBEDDING_WARNING to suppress this warning.)"\n')
|
b'#ifndef NO_CONFIGURATION_EMBEDDING_WARNING\n'
|
||||||
result_file.write(b'#endif\n')
|
+ b' #warning "Generated file \'mc.zip\' is embedded (Define NO_CONFIGURATION_EMBEDDING_WARNING to suppress this warning.)"\n'
|
||||||
result_file.write(b'const unsigned char mc_zip[] PROGMEM = {\n ')
|
+ b'#endif\n'
|
||||||
|
+ b'const unsigned char mc_zip[] PROGMEM = {\n '
|
||||||
|
)
|
||||||
count = 0
|
count = 0
|
||||||
for b in open(os.path.join(build_dir, 'mc.zip'), 'rb').read():
|
for b in open(os.path.join(build_dir, 'mc.zip'), 'rb').read():
|
||||||
result_file.write(b' 0x%02X,' % b)
|
result_file.write(b' 0x%02X,' % b)
|
||||||
|
|
Loading…
Reference in a new issue