🔨 Fix IntelliSense / PIO conflicts (#23058)
Co-authored-by: Scott Lahteine <github@thinkyhead.com>
This commit is contained in:
parent
f97635de36
commit
fc2020c6ec
|
@ -4,15 +4,16 @@
|
||||||
# Windows: bossac.exe
|
# Windows: bossac.exe
|
||||||
# Other: leave unchanged
|
# Other: leave unchanged
|
||||||
#
|
#
|
||||||
|
import pioutil
|
||||||
|
if pioutil.is_pio_build():
|
||||||
|
import platform
|
||||||
|
current_OS = platform.system()
|
||||||
|
|
||||||
import platform
|
if current_OS == 'Windows':
|
||||||
current_OS = platform.system()
|
|
||||||
|
|
||||||
if current_OS == 'Windows':
|
Import("env")
|
||||||
|
|
||||||
Import("env")
|
# Use bossac.exe on Windows
|
||||||
|
env.Replace(
|
||||||
# Use bossac.exe on Windows
|
UPLOADCMD="bossac --info --unlock --write --verify --reset --erase -U false --boot $SOURCE"
|
||||||
env.Replace(
|
)
|
||||||
UPLOADCMD="bossac --info --unlock --write --verify --reset --erase -U false --boot $SOURCE"
|
|
||||||
)
|
|
||||||
|
|
|
@ -1,123 +1,127 @@
|
||||||
#
|
#
|
||||||
# sets output_port
|
# upload_extra_script.py
|
||||||
|
# set the output_port
|
||||||
# if target_filename is found then that drive is used
|
# if target_filename is found then that drive is used
|
||||||
# else if target_drive is found then that drive is used
|
# else if target_drive is found then that drive is used
|
||||||
#
|
#
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
target_filename = "FIRMWARE.CUR"
|
import pioutil
|
||||||
target_drive = "REARM"
|
if pioutil.is_pio_build():
|
||||||
|
|
||||||
import os,getpass,platform
|
target_filename = "FIRMWARE.CUR"
|
||||||
|
target_drive = "REARM"
|
||||||
|
|
||||||
current_OS = platform.system()
|
import os,getpass,platform
|
||||||
Import("env")
|
|
||||||
|
|
||||||
def print_error(e):
|
current_OS = platform.system()
|
||||||
print('\nUnable to find destination disk (%s)\n' \
|
Import("env")
|
||||||
'Please select it in platformio.ini using the upload_port keyword ' \
|
|
||||||
'(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
|
|
||||||
'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
|
|
||||||
%(e, env.get('PIOENV')))
|
|
||||||
|
|
||||||
def before_upload(source, target, env):
|
def print_error(e):
|
||||||
try:
|
print('\nUnable to find destination disk (%s)\n' \
|
||||||
#
|
'Please select it in platformio.ini using the upload_port keyword ' \
|
||||||
# Find a disk for upload
|
'(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
|
||||||
#
|
'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
|
||||||
upload_disk = 'Disk not found'
|
%(e, env.get('PIOENV')))
|
||||||
target_file_found = False
|
|
||||||
target_drive_found = False
|
def before_upload(source, target, env):
|
||||||
if current_OS == 'Windows':
|
try:
|
||||||
#
|
#
|
||||||
# platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
|
# Find a disk for upload
|
||||||
# Windows - doesn't care about the disk's name, only cares about the drive letter
|
#
|
||||||
import subprocess,string
|
upload_disk = 'Disk not found'
|
||||||
from ctypes import windll
|
target_file_found = False
|
||||||
|
target_drive_found = False
|
||||||
|
if current_OS == 'Windows':
|
||||||
|
#
|
||||||
|
# platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
|
||||||
|
# Windows - doesn't care about the disk's name, only cares about the drive letter
|
||||||
|
import subprocess,string
|
||||||
|
from ctypes import windll
|
||||||
|
|
||||||
# getting list of drives
|
# getting list of drives
|
||||||
# https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
|
# https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
|
||||||
drives = []
|
drives = []
|
||||||
bitmask = windll.kernel32.GetLogicalDrives()
|
bitmask = windll.kernel32.GetLogicalDrives()
|
||||||
for letter in string.ascii_uppercase:
|
for letter in string.ascii_uppercase:
|
||||||
if bitmask & 1:
|
if bitmask & 1:
|
||||||
drives.append(letter)
|
drives.append(letter)
|
||||||
bitmask >>= 1
|
bitmask >>= 1
|
||||||
|
|
||||||
for drive in drives:
|
for drive in drives:
|
||||||
final_drive_name = drive + ':\\'
|
final_drive_name = drive + ':\\'
|
||||||
# print ('disc check: {}'.format(final_drive_name))
|
# print ('disc check: {}'.format(final_drive_name))
|
||||||
try:
|
try:
|
||||||
volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
|
volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print ('error:{}'.format(e))
|
print ('error:{}'.format(e))
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
if target_drive in volume_info and not target_file_found: # set upload if not found target file yet
|
if target_drive in volume_info and not target_file_found: # set upload if not found target file yet
|
||||||
target_drive_found = True
|
target_drive_found = True
|
||||||
upload_disk = final_drive_name
|
|
||||||
if target_filename in volume_info:
|
|
||||||
if not target_file_found:
|
|
||||||
upload_disk = final_drive_name
|
upload_disk = final_drive_name
|
||||||
target_file_found = True
|
if target_filename in volume_info:
|
||||||
|
if not target_file_found:
|
||||||
|
upload_disk = final_drive_name
|
||||||
|
target_file_found = True
|
||||||
|
|
||||||
elif current_OS == 'Linux':
|
elif current_OS == 'Linux':
|
||||||
#
|
#
|
||||||
# platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
|
# platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
|
||||||
#
|
#
|
||||||
drives = os.listdir(os.path.join(os.sep, 'media', getpass.getuser()))
|
drives = os.listdir(os.path.join(os.sep, 'media', getpass.getuser()))
|
||||||
if target_drive in drives: # If target drive is found, use it.
|
if target_drive in drives: # If target drive is found, use it.
|
||||||
target_drive_found = True
|
target_drive_found = True
|
||||||
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), target_drive) + os.sep
|
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), target_drive) + os.sep
|
||||||
else:
|
else:
|
||||||
|
for drive in drives:
|
||||||
|
try:
|
||||||
|
files = os.listdir(os.path.join(os.sep, 'media', getpass.getuser(), drive))
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if target_filename in files:
|
||||||
|
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), drive) + os.sep
|
||||||
|
target_file_found = True
|
||||||
|
break
|
||||||
|
#
|
||||||
|
# set upload_port to drive if found
|
||||||
|
#
|
||||||
|
|
||||||
|
if target_file_found or target_drive_found:
|
||||||
|
env.Replace(
|
||||||
|
UPLOAD_FLAGS="-P$UPLOAD_PORT"
|
||||||
|
)
|
||||||
|
|
||||||
|
elif current_OS == 'Darwin': # MAC
|
||||||
|
#
|
||||||
|
# platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
|
||||||
|
#
|
||||||
|
drives = os.listdir('/Volumes') # human readable names
|
||||||
|
if target_drive in drives and not target_file_found: # set upload if not found target file yet
|
||||||
|
target_drive_found = True
|
||||||
|
upload_disk = '/Volumes/' + target_drive + '/'
|
||||||
for drive in drives:
|
for drive in drives:
|
||||||
try:
|
try:
|
||||||
files = os.listdir(os.path.join(os.sep, 'media', getpass.getuser(), drive))
|
filenames = os.listdir('/Volumes/' + drive + '/') # will get an error if the drive is protected
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
if target_filename in files:
|
if target_filename in filenames:
|
||||||
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), drive) + os.sep
|
if not target_file_found:
|
||||||
|
upload_disk = '/Volumes/' + drive + '/'
|
||||||
target_file_found = True
|
target_file_found = True
|
||||||
break
|
|
||||||
#
|
|
||||||
# set upload_port to drive if found
|
|
||||||
#
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Set upload_port to drive if found
|
||||||
|
#
|
||||||
if target_file_found or target_drive_found:
|
if target_file_found or target_drive_found:
|
||||||
env.Replace(
|
env.Replace(UPLOAD_PORT=upload_disk)
|
||||||
UPLOAD_FLAGS="-P$UPLOAD_PORT"
|
print('\nUpload disk: ', upload_disk, '\n')
|
||||||
)
|
else:
|
||||||
|
print_error('Autodetect Error')
|
||||||
|
|
||||||
elif current_OS == 'Darwin': # MAC
|
except Exception as e:
|
||||||
#
|
print_error(str(e))
|
||||||
# platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
|
|
||||||
#
|
|
||||||
drives = os.listdir('/Volumes') # human readable names
|
|
||||||
if target_drive in drives and not target_file_found: # set upload if not found target file yet
|
|
||||||
target_drive_found = True
|
|
||||||
upload_disk = '/Volumes/' + target_drive + '/'
|
|
||||||
for drive in drives:
|
|
||||||
try:
|
|
||||||
filenames = os.listdir('/Volumes/' + drive + '/') # will get an error if the drive is protected
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if target_filename in filenames:
|
|
||||||
if not target_file_found:
|
|
||||||
upload_disk = '/Volumes/' + drive + '/'
|
|
||||||
target_file_found = True
|
|
||||||
|
|
||||||
#
|
env.AddPreAction("upload", before_upload)
|
||||||
# Set upload_port to drive if found
|
|
||||||
#
|
|
||||||
if target_file_found or target_drive_found:
|
|
||||||
env.Replace(UPLOAD_PORT=upload_disk)
|
|
||||||
print('\nUpload disk: ', upload_disk, '\n')
|
|
||||||
else:
|
|
||||||
print_error('Autodetect Error')
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print_error(str(e))
|
|
||||||
|
|
||||||
env.AddPreAction("upload", before_upload)
|
|
||||||
|
|
|
@ -30,25 +30,27 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
# extra script for linker options
|
# extra script for linker options
|
||||||
else:
|
else:
|
||||||
from SCons.Script import DefaultEnvironment
|
import pioutil
|
||||||
env = DefaultEnvironment()
|
if pioutil.is_pio_build():
|
||||||
env.Append(
|
from SCons.Script import DefaultEnvironment
|
||||||
|
env = DefaultEnvironment()
|
||||||
|
env.Append(
|
||||||
ARFLAGS=["rcs"],
|
ARFLAGS=["rcs"],
|
||||||
|
|
||||||
ASFLAGS=["-x", "assembler-with-cpp"],
|
ASFLAGS=["-x", "assembler-with-cpp"],
|
||||||
|
|
||||||
CXXFLAGS=[
|
CXXFLAGS=[
|
||||||
"-fabi-version=0",
|
"-fabi-version=0",
|
||||||
"-fno-use-cxa-atexit",
|
"-fno-use-cxa-atexit",
|
||||||
"-fno-threadsafe-statics"
|
"-fno-threadsafe-statics"
|
||||||
],
|
],
|
||||||
LINKFLAGS=[
|
LINKFLAGS=[
|
||||||
"-Os",
|
"-Os",
|
||||||
"-mcpu=cortex-m3",
|
"-mcpu=cortex-m3",
|
||||||
"-ffreestanding",
|
"-ffreestanding",
|
||||||
"-mthumb",
|
"-mthumb",
|
||||||
"--specs=nano.specs",
|
"--specs=nano.specs",
|
||||||
"--specs=nosys.specs",
|
"--specs=nosys.specs",
|
||||||
"-u_printf_float",
|
"-u_printf_float",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
|
@ -2,18 +2,20 @@
|
||||||
# SAMD51_grandcentral_m4.py
|
# SAMD51_grandcentral_m4.py
|
||||||
# Customizations for env:SAMD51_grandcentral_m4
|
# Customizations for env:SAMD51_grandcentral_m4
|
||||||
#
|
#
|
||||||
from os.path import join, isfile
|
import pioutil
|
||||||
import shutil
|
if pioutil.is_pio_build():
|
||||||
from pprint import pprint
|
from os.path import join, isfile
|
||||||
|
import shutil
|
||||||
|
from pprint import pprint
|
||||||
|
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
mf = env["MARLIN_FEATURES"]
|
mf = env["MARLIN_FEATURES"]
|
||||||
rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
|
rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
|
||||||
txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
|
txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
|
||||||
|
|
||||||
serialBuf = str(max(int(rxBuf), int(txBuf), 350))
|
serialBuf = str(max(int(rxBuf), int(txBuf), 350))
|
||||||
|
|
||||||
build_flags = env.get('BUILD_FLAGS')
|
build_flags = env.get('BUILD_FLAGS')
|
||||||
build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
|
build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
|
||||||
env.Replace(BUILD_FLAGS=build_flags)
|
env.Replace(BUILD_FLAGS=build_flags)
|
||||||
|
|
|
@ -1,40 +1,43 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/STM32F103RC_MEEB_3DP.py
|
# STM32F103RC_MEEB_3DP.py
|
||||||
#
|
#
|
||||||
try:
|
import pioutil
|
||||||
import configparser
|
if pioutil.is_pio_build():
|
||||||
except ImportError:
|
|
||||||
import ConfigParser as configparser
|
|
||||||
|
|
||||||
import os
|
try:
|
||||||
Import("env", "projenv")
|
import configparser
|
||||||
|
except ImportError:
|
||||||
|
import ConfigParser as configparser
|
||||||
|
|
||||||
config = configparser.ConfigParser()
|
import os
|
||||||
config.read("platformio.ini")
|
Import("env", "projenv")
|
||||||
|
|
||||||
#
|
config = configparser.ConfigParser()
|
||||||
# Upload actions
|
config.read("platformio.ini")
|
||||||
#
|
|
||||||
def before_upload(source, target, env):
|
|
||||||
env.Execute("pwd")
|
|
||||||
|
|
||||||
def after_upload(source, target, env):
|
#
|
||||||
env.Execute("pwd")
|
# Upload actions
|
||||||
|
#
|
||||||
|
def before_upload(source, target, env):
|
||||||
|
env.Execute("pwd")
|
||||||
|
|
||||||
env.AddPreAction("upload", before_upload)
|
def after_upload(source, target, env):
|
||||||
env.AddPostAction("upload", after_upload)
|
env.Execute("pwd")
|
||||||
|
|
||||||
flash_size = 0
|
env.AddPreAction("upload", before_upload)
|
||||||
vect_tab_addr = 0
|
env.AddPostAction("upload", after_upload)
|
||||||
|
|
||||||
for define in env['CPPDEFINES']:
|
flash_size = 0
|
||||||
if define[0] == "VECT_TAB_ADDR":
|
vect_tab_addr = 0
|
||||||
vect_tab_addr = define[1]
|
|
||||||
if define[0] == "STM32_FLASH_SIZE":
|
|
||||||
flash_size = define[1]
|
|
||||||
|
|
||||||
print('Use the {0:s} address as the marlin app entry point.'.format(vect_tab_addr))
|
for define in env['CPPDEFINES']:
|
||||||
print('Use the {0:d}KB flash version of stm32f103rct6 chip.'.format(flash_size))
|
if define[0] == "VECT_TAB_ADDR":
|
||||||
|
vect_tab_addr = define[1]
|
||||||
|
if define[0] == "STM32_FLASH_SIZE":
|
||||||
|
flash_size = define[1]
|
||||||
|
|
||||||
import marlin
|
print('Use the {0:s} address as the marlin app entry point.'.format(vect_tab_addr))
|
||||||
marlin.custom_ld_script("STM32F103RC_MEEB_3DP.ld")
|
print('Use the {0:d}KB flash version of stm32f103rct6 chip.'.format(flash_size))
|
||||||
|
|
||||||
|
import marlin
|
||||||
|
marlin.custom_ld_script("STM32F103RC_MEEB_3DP.ld")
|
||||||
|
|
|
@ -1,25 +1,28 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/STM32F103RC_fysetc.py
|
# STM32F103RC_fysetc.py
|
||||||
#
|
#
|
||||||
from os.path import join
|
import pioutil
|
||||||
from os.path import expandvars
|
if pioutil.is_pio_build():
|
||||||
Import("env")
|
import os
|
||||||
|
from os.path import join
|
||||||
|
from os.path import expandvars
|
||||||
|
Import("env")
|
||||||
|
|
||||||
# Custom HEX from ELF
|
# Custom HEX from ELF
|
||||||
env.AddPostAction(
|
env.AddPostAction(
|
||||||
join("$BUILD_DIR", "${PROGNAME}.elf"),
|
join("$BUILD_DIR", "${PROGNAME}.elf"),
|
||||||
env.VerboseAction(" ".join([
|
env.VerboseAction(" ".join([
|
||||||
"$OBJCOPY", "-O ihex", "$TARGET",
|
"$OBJCOPY", "-O ihex", "$TARGET",
|
||||||
"\"" + join("$BUILD_DIR", "${PROGNAME}.hex") + "\"", # Note: $BUILD_DIR is a full path
|
"\"" + join("$BUILD_DIR", "${PROGNAME}.hex") + "\"", # Note: $BUILD_DIR is a full path
|
||||||
]), "Building $TARGET"))
|
]), "Building $TARGET"))
|
||||||
|
|
||||||
# In-line command with arguments
|
# In-line command with arguments
|
||||||
UPLOAD_TOOL="stm32flash"
|
UPLOAD_TOOL="stm32flash"
|
||||||
platform = env.PioPlatform()
|
platform = env.PioPlatform()
|
||||||
if platform.get_package_dir("tool-stm32duino") != None:
|
if platform.get_package_dir("tool-stm32duino") != None:
|
||||||
UPLOAD_TOOL=expandvars("\"" + join(platform.get_package_dir("tool-stm32duino"),"stm32flash","stm32flash") + "\"")
|
UPLOAD_TOOL=expandvars("\"" + join(platform.get_package_dir("tool-stm32duino"),"stm32flash","stm32flash") + "\"")
|
||||||
|
|
||||||
env.Replace(
|
env.Replace(
|
||||||
UPLOADER=UPLOAD_TOOL,
|
UPLOADER=UPLOAD_TOOL,
|
||||||
UPLOADCMD=expandvars(UPLOAD_TOOL + " -v -i rts,-dtr,dtr -R -b 115200 -g 0x8000000 -w \"" + join("$BUILD_DIR","${PROGNAME}.hex")+"\"" + " $UPLOAD_PORT")
|
UPLOADCMD=expandvars(UPLOAD_TOOL + " -v -i rts,-dtr,dtr -R -b 115200 -g 0x8000000 -w \"" + join("$BUILD_DIR","${PROGNAME}.hex")+"\"" + " $UPLOAD_PORT")
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,30 +1,32 @@
|
||||||
#
|
#
|
||||||
# STM32F1_create_variant.py
|
# STM32F1_create_variant.py
|
||||||
#
|
#
|
||||||
import os,shutil,marlin
|
import pioutil
|
||||||
from SCons.Script import DefaultEnvironment
|
if pioutil.is_pio_build():
|
||||||
from platformio import util
|
import os,shutil,marlin
|
||||||
|
from SCons.Script import DefaultEnvironment
|
||||||
|
from platformio import util
|
||||||
|
|
||||||
env = DefaultEnvironment()
|
env = DefaultEnvironment()
|
||||||
platform = env.PioPlatform()
|
platform = env.PioPlatform()
|
||||||
board = env.BoardConfig()
|
board = env.BoardConfig()
|
||||||
|
|
||||||
FRAMEWORK_DIR = platform.get_package_dir("framework-arduinoststm32-maple")
|
FRAMEWORK_DIR = platform.get_package_dir("framework-arduinoststm32-maple")
|
||||||
assert os.path.isdir(FRAMEWORK_DIR)
|
assert os.path.isdir(FRAMEWORK_DIR)
|
||||||
|
|
||||||
source_root = os.path.join("buildroot", "share", "PlatformIO", "variants")
|
source_root = os.path.join("buildroot", "share", "PlatformIO", "variants")
|
||||||
assert os.path.isdir(source_root)
|
assert os.path.isdir(source_root)
|
||||||
|
|
||||||
variant = board.get("build.variant")
|
variant = board.get("build.variant")
|
||||||
variant_dir = os.path.join(FRAMEWORK_DIR, "STM32F1", "variants", variant)
|
variant_dir = os.path.join(FRAMEWORK_DIR, "STM32F1", "variants", variant)
|
||||||
|
|
||||||
source_dir = os.path.join(source_root, variant)
|
source_dir = os.path.join(source_root, variant)
|
||||||
assert os.path.isdir(source_dir)
|
assert os.path.isdir(source_dir)
|
||||||
|
|
||||||
if os.path.isdir(variant_dir):
|
if os.path.isdir(variant_dir):
|
||||||
shutil.rmtree(variant_dir)
|
shutil.rmtree(variant_dir)
|
||||||
|
|
||||||
if not os.path.isdir(variant_dir):
|
if not os.path.isdir(variant_dir):
|
||||||
os.mkdir(variant_dir)
|
os.mkdir(variant_dir)
|
||||||
|
|
||||||
marlin.copytree(source_dir, variant_dir)
|
marlin.copytree(source_dir, variant_dir)
|
||||||
|
|
|
@ -2,4 +2,5 @@
|
||||||
# add_nanolib.py
|
# add_nanolib.py
|
||||||
#
|
#
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
env.Append(LINKFLAGS=["--specs=nano.specs"])
|
env.Append(LINKFLAGS=["--specs=nano.specs"])
|
||||||
|
|
|
@ -1,116 +1,117 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/chitu_crypt.py
|
# chitu_crypt.py
|
||||||
# Customizations for Chitu boards
|
# Customizations for Chitu boards
|
||||||
#
|
#
|
||||||
import os,random,struct,uuid,marlin
|
import pioutil
|
||||||
|
if pioutil.is_pio_build():
|
||||||
|
import os,random,struct,uuid,marlin
|
||||||
|
# Relocate firmware from 0x08000000 to 0x08008800
|
||||||
|
marlin.relocate_firmware("0x08008800")
|
||||||
|
|
||||||
# Relocate firmware from 0x08000000 to 0x08008800
|
def calculate_crc(contents, seed):
|
||||||
marlin.relocate_firmware("0x08008800")
|
accumulating_xor_value = seed;
|
||||||
|
|
||||||
def calculate_crc(contents, seed):
|
for i in range(0, len(contents), 4):
|
||||||
accumulating_xor_value = seed;
|
value = struct.unpack('<I', contents[ i : i + 4])[0]
|
||||||
|
accumulating_xor_value = accumulating_xor_value ^ value
|
||||||
|
return accumulating_xor_value
|
||||||
|
|
||||||
for i in range(0, len(contents), 4):
|
def xor_block(r0, r1, block_number, block_size, file_key):
|
||||||
value = struct.unpack('<I', contents[ i : i + 4])[0]
|
# This is the loop counter
|
||||||
accumulating_xor_value = accumulating_xor_value ^ value
|
loop_counter = 0x0
|
||||||
return accumulating_xor_value
|
|
||||||
|
|
||||||
def xor_block(r0, r1, block_number, block_size, file_key):
|
# This is the key length
|
||||||
# This is the loop counter
|
key_length = 0x18
|
||||||
loop_counter = 0x0
|
|
||||||
|
|
||||||
# This is the key length
|
# This is an initial seed
|
||||||
key_length = 0x18
|
xor_seed = 0x4BAD
|
||||||
|
|
||||||
# This is an initial seed
|
# This is the block counter
|
||||||
xor_seed = 0x4BAD
|
block_number = xor_seed * block_number
|
||||||
|
|
||||||
# This is the block counter
|
#load the xor key from the file
|
||||||
block_number = xor_seed * block_number
|
r7 = file_key
|
||||||
|
|
||||||
#load the xor key from the file
|
for loop_counter in range(0, block_size):
|
||||||
r7 = file_key
|
# meant to make sure different bits of the key are used.
|
||||||
|
xor_seed = int(loop_counter / key_length)
|
||||||
|
|
||||||
for loop_counter in range(0, block_size):
|
# IP is a scratch register / R12
|
||||||
# meant to make sure different bits of the key are used.
|
ip = loop_counter - (key_length * xor_seed)
|
||||||
xor_seed = int(loop_counter / key_length)
|
|
||||||
|
|
||||||
# IP is a scratch register / R12
|
# xor_seed = (loop_counter * loop_counter) + block_number
|
||||||
ip = loop_counter - (key_length * xor_seed)
|
xor_seed = (loop_counter * loop_counter) + block_number
|
||||||
|
|
||||||
# xor_seed = (loop_counter * loop_counter) + block_number
|
# shift the xor_seed left by the bits in IP.
|
||||||
xor_seed = (loop_counter * loop_counter) + block_number
|
xor_seed = xor_seed >> ip
|
||||||
|
|
||||||
# shift the xor_seed left by the bits in IP.
|
# load a byte into IP
|
||||||
xor_seed = xor_seed >> ip
|
ip = r0[loop_counter]
|
||||||
|
|
||||||
# load a byte into IP
|
# XOR the seed with r7
|
||||||
ip = r0[loop_counter]
|
xor_seed = xor_seed ^ r7
|
||||||
|
|
||||||
# XOR the seed with r7
|
# and then with IP
|
||||||
xor_seed = xor_seed ^ r7
|
xor_seed = xor_seed ^ ip
|
||||||
|
|
||||||
# and then with IP
|
#Now store the byte back
|
||||||
xor_seed = xor_seed ^ ip
|
r1[loop_counter] = xor_seed & 0xFF
|
||||||
|
|
||||||
#Now store the byte back
|
#increment the loop_counter
|
||||||
r1[loop_counter] = xor_seed & 0xFF
|
loop_counter = loop_counter + 1
|
||||||
|
|
||||||
#increment the loop_counter
|
def encrypt_file(input, output_file, file_length):
|
||||||
loop_counter = loop_counter + 1
|
input_file = bytearray(input.read())
|
||||||
|
block_size = 0x800
|
||||||
|
key_length = 0x18
|
||||||
|
|
||||||
def encrypt_file(input, output_file, file_length):
|
uid_value = uuid.uuid4()
|
||||||
input_file = bytearray(input.read())
|
file_key = int(uid_value.hex[0:8], 16)
|
||||||
block_size = 0x800
|
|
||||||
key_length = 0x18
|
|
||||||
|
|
||||||
uid_value = uuid.uuid4()
|
xor_crc = 0xEF3D4323;
|
||||||
file_key = int(uid_value.hex[0:8], 16)
|
|
||||||
|
|
||||||
xor_crc = 0xEF3D4323;
|
# the input file is exepcted to be in chunks of 0x800
|
||||||
|
# so round the size
|
||||||
|
while len(input_file) % block_size != 0:
|
||||||
|
input_file.extend(b'0x0')
|
||||||
|
|
||||||
# the input file is exepcted to be in chunks of 0x800
|
# write the file header
|
||||||
# so round the size
|
output_file.write(struct.pack(">I", 0x443D2D3F))
|
||||||
while len(input_file) % block_size != 0:
|
# encrypt the contents using a known file header key
|
||||||
input_file.extend(b'0x0')
|
|
||||||
|
|
||||||
# write the file header
|
# write the file_key
|
||||||
output_file.write(struct.pack(">I", 0x443D2D3F))
|
output_file.write(struct.pack("<I", file_key))
|
||||||
# encrypt the contents using a known file header key
|
|
||||||
|
|
||||||
# write the file_key
|
#TODO - how to enforce that the firmware aligns to block boundaries?
|
||||||
output_file.write(struct.pack("<I", file_key))
|
block_count = int(len(input_file) / block_size)
|
||||||
|
print ("Block Count is ", block_count)
|
||||||
|
for block_number in range(0, block_count):
|
||||||
|
block_offset = (block_number * block_size)
|
||||||
|
block_end = block_offset + block_size
|
||||||
|
block_array = bytearray(input_file[block_offset: block_end])
|
||||||
|
xor_block(block_array, block_array, block_number, block_size, file_key)
|
||||||
|
for n in range (0, block_size):
|
||||||
|
input_file[block_offset + n] = block_array[n]
|
||||||
|
|
||||||
#TODO - how to enforce that the firmware aligns to block boundaries?
|
# update the expected CRC value.
|
||||||
block_count = int(len(input_file) / block_size)
|
xor_crc = calculate_crc(block_array, xor_crc)
|
||||||
print ("Block Count is ", block_count)
|
|
||||||
for block_number in range(0, block_count):
|
|
||||||
block_offset = (block_number * block_size)
|
|
||||||
block_end = block_offset + block_size
|
|
||||||
block_array = bytearray(input_file[block_offset: block_end])
|
|
||||||
xor_block(block_array, block_array, block_number, block_size, file_key)
|
|
||||||
for n in range (0, block_size):
|
|
||||||
input_file[block_offset + n] = block_array[n]
|
|
||||||
|
|
||||||
# update the expected CRC value.
|
# write CRC
|
||||||
xor_crc = calculate_crc(block_array, xor_crc)
|
output_file.write(struct.pack("<I", xor_crc))
|
||||||
|
|
||||||
# write CRC
|
# finally, append the encrypted results.
|
||||||
output_file.write(struct.pack("<I", xor_crc))
|
output_file.write(input_file)
|
||||||
|
return
|
||||||
|
|
||||||
# finally, append the encrypted results.
|
# Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
|
||||||
output_file.write(input_file)
|
def encrypt(source, target, env):
|
||||||
return
|
firmware = open(target[0].path, "rb")
|
||||||
|
update = open(target[0].dir.path + '/update.cbd', "wb")
|
||||||
|
length = os.path.getsize(target[0].path)
|
||||||
|
|
||||||
# Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
|
encrypt_file(firmware, update, length)
|
||||||
def encrypt(source, target, env):
|
|
||||||
firmware = open(target[0].path, "rb")
|
|
||||||
update = open(target[0].dir.path + '/update.cbd', "wb")
|
|
||||||
length = os.path.getsize(target[0].path)
|
|
||||||
|
|
||||||
encrypt_file(firmware, update, length)
|
firmware.close()
|
||||||
|
update.close()
|
||||||
|
|
||||||
firmware.close()
|
marlin.add_post_action(encrypt);
|
||||||
update.close()
|
|
||||||
|
|
||||||
marlin.add_post_action(encrypt);
|
|
||||||
|
|
|
@ -2,36 +2,38 @@
|
||||||
# common-cxxflags.py
|
# common-cxxflags.py
|
||||||
# Convenience script to apply customizations to CPP flags
|
# Convenience script to apply customizations to CPP flags
|
||||||
#
|
#
|
||||||
Import("env")
|
import pioutil
|
||||||
|
if pioutil.is_pio_build():
|
||||||
|
Import("env")
|
||||||
|
|
||||||
cxxflags = [
|
cxxflags = [
|
||||||
#"-Wno-incompatible-pointer-types",
|
#"-Wno-incompatible-pointer-types",
|
||||||
#"-Wno-unused-const-variable",
|
#"-Wno-unused-const-variable",
|
||||||
#"-Wno-maybe-uninitialized",
|
#"-Wno-maybe-uninitialized",
|
||||||
#"-Wno-sign-compare"
|
#"-Wno-sign-compare"
|
||||||
]
|
]
|
||||||
if "teensy" not in env['PIOENV']:
|
if "teensy" not in env['PIOENV']:
|
||||||
cxxflags += ["-Wno-register"]
|
cxxflags += ["-Wno-register"]
|
||||||
env.Append(CXXFLAGS=cxxflags)
|
env.Append(CXXFLAGS=cxxflags)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Add CPU frequency as a compile time constant instead of a runtime variable
|
# Add CPU frequency as a compile time constant instead of a runtime variable
|
||||||
#
|
#
|
||||||
def add_cpu_freq():
|
def add_cpu_freq():
|
||||||
if 'BOARD_F_CPU' in env:
|
if 'BOARD_F_CPU' in env:
|
||||||
env['BUILD_FLAGS'].append('-DBOARD_F_CPU=' + env['BOARD_F_CPU'])
|
env['BUILD_FLAGS'].append('-DBOARD_F_CPU=' + env['BOARD_F_CPU'])
|
||||||
|
|
||||||
# Useful for JTAG debugging
|
# Useful for JTAG debugging
|
||||||
#
|
#
|
||||||
# It will separate release and debug build folders.
|
# It will separate release and debug build folders.
|
||||||
# It useful to keep two live versions: a debug version for debugging and another for
|
# It useful to keep two live versions: a debug version for debugging and another for
|
||||||
# release, for flashing when upload is not done automatically by jlink/stlink.
|
# release, for flashing when upload is not done automatically by jlink/stlink.
|
||||||
# Without this, PIO needs to recompile everything twice for any small change.
|
# Without this, PIO needs to recompile everything twice for any small change.
|
||||||
if env.GetBuildType() == "debug" and env.get('UPLOAD_PROTOCOL') not in ['jlink', 'stlink']:
|
if env.GetBuildType() == "debug" and env.get('UPLOAD_PROTOCOL') not in ['jlink', 'stlink']:
|
||||||
env['BUILD_DIR'] = '$PROJECT_BUILD_DIR/$PIOENV/debug'
|
env['BUILD_DIR'] = '$PROJECT_BUILD_DIR/$PIOENV/debug'
|
||||||
|
|
||||||
# On some platform, F_CPU is a runtime variable. Since it's used to convert from ns
|
# On some platform, F_CPU is a runtime variable. Since it's used to convert from ns
|
||||||
# to CPU cycles, this adds overhead preventing small delay (in the order of less than
|
# to CPU cycles, this adds overhead preventing small delay (in the order of less than
|
||||||
# 30 cycles) to be generated correctly. By using a compile time constant instead
|
# 30 cycles) to be generated correctly. By using a compile time constant instead
|
||||||
# the compiler will perform the computation and this overhead will be avoided
|
# the compiler will perform the computation and this overhead will be avoided
|
||||||
add_cpu_freq()
|
add_cpu_freq()
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
#
|
#
|
||||||
# common-dependencies-post.py
|
# post:common-dependencies-post.py
|
||||||
# Convenience script to add build flags for Marlin Enabled Features
|
# Convenience script to add build flags for Marlin Enabled Features
|
||||||
#
|
#
|
||||||
|
import pioutil
|
||||||
|
if pioutil.is_pio_build():
|
||||||
|
Import("env", "projenv")
|
||||||
|
|
||||||
Import("env")
|
def apply_board_build_flags():
|
||||||
Import("projenv")
|
if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
|
||||||
|
return
|
||||||
|
projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
|
||||||
|
|
||||||
def apply_board_build_flags():
|
# We need to add the board build flags in a post script
|
||||||
if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
|
# so the platform build script doesn't overwrite the custom CCFLAGS
|
||||||
return
|
apply_board_build_flags()
|
||||||
projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
|
|
||||||
|
|
||||||
# We need to add the board build flags in a post script
|
|
||||||
# so the platform build script doesn't overwrite the custom CCFLAGS
|
|
||||||
apply_board_build_flags()
|
|
||||||
|
|
|
@ -2,320 +2,317 @@
|
||||||
# common-dependencies.py
|
# common-dependencies.py
|
||||||
# Convenience script to check dependencies and add libs and sources for Marlin Enabled Features
|
# Convenience script to check dependencies and add libs and sources for Marlin Enabled Features
|
||||||
#
|
#
|
||||||
import subprocess,os,re,pioutil
|
import pioutil
|
||||||
Import("env")
|
if pioutil.is_pio_build():
|
||||||
|
|
||||||
# Detect that 'vscode init' is running
|
import subprocess,os,re
|
||||||
if pioutil.is_vscode_init():
|
Import("env")
|
||||||
env.Exit(0)
|
|
||||||
|
|
||||||
PIO_VERSION_MIN = (5, 0, 3)
|
from platformio.package.meta import PackageSpec
|
||||||
try:
|
from platformio.project.config import ProjectConfig
|
||||||
from platformio import VERSION as PIO_VERSION
|
|
||||||
weights = (1000, 100, 1)
|
|
||||||
version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
|
|
||||||
version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
|
|
||||||
if version_cur < version_min:
|
|
||||||
print()
|
|
||||||
print("**************************************************")
|
|
||||||
print("****** An update to PlatformIO is ******")
|
|
||||||
print("****** required to build Marlin Firmware. ******")
|
|
||||||
print("****** ******")
|
|
||||||
print("****** Minimum version: ", PIO_VERSION_MIN, " ******")
|
|
||||||
print("****** Current Version: ", PIO_VERSION, " ******")
|
|
||||||
print("****** ******")
|
|
||||||
print("****** Update PlatformIO and try again. ******")
|
|
||||||
print("**************************************************")
|
|
||||||
print()
|
|
||||||
exit(1)
|
|
||||||
except SystemExit:
|
|
||||||
exit(1)
|
|
||||||
except:
|
|
||||||
print("Can't detect PlatformIO Version")
|
|
||||||
|
|
||||||
from platformio.package.meta import PackageSpec
|
|
||||||
from platformio.project.config import ProjectConfig
|
|
||||||
|
|
||||||
#print(env.Dump())
|
|
||||||
|
|
||||||
try:
|
|
||||||
verbose = int(env.GetProjectOption('custom_verbose'))
|
|
||||||
except:
|
|
||||||
verbose = 0
|
verbose = 0
|
||||||
|
FEATURE_CONFIG = {}
|
||||||
|
|
||||||
def blab(str,level=1):
|
def validate_pio():
|
||||||
if verbose >= level:
|
PIO_VERSION_MIN = (5, 0, 3)
|
||||||
print("[deps] %s" % str)
|
try:
|
||||||
|
from platformio import VERSION as PIO_VERSION
|
||||||
|
weights = (1000, 100, 1)
|
||||||
|
version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
|
||||||
|
version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
|
||||||
|
if version_cur < version_min:
|
||||||
|
print()
|
||||||
|
print("**************************************************")
|
||||||
|
print("****** An update to PlatformIO is ******")
|
||||||
|
print("****** required to build Marlin Firmware. ******")
|
||||||
|
print("****** ******")
|
||||||
|
print("****** Minimum version: ", PIO_VERSION_MIN, " ******")
|
||||||
|
print("****** Current Version: ", PIO_VERSION, " ******")
|
||||||
|
print("****** ******")
|
||||||
|
print("****** Update PlatformIO and try again. ******")
|
||||||
|
print("**************************************************")
|
||||||
|
print()
|
||||||
|
exit(1)
|
||||||
|
except SystemExit:
|
||||||
|
exit(1)
|
||||||
|
except:
|
||||||
|
print("Can't detect PlatformIO Version")
|
||||||
|
|
||||||
FEATURE_CONFIG = {}
|
def blab(str,level=1):
|
||||||
|
if verbose >= level:
|
||||||
|
print("[deps] %s" % str)
|
||||||
|
|
||||||
def add_to_feat_cnf(feature, flines):
|
def add_to_feat_cnf(feature, flines):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
feat = FEATURE_CONFIG[feature]
|
feat = FEATURE_CONFIG[feature]
|
||||||
except:
|
except:
|
||||||
FEATURE_CONFIG[feature] = {}
|
FEATURE_CONFIG[feature] = {}
|
||||||
|
|
||||||
# Get a reference to the FEATURE_CONFIG under construction
|
|
||||||
feat = FEATURE_CONFIG[feature]
|
|
||||||
|
|
||||||
# Split up passed lines on commas or newlines and iterate
|
|
||||||
# Add common options to the features config under construction
|
|
||||||
# For lib_deps replace a previous instance of the same library
|
|
||||||
atoms = re.sub(r',\\s*', '\n', flines).strip().split('\n')
|
|
||||||
for line in atoms:
|
|
||||||
parts = line.split('=')
|
|
||||||
name = parts.pop(0)
|
|
||||||
if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
|
|
||||||
feat[name] = '='.join(parts)
|
|
||||||
blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
|
|
||||||
else:
|
|
||||||
for dep in re.split(r",\s*", line):
|
|
||||||
lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
|
|
||||||
lib_re = re.compile('(?!^' + lib_name + '\\b)')
|
|
||||||
feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
|
|
||||||
blab("[%s] lib_deps = %s" % (feature, dep), 3)
|
|
||||||
|
|
||||||
def load_config():
|
|
||||||
blab("========== Gather [features] entries...")
|
|
||||||
items = ProjectConfig().items('features')
|
|
||||||
for key in items:
|
|
||||||
feature = key[0].upper()
|
|
||||||
if not feature in FEATURE_CONFIG:
|
|
||||||
FEATURE_CONFIG[feature] = { 'lib_deps': [] }
|
|
||||||
add_to_feat_cnf(feature, key[1])
|
|
||||||
|
|
||||||
# Add options matching custom_marlin.MY_OPTION to the pile
|
|
||||||
blab("========== Gather custom_marlin entries...")
|
|
||||||
all_opts = env.GetProjectOptions()
|
|
||||||
for n in all_opts:
|
|
||||||
key = n[0]
|
|
||||||
mat = re.match(r'custom_marlin\.(.+)', key)
|
|
||||||
if mat:
|
|
||||||
try:
|
|
||||||
val = env.GetProjectOption(key)
|
|
||||||
except:
|
|
||||||
val = None
|
|
||||||
if val:
|
|
||||||
opt = mat.group(1).upper()
|
|
||||||
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
|
|
||||||
add_to_feat_cnf(opt, val)
|
|
||||||
|
|
||||||
def get_all_known_libs():
|
|
||||||
known_libs = []
|
|
||||||
for feature in FEATURE_CONFIG:
|
|
||||||
feat = FEATURE_CONFIG[feature]
|
|
||||||
if not 'lib_deps' in feat:
|
|
||||||
continue
|
|
||||||
for dep in feat['lib_deps']:
|
|
||||||
known_libs.append(PackageSpec(dep).name)
|
|
||||||
return known_libs
|
|
||||||
|
|
||||||
def get_all_env_libs():
|
|
||||||
env_libs = []
|
|
||||||
lib_deps = env.GetProjectOption('lib_deps')
|
|
||||||
for dep in lib_deps:
|
|
||||||
env_libs.append(PackageSpec(dep).name)
|
|
||||||
return env_libs
|
|
||||||
|
|
||||||
def set_env_field(field, value):
|
|
||||||
proj = env.GetProjectConfig()
|
|
||||||
proj.set("env:" + env['PIOENV'], field, value)
|
|
||||||
|
|
||||||
# All unused libs should be ignored so that if a library
|
|
||||||
# exists in .pio/lib_deps it will not break compilation.
|
|
||||||
def force_ignore_unused_libs():
|
|
||||||
env_libs = get_all_env_libs()
|
|
||||||
known_libs = get_all_known_libs()
|
|
||||||
diff = (list(set(known_libs) - set(env_libs)))
|
|
||||||
lib_ignore = env.GetProjectOption('lib_ignore') + diff
|
|
||||||
blab("Ignore libraries: %s" % lib_ignore)
|
|
||||||
set_env_field('lib_ignore', lib_ignore)
|
|
||||||
|
|
||||||
def apply_features_config():
|
|
||||||
load_config()
|
|
||||||
blab("========== Apply enabled features...")
|
|
||||||
for feature in FEATURE_CONFIG:
|
|
||||||
if not env.MarlinFeatureIsEnabled(feature):
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
# Get a reference to the FEATURE_CONFIG under construction
|
||||||
feat = FEATURE_CONFIG[feature]
|
feat = FEATURE_CONFIG[feature]
|
||||||
|
|
||||||
if 'lib_deps' in feat and len(feat['lib_deps']):
|
# Split up passed lines on commas or newlines and iterate
|
||||||
blab("========== Adding lib_deps for %s... " % feature, 2)
|
# Add common options to the features config under construction
|
||||||
|
# For lib_deps replace a previous instance of the same library
|
||||||
|
atoms = re.sub(r',\\s*', '\n', flines).strip().split('\n')
|
||||||
|
for line in atoms:
|
||||||
|
parts = line.split('=')
|
||||||
|
name = parts.pop(0)
|
||||||
|
if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
|
||||||
|
feat[name] = '='.join(parts)
|
||||||
|
blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
|
||||||
|
else:
|
||||||
|
for dep in re.split(r",\s*", line):
|
||||||
|
lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
|
||||||
|
lib_re = re.compile('(?!^' + lib_name + '\\b)')
|
||||||
|
feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
|
||||||
|
blab("[%s] lib_deps = %s" % (feature, dep), 3)
|
||||||
|
|
||||||
# feat to add
|
def load_config():
|
||||||
deps_to_add = {}
|
blab("========== Gather [features] entries...")
|
||||||
|
items = ProjectConfig().items('features')
|
||||||
|
for key in items:
|
||||||
|
feature = key[0].upper()
|
||||||
|
if not feature in FEATURE_CONFIG:
|
||||||
|
FEATURE_CONFIG[feature] = { 'lib_deps': [] }
|
||||||
|
add_to_feat_cnf(feature, key[1])
|
||||||
|
|
||||||
|
# Add options matching custom_marlin.MY_OPTION to the pile
|
||||||
|
blab("========== Gather custom_marlin entries...")
|
||||||
|
all_opts = env.GetProjectOptions()
|
||||||
|
for n in all_opts:
|
||||||
|
key = n[0]
|
||||||
|
mat = re.match(r'custom_marlin\.(.+)', key)
|
||||||
|
if mat:
|
||||||
|
try:
|
||||||
|
val = env.GetProjectOption(key)
|
||||||
|
except:
|
||||||
|
val = None
|
||||||
|
if val:
|
||||||
|
opt = mat.group(1).upper()
|
||||||
|
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
|
||||||
|
add_to_feat_cnf(opt, val)
|
||||||
|
|
||||||
|
def get_all_known_libs():
|
||||||
|
known_libs = []
|
||||||
|
for feature in FEATURE_CONFIG:
|
||||||
|
feat = FEATURE_CONFIG[feature]
|
||||||
|
if not 'lib_deps' in feat:
|
||||||
|
continue
|
||||||
for dep in feat['lib_deps']:
|
for dep in feat['lib_deps']:
|
||||||
deps_to_add[PackageSpec(dep).name] = dep
|
known_libs.append(PackageSpec(dep).name)
|
||||||
blab("==================== %s... " % dep, 2)
|
return known_libs
|
||||||
|
|
||||||
# Does the env already have the dependency?
|
def get_all_env_libs():
|
||||||
deps = env.GetProjectOption('lib_deps')
|
env_libs = []
|
||||||
for dep in deps:
|
lib_deps = env.GetProjectOption('lib_deps')
|
||||||
name = PackageSpec(dep).name
|
for dep in lib_deps:
|
||||||
if name in deps_to_add:
|
env_libs.append(PackageSpec(dep).name)
|
||||||
del deps_to_add[name]
|
return env_libs
|
||||||
|
|
||||||
# Are there any libraries that should be ignored?
|
def set_env_field(field, value):
|
||||||
lib_ignore = env.GetProjectOption('lib_ignore')
|
proj = env.GetProjectConfig()
|
||||||
for dep in deps:
|
proj.set("env:" + env['PIOENV'], field, value)
|
||||||
name = PackageSpec(dep).name
|
|
||||||
if name in deps_to_add:
|
|
||||||
del deps_to_add[name]
|
|
||||||
|
|
||||||
# Is there anything left?
|
# All unused libs should be ignored so that if a library
|
||||||
if len(deps_to_add) > 0:
|
# exists in .pio/lib_deps it will not break compilation.
|
||||||
# Only add the missing dependencies
|
def force_ignore_unused_libs():
|
||||||
set_env_field('lib_deps', deps + list(deps_to_add.values()))
|
env_libs = get_all_env_libs()
|
||||||
|
known_libs = get_all_known_libs()
|
||||||
|
diff = (list(set(known_libs) - set(env_libs)))
|
||||||
|
lib_ignore = env.GetProjectOption('lib_ignore') + diff
|
||||||
|
blab("Ignore libraries: %s" % lib_ignore)
|
||||||
|
set_env_field('lib_ignore', lib_ignore)
|
||||||
|
|
||||||
if 'build_flags' in feat:
|
def apply_features_config():
|
||||||
f = feat['build_flags']
|
load_config()
|
||||||
blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
|
blab("========== Apply enabled features...")
|
||||||
new_flags = env.GetProjectOption('build_flags') + [ f ]
|
for feature in FEATURE_CONFIG:
|
||||||
env.Replace(BUILD_FLAGS=new_flags)
|
if not env.MarlinFeatureIsEnabled(feature):
|
||||||
|
continue
|
||||||
|
|
||||||
if 'extra_scripts' in feat:
|
feat = FEATURE_CONFIG[feature]
|
||||||
blab("Running extra_scripts for %s... " % feature, 2)
|
|
||||||
env.SConscript(feat['extra_scripts'], exports="env")
|
|
||||||
|
|
||||||
if 'src_filter' in feat:
|
if 'lib_deps' in feat and len(feat['lib_deps']):
|
||||||
blab("========== Adding src_filter for %s... " % feature, 2)
|
blab("========== Adding lib_deps for %s... " % feature, 2)
|
||||||
src_filter = ' '.join(env.GetProjectOption('src_filter'))
|
|
||||||
# first we need to remove the references to the same folder
|
|
||||||
my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
|
|
||||||
cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
|
|
||||||
for d in my_srcs:
|
|
||||||
if d in cur_srcs:
|
|
||||||
src_filter = re.sub(r'[+-]' + d, '', src_filter)
|
|
||||||
|
|
||||||
src_filter = feat['src_filter'] + ' ' + src_filter
|
# feat to add
|
||||||
set_env_field('src_filter', [src_filter])
|
deps_to_add = {}
|
||||||
env.Replace(SRC_FILTER=src_filter)
|
for dep in feat['lib_deps']:
|
||||||
|
deps_to_add[PackageSpec(dep).name] = dep
|
||||||
|
blab("==================== %s... " % dep, 2)
|
||||||
|
|
||||||
if 'lib_ignore' in feat:
|
# Does the env already have the dependency?
|
||||||
blab("========== Adding lib_ignore for %s... " % feature, 2)
|
deps = env.GetProjectOption('lib_deps')
|
||||||
lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
|
for dep in deps:
|
||||||
set_env_field('lib_ignore', lib_ignore)
|
name = PackageSpec(dep).name
|
||||||
|
if name in deps_to_add:
|
||||||
|
del deps_to_add[name]
|
||||||
|
|
||||||
#
|
# Are there any libraries that should be ignored?
|
||||||
# Find a compiler, considering the OS
|
lib_ignore = env.GetProjectOption('lib_ignore')
|
||||||
#
|
for dep in deps:
|
||||||
ENV_BUILD_PATH = os.path.join(env.Dictionary('PROJECT_BUILD_DIR'), env['PIOENV'])
|
name = PackageSpec(dep).name
|
||||||
GCC_PATH_CACHE = os.path.join(ENV_BUILD_PATH, ".gcc_path")
|
if name in deps_to_add:
|
||||||
def search_compiler():
|
del deps_to_add[name]
|
||||||
try:
|
|
||||||
filepath = env.GetProjectOption('custom_gcc')
|
# Is there anything left?
|
||||||
blab("Getting compiler from env")
|
if len(deps_to_add) > 0:
|
||||||
|
# Only add the missing dependencies
|
||||||
|
set_env_field('lib_deps', deps + list(deps_to_add.values()))
|
||||||
|
|
||||||
|
if 'build_flags' in feat:
|
||||||
|
f = feat['build_flags']
|
||||||
|
blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
|
||||||
|
new_flags = env.GetProjectOption('build_flags') + [ f ]
|
||||||
|
env.Replace(BUILD_FLAGS=new_flags)
|
||||||
|
|
||||||
|
if 'extra_scripts' in feat:
|
||||||
|
blab("Running extra_scripts for %s... " % feature, 2)
|
||||||
|
env.SConscript(feat['extra_scripts'], exports="env")
|
||||||
|
|
||||||
|
if 'src_filter' in feat:
|
||||||
|
blab("========== Adding src_filter for %s... " % feature, 2)
|
||||||
|
src_filter = ' '.join(env.GetProjectOption('src_filter'))
|
||||||
|
# first we need to remove the references to the same folder
|
||||||
|
my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
|
||||||
|
cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
|
||||||
|
for d in my_srcs:
|
||||||
|
if d in cur_srcs:
|
||||||
|
src_filter = re.sub(r'[+-]' + d, '', src_filter)
|
||||||
|
|
||||||
|
src_filter = feat['src_filter'] + ' ' + src_filter
|
||||||
|
set_env_field('src_filter', [src_filter])
|
||||||
|
env.Replace(SRC_FILTER=src_filter)
|
||||||
|
|
||||||
|
if 'lib_ignore' in feat:
|
||||||
|
blab("========== Adding lib_ignore for %s... " % feature, 2)
|
||||||
|
lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
|
||||||
|
set_env_field('lib_ignore', lib_ignore)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Find a compiler, considering the OS
|
||||||
|
#
|
||||||
|
ENV_BUILD_PATH = os.path.join(env.Dictionary('PROJECT_BUILD_DIR'), env['PIOENV'])
|
||||||
|
GCC_PATH_CACHE = os.path.join(ENV_BUILD_PATH, ".gcc_path")
|
||||||
|
def search_compiler():
|
||||||
|
try:
|
||||||
|
filepath = env.GetProjectOption('custom_gcc')
|
||||||
|
blab("Getting compiler from env")
|
||||||
|
return filepath
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if os.path.exists(GCC_PATH_CACHE):
|
||||||
|
with open(GCC_PATH_CACHE, 'r') as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
# Find the current platform compiler by searching the $PATH
|
||||||
|
# which will be in a platformio toolchain bin folder
|
||||||
|
path_regex = re.escape(env['PROJECT_PACKAGES_DIR'])
|
||||||
|
|
||||||
|
# See if the environment provides a default compiler
|
||||||
|
try:
|
||||||
|
gcc = env.GetProjectOption('custom_deps_gcc')
|
||||||
|
except:
|
||||||
|
gcc = "g++"
|
||||||
|
|
||||||
|
if env['PLATFORM'] == 'win32':
|
||||||
|
path_separator = ';'
|
||||||
|
path_regex += r'.*\\bin'
|
||||||
|
gcc += ".exe"
|
||||||
|
else:
|
||||||
|
path_separator = ':'
|
||||||
|
path_regex += r'/.+/bin'
|
||||||
|
|
||||||
|
# Search for the compiler
|
||||||
|
for pathdir in env['ENV']['PATH'].split(path_separator):
|
||||||
|
if not re.search(path_regex, pathdir, re.IGNORECASE):
|
||||||
|
continue
|
||||||
|
for filepath in os.listdir(pathdir):
|
||||||
|
if not filepath.endswith(gcc):
|
||||||
|
continue
|
||||||
|
# Use entire path to not rely on env PATH
|
||||||
|
filepath = os.path.sep.join([pathdir, filepath])
|
||||||
|
# Cache the g++ path to no search always
|
||||||
|
if os.path.exists(ENV_BUILD_PATH):
|
||||||
|
with open(GCC_PATH_CACHE, 'w+') as f:
|
||||||
|
f.write(filepath)
|
||||||
|
|
||||||
|
return filepath
|
||||||
|
|
||||||
|
filepath = env.get('CXX')
|
||||||
|
if filepath == 'CC':
|
||||||
|
filepath = gcc
|
||||||
|
blab("Couldn't find a compiler! Fallback to %s" % filepath)
|
||||||
return filepath
|
return filepath
|
||||||
|
|
||||||
|
#
|
||||||
|
# Use the compiler to get a list of all enabled features
|
||||||
|
#
|
||||||
|
def load_marlin_features():
|
||||||
|
if 'MARLIN_FEATURES' in env:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Process defines
|
||||||
|
build_flags = env.get('BUILD_FLAGS')
|
||||||
|
build_flags = env.ParseFlagsExtended(build_flags)
|
||||||
|
|
||||||
|
cxx = search_compiler()
|
||||||
|
cmd = ['"' + cxx + '"']
|
||||||
|
|
||||||
|
# Build flags from board.json
|
||||||
|
#if 'BOARD' in env:
|
||||||
|
# cmd += [env.BoardConfig().get("build.extra_flags")]
|
||||||
|
for s in build_flags['CPPDEFINES']:
|
||||||
|
if isinstance(s, tuple):
|
||||||
|
cmd += ['-D' + s[0] + '=' + str(s[1])]
|
||||||
|
else:
|
||||||
|
cmd += ['-D' + s]
|
||||||
|
|
||||||
|
cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++ buildroot/share/PlatformIO/scripts/common-dependencies.h']
|
||||||
|
cmd = ' '.join(cmd)
|
||||||
|
blab(cmd, 4)
|
||||||
|
define_list = subprocess.check_output(cmd, shell=True).splitlines()
|
||||||
|
marlin_features = {}
|
||||||
|
for define in define_list:
|
||||||
|
feature = define[8:].strip().decode().split(' ')
|
||||||
|
feature, definition = feature[0], ' '.join(feature[1:])
|
||||||
|
marlin_features[feature] = definition
|
||||||
|
env['MARLIN_FEATURES'] = marlin_features
|
||||||
|
|
||||||
|
#
|
||||||
|
# Return True if a matching feature is enabled
|
||||||
|
#
|
||||||
|
def MarlinFeatureIsEnabled(env, feature):
|
||||||
|
load_marlin_features()
|
||||||
|
r = re.compile('^' + feature + '$')
|
||||||
|
found = list(filter(r.match, env['MARLIN_FEATURES']))
|
||||||
|
|
||||||
|
# Defines could still be 'false' or '0', so check
|
||||||
|
some_on = False
|
||||||
|
if len(found):
|
||||||
|
for f in found:
|
||||||
|
val = env['MARLIN_FEATURES'][f]
|
||||||
|
if val in [ '', '1', 'true' ]:
|
||||||
|
some_on = True
|
||||||
|
elif val in env['MARLIN_FEATURES']:
|
||||||
|
some_on = env.MarlinFeatureIsEnabled(val)
|
||||||
|
|
||||||
|
return some_on
|
||||||
|
|
||||||
|
validate_pio()
|
||||||
|
|
||||||
|
try:
|
||||||
|
verbose = int(env.GetProjectOption('custom_verbose'))
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if os.path.exists(GCC_PATH_CACHE):
|
# Add a method for other PIO scripts to query enabled features
|
||||||
with open(GCC_PATH_CACHE, 'r') as f:
|
env.AddMethod(MarlinFeatureIsEnabled)
|
||||||
return f.read()
|
|
||||||
|
|
||||||
# Find the current platform compiler by searching the $PATH
|
# Add dependencies for enabled Marlin features
|
||||||
# which will be in a platformio toolchain bin folder
|
apply_features_config()
|
||||||
path_regex = re.escape(env['PROJECT_PACKAGES_DIR'])
|
force_ignore_unused_libs()
|
||||||
|
|
||||||
# See if the environment provides a default compiler
|
|
||||||
try:
|
|
||||||
gcc = env.GetProjectOption('custom_deps_gcc')
|
|
||||||
except:
|
|
||||||
gcc = "g++"
|
|
||||||
|
|
||||||
if env['PLATFORM'] == 'win32':
|
|
||||||
path_separator = ';'
|
|
||||||
path_regex += r'.*\\bin'
|
|
||||||
gcc += ".exe"
|
|
||||||
else:
|
|
||||||
path_separator = ':'
|
|
||||||
path_regex += r'/.+/bin'
|
|
||||||
|
|
||||||
# Search for the compiler
|
|
||||||
for pathdir in env['ENV']['PATH'].split(path_separator):
|
|
||||||
if not re.search(path_regex, pathdir, re.IGNORECASE):
|
|
||||||
continue
|
|
||||||
for filepath in os.listdir(pathdir):
|
|
||||||
if not filepath.endswith(gcc):
|
|
||||||
continue
|
|
||||||
# Use entire path to not rely on env PATH
|
|
||||||
filepath = os.path.sep.join([pathdir, filepath])
|
|
||||||
# Cache the g++ path to no search always
|
|
||||||
if os.path.exists(ENV_BUILD_PATH):
|
|
||||||
with open(GCC_PATH_CACHE, 'w+') as f:
|
|
||||||
f.write(filepath)
|
|
||||||
|
|
||||||
return filepath
|
|
||||||
|
|
||||||
filepath = env.get('CXX')
|
|
||||||
if filepath == 'CC':
|
|
||||||
filepath = gcc
|
|
||||||
blab("Couldn't find a compiler! Fallback to %s" % filepath)
|
|
||||||
return filepath
|
|
||||||
|
|
||||||
#
|
|
||||||
# Use the compiler to get a list of all enabled features
|
|
||||||
#
|
|
||||||
def load_marlin_features():
|
|
||||||
if 'MARLIN_FEATURES' in env:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Process defines
|
|
||||||
build_flags = env.get('BUILD_FLAGS')
|
|
||||||
build_flags = env.ParseFlagsExtended(build_flags)
|
|
||||||
|
|
||||||
cxx = search_compiler()
|
|
||||||
cmd = ['"' + cxx + '"']
|
|
||||||
|
|
||||||
# Build flags from board.json
|
|
||||||
#if 'BOARD' in env:
|
|
||||||
# cmd += [env.BoardConfig().get("build.extra_flags")]
|
|
||||||
for s in build_flags['CPPDEFINES']:
|
|
||||||
if isinstance(s, tuple):
|
|
||||||
cmd += ['-D' + s[0] + '=' + str(s[1])]
|
|
||||||
else:
|
|
||||||
cmd += ['-D' + s]
|
|
||||||
|
|
||||||
cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++ buildroot/share/PlatformIO/scripts/common-dependencies.h']
|
|
||||||
cmd = ' '.join(cmd)
|
|
||||||
blab(cmd, 4)
|
|
||||||
define_list = subprocess.check_output(cmd, shell=True).splitlines()
|
|
||||||
marlin_features = {}
|
|
||||||
for define in define_list:
|
|
||||||
feature = define[8:].strip().decode().split(' ')
|
|
||||||
feature, definition = feature[0], ' '.join(feature[1:])
|
|
||||||
marlin_features[feature] = definition
|
|
||||||
env['MARLIN_FEATURES'] = marlin_features
|
|
||||||
|
|
||||||
#
|
|
||||||
# Return True if a matching feature is enabled
|
|
||||||
#
|
|
||||||
def MarlinFeatureIsEnabled(env, feature):
|
|
||||||
load_marlin_features()
|
|
||||||
r = re.compile('^' + feature + '$')
|
|
||||||
found = list(filter(r.match, env['MARLIN_FEATURES']))
|
|
||||||
|
|
||||||
# Defines could still be 'false' or '0', so check
|
|
||||||
some_on = False
|
|
||||||
if len(found):
|
|
||||||
for f in found:
|
|
||||||
val = env['MARLIN_FEATURES'][f]
|
|
||||||
if val in [ '', '1', 'true' ]:
|
|
||||||
some_on = True
|
|
||||||
elif val in env['MARLIN_FEATURES']:
|
|
||||||
some_on = env.MarlinFeatureIsEnabled(val)
|
|
||||||
|
|
||||||
return some_on
|
|
||||||
|
|
||||||
#
|
|
||||||
# Add a method for other PIO scripts to query enabled features
|
|
||||||
#
|
|
||||||
env.AddMethod(MarlinFeatureIsEnabled)
|
|
||||||
|
|
||||||
#
|
|
||||||
# Add dependencies for enabled Marlin features
|
|
||||||
#
|
|
||||||
apply_features_config()
|
|
||||||
force_ignore_unused_libs()
|
|
||||||
|
|
|
@ -1,16 +1,18 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/custom_board.py
|
# custom_board.py
|
||||||
#
|
#
|
||||||
# - For build.address replace VECT_TAB_ADDR to relocate the firmware
|
# - For build.address replace VECT_TAB_ADDR to relocate the firmware
|
||||||
# - For build.ldscript use one of the linker scripts in buildroot/share/PlatformIO/ldscripts
|
# - For build.ldscript use one of the linker scripts in buildroot/share/PlatformIO/ldscripts
|
||||||
#
|
#
|
||||||
import marlin
|
import pioutil
|
||||||
board = marlin.env.BoardConfig()
|
if pioutil.is_pio_build():
|
||||||
|
import marlin
|
||||||
|
board = marlin.env.BoardConfig()
|
||||||
|
|
||||||
address = board.get("build.address", "")
|
address = board.get("build.address", "")
|
||||||
if address:
|
if address:
|
||||||
marlin.relocate_firmware(address)
|
marlin.relocate_firmware(address)
|
||||||
|
|
||||||
ldscript = board.get("build.ldscript", "")
|
ldscript = board.get("build.ldscript", "")
|
||||||
if ldscript:
|
if ldscript:
|
||||||
marlin.custom_ld_script(ldscript)
|
marlin.custom_ld_script(ldscript)
|
||||||
|
|
|
@ -1,51 +1,49 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/download_mks_assets.py
|
# download_mks_assets.py
|
||||||
# Added by HAS_TFT_LVGL_UI to download assets from Makerbase repo
|
# Added by HAS_TFT_LVGL_UI to download assets from Makerbase repo
|
||||||
#
|
#
|
||||||
Import("env")
|
import pioutil
|
||||||
import os,requests,zipfile,tempfile,shutil,pioutil
|
if pioutil.is_pio_build():
|
||||||
|
Import("env")
|
||||||
|
import os,requests,zipfile,tempfile,shutil
|
||||||
|
|
||||||
# Detect that 'vscode init' is running
|
url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
|
||||||
if pioutil.is_vscode_init():
|
deps_path = env.Dictionary("PROJECT_LIBDEPS_DIR")
|
||||||
env.Exit(0)
|
zip_path = os.path.join(deps_path, "mks-assets.zip")
|
||||||
|
assets_path = os.path.join(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
|
||||||
|
|
||||||
url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
|
def download_mks_assets():
|
||||||
deps_path = env.Dictionary("PROJECT_LIBDEPS_DIR")
|
print("Downloading MKS Assets")
|
||||||
zip_path = os.path.join(deps_path, "mks-assets.zip")
|
r = requests.get(url, stream=True)
|
||||||
assets_path = os.path.join(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
|
# the user may have a very clean workspace,
|
||||||
|
# so create the PROJECT_LIBDEPS_DIR directory if not exits
|
||||||
|
if os.path.exists(deps_path) == False:
|
||||||
|
os.mkdir(deps_path)
|
||||||
|
with open(zip_path, 'wb') as fd:
|
||||||
|
for chunk in r.iter_content(chunk_size=128):
|
||||||
|
fd.write(chunk)
|
||||||
|
|
||||||
def download_mks_assets():
|
def copy_mks_assets():
|
||||||
print("Downloading MKS Assets")
|
print("Copying MKS Assets")
|
||||||
r = requests.get(url, stream=True)
|
output_path = tempfile.mkdtemp()
|
||||||
# the user may have a very clean workspace,
|
zip_obj = zipfile.ZipFile(zip_path, 'r')
|
||||||
# so create the PROJECT_LIBDEPS_DIR directory if not exits
|
zip_obj.extractall(output_path)
|
||||||
if os.path.exists(deps_path) == False:
|
zip_obj.close()
|
||||||
os.mkdir(deps_path)
|
if os.path.exists(assets_path) == True and os.path.isdir(assets_path) == False:
|
||||||
with open(zip_path, 'wb') as fd:
|
os.unlink(assets_path)
|
||||||
for chunk in r.iter_content(chunk_size=128):
|
if os.path.exists(assets_path) == False:
|
||||||
fd.write(chunk)
|
os.mkdir(assets_path)
|
||||||
|
base_path = ''
|
||||||
|
for filename in os.listdir(output_path):
|
||||||
|
base_path = filename
|
||||||
|
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_font')):
|
||||||
|
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_font', filename), assets_path)
|
||||||
|
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_pic')):
|
||||||
|
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_pic', filename), assets_path)
|
||||||
|
shutil.rmtree(output_path, ignore_errors=True)
|
||||||
|
|
||||||
|
if os.path.exists(zip_path) == False:
|
||||||
|
download_mks_assets()
|
||||||
|
|
||||||
def copy_mks_assets():
|
|
||||||
print("Copying MKS Assets")
|
|
||||||
output_path = tempfile.mkdtemp()
|
|
||||||
zip_obj = zipfile.ZipFile(zip_path, 'r')
|
|
||||||
zip_obj.extractall(output_path)
|
|
||||||
zip_obj.close()
|
|
||||||
if os.path.exists(assets_path) == True and os.path.isdir(assets_path) == False:
|
|
||||||
os.unlink(assets_path)
|
|
||||||
if os.path.exists(assets_path) == False:
|
if os.path.exists(assets_path) == False:
|
||||||
os.mkdir(assets_path)
|
copy_mks_assets()
|
||||||
base_path = ''
|
|
||||||
for filename in os.listdir(output_path):
|
|
||||||
base_path = filename
|
|
||||||
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_font')):
|
|
||||||
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_font', filename), assets_path)
|
|
||||||
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_pic')):
|
|
||||||
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_pic', filename), assets_path)
|
|
||||||
shutil.rmtree(output_path, ignore_errors=True)
|
|
||||||
|
|
||||||
if os.path.exists(zip_path) == False:
|
|
||||||
download_mks_assets()
|
|
||||||
|
|
||||||
if os.path.exists(assets_path) == False:
|
|
||||||
copy_mks_assets()
|
|
||||||
|
|
|
@ -1,32 +1,35 @@
|
||||||
#
|
#
|
||||||
# fix_framework_weakness.py
|
# fix_framework_weakness.py
|
||||||
#
|
#
|
||||||
from os.path import join, isfile
|
import pioutil
|
||||||
import shutil
|
if pioutil.is_pio_build():
|
||||||
from pprint import pprint
|
|
||||||
|
|
||||||
Import("env")
|
import shutil
|
||||||
|
from os.path import join, isfile
|
||||||
|
from pprint import pprint
|
||||||
|
|
||||||
if env.MarlinFeatureIsEnabled("POSTMORTEM_DEBUGGING"):
|
Import("env")
|
||||||
FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple")
|
|
||||||
patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done")
|
|
||||||
|
|
||||||
# patch file only if we didn't do it before
|
if env.MarlinFeatureIsEnabled("POSTMORTEM_DEBUGGING"):
|
||||||
if not isfile(patchflag_path):
|
FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple")
|
||||||
print("Patching libmaple exception handlers")
|
patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done")
|
||||||
original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S")
|
|
||||||
backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak")
|
|
||||||
src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S")
|
|
||||||
|
|
||||||
assert isfile(original_file) and isfile(src_file)
|
# patch file only if we didn't do it before
|
||||||
shutil.copyfile(original_file, backup_file)
|
if not isfile(patchflag_path):
|
||||||
shutil.copyfile(src_file, original_file);
|
print("Patching libmaple exception handlers")
|
||||||
|
original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S")
|
||||||
|
backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak")
|
||||||
|
src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S")
|
||||||
|
|
||||||
def _touch(path):
|
assert isfile(original_file) and isfile(src_file)
|
||||||
with open(path, "w") as fp:
|
shutil.copyfile(original_file, backup_file)
|
||||||
fp.write("")
|
shutil.copyfile(src_file, original_file);
|
||||||
|
|
||||||
env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
|
def _touch(path):
|
||||||
print("Done patching exception handler")
|
with open(path, "w") as fp:
|
||||||
|
fp.write("")
|
||||||
|
|
||||||
print("Libmaple modified and ready for post mortem debugging")
|
env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
|
||||||
|
print("Done patching exception handler")
|
||||||
|
|
||||||
|
print("Libmaple modified and ready for post mortem debugging")
|
||||||
|
|
|
@ -5,50 +5,52 @@
|
||||||
# the appropriate framework variants folder, so that its contents
|
# the appropriate framework variants folder, so that its contents
|
||||||
# will be picked up by PlatformIO just like any other variant.
|
# will be picked up by PlatformIO just like any other variant.
|
||||||
#
|
#
|
||||||
import os,shutil,marlin
|
import pioutil
|
||||||
from SCons.Script import DefaultEnvironment
|
if pioutil.is_pio_build():
|
||||||
from platformio import util
|
import os,shutil,marlin
|
||||||
|
from SCons.Script import DefaultEnvironment
|
||||||
|
from platformio import util
|
||||||
|
|
||||||
env = DefaultEnvironment()
|
env = DefaultEnvironment()
|
||||||
|
|
||||||
#
|
#
|
||||||
# Get the platform name from the 'platform_packages' option,
|
# Get the platform name from the 'platform_packages' option,
|
||||||
# or look it up by the platform.class.name.
|
# or look it up by the platform.class.name.
|
||||||
#
|
#
|
||||||
platform = env.PioPlatform()
|
platform = env.PioPlatform()
|
||||||
|
|
||||||
from platformio.package.meta import PackageSpec
|
from platformio.package.meta import PackageSpec
|
||||||
platform_packages = env.GetProjectOption('platform_packages')
|
platform_packages = env.GetProjectOption('platform_packages')
|
||||||
if len(platform_packages) == 0:
|
if len(platform_packages) == 0:
|
||||||
framewords = {
|
framewords = {
|
||||||
"Ststm32Platform": "framework-arduinoststm32",
|
"Ststm32Platform": "framework-arduinoststm32",
|
||||||
"AtmelavrPlatform": "framework-arduino-avr"
|
"AtmelavrPlatform": "framework-arduino-avr"
|
||||||
}
|
}
|
||||||
platform_name = framewords[platform.__class__.__name__]
|
platform_name = framewords[platform.__class__.__name__]
|
||||||
else:
|
else:
|
||||||
platform_name = PackageSpec(platform_packages[0]).name
|
platform_name = PackageSpec(platform_packages[0]).name
|
||||||
|
|
||||||
if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino" ]:
|
if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino" ]:
|
||||||
platform_name = "framework-arduinoststm32"
|
platform_name = "framework-arduinoststm32"
|
||||||
|
|
||||||
FRAMEWORK_DIR = platform.get_package_dir(platform_name)
|
FRAMEWORK_DIR = platform.get_package_dir(platform_name)
|
||||||
assert os.path.isdir(FRAMEWORK_DIR)
|
assert os.path.isdir(FRAMEWORK_DIR)
|
||||||
|
|
||||||
board = env.BoardConfig()
|
board = env.BoardConfig()
|
||||||
|
|
||||||
#mcu_type = board.get("build.mcu")[:-2]
|
#mcu_type = board.get("build.mcu")[:-2]
|
||||||
variant = board.get("build.variant")
|
variant = board.get("build.variant")
|
||||||
#series = mcu_type[:7].upper() + "xx"
|
#series = mcu_type[:7].upper() + "xx"
|
||||||
|
|
||||||
# Prepare a new empty folder at the destination
|
# Prepare a new empty folder at the destination
|
||||||
variant_dir = os.path.join(FRAMEWORK_DIR, "variants", variant)
|
variant_dir = os.path.join(FRAMEWORK_DIR, "variants", variant)
|
||||||
if os.path.isdir(variant_dir):
|
if os.path.isdir(variant_dir):
|
||||||
shutil.rmtree(variant_dir)
|
shutil.rmtree(variant_dir)
|
||||||
if not os.path.isdir(variant_dir):
|
if not os.path.isdir(variant_dir):
|
||||||
os.mkdir(variant_dir)
|
os.mkdir(variant_dir)
|
||||||
|
|
||||||
# Source dir is a local variant sub-folder
|
# Source dir is a local variant sub-folder
|
||||||
source_dir = os.path.join("buildroot/share/PlatformIO/variants", variant)
|
source_dir = os.path.join("buildroot/share/PlatformIO/variants", variant)
|
||||||
assert os.path.isdir(source_dir)
|
assert os.path.isdir(source_dir)
|
||||||
|
|
||||||
marlin.copytree(source_dir, variant_dir)
|
marlin.copytree(source_dir, variant_dir)
|
||||||
|
|
|
@ -1,39 +1,40 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/jgaurora_a5s_a1_with_bootloader.py
|
# jgaurora_a5s_a1_with_bootloader.py
|
||||||
# Customizations for env:jgaurora_a5s_a1
|
# Customizations for env:jgaurora_a5s_a1
|
||||||
#
|
#
|
||||||
import os,marlin
|
import pioutil
|
||||||
|
if pioutil.is_pio_build():
|
||||||
|
import os,marlin
|
||||||
|
# Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
|
||||||
|
def addboot(source, target, env):
|
||||||
|
firmware = open(target[0].path, "rb")
|
||||||
|
lengthfirmware = os.path.getsize(target[0].path)
|
||||||
|
bootloader_bin = "buildroot/share/PlatformIO/scripts/" + "jgaurora_bootloader.bin"
|
||||||
|
bootloader = open(bootloader_bin, "rb")
|
||||||
|
lengthbootloader = os.path.getsize(bootloader_bin)
|
||||||
|
|
||||||
# Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
|
firmware_with_boothloader_bin = target[0].dir.path + '/firmware_with_bootloader.bin'
|
||||||
def addboot(source, target, env):
|
if os.path.exists(firmware_with_boothloader_bin):
|
||||||
firmware = open(target[0].path, "rb")
|
os.remove(firmware_with_boothloader_bin)
|
||||||
lengthfirmware = os.path.getsize(target[0].path)
|
firmwareimage = open(firmware_with_boothloader_bin, "wb")
|
||||||
bootloader_bin = "buildroot/share/PlatformIO/scripts/" + "jgaurora_bootloader.bin"
|
position = 0
|
||||||
bootloader = open(bootloader_bin, "rb")
|
while position < lengthbootloader:
|
||||||
lengthbootloader = os.path.getsize(bootloader_bin)
|
byte = bootloader.read(1)
|
||||||
|
firmwareimage.write(byte)
|
||||||
|
position += 1
|
||||||
|
position = 0
|
||||||
|
while position < lengthfirmware:
|
||||||
|
byte = firmware.read(1)
|
||||||
|
firmwareimage.write(byte)
|
||||||
|
position += 1
|
||||||
|
bootloader.close()
|
||||||
|
firmware.close()
|
||||||
|
firmwareimage.close()
|
||||||
|
|
||||||
firmware_with_boothloader_bin = target[0].dir.path + '/firmware_with_bootloader.bin'
|
firmware_without_bootloader_bin = target[0].dir.path + '/firmware_for_sd_upload.bin'
|
||||||
if os.path.exists(firmware_with_boothloader_bin):
|
if os.path.exists(firmware_without_bootloader_bin):
|
||||||
os.remove(firmware_with_boothloader_bin)
|
os.remove(firmware_without_bootloader_bin)
|
||||||
firmwareimage = open(firmware_with_boothloader_bin, "wb")
|
os.rename(target[0].path, firmware_without_bootloader_bin)
|
||||||
position = 0
|
#os.rename(target[0].dir.path+'/firmware_with_bootloader.bin', target[0].dir.path+'/firmware.bin')
|
||||||
while position < lengthbootloader:
|
|
||||||
byte = bootloader.read(1)
|
|
||||||
firmwareimage.write(byte)
|
|
||||||
position += 1
|
|
||||||
position = 0
|
|
||||||
while position < lengthfirmware:
|
|
||||||
byte = firmware.read(1)
|
|
||||||
firmwareimage.write(byte)
|
|
||||||
position += 1
|
|
||||||
bootloader.close()
|
|
||||||
firmware.close()
|
|
||||||
firmwareimage.close()
|
|
||||||
|
|
||||||
firmware_without_bootloader_bin = target[0].dir.path + '/firmware_for_sd_upload.bin'
|
marlin.add_post_action(addboot);
|
||||||
if os.path.exists(firmware_without_bootloader_bin):
|
|
||||||
os.remove(firmware_without_bootloader_bin)
|
|
||||||
os.rename(target[0].path, firmware_without_bootloader_bin)
|
|
||||||
#os.rename(target[0].dir.path+'/firmware_with_bootloader.bin', target[0].dir.path+'/firmware.bin')
|
|
||||||
|
|
||||||
marlin.add_post_action(addboot);
|
|
||||||
|
|
|
@ -1,47 +1,49 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/lerdge.py
|
# lerdge.py
|
||||||
# Customizations for Lerdge build environments:
|
# Customizations for Lerdge build environments:
|
||||||
# env:LERDGEX env:LERDGEX_usb_flash_drive
|
# env:LERDGEX env:LERDGEX_usb_flash_drive
|
||||||
# env:LERDGES env:LERDGES_usb_flash_drive
|
# env:LERDGES env:LERDGES_usb_flash_drive
|
||||||
# env:LERDGEK env:LERDGEK_usb_flash_drive
|
# env:LERDGEK env:LERDGEK_usb_flash_drive
|
||||||
#
|
#
|
||||||
import os,marlin
|
import pioutil
|
||||||
Import("env")
|
if pioutil.is_pio_build():
|
||||||
|
import os,marlin
|
||||||
|
Import("env")
|
||||||
|
|
||||||
from SCons.Script import DefaultEnvironment
|
from SCons.Script import DefaultEnvironment
|
||||||
board = DefaultEnvironment().BoardConfig()
|
board = DefaultEnvironment().BoardConfig()
|
||||||
|
|
||||||
def encryptByte(byte):
|
def encryptByte(byte):
|
||||||
byte = 0xFF & ((byte << 6) | (byte >> 2))
|
byte = 0xFF & ((byte << 6) | (byte >> 2))
|
||||||
i = 0x58 + byte
|
i = 0x58 + byte
|
||||||
j = 0x05 + byte + (i >> 8)
|
j = 0x05 + byte + (i >> 8)
|
||||||
byte = (0xF8 & i) | (0x07 & j)
|
byte = (0xF8 & i) | (0x07 & j)
|
||||||
return byte
|
return byte
|
||||||
|
|
||||||
def encrypt_file(input, output_file, file_length):
|
def encrypt_file(input, output_file, file_length):
|
||||||
input_file = bytearray(input.read())
|
input_file = bytearray(input.read())
|
||||||
for i in range(len(input_file)):
|
for i in range(len(input_file)):
|
||||||
input_file[i] = encryptByte(input_file[i])
|
input_file[i] = encryptByte(input_file[i])
|
||||||
output_file.write(input_file)
|
output_file.write(input_file)
|
||||||
|
|
||||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt
|
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt
|
||||||
def encrypt(source, target, env):
|
def encrypt(source, target, env):
|
||||||
fwpath = target[0].path
|
fwpath = target[0].path
|
||||||
enname = board.get("build.encrypt")
|
enname = board.get("build.encrypt")
|
||||||
print("Encrypting %s to %s" % (fwpath, enname))
|
print("Encrypting %s to %s" % (fwpath, enname))
|
||||||
fwfile = open(fwpath, "rb")
|
fwfile = open(fwpath, "rb")
|
||||||
enfile = open(target[0].dir.path + "/" + enname, "wb")
|
enfile = open(target[0].dir.path + "/" + enname, "wb")
|
||||||
length = os.path.getsize(fwpath)
|
length = os.path.getsize(fwpath)
|
||||||
|
|
||||||
encrypt_file(fwfile, enfile, length)
|
encrypt_file(fwfile, enfile, length)
|
||||||
|
|
||||||
fwfile.close()
|
fwfile.close()
|
||||||
enfile.close()
|
enfile.close()
|
||||||
os.remove(fwpath)
|
os.remove(fwpath)
|
||||||
|
|
||||||
if 'encrypt' in board.get("build").keys():
|
if 'encrypt' in board.get("build").keys():
|
||||||
if board.get("build.encrypt") != "":
|
if board.get("build.encrypt") != "":
|
||||||
marlin.add_post_action(encrypt)
|
marlin.add_post_action(encrypt)
|
||||||
else:
|
else:
|
||||||
print("LERDGE builds require output file via board_build.encrypt = 'filename' parameter")
|
print("LERDGE builds require output file via board_build.encrypt = 'filename' parameter")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/marlin.py
|
# marlin.py
|
||||||
# Helper module with some commonly-used functions
|
# Helper module with some commonly-used functions
|
||||||
#
|
#
|
||||||
import os,shutil
|
import os,shutil
|
||||||
|
@ -10,13 +10,13 @@ env = DefaultEnvironment()
|
||||||
from os.path import join
|
from os.path import join
|
||||||
|
|
||||||
def copytree(src, dst, symlinks=False, ignore=None):
|
def copytree(src, dst, symlinks=False, ignore=None):
|
||||||
for item in os.listdir(src):
|
for item in os.listdir(src):
|
||||||
s = join(src, item)
|
s = join(src, item)
|
||||||
d = join(dst, item)
|
d = join(dst, item)
|
||||||
if os.path.isdir(s):
|
if os.path.isdir(s):
|
||||||
shutil.copytree(s, d, symlinks, ignore)
|
shutil.copytree(s, d, symlinks, ignore)
|
||||||
else:
|
else:
|
||||||
shutil.copy2(s, d)
|
shutil.copy2(s, d)
|
||||||
|
|
||||||
def replace_define(field, value):
|
def replace_define(field, value):
|
||||||
for define in env['CPPDEFINES']:
|
for define in env['CPPDEFINES']:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/mks_robin.py
|
# mks_robin.py
|
||||||
#
|
#
|
||||||
import robin
|
import robin
|
||||||
robin.prepare("0x08007000", "mks_robin.ld", "Robin.bin")
|
robin.prepare("0x08007000", "mks_robin.ld", "Robin.bin")
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/mks_robin_e3.py
|
# mks_robin_e3.py
|
||||||
#
|
#
|
||||||
import robin
|
import robin
|
||||||
robin.prepare("0x08005000", "mks_robin_e3.ld", "Robin_e3.bin")
|
robin.prepare("0x08005000", "mks_robin_e3.ld", "Robin_e3.bin")
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/mks_robin_e3p.py
|
# mks_robin_e3p.py
|
||||||
#
|
#
|
||||||
import robin
|
import robin
|
||||||
robin.prepare("0x08007000", "mks_robin_e3p.ld", "Robin_e3p.bin")
|
robin.prepare("0x08007000", "mks_robin_e3p.ld", "Robin_e3p.bin")
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/mks_robin_lite.py
|
# mks_robin_lite.py
|
||||||
#
|
#
|
||||||
import robin
|
import robin
|
||||||
robin.prepare("0x08005000", "mks_robin_lite.ld", "mksLite.bin")
|
robin.prepare("0x08005000", "mks_robin_lite.ld", "mksLite.bin")
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/mks_robin_lite3.py
|
# mks_robin_lite3.py
|
||||||
#
|
#
|
||||||
import robin
|
import robin
|
||||||
robin.prepare("0x08005000", "mks_robin_lite.ld", "mksLite3.bin")
|
robin.prepare("0x08005000", "mks_robin_lite.ld", "mksLite3.bin")
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/mks_robin_mini.py
|
# mks_robin_mini.py
|
||||||
#
|
#
|
||||||
import robin
|
import robin
|
||||||
robin.prepare("0x08007000", "mks_robin_mini.ld", "Robin_mini.bin")
|
robin.prepare("0x08007000", "mks_robin_mini.ld", "Robin_mini.bin")
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/mks_robin_nano.py
|
# mks_robin_nano.py
|
||||||
#
|
#
|
||||||
import robin
|
import robin
|
||||||
robin.prepare("0x08007000", "mks_robin_nano.ld", "Robin_nano.bin")
|
robin.prepare("0x08007000", "mks_robin_nano.ld", "Robin_nano.bin")
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/mks_robin_nano35.py
|
# mks_robin_nano35.py
|
||||||
#
|
#
|
||||||
import robin
|
import robin
|
||||||
robin.prepare("0x08007000", "mks_robin_nano.ld", "Robin_nano35.bin")
|
robin.prepare("0x08007000", "mks_robin_nano.ld", "Robin_nano35.bin")
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/mks_robin_pro.py
|
# mks_robin_pro.py
|
||||||
#
|
#
|
||||||
import robin
|
import robin
|
||||||
robin.prepare("0x08007000", "mks_robin_pro.ld", "Robin_pro.bin")
|
robin.prepare("0x08007000", "mks_robin_pro.ld", "Robin_pro.bin")
|
||||||
|
|
|
@ -8,54 +8,56 @@
|
||||||
#
|
#
|
||||||
# - For 'board_build.rename' add a post-action to rename the firmware file.
|
# - For 'board_build.rename' add a post-action to rename the firmware file.
|
||||||
#
|
#
|
||||||
import os,sys,marlin
|
import pioutil
|
||||||
Import("env")
|
if pioutil.is_pio_build():
|
||||||
|
import os,sys,marlin
|
||||||
|
Import("env")
|
||||||
|
|
||||||
from SCons.Script import DefaultEnvironment
|
from SCons.Script import DefaultEnvironment
|
||||||
board = DefaultEnvironment().BoardConfig()
|
board = DefaultEnvironment().BoardConfig()
|
||||||
|
|
||||||
board_keys = board.get("build").keys()
|
board_keys = board.get("build").keys()
|
||||||
|
|
||||||
#
|
#
|
||||||
# For build.offset define LD_FLASH_OFFSET, used by ldscript.ld
|
# For build.offset define LD_FLASH_OFFSET, used by ldscript.ld
|
||||||
#
|
#
|
||||||
if 'offset' in board_keys:
|
if 'offset' in board_keys:
|
||||||
LD_FLASH_OFFSET = board.get("build.offset")
|
LD_FLASH_OFFSET = board.get("build.offset")
|
||||||
marlin.relocate_vtab(LD_FLASH_OFFSET)
|
marlin.relocate_vtab(LD_FLASH_OFFSET)
|
||||||
|
|
||||||
# Flash size
|
# Flash size
|
||||||
maximum_flash_size = int(board.get("upload.maximum_size") / 1024)
|
maximum_flash_size = int(board.get("upload.maximum_size") / 1024)
|
||||||
marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
|
marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
|
||||||
|
|
||||||
# Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json)
|
# Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json)
|
||||||
maximum_ram_size = board.get("upload.maximum_ram_size")
|
maximum_ram_size = board.get("upload.maximum_ram_size")
|
||||||
|
|
||||||
for i, flag in enumerate(env["LINKFLAGS"]):
|
for i, flag in enumerate(env["LINKFLAGS"]):
|
||||||
if "-Wl,--defsym=LD_FLASH_OFFSET" in flag:
|
if "-Wl,--defsym=LD_FLASH_OFFSET" in flag:
|
||||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET
|
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET
|
||||||
if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag:
|
if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag:
|
||||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
|
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
|
||||||
|
|
||||||
#
|
#
|
||||||
# For build.encrypt rename and encode the firmware file.
|
# For build.encrypt rename and encode the firmware file.
|
||||||
#
|
#
|
||||||
if 'encrypt' in board_keys:
|
if 'encrypt' in board_keys:
|
||||||
|
|
||||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt
|
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt
|
||||||
def encrypt(source, target, env):
|
def encrypt(source, target, env):
|
||||||
marlin.encrypt_mks(source, target, env, board.get("build.encrypt"))
|
marlin.encrypt_mks(source, target, env, board.get("build.encrypt"))
|
||||||
|
|
||||||
if board.get("build.encrypt") != "":
|
if board.get("build.encrypt") != "":
|
||||||
marlin.add_post_action(encrypt)
|
marlin.add_post_action(encrypt)
|
||||||
|
|
||||||
#
|
#
|
||||||
# For build.rename simply rename the firmware file.
|
# For build.rename simply rename the firmware file.
|
||||||
#
|
#
|
||||||
if 'rename' in board_keys:
|
if 'rename' in board_keys:
|
||||||
|
|
||||||
def rename_target(source, target, env):
|
def rename_target(source, target, env):
|
||||||
firmware = os.path.join(target[0].dir.path, board.get("build.rename"))
|
firmware = os.path.join(target[0].dir.path, board.get("build.rename"))
|
||||||
import shutil
|
import shutil
|
||||||
shutil.copy(target[0].path, firmware)
|
shutil.copy(target[0].path, firmware)
|
||||||
|
|
||||||
marlin.add_post_action(rename_target)
|
marlin.add_post_action(rename_target)
|
||||||
|
|
|
@ -1,18 +1,20 @@
|
||||||
#
|
#
|
||||||
# Convert the ELF to an SREC file suitable for some bootloaders
|
# Convert the ELF to an SREC file suitable for some bootloaders
|
||||||
#
|
#
|
||||||
import os,sys
|
import pioutil
|
||||||
from os.path import join
|
if pioutil.is_pio_build():
|
||||||
|
import os,sys
|
||||||
|
from os.path import join
|
||||||
|
|
||||||
Import("env")
|
Import("env")
|
||||||
|
|
||||||
board = env.BoardConfig()
|
board = env.BoardConfig()
|
||||||
board_keys = board.get("build").keys()
|
board_keys = board.get("build").keys()
|
||||||
if 'encrypt' in board_keys:
|
if 'encrypt' in board_keys:
|
||||||
env.AddPostAction(
|
env.AddPostAction(
|
||||||
join("$BUILD_DIR", "${PROGNAME}.bin"),
|
join("$BUILD_DIR", "${PROGNAME}.bin"),
|
||||||
env.VerboseAction(" ".join([
|
env.VerboseAction(" ".join([
|
||||||
"$OBJCOPY", "-O", "srec",
|
"$OBJCOPY", "-O", "srec",
|
||||||
"\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encrypt")) + "\""
|
"\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encrypt")) + "\""
|
||||||
]), "Building $TARGET")
|
]), "Building $TARGET")
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/pioutil.py
|
# pioutil.py
|
||||||
#
|
#
|
||||||
|
|
||||||
# Detect that 'vscode init' is running
|
# Make sure 'vscode init' is not the current command
|
||||||
def is_vscode_init():
|
def is_pio_build():
|
||||||
from SCons.Script import COMMAND_LINE_TARGETS
|
from SCons.Script import COMMAND_LINE_TARGETS
|
||||||
return "idedata" in COMMAND_LINE_TARGETS or "_idedata" in COMMAND_LINE_TARGETS
|
return "idedata" not in COMMAND_LINE_TARGETS and "_idedata" not in COMMAND_LINE_TARGETS
|
||||||
|
|
|
@ -2,100 +2,99 @@
|
||||||
# preflight-checks.py
|
# preflight-checks.py
|
||||||
# Check for common issues prior to compiling
|
# Check for common issues prior to compiling
|
||||||
#
|
#
|
||||||
import os,re,sys,pioutil
|
import pioutil
|
||||||
Import("env")
|
if pioutil.is_pio_build():
|
||||||
|
|
||||||
# Detect that 'vscode init' is running
|
import os,re,sys
|
||||||
if pioutil.is_vscode_init():
|
Import("env")
|
||||||
env.Exit(0)
|
|
||||||
|
|
||||||
def get_envs_for_board(board):
|
def get_envs_for_board(board):
|
||||||
with open(os.path.join("Marlin", "src", "pins", "pins.h"), "r") as file:
|
with open(os.path.join("Marlin", "src", "pins", "pins.h"), "r") as file:
|
||||||
|
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
envregex = r"(?:env|win):"
|
envregex = r"(?:env|win):"
|
||||||
elif sys.platform == 'darwin':
|
elif sys.platform == 'darwin':
|
||||||
envregex = r"(?:env|mac|uni):"
|
envregex = r"(?:env|mac|uni):"
|
||||||
elif sys.platform == 'linux':
|
elif sys.platform == 'linux':
|
||||||
envregex = r"(?:env|lin|uni):"
|
envregex = r"(?:env|lin|uni):"
|
||||||
else:
|
else:
|
||||||
envregex = r"(?:env):"
|
envregex = r"(?:env):"
|
||||||
|
|
||||||
r = re.compile(r"if\s+MB\((.+)\)")
|
r = re.compile(r"if\s+MB\((.+)\)")
|
||||||
if board.startswith("BOARD_"):
|
if board.startswith("BOARD_"):
|
||||||
board = board[6:]
|
board = board[6:]
|
||||||
|
|
||||||
for line in file:
|
for line in file:
|
||||||
mbs = r.findall(line)
|
mbs = r.findall(line)
|
||||||
if mbs and board in re.split(r",\s*", mbs[0]):
|
if mbs and board in re.split(r",\s*", mbs[0]):
|
||||||
line = file.readline()
|
line = file.readline()
|
||||||
found_envs = re.match(r"\s*#include .+" + envregex, line)
|
found_envs = re.match(r"\s*#include .+" + envregex, line)
|
||||||
if found_envs:
|
if found_envs:
|
||||||
envlist = re.findall(envregex + r"(\w+)", line)
|
envlist = re.findall(envregex + r"(\w+)", line)
|
||||||
return [ "env:"+s for s in envlist ]
|
return [ "env:"+s for s in envlist ]
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def check_envs(build_env, board_envs, config):
|
def check_envs(build_env, board_envs, config):
|
||||||
if build_env in board_envs:
|
if build_env in board_envs:
|
||||||
return True
|
return True
|
||||||
ext = config.get(build_env, 'extends', default=None)
|
ext = config.get(build_env, 'extends', default=None)
|
||||||
if ext:
|
if ext:
|
||||||
if isinstance(ext, str):
|
if isinstance(ext, str):
|
||||||
return check_envs(ext, board_envs, config)
|
return check_envs(ext, board_envs, config)
|
||||||
elif isinstance(ext, list):
|
elif isinstance(ext, list):
|
||||||
for ext_env in ext:
|
for ext_env in ext:
|
||||||
if check_envs(ext_env, board_envs, config):
|
if check_envs(ext_env, board_envs, config):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def sanity_check_target():
|
def sanity_check_target():
|
||||||
# Sanity checks:
|
# Sanity checks:
|
||||||
if 'PIOENV' not in env:
|
if 'PIOENV' not in env:
|
||||||
raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO")
|
raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO")
|
||||||
|
|
||||||
if 'MARLIN_FEATURES' not in env:
|
if 'MARLIN_FEATURES' not in env:
|
||||||
raise SystemExit("Error: this script should be used after common Marlin scripts")
|
raise SystemExit("Error: this script should be used after common Marlin scripts")
|
||||||
|
|
||||||
if 'MOTHERBOARD' not in env['MARLIN_FEATURES']:
|
if 'MOTHERBOARD' not in env['MARLIN_FEATURES']:
|
||||||
raise SystemExit("Error: MOTHERBOARD is not defined in Configuration.h")
|
raise SystemExit("Error: MOTHERBOARD is not defined in Configuration.h")
|
||||||
|
|
||||||
build_env = env['PIOENV']
|
build_env = env['PIOENV']
|
||||||
motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
|
motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
|
||||||
board_envs = get_envs_for_board(motherboard)
|
board_envs = get_envs_for_board(motherboard)
|
||||||
config = env.GetProjectConfig()
|
config = env.GetProjectConfig()
|
||||||
result = check_envs("env:"+build_env, board_envs, config)
|
result = check_envs("env:"+build_env, board_envs, config)
|
||||||
|
|
||||||
if not result:
|
if not result:
|
||||||
err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
|
err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
|
||||||
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
|
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
|
||||||
raise SystemExit(err)
|
raise SystemExit(err)
|
||||||
|
|
||||||
#
|
#
|
||||||
# Check for Config files in two common incorrect places
|
# Check for Config files in two common incorrect places
|
||||||
#
|
#
|
||||||
for p in [ env['PROJECT_DIR'], os.path.join(env['PROJECT_DIR'], "config") ]:
|
for p in [ env['PROJECT_DIR'], os.path.join(env['PROJECT_DIR'], "config") ]:
|
||||||
for f in [ "Configuration.h", "Configuration_adv.h" ]:
|
for f in [ "Configuration.h", "Configuration_adv.h" ]:
|
||||||
|
if os.path.isfile(os.path.join(p, f)):
|
||||||
|
err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
|
||||||
|
raise SystemExit(err)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Give warnings on every build
|
||||||
|
#
|
||||||
|
warnfile = os.path.join(env['PROJECT_BUILD_DIR'], build_env, "src", "src", "inc", "Warnings.cpp.o")
|
||||||
|
if os.path.exists(warnfile):
|
||||||
|
os.remove(warnfile)
|
||||||
|
|
||||||
|
#
|
||||||
|
# Check for old files indicating an entangled Marlin (mixing old and new code)
|
||||||
|
#
|
||||||
|
mixedin = []
|
||||||
|
p = os.path.join(env['PROJECT_DIR'], "Marlin", "src", "lcd", "dogm")
|
||||||
|
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
|
||||||
if os.path.isfile(os.path.join(p, f)):
|
if os.path.isfile(os.path.join(p, f)):
|
||||||
err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
|
mixedin += [ f ]
|
||||||
raise SystemExit(err)
|
if mixedin:
|
||||||
|
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
|
||||||
|
raise SystemExit(err)
|
||||||
|
|
||||||
#
|
sanity_check_target()
|
||||||
# Give warnings on every build
|
|
||||||
#
|
|
||||||
warnfile = os.path.join(env['PROJECT_BUILD_DIR'], build_env, "src", "src", "inc", "Warnings.cpp.o")
|
|
||||||
if os.path.exists(warnfile):
|
|
||||||
os.remove(warnfile)
|
|
||||||
|
|
||||||
#
|
|
||||||
# Check for old files indicating an entangled Marlin (mixing old and new code)
|
|
||||||
#
|
|
||||||
mixedin = []
|
|
||||||
p = os.path.join(env['PROJECT_DIR'], "Marlin", "src", "lcd", "dogm")
|
|
||||||
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
|
|
||||||
if os.path.isfile(os.path.join(p, f)):
|
|
||||||
mixedin += [ f ]
|
|
||||||
if mixedin:
|
|
||||||
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
|
|
||||||
raise SystemExit(err)
|
|
||||||
|
|
||||||
sanity_check_target()
|
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
# random-bin.py
|
# random-bin.py
|
||||||
# Set a unique firmware name based on current date and time
|
# Set a unique firmware name based on current date and time
|
||||||
#
|
#
|
||||||
Import("env")
|
import pioutil
|
||||||
|
if pioutil.is_pio_build():
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
Import("env")
|
||||||
env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")
|
env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
#
|
#
|
||||||
# buildroot/share/PlatformIO/scripts/robin.py
|
# robin.py
|
||||||
#
|
#
|
||||||
import marlin
|
|
||||||
|
|
||||||
# Apply customizations for a MKS Robin
|
# Apply customizations for a MKS Robin
|
||||||
def prepare(address, ldname, fwname):
|
def prepare(address, ldname, fwname):
|
||||||
def encrypt(source, target, env):
|
import pioutil
|
||||||
marlin.encrypt_mks(source, target, env, fwname)
|
if pioutil.is_pio_build():
|
||||||
marlin.relocate_firmware(address)
|
import marlin
|
||||||
marlin.custom_ld_script(ldname)
|
def encrypt(source, target, env):
|
||||||
marlin.add_post_action(encrypt);
|
marlin.encrypt_mks(source, target, env, fwname)
|
||||||
|
marlin.relocate_firmware(address)
|
||||||
|
marlin.custom_ld_script(ldname)
|
||||||
|
marlin.add_post_action(encrypt);
|
||||||
|
|
|
@ -1,52 +1,54 @@
|
||||||
#
|
#
|
||||||
|
# simulator.py
|
||||||
# PlatformIO pre: script for simulator builds
|
# PlatformIO pre: script for simulator builds
|
||||||
#
|
#
|
||||||
|
import pioutil
|
||||||
|
if pioutil.is_pio_build():
|
||||||
|
# Get the environment thus far for the build
|
||||||
|
Import("env")
|
||||||
|
|
||||||
# Get the environment thus far for the build
|
#print(env.Dump())
|
||||||
Import("env")
|
|
||||||
|
|
||||||
#print(env.Dump())
|
#
|
||||||
|
# Give the binary a distinctive name
|
||||||
|
#
|
||||||
|
|
||||||
#
|
env['PROGNAME'] = "MarlinSimulator"
|
||||||
# Give the binary a distinctive name
|
|
||||||
#
|
|
||||||
|
|
||||||
env['PROGNAME'] = "MarlinSimulator"
|
#
|
||||||
|
# If Xcode is installed add the path to its Frameworks folder,
|
||||||
|
# or if Mesa is installed try to use its GL/gl.h.
|
||||||
|
#
|
||||||
|
|
||||||
#
|
import sys
|
||||||
# If Xcode is installed add the path to its Frameworks folder,
|
if sys.platform == 'darwin':
|
||||||
# or if Mesa is installed try to use its GL/gl.h.
|
|
||||||
#
|
|
||||||
|
|
||||||
import sys
|
#
|
||||||
if sys.platform == 'darwin':
|
# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
|
||||||
|
#
|
||||||
|
env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
|
||||||
|
|
||||||
#
|
# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
|
||||||
# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
|
xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
|
||||||
#
|
mesa_path = "/opt/local/include/GL/gl.h"
|
||||||
env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
|
|
||||||
|
|
||||||
# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
|
import os.path
|
||||||
xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
|
|
||||||
mesa_path = "/opt/local/include/GL/gl.h"
|
|
||||||
|
|
||||||
import os.path
|
if os.path.exists(xcode_path):
|
||||||
|
|
||||||
if os.path.exists(xcode_path):
|
env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
|
||||||
|
print("Using OpenGL framework headers from Xcode.app")
|
||||||
|
|
||||||
env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
|
elif os.path.exists(mesa_path):
|
||||||
print("Using OpenGL framework headers from Xcode.app")
|
|
||||||
|
|
||||||
elif os.path.exists(mesa_path):
|
env['BUILD_FLAGS'] += [ '-D__MESA__' ]
|
||||||
|
print("Using OpenGL header from", mesa_path)
|
||||||
|
|
||||||
env['BUILD_FLAGS'] += [ '-D__MESA__' ]
|
else:
|
||||||
print("Using OpenGL header from", mesa_path)
|
|
||||||
|
|
||||||
else:
|
print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n")
|
||||||
|
|
||||||
print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n")
|
# Break out of the PIO build immediately
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
# Break out of the PIO build immediately
|
env.AddCustomTarget("upload", "$BUILD_DIR/${PROGNAME}", "$BUILD_DIR/${PROGNAME}")
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
env.AddCustomTarget("upload", "$BUILD_DIR/${PROGNAME}", "$BUILD_DIR/${PROGNAME}")
|
|
||||||
|
|
|
@ -1,59 +1,61 @@
|
||||||
#
|
#
|
||||||
# stm32_serialbuffer.py
|
# stm32_serialbuffer.py
|
||||||
#
|
#
|
||||||
Import("env")
|
import pioutil
|
||||||
|
if pioutil.is_pio_build():
|
||||||
|
Import("env")
|
||||||
|
|
||||||
# Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to
|
# Get a build flag's value or None
|
||||||
# configure buffer sizes for receiving \ transmitting serial data.
|
def getBuildFlagValue(name):
|
||||||
# Stm32duino uses another set of defines for the same purpose, so this
|
for flag in build_flags:
|
||||||
# script gets the values from the configuration and uses them to define
|
if isinstance(flag, list) and flag[0] == name:
|
||||||
# `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build
|
return flag[1]
|
||||||
# flags so they are available for use by the platform.
|
|
||||||
#
|
|
||||||
# The script will set the value as the default one (64 bytes)
|
|
||||||
# or the user-configured one, whichever is higher.
|
|
||||||
#
|
|
||||||
# Marlin's default buffer sizes are 128 for RX and 32 for TX.
|
|
||||||
# The highest value is taken (128/64).
|
|
||||||
#
|
|
||||||
# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
|
|
||||||
# defined, the first of these values will be used as the minimum.
|
|
||||||
build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
|
|
||||||
mf = env["MARLIN_FEATURES"]
|
|
||||||
|
|
||||||
# Get a build flag's value or None
|
return None
|
||||||
def getBuildFlagValue(name):
|
|
||||||
for flag in build_flags:
|
|
||||||
if isinstance(flag, list) and flag[0] == name:
|
|
||||||
return flag[1]
|
|
||||||
|
|
||||||
return None
|
# Get an overriding buffer size for RX or TX from the build flags
|
||||||
|
def getInternalSize(side):
|
||||||
|
return getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \
|
||||||
|
getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \
|
||||||
|
getBuildFlagValue(f"USART_{side}_BUF_SIZE")
|
||||||
|
|
||||||
# Get an overriding buffer size for RX or TX from the build flags
|
# Get the largest defined buffer size for RX or TX
|
||||||
def getInternalSize(side):
|
def getBufferSize(side, default):
|
||||||
return getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \
|
# Get a build flag value or fall back to the given default
|
||||||
getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \
|
internal = int(getInternalSize(side) or default)
|
||||||
getBuildFlagValue(f"USART_{side}_BUF_SIZE")
|
flag = side + "_BUFFER_SIZE"
|
||||||
|
# Return the largest value
|
||||||
|
return max(int(mf[flag]), internal) if flag in mf else internal
|
||||||
|
|
||||||
# Get the largest defined buffer size for RX or TX
|
# Add a build flag if it's not already defined
|
||||||
def getBufferSize(side, default):
|
def tryAddFlag(name, value):
|
||||||
# Get a build flag value or fall back to the given default
|
if getBuildFlagValue(name) is None:
|
||||||
internal = int(getInternalSize(side) or default)
|
env.Append(BUILD_FLAGS=[f"-D{name}={value}"])
|
||||||
flag = side + "_BUFFER_SIZE"
|
|
||||||
# Return the largest value
|
|
||||||
return max(int(mf[flag]), internal) if flag in mf else internal
|
|
||||||
|
|
||||||
# Add a build flag if it's not already defined
|
# Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to
|
||||||
def tryAddFlag(name, value):
|
# configure buffer sizes for receiving \ transmitting serial data.
|
||||||
if getBuildFlagValue(name) is None:
|
# Stm32duino uses another set of defines for the same purpose, so this
|
||||||
env.Append(BUILD_FLAGS=[f"-D{name}={value}"])
|
# script gets the values from the configuration and uses them to define
|
||||||
|
# `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build
|
||||||
|
# flags so they are available for use by the platform.
|
||||||
|
#
|
||||||
|
# The script will set the value as the default one (64 bytes)
|
||||||
|
# or the user-configured one, whichever is higher.
|
||||||
|
#
|
||||||
|
# Marlin's default buffer sizes are 128 for RX and 32 for TX.
|
||||||
|
# The highest value is taken (128/64).
|
||||||
|
#
|
||||||
|
# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
|
||||||
|
# defined, the first of these values will be used as the minimum.
|
||||||
|
build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
|
||||||
|
mf = env["MARLIN_FEATURES"]
|
||||||
|
|
||||||
# Get the largest defined buffer sizes for RX or TX, using defaults for undefined
|
# Get the largest defined buffer sizes for RX or TX, using defaults for undefined
|
||||||
rxBuf = getBufferSize("RX", 128)
|
rxBuf = getBufferSize("RX", 128)
|
||||||
txBuf = getBufferSize("TX", 64)
|
txBuf = getBufferSize("TX", 64)
|
||||||
|
|
||||||
# Provide serial buffer sizes to the stm32duino platform
|
# Provide serial buffer sizes to the stm32duino platform
|
||||||
tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf)
|
tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf)
|
||||||
tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf)
|
tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf)
|
||||||
tryAddFlag("USART_RX_BUF_SIZE", rxBuf)
|
tryAddFlag("USART_RX_BUF_SIZE", rxBuf)
|
||||||
tryAddFlag("USART_TX_BUF_SIZE", txBuf)
|
tryAddFlag("USART_TX_BUF_SIZE", txBuf)
|
||||||
|
|
|
@ -326,7 +326,6 @@ platform = ${common_stm32f1.platform}
|
||||||
extends = common_stm32f1
|
extends = common_stm32f1
|
||||||
board = marlin_CHITU_F103
|
board = marlin_CHITU_F103
|
||||||
extra_scripts = ${common_stm32f1.extra_scripts}
|
extra_scripts = ${common_stm32f1.extra_scripts}
|
||||||
pre:buildroot/share/PlatformIO/scripts/common-dependencies.py
|
|
||||||
pre:buildroot/share/PlatformIO/scripts/STM32F1_create_variant.py
|
pre:buildroot/share/PlatformIO/scripts/STM32F1_create_variant.py
|
||||||
buildroot/share/PlatformIO/scripts/chitu_crypt.py
|
buildroot/share/PlatformIO/scripts/chitu_crypt.py
|
||||||
build_flags = ${common_stm32f1.build_flags}
|
build_flags = ${common_stm32f1.build_flags}
|
||||||
|
|
Loading…
Reference in a new issue