mirror of
https://github.com/PX4/PX4-Autopilot.git
synced 2026-03-23 18:13:49 +08:00
Read companion_targets files from board directories and exclude those targets from CI grouped builds. The parent target builds them via Make prerequisite, avoiding redundant CI jobs. Signed-off-by: Ramon Roche <mrpollo@gmail.com>
340 lines
13 KiB
Python
Executable File
340 lines
13 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
""" Script to generate a JSON config with all build targets (for CI) """
|
|
|
|
import argparse
|
|
import os
|
|
import sys
|
|
import json
|
|
import re
|
|
from kconfiglib import Kconfig
|
|
|
|
kconf = Kconfig()
|
|
|
|
# Supress warning output
|
|
kconf.warn_assign_undef = False
|
|
kconf.warn_assign_override = False
|
|
kconf.warn_assign_redun = False
|
|
|
|
source_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')
|
|
|
|
parser = argparse.ArgumentParser(description='Generate build targets')
|
|
|
|
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
|
|
help='Verbose Output')
|
|
parser.add_argument('-p', '--pretty', dest='pretty', action='store_true',
|
|
help='Pretty output instead of a single line')
|
|
parser.add_argument('-g', '--groups', dest='group', action='store_true',
|
|
help='Groups targets')
|
|
parser.add_argument('-f', '--filter', dest='filter', help='comma separated list of build target name prefixes to include instead of all e.g. "px4_fmu-v5_"')
|
|
|
|
args = parser.parse_args()
|
|
verbose = args.verbose
|
|
|
|
target_filter = []
|
|
if args.filter:
|
|
for target in args.filter.split(','):
|
|
target_filter.append(target)
|
|
|
|
default_container = 'ghcr.io/px4/px4-dev:v1.16.0-rc1-258-g0369abd556'
|
|
voxl2_container = 'ghcr.io/px4/px4-dev-voxl2:v1.5'
|
|
build_configs = []
|
|
grouped_targets = {}
|
|
excluded_boards = ['px4_ros2', 'espressif_esp32'] # TODO: fix and enable
|
|
excluded_manufacturers = ['atlflight']
|
|
excluded_platforms = []
|
|
|
|
# Container overrides for platforms/boards that need a non-default container
|
|
platform_container_overrides = {
|
|
'qurt': voxl2_container,
|
|
}
|
|
board_container_overrides = {
|
|
'modalai_voxl2': voxl2_container,
|
|
}
|
|
excluded_labels = [
|
|
'stackcheck',
|
|
'nolockstep', 'replay', 'test',
|
|
'uavcanv1', # TODO: fix and enable
|
|
]
|
|
|
|
github_action_config = { 'include': build_configs }
|
|
extra_args = {}
|
|
if args.pretty:
|
|
extra_args['indent'] = 2
|
|
|
|
def chunks(arr, size):
|
|
# splits array into parts
|
|
for i in range(0, len(arr), size):
|
|
yield arr[i:i + size]
|
|
|
|
def comma_targets(targets):
|
|
# turns array of targets into a comma split string
|
|
return ",".join(targets)
|
|
|
|
def process_target(px4board_file, target_name):
|
|
# reads through the board file and grabs
|
|
# useful information for building
|
|
ret = None
|
|
platform = None
|
|
toolchain = None
|
|
group = None
|
|
|
|
if px4board_file.endswith("default.px4board") or \
|
|
px4board_file.endswith("performance-test.px4board") or \
|
|
px4board_file.endswith("bootloader.px4board"):
|
|
kconf.load_config(px4board_file, replace=True)
|
|
else: # Merge config with default.px4board
|
|
default_kconfig = re.sub(r'[a-zA-Z\d_-]+\.px4board', 'default.px4board', px4board_file)
|
|
kconf.load_config(default_kconfig, replace=True)
|
|
kconf.load_config(px4board_file, replace=False)
|
|
|
|
if "BOARD_TOOLCHAIN" in kconf.syms:
|
|
toolchain = kconf.syms["BOARD_TOOLCHAIN"].str_value
|
|
|
|
if "BOARD_PLATFORM" in kconf.syms:
|
|
platform = kconf.syms["BOARD_PLATFORM"].str_value
|
|
|
|
assert platform, f"PLATFORM not found in {px4board_file}"
|
|
|
|
if platform not in excluded_platforms:
|
|
container = default_container
|
|
|
|
# Extract board name (manufacturer_board) from target name
|
|
board_name = '_'.join(target_name.split('_')[:2])
|
|
|
|
# Apply container overrides for specific platforms or boards
|
|
if platform in platform_container_overrides:
|
|
container = platform_container_overrides[platform]
|
|
if board_name in board_container_overrides:
|
|
container = board_container_overrides[board_name]
|
|
|
|
# Boards with container overrides get their own group
|
|
if board_name in board_container_overrides or platform in platform_container_overrides:
|
|
group = 'voxl2'
|
|
elif platform == 'posix':
|
|
group = 'base'
|
|
if toolchain:
|
|
if toolchain.startswith('aarch64'):
|
|
group = 'aarch64'
|
|
elif toolchain == 'arm-linux-gnueabihf':
|
|
group = 'armhf'
|
|
else:
|
|
if verbose: print(f'unmatched toolchain: {toolchain}')
|
|
elif platform == 'nuttx':
|
|
group = 'nuttx'
|
|
else:
|
|
if verbose: print(f'unmatched platform: {platform}')
|
|
|
|
ret = {'target': target_name, 'container': container}
|
|
if(args.group):
|
|
ret['arch'] = group
|
|
|
|
return ret
|
|
|
|
# Look for board targets in the ./boards directory
|
|
if(verbose):
|
|
print("=======================")
|
|
print("= scanning for boards =")
|
|
print("=======================")
|
|
|
|
# We also need to build metadata
|
|
# includes:
|
|
# - Airframe
|
|
# - Parameters
|
|
# - Events
|
|
metadata_targets = ['airframe_metadata', 'parameters_metadata', 'extract_events']
|
|
grouped_targets['base'] = {}
|
|
grouped_targets['base']['container'] = default_container
|
|
grouped_targets['base']['manufacturers'] = {}
|
|
grouped_targets['base']['manufacturers']['px4'] = []
|
|
grouped_targets['base']['manufacturers']['px4'] += metadata_targets
|
|
|
|
for manufacturer in sorted(os.scandir(os.path.join(source_dir, '../boards')), key=lambda e: e.name):
|
|
if not manufacturer.is_dir():
|
|
continue
|
|
if manufacturer.name in excluded_manufacturers:
|
|
if verbose: print(f'excluding manufacturer {manufacturer.name}')
|
|
continue
|
|
|
|
for board in sorted(os.scandir(manufacturer.path), key=lambda e: e.name):
|
|
if not board.is_dir():
|
|
continue
|
|
|
|
for files in sorted(os.scandir(board.path), key=lambda e: e.name):
|
|
if files.is_file() and files.name.endswith('.px4board'):
|
|
|
|
board_name = manufacturer.name + '_' + board.name
|
|
label = files.name[:-9]
|
|
target_name = manufacturer.name + '_' + board.name + '_' + label
|
|
|
|
if target_filter and not any(target_name.startswith(f) for f in target_filter):
|
|
if verbose: print(f'excluding board {board_name} ({target_name})')
|
|
continue
|
|
|
|
if board_name in excluded_boards:
|
|
if verbose: print(f'excluding board {board_name} ({target_name})')
|
|
continue
|
|
|
|
if label in excluded_labels:
|
|
if verbose: print(f'excluding label {label} ({target_name})')
|
|
continue
|
|
target = process_target(files.path, target_name)
|
|
if (args.group and target is not None):
|
|
if (target['arch'] not in grouped_targets):
|
|
grouped_targets[target['arch']] = {}
|
|
grouped_targets[target['arch']]['container'] = target['container']
|
|
grouped_targets[target['arch']]['manufacturers'] = {}
|
|
if(manufacturer.name not in grouped_targets[target['arch']]['manufacturers']):
|
|
grouped_targets[target['arch']]['manufacturers'][manufacturer.name] = []
|
|
grouped_targets[target['arch']]['manufacturers'][manufacturer.name].append(target_name)
|
|
if target is not None:
|
|
build_configs.append(target)
|
|
|
|
# Remove companion targets from CI groups (parent target builds them via Make prerequisite)
|
|
for manufacturer in sorted(os.scandir(os.path.join(source_dir, '../boards')), key=lambda e: e.name):
|
|
if not manufacturer.is_dir():
|
|
continue
|
|
for board in sorted(os.scandir(manufacturer.path), key=lambda e: e.name):
|
|
if not board.is_dir():
|
|
continue
|
|
companion_file = os.path.join(board.path, 'companion_targets')
|
|
if os.path.exists(companion_file):
|
|
with open(companion_file) as f:
|
|
companions = {l.strip() for l in f if l.strip() and not l.startswith('#')}
|
|
for arch in grouped_targets:
|
|
for man in grouped_targets[arch]['manufacturers']:
|
|
grouped_targets[arch]['manufacturers'][man] = [
|
|
t for t in grouped_targets[arch]['manufacturers'][man]
|
|
if t not in companions
|
|
]
|
|
|
|
if(verbose):
|
|
import pprint
|
|
print("============================")
|
|
print("= Boards found in ./boards =")
|
|
print("============================")
|
|
pprint.pp(grouped_targets)
|
|
|
|
if(verbose):
|
|
print("===================")
|
|
print("= Generating JSON =")
|
|
print("===================")
|
|
|
|
if (args.group):
|
|
# if we are using this script for grouping builds
|
|
# we loop trough the manufacturers list and split their targets
|
|
# if a manufacturer has more than a LIMIT of boards then we split that
|
|
# into sub groups such as "arch-manufacturer name-index"
|
|
# example:
|
|
# nuttx-px4-0
|
|
# nuttx-px4-1
|
|
# nuttx-px4-2
|
|
# nuttx-ark-0
|
|
# nuttx-ark-1
|
|
# if the manufacturer doesn't have more targets than LIMIT then we add
|
|
# them to a generic group with the following structure "arch-index"
|
|
# example:
|
|
# nuttx-0
|
|
# nuttx-1
|
|
final_groups = []
|
|
last_man = ''
|
|
last_arch = ''
|
|
SPLIT_LIMIT = 10
|
|
LOWER_LIMIT = 5
|
|
if(verbose):
|
|
print(f'=:Architectures: [{grouped_targets.keys()}]')
|
|
for arch in grouped_targets:
|
|
runner = 'x64' if arch in ('nuttx', 'voxl2') else 'arm64'
|
|
if(verbose):
|
|
print(f'=:Processing: [{arch}]')
|
|
temp_group = []
|
|
for man in grouped_targets[arch]['manufacturers']:
|
|
if(verbose):
|
|
print(f'=:Processing: [{arch}][{man}]')
|
|
man_len = len(grouped_targets[arch]['manufacturers'][man])
|
|
if(man_len > LOWER_LIMIT and man_len < (SPLIT_LIMIT + 1)):
|
|
# Manufacturers can have their own group
|
|
if(verbose):
|
|
print(f'=:Processing: [{arch}][{man}][{man_len}]==Manufacturers can have their own group')
|
|
group_name = arch + "-" + man
|
|
targets = comma_targets(grouped_targets[arch]['manufacturers'][man])
|
|
final_groups.append({
|
|
"container": grouped_targets[arch]['container'],
|
|
"targets": targets,
|
|
"arch": arch,
|
|
"runner": runner,
|
|
"group": group_name,
|
|
"len": len(grouped_targets[arch]['manufacturers'][man])
|
|
})
|
|
elif(man_len >= (SPLIT_LIMIT + 1)):
|
|
# Split big man groups into subgroups
|
|
# example: Pixhawk
|
|
if(verbose):
|
|
print(f'=:Processing: [{arch}][{man}][{man_len}]==Manufacturers has multiple own groups')
|
|
chunk_limit = SPLIT_LIMIT
|
|
chunk_counter = 0
|
|
for chunk in chunks(grouped_targets[arch]['manufacturers'][man], chunk_limit):
|
|
group_name = arch + "-" + man + "-" + str(chunk_counter)
|
|
targets = comma_targets(chunk)
|
|
final_groups.append({
|
|
"container": grouped_targets[arch]['container'],
|
|
"targets": targets,
|
|
"arch": arch,
|
|
"runner": runner,
|
|
"group": group_name,
|
|
"len": len(chunk),
|
|
})
|
|
chunk_counter += 1
|
|
else:
|
|
if(verbose):
|
|
print(f'=:Processing: [{arch}][{man}][{man_len}]==Manufacturers too small group with others')
|
|
temp_group.extend(grouped_targets[arch]['manufacturers'][man])
|
|
|
|
temp_len = len(temp_group)
|
|
chunk_counter = 0
|
|
if(temp_len > 0 and temp_len < (SPLIT_LIMIT + 1)):
|
|
if(verbose):
|
|
print(f'=:Processing: [{arch}][orphan][{temp_len}]==Leftover arch can have their own group')
|
|
group_name = arch + "-" + str(chunk_counter)
|
|
targets = comma_targets(temp_group)
|
|
final_groups.append({
|
|
"container": grouped_targets[arch]['container'],
|
|
"targets": targets,
|
|
"arch": arch,
|
|
"runner": runner,
|
|
"group": group_name,
|
|
"len": temp_len
|
|
})
|
|
elif(temp_len >= (SPLIT_LIMIT + 1)):
|
|
# Split big man groups into subgroups
|
|
# example: Pixhawk
|
|
if(verbose):
|
|
print(f'=:Processing: [{arch}][orphan][{temp_len}]==Leftover arch can has multpile group')
|
|
chunk_limit = SPLIT_LIMIT
|
|
chunk_counter = 0
|
|
for chunk in chunks(temp_group, chunk_limit):
|
|
group_name = arch + "-" + str(chunk_counter)
|
|
targets = comma_targets(chunk)
|
|
final_groups.append({
|
|
"container": grouped_targets[arch]['container'],
|
|
"targets": targets,
|
|
"arch": arch,
|
|
"runner": runner,
|
|
"group": group_name,
|
|
"len": len(chunk),
|
|
})
|
|
chunk_counter += 1
|
|
if(verbose):
|
|
import pprint
|
|
print("================")
|
|
print("= final_groups =")
|
|
print("================")
|
|
pprint.pp(final_groups)
|
|
|
|
print("===============")
|
|
print("= JSON output =")
|
|
print("===============")
|
|
|
|
print(json.dumps({ "include": final_groups }, **extra_args))
|
|
else:
|
|
print(json.dumps(github_action_config, **extra_args))
|