diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c046553dc..69dd493c1 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -20,7 +20,6 @@ jobs: strategy: matrix: python-version: - - '3.7' - '3.10' - '3.13' diff --git a/src/lava_callback.py b/src/lava_callback.py index 792925022..9dd6fdf7c 100755 --- a/src/lava_callback.py +++ b/src/lava_callback.py @@ -29,7 +29,6 @@ from base import validate_url - SETTINGS = toml.load(os.getenv('KCI_SETTINGS', 'config/kernelci.toml')) CONFIGS = kernelci.config.load( SETTINGS.get('DEFAULT', {}).get('yaml_config', 'config') @@ -40,6 +39,24 @@ app = FastAPI() executor = ThreadPoolExecutor(max_workers=16) +# Most common images used in LAVA deploy actions +# https://docs.lavasoftware.org/lava/actions-deploy.html#deploy-action-reference +input_file_types = [ + 'nfsrootfs', + 'rootfs', + 'ramdisk', + 'initrd', + 'initramfs', + 'ndbroot', + 'persistent_nfs', + 'nfsrootfs', + 'boot', + 'boot_part', + 'recovery_image', + 'system', + 'partition', +] + class ManualCheckout(BaseModel): commit: str @@ -211,6 +228,16 @@ def async_job_submit(api_helper, node_id, job_callback): job_result = job_callback.get_job_status() device_id = job_callback.get_device_id() storage_config_name = job_callback.get_meta('storage_config_name') + + if job_actions := job_callback.get_job_definition('actions'): + if deploy_images := job_actions[0].get('deploy', {}): + # Some LAVA deploy actions have images key, others directly + # have the input file types in the deploy dict + if "images" in deploy_images: + deploy_images = deploy_images.get('images') + for input_file in input_file_types: + if url := deploy_images.get(input_file, {}).get('url'): + job_node['artifacts']["input_"+input_file] = url storage = _get_storage(storage_config_name) log_txt_url = _upload_log(log_parser, job_node, storage) if log_txt_url: diff --git a/src/send_kcidb.py b/src/send_kcidb.py index 7662c4ef9..c28686e65 100755 --- a/src/send_kcidb.py +++ b/src/send_kcidb.py @@ -190,21 +190,24 @@ def _parse_checkout_node(self, origin, checkout_node): } }] - def _get_output_files(self, artifacts: dict, exclude_properties=None): - output_files = [] + def _filter_artifacts(self, artifacts: dict, exclude_properties=None, + include_properties=None): + filtered_artifacts = [] for name, url in artifacts.items(): - if exclude_properties and name in exclude_properties: + if exclude_properties and any(prop in name for prop in exclude_properties): + continue + if include_properties and not any(prop in name for prop in include_properties): continue # Replace "/" with "_" to match with the allowed pattern - # for "name" property of "output_files" i.e. '^[^/]+$' + # for "name" property of "input_files" i.e. '^[^/]+$' name = name.replace("/", "_") - output_files.append( + filtered_artifacts.append( { 'name': name, 'url': url } ) - return output_files + return filtered_artifacts def _get_log_excerpt(self, log_url): """Parse compressed(gzip) or text log file and return last 16*1024 characters as it's @@ -312,7 +315,6 @@ def _parse_build_node(self, origin, node): artifacts=artifacts, exclude_properties=('build_log', '_config') ) - parsed_build_node['input_files'] = None parsed_build_node['config_url'] = artifacts.get('_config') parsed_build_node['log_url'] = artifacts.get('build_log') log_url = parsed_build_node['log_url'] @@ -537,11 +539,14 @@ def _parse_test_node(self, origin, test_node): artifacts = self._get_artifacts(test_node) if artifacts: - parsed_test_node['output_files'] = self._get_output_files( + parsed_test_node['input_files'] = self._filter_artifacts( + artifacts=artifacts, + include_properties=('input_') + ) + parsed_test_node['output_files'] = self._filter_artifacts( artifacts=artifacts, - exclude_properties=('lava_log', 'test_log') + exclude_properties=('lava_log', 'test_log', 'input_') ) - parsed_test_node['input_files'] = None if artifacts.get('lava_log'): parsed_test_node['log_url'] = artifacts.get('lava_log') else: