From b29dd90a4f688731e774e503d6eecb3401c84d43 Mon Sep 17 00:00:00 2001 From: Arnulfo Arroyo Date: Wed, 15 Apr 2020 15:51:54 -0500 Subject: [PATCH 001/173] Add recipe for libgd --- recipes/libgd/all/conandata.yml | 4 + recipes/libgd/all/conanfile.py | 102 ++++++++++++++++++ recipes/libgd/all/test_package/CMakeLists.txt | 14 +++ recipes/libgd/all/test_package/conanfile.py | 20 ++++ recipes/libgd/all/test_package/example.cpp | 8 ++ recipes/libgd/config.yml | 3 + 6 files changed, 151 insertions(+) create mode 100644 recipes/libgd/all/conandata.yml create mode 100644 recipes/libgd/all/conanfile.py create mode 100644 recipes/libgd/all/test_package/CMakeLists.txt create mode 100644 recipes/libgd/all/test_package/conanfile.py create mode 100644 recipes/libgd/all/test_package/example.cpp create mode 100644 recipes/libgd/config.yml diff --git a/recipes/libgd/all/conandata.yml b/recipes/libgd/all/conandata.yml new file mode 100644 index 0000000000000..49dafae0ba34c --- /dev/null +++ b/recipes/libgd/all/conandata.yml @@ -0,0 +1,4 @@ +sources: + "2.2.5": + url: "https://github.com/libgd/libgd/releases/download/gd-2.2.5/libgd-2.2.5.tar.gz" + sha256: "a66111c9b4a04e818e9e2a37d7ae8d4aae0939a100a36b0ffb52c706a09074b5" \ No newline at end of file diff --git a/recipes/libgd/all/conanfile.py b/recipes/libgd/all/conanfile.py new file mode 100644 index 0000000000000..ca8362329736f --- /dev/null +++ b/recipes/libgd/all/conanfile.py @@ -0,0 +1,102 @@ +import os +from conans import ConanFile, tools, CMake + + +class LibgdConan(ConanFile): + name = "libgd" + license = "https://github.com/libgd/libgd/blob/master/COPYING" + url = "https://github.com/conan-io/conan-center-index" + description = "GD is an open source code library for the dynamic creation of images by programmers." + topics = ("images", "graphics") + settings = "os", "compiler", "build_type", "arch" + homepage = "https://libgd.github.io" + options = {"shared": [True, False], "fPIC": [True, False]} + default_options = {"shared": False} + generators = "cmake" + requires = "zlib/1.2.11" + + def config_options(self): + if self.settings.os == "Windows": + self.options.remove("fPIC") + + @property + def _source_subfolder(self): + return "source_subfolder" + + @property + def _build_subfolder(self): + return "build_subfolder" + + def source(self): + tools.get(**self.conan_data["sources"][self.version]) + os.rename('libgd-' + self.version, self._source_subfolder) + tools.replace_in_file( + os.path.join( + self._source_subfolder, + "CMakeLists.txt"), + "CMAKE_MINIMUM_REQUIRED(VERSION 2.6 FATAL_ERROR)", + '''cmake_minimum_required (VERSION 3.6 FATAL_ERROR) +PROJECT(GD C) +include(${CMAKE_BINARY_DIR}/../conanbuildinfo.cmake) +conan_basic_setup()''') + tools.replace_in_file( + os.path.join( + self._source_subfolder, + "CMakeLists.txt"), + 'PROJECT(GD)', + '# moved: PROJECT(GD)') + tools.replace_in_file( + os.path.join( + self._source_subfolder, + "CMakeLists.txt"), + 'if(NOT MINGW AND MSVC_VERSION GREATER 1399)', + 'if (BUILD_STATIC_LIBS AND WIN32 AND NOT MINGW AND NOT MSYS)') + tools.replace_in_file( + os.path.join( + self._source_subfolder, + "src", + "CMakeLists.txt"), + '''if (BUILD_SHARED_LIBS) + target_link_libraries(${GD_LIB} ${LIBGD_DEP_LIBS})''', + '''if (NOT WIN32) + list(APPEND LIBGD_DEP_LIBS m) +endif() +if (BUILD_SHARED_LIBS) + target_link_libraries(${GD_LIB} ${LIBGD_DEP_LIBS}) +''') + + def build(self): + cmake = CMake(self) + cmake.definitions['BUILD_STATIC_LIBS'] = not self.options.shared + cmake.definitions["ZLIB_LIBRARY"] = self.deps_cpp_info["zlib"].libs[0] + cmake.definitions["ZLIB_INCLUDE_DIR"] = self.deps_cpp_info["zlib"].include_paths[0] + cmake.configure( + source_folder=self._source_subfolder, + build_folder=self._build_subfolder) + cmake.build() + cmake.install() + + def package(self): + self.copy("COPYING", src=self._source_subfolder, dst="licenses", ignore_case=True, keep_path=False) + tools.rmdir(os.path.join(self.package_folder, 'share')) + self.copy("*", src="bin", dst="bin") + self.copy("*", src="lib", dst="lib") + self.copy("entities.h", dst="include", src="src") + self.copy("gd.h", dst="include", src="src") + self.copy("gd_color_map.h", dst="include", src="src") + self.copy("gd_errors.h", dst="include", src="src") + self.copy("gd_io.h", dst="include", src="src") + self.copy("gdcache.h", dst="include", src="src") + self.copy("gdfontg.h", dst="include", src="src") + self.copy("gdfontl.h", dst="include", src="src") + self.copy("gdfontmb.h", dst="include", src="src") + self.copy("gdfonts.h", dst="include", src="src") + self.copy("gdfontt.h", dst="include", src="src") + self.copy("gdfx.h", dst="include", src="src") + self.copy("gdpp.h", dst="include", src="src") + + def package_info(self): + self.cpp_info.libs = tools.collect_libs(self) + if not self.options.shared: + self.cpp_info.defines.append('NONDLL') + self.cpp_info.defines.append('BGDWIN32') diff --git a/recipes/libgd/all/test_package/CMakeLists.txt b/recipes/libgd/all/test_package/CMakeLists.txt new file mode 100644 index 0000000000000..ad6dc6ebd8e12 --- /dev/null +++ b/recipes/libgd/all/test_package/CMakeLists.txt @@ -0,0 +1,14 @@ +cmake_minimum_required(VERSION 2.8.12) +project(PackageTest CXX) + +include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) +conan_basic_setup() + +add_executable(example example.cpp) +target_link_libraries(example ${CONAN_LIBS}) + +# CTest is a testing tool that can be used to test your project. +# enable_testing() +# add_test(NAME example +# WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/bin +# COMMAND example) diff --git a/recipes/libgd/all/test_package/conanfile.py b/recipes/libgd/all/test_package/conanfile.py new file mode 100644 index 0000000000000..2736bca8a0fcf --- /dev/null +++ b/recipes/libgd/all/test_package/conanfile.py @@ -0,0 +1,20 @@ +import os + +from conans import ConanFile, CMake, tools + + +class LibgdTestConan(ConanFile): + settings = "os", "compiler", "build_type", "arch" + generators = "cmake" + + def build(self): + cmake = CMake(self) + # Current dir is "test_package/build/" and CMakeLists.txt is + # in "test_package" + cmake.configure() + cmake.build() + + def test(self): + if not tools.cross_building(self.settings): + os.chdir("bin") + self.run(".%sexample" % os.sep) diff --git a/recipes/libgd/all/test_package/example.cpp b/recipes/libgd/all/test_package/example.cpp new file mode 100644 index 0000000000000..4fed890149612 --- /dev/null +++ b/recipes/libgd/all/test_package/example.cpp @@ -0,0 +1,8 @@ +#include "gd.h" + +int main() { + gdImagePtr im; + im = gdImageCreate(10, 10); + gdImageDestroy (im); + return 0; +} diff --git a/recipes/libgd/config.yml b/recipes/libgd/config.yml new file mode 100644 index 0000000000000..69b4ae234365f --- /dev/null +++ b/recipes/libgd/config.yml @@ -0,0 +1,3 @@ +versions: + "2.2.5": + folder: all \ No newline at end of file From 8946e249be384a52a1a1b79f3cc177e95d7c909b Mon Sep 17 00:00:00 2001 From: Arnulfo Arroyo Date: Tue, 21 Apr 2020 18:14:03 -0500 Subject: [PATCH 002/173] Add default value for fPIC --- recipes/libgd/all/conanfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/recipes/libgd/all/conanfile.py b/recipes/libgd/all/conanfile.py index ca8362329736f..30a36b660c65f 100644 --- a/recipes/libgd/all/conanfile.py +++ b/recipes/libgd/all/conanfile.py @@ -11,7 +11,7 @@ class LibgdConan(ConanFile): settings = "os", "compiler", "build_type", "arch" homepage = "https://libgd.github.io" options = {"shared": [True, False], "fPIC": [True, False]} - default_options = {"shared": False} + default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = "zlib/1.2.11" From a428699a23621a2d9d9ee0fbf1dabb326fed1504 Mon Sep 17 00:00:00 2001 From: Rob Boehne Date: Tue, 13 Oct 2020 13:44:00 -0500 Subject: [PATCH 003/173] The test package doesn't work with BISON_PKGDATADIR unset, so don't try running without it. --- recipes/bison/all/test_package/conanfile.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/recipes/bison/all/test_package/conanfile.py b/recipes/bison/all/test_package/conanfile.py index 1f515de3bc32c..c1d831ca6b697 100644 --- a/recipes/bison/all/test_package/conanfile.py +++ b/recipes/bison/all/test_package/conanfile.py @@ -38,6 +38,3 @@ def test(self): with tools.environment_append({"M4": None}): self.run("bison -d {}".format(self._mc_parser_source), run_environment=True) - # verify bison works without BISON_PKGDATADIR and M4 environment variables - with tools.environment_append({"BISON_PKGDATADIR": None, "M4": None}): - self.run("bison -d {}".format(self._mc_parser_source), run_environment=True) From ec994b1a7abc3617c85e47a56e7da4752ccfced1 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 16 Nov 2021 12:54:45 -0600 Subject: [PATCH 004/173] .gitignore: Add standard ignore settings from gitignore.io - Remove duplicates from the upstream .gitignore --- .gitignore | 354 ++++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 335 insertions(+), 19 deletions(-) diff --git a/.gitignore b/.gitignore index 96733c91755c1..bb3329e6a4fcb 100644 --- a/.gitignore +++ b/.gitignore @@ -1,25 +1,226 @@ -# IDEs -.idea -.vscode -.project -.pydevproject -.settings/ -.ropeproject/ -.devcontainer/ -## emacs +# Created by https://www.toptal.com/developers/gitignore/api/vim,git,linux,macos,emacs,python,windows,pycharm,visualstudiocode +# Edit at https://www.toptal.com/developers/gitignore?templates=vim,git,linux,macos,emacs,python,windows,pycharm,visualstudiocode + +### Emacs ### +# -*- mode: gitignore; -*- *~ +\#*\# +/.emacs.desktop +/.emacs.desktop.lock +*.elc +auto-save-list +tramp +.\#* +# Org-mode +.org-id-locations +*_archive -# Byte-compiled / optimized / DLL files / Cache +# flymake-mode +*_flymake.* + +# eshell files +/eshell/history +/eshell/lastdir + +# elpa packages +/elpa/ + +# reftex files +*.rel + +# AUCTeX auto folder +/auto/ + +# cask packages +.cask/ +dist/ + +# Flycheck +flycheck_*.el + +# server auth directory +/server/ + +# projectiles files +.projectile + +# directory configuration +.dir-locals.el + +# network security +/network-security.data + + +### Git ### +# Created by git for backups. To disable backups in Git: +# $ git config --global mergetool.keepBackup false +*.orig + +# Created by git when using merge tools for conflicts +*.BACKUP.* +*.BASE.* +*.LOCAL.* +*.REMOTE.* +*_BACKUP_*.txt +*_BASE_*.txt +*_LOCAL_*.txt +*_REMOTE_*.txt + +### Linux ### + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### PyCharm Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +# https://plugins.jetbrains.com/plugin/7973-sonarlint +.idea/**/sonarlint/ + +# SonarQube Plugin +# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin +.idea/**/sonarIssues.xml + +# Markdown Navigator plugin +# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced +.idea/**/markdown-navigator.xml +.idea/**/markdown-navigator-enh.xml +.idea/**/markdown-navigator/ + +# Cache file creation bug +# See https://youtrack.jetbrains.com/issue/JBR-2257 +.idea/$CACHE_FILE$ + +# CodeStream plugin +# https://plugins.jetbrains.com/plugin/12206-codestream +.idea/codestream.xml + +### Python ### +# Byte-compiled / optimized / DLL files __pycache__/ -test_package/__pycache__/ -test_package/build/ -build/ -*.pyc *.py[cod] *$py.class -tmp/ -.DS_Store # C extensions *.so @@ -28,7 +229,6 @@ tmp/ .Python build/ develop-eggs/ -dist/ downloads/ eggs/ .eggs/ @@ -38,6 +238,7 @@ parts/ sdist/ var/ wheels/ +share/python-wheels/ *.egg-info/ .installed.cfg *.egg @@ -56,14 +257,17 @@ pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ +.nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover +*.py,cover .hypothesis/ .pytest_cache/ +cover/ # Translations *.mo @@ -73,6 +277,7 @@ coverage.xml *.log local_settings.py db.sqlite3 +db.sqlite3-journal # Flask stuff: instance/ @@ -85,16 +290,34 @@ instance/ docs/_build/ # PyBuilder +.pybuilder/ target/ # Jupyter Notebook .ipynb_checkpoints +# IPython +profile_default/ +ipython_config.py + # pyenv -.python-version +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version -# celery beat schedule file +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff celerybeat-schedule +celerybeat.pid # SageMath parsed files *.sage.py @@ -112,11 +335,104 @@ venv.bak/ .spyderproject .spyproject +# Rope project settings +.ropeproject + # mkdocs documentation /site # mypy .mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +### Vim ### +# Swap +[._]*.s[a-v][a-z] +!*.svg # comment out if you don't need vector files +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +# Support for Project snippet scope +!.vscode/*.code-snippets + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# End of https://www.toptal.com/developers/gitignore/api/vim,git,linux,macos,emacs,python,windows,pycharm,visualstudiocode + +# Byte-compiled / optimized / DLL files / Cache +test_package/__pycache__/ +test_package/build/ +*.pyc +tmp/ + +# pyenv +.python-version + +# Environments +python-env-* # scons build files *.dblite From a3d9c66a4ac34718ad91ad928386af2a0857de50 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 16 Nov 2021 12:56:22 -0600 Subject: [PATCH 005/173] Add Python virtual environment - mkenv.py - requirements.in --- .gitignore | 1 + mkenv.py | 127 ++++++++++++++++++++++++++++++++++++++++++++++++ requirements.in | 6 +++ 3 files changed, 134 insertions(+) create mode 100755 mkenv.py create mode 100644 requirements.in diff --git a/.gitignore b/.gitignore index bb3329e6a4fcb..921e8f057f3a0 100644 --- a/.gitignore +++ b/.gitignore @@ -433,6 +433,7 @@ tmp/ # Environments python-env-* +requirements.txt # scons build files *.dblite diff --git a/mkenv.py b/mkenv.py new file mode 100755 index 0000000000000..95f707340babe --- /dev/null +++ b/mkenv.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import string +import sys +import os + +import venv +import subprocess +import platform +import argparse + +if sys.version_info[:2] < (3, 6): + # Don't allow anything but Python 3.6 or higher + raise SystemError("Only Python 3.6+ is allowed") + +HERE = os.path.dirname(os.path.abspath(__file__)) +join = os.path.join +lower_node = platform.node().split('.')[0].lower().replace(' ', '-') +# See https://stackoverflow.com/a/10839538, but use ASCII letters +allowed = string.digits + string.ascii_letters + '-_' +HOME_DIR = join(HERE, 'python-env-%s' % ''.join(filter(allowed.__contains__, lower_node))) + + +def install_project_requirements(output_route): + """ + Install the project's required modules via pip-tools. + """ + print('Checking required packages are installed...') + + activation_path = HOME_DIR + execut = '' + if windows(): + execut = '.exe' + activation_path = os.path.join(activation_path, 'Scripts') + else: + activation_path = os.path.join(activation_path, 'bin') + + try: + print('Update pip ... ') + # update pip so the other steps won't fail with a warning to update pip + # Also, install pip-tools for better dependency management + subprocess.check_call([os.path.join(activation_path, 'python' + execut), + '-m', 'pip', 'install', '--upgrade', 'pip', 'pip-tools', + 'wheel'], + stdout=output_route, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError: + print('ERROR: Could not install/upgrade pip, pip-tools, and wheel') + raise + except PermissionError: + print('ERROR: Could not install pip due to permission error', activation_path) + raise + + try: + pip_compile_cmd = os.path.join(activation_path, 'pip-compile' + execut) + pip_sync_cmd = os.path.join(activation_path, 'pip-sync' + execut) + print('Installing / Refreshing required packages... ') + artifactory_url = 'http://artifactory.dlogics.com:8081/artifactory' + index_url = artifactory_url + '/api/pypi/pypi/simple' + artifactory = 'artifactory.dlogics.com' + print('Dependency resolution...') + # Avoid PEP 517. This gets around a problem with system_site_packages, + # pip >= 19.0.0, and older setuptools. + # See: https://github.com/pypa/pip/issues/6264#issuecomment-470498695 + # in pip-tools, this is the --no-build-isolation option + subprocess.check_call([pip_compile_cmd, '--no-build-isolation', '--upgrade', '-i', + index_url, '--trusted-host', + artifactory], + stdout=output_route, stderr=subprocess.STDOUT) + print('Installing/upgrading packages...') + subprocess.check_call([pip_sync_cmd, '-i', + index_url, '--trusted-host', + artifactory], + stdout=output_route, stderr=subprocess.STDOUT) + + except subprocess.CalledProcessError: + print('ERROR: Could not install required packages using ', pip_compile_cmd, ' and ', pip_sync_cmd) + raise + except PermissionError: + print('ERROR: Could not run pip-tools due to permission error', activation_path) + raise + + print('Packages up to date...') + activate_cmd = (f' . .{HOME_DIR}/bin/activate\n' if not windows() else + f' {HOME_DIR}\\Scripts\\activate.bat\n') + print('\n Now activate the virtual environment with:\n ' + activate_cmd) + + +def main(): + parser = argparse.ArgumentParser(description='Virtual environment setup script') + parser.add_argument('-v', '--verbose', action='store_true', + help='Show package installation output') + parser.add_argument('--env-name', action='store_true', + help='Print the environment name and exit') + parser.add_argument('--env-path', action='store_true', + help='Print the path to the programs in the environment and exit') + opts = parser.parse_args() + + output_route = None if opts.verbose else subprocess.DEVNULL + + if opts.env_name: + print(HOME_DIR) + return + + if opts.env_path: + scripts_or_bin = 'Scripts' if windows() else 'bin' + print(os.path.join(HERE, HOME_DIR, scripts_or_bin)) + return + + print('Creating virtualenv ', HOME_DIR) + # venv.main() does this, and it makes it possible to create a virtual environment + # more than once on Windows. + if os.name == 'nt': + use_symlinks = False + else: + use_symlinks = True + venv.create(HOME_DIR, system_site_packages=False, symlinks=use_symlinks, with_pip=True) + + install_project_requirements(output_route) + + +def windows(): + 'returns True on Windows platforms' + return platform.system() == 'Windows' + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/requirements.in b/requirements.in new file mode 100644 index 0000000000000..085a1977a34aa --- /dev/null +++ b/requirements.in @@ -0,0 +1,6 @@ +tox +dl-conan-build-tools~=3.4 +coverage +flake8 +pipdeptree +certifi From bd72f999d39713ffb43436226eb7f178d4e27d0d Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 2 Dec 2021 18:20:17 -0600 Subject: [PATCH 006/173] tox.ini: Add rules for flake8 --- tox.ini | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 tox.ini diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000000000..36f7a84158374 --- /dev/null +++ b/tox.ini @@ -0,0 +1,11 @@ +[flake8] +# Note: Excluding .git, build directories, directories with unfixed Python +# files, and directories with NO Python files, to reduce time flake8 spends +# scanning. +# +# Also, not enforcing flake8 on the recipes; they come from upstream with flake8 +# errors +# +exclude = .git,.tox,python-env-*,.idea,.conan,recipes +select = E,W,F +max-line-length = 120 From dfbb5b12cb8b1b7728fbd3c7b96d1e7e67415314 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 16 Nov 2021 15:31:30 -0600 Subject: [PATCH 007/173] upload-recipes task: upload recipes to an Artifactory remote - Can upload all recipes, or recipes by package name, or packages based on the changes since a Git commit. --- tasks/__init__.py | 63 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 tasks/__init__.py diff --git a/tasks/__init__.py b/tasks/__init__.py new file mode 100644 index 0000000000000..832a1d6bec3e2 --- /dev/null +++ b/tasks/__init__.py @@ -0,0 +1,63 @@ +import io +import os +import yaml +from dl_conan_build_tools.tasks import conan +from invoke import Collection +from invoke.tasks import Task, task + + +@task(help={'remote': 'remote to upload to, default conan-center-dl-staging', + 'package': 'name of package to upload, can be specified more than once', + 'all': 'upload all packages in recipes folder', + 'since-commit': 'upload all packages in recipes folder changed since COMMIT' + }, + iterable=['package']) +def upload_recipes(ctx, remote='conan-center-dl-staging', package=None, all=False, since_commit=None): + """Export and upload the named recipes to the given remote. + + Exports and uploads all the versions of the selected recipes to the remote.""" + packages = set() + packages.update(package or []) + if all: + packages.update(os.listdir('recipes')) + if since_commit: + stm = io.StringIO() + ctx.run(f'git diff --name-only {since_commit} -- recipes', out_stream=stm, pty=False, dry=False) + lines = stm.getvalue().strip('\n').split('\n') + packages.update(path.split('/')[1] for path in lines if path) + sorted_packages = sorted(packages) + print('*** Uploading:') + for pkg in sorted_packages: + print(f' {pkg}') + for one_package in sorted_packages: + ctx.run(f'conan remove {one_package} --force') + recipe_folder = os.path.join('recipes', one_package) + config_yml_file = os.path.join(recipe_folder, 'config.yml') + if os.path.exists(config_yml_file): + with open(config_yml_file, 'r') as config_yml: + config_data = yaml.safe_load(config_yml) + for version, config in config_data['versions'].items(): + folder = os.path.join(recipe_folder, config['folder']) + ctx.run(f'conan export {folder} {one_package}/{version}@') + else: + with os.scandir(recipe_folder) as dirs: + for entry in dirs: + if not entry.name.startswith('.') and entry.is_dir(): + version = entry.name + folder = os.path.join(recipe_folder, version) + ctx.run(f'conan export {folder} {one_package}/{version}@') + ctx.run(f'conan upload -r {remote} {one_package} --confirm') + + +tasks = [] +tasks.extend([v for v in locals().values() if isinstance(v, Task)]) + +conan_tasks = Collection() +conan_tasks.add_task(conan.install_config) +conan_tasks.add_task(conan.login) +conan_tasks.add_task(conan.purge) + +ns = Collection(*tasks) +ns.add_collection(conan_tasks, 'conan') + +ns.configure({'run': {'echo': 'true'}}) From 886ef9cc88c6e148fb48539eb4d20f8377f8e501 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 17 Nov 2021 08:06:53 -0600 Subject: [PATCH 008/173] .editorconfig: Merge in standard DL values from ocrbox --- .editorconfig | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.editorconfig b/.editorconfig index a0c12e281a351..e9dcd7b627cc6 100644 --- a/.editorconfig +++ b/.editorconfig @@ -7,15 +7,18 @@ insert_final_newline = true indent_style = space tab_width = 4 trim_trailing_whitespace = true +indent_size = 4 [*.py] max_line_length = 200 [*.yml] tab_width = 2 +indent_size = 2 -[{Makefile,Makefile.am,Makefile.in}] +[{Makefile,Makefile.am,Makefile.in,*.mak}] indent_style = tab +indent_size = 8 -[*.{diff,patch}] +[*.{diff,patch,md}] trim_trailing_whitespace = false From a0afd4c8778d00ce85dd1cee72ee01f50a4e30c9 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 18 Nov 2021 15:29:04 -0600 Subject: [PATCH 009/173] upload-recipes task: Factor recipe upload --- tasks/__init__.py | 39 ++++++++++++++++++++++----------------- 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/tasks/__init__.py b/tasks/__init__.py index 832a1d6bec3e2..c253944323779 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -30,23 +30,28 @@ def upload_recipes(ctx, remote='conan-center-dl-staging', package=None, all=Fals for pkg in sorted_packages: print(f' {pkg}') for one_package in sorted_packages: - ctx.run(f'conan remove {one_package} --force') - recipe_folder = os.path.join('recipes', one_package) - config_yml_file = os.path.join(recipe_folder, 'config.yml') - if os.path.exists(config_yml_file): - with open(config_yml_file, 'r') as config_yml: - config_data = yaml.safe_load(config_yml) - for version, config in config_data['versions'].items(): - folder = os.path.join(recipe_folder, config['folder']) - ctx.run(f'conan export {folder} {one_package}/{version}@') - else: - with os.scandir(recipe_folder) as dirs: - for entry in dirs: - if not entry.name.startswith('.') and entry.is_dir(): - version = entry.name - folder = os.path.join(recipe_folder, version) - ctx.run(f'conan export {folder} {one_package}/{version}@') - ctx.run(f'conan upload -r {remote} {one_package} --confirm') + upload_one_package_name(ctx, one_package, remote) + + +def upload_one_package_name(ctx, package_name, remote): + """Upload one recipe to the given remote""" + ctx.run(f'conan remove {package_name} --force') + recipe_folder = os.path.join('recipes', package_name) + config_yml_file = os.path.join(recipe_folder, 'config.yml') + if os.path.exists(config_yml_file): + with open(config_yml_file, 'r') as config_yml: + config_data = yaml.safe_load(config_yml) + for version, config in config_data['versions'].items(): + folder = os.path.join(recipe_folder, config['folder']) + ctx.run(f'conan export {folder} {package_name}/{version}@') + else: + with os.scandir(recipe_folder) as dirs: + for entry in dirs: + if not entry.name.startswith('.') and entry.is_dir(): + version = entry.name + folder = os.path.join(recipe_folder, version) + ctx.run(f'conan export {folder} {package_name}/{version}@') + ctx.run(f'conan upload -r {remote} {package_name} --confirm') tasks = [] From 3050c3e8fdafd7722b729478d219a4547d6a94d3 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 18 Nov 2021 17:17:34 -0600 Subject: [PATCH 010/173] upload-recipes: Run in parallel via ThreadPool --- tasks/__init__.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/tasks/__init__.py b/tasks/__init__.py index c253944323779..19ce86c4b06aa 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -4,15 +4,19 @@ from dl_conan_build_tools.tasks import conan from invoke import Collection from invoke.tasks import Task, task +from multiprocessing.pool import ThreadPool @task(help={'remote': 'remote to upload to, default conan-center-dl-staging', 'package': 'name of package to upload, can be specified more than once', 'all': 'upload all packages in recipes folder', - 'since-commit': 'upload all packages in recipes folder changed since COMMIT' + 'since-commit': 'upload all packages in recipes folder changed since COMMIT', + 'parallel': 'run uploads in parallel (default)', + 'upload': 'upload the recipe (default) (otherwise, just does the exports)' }, iterable=['package']) -def upload_recipes(ctx, remote='conan-center-dl-staging', package=None, all=False, since_commit=None): +def upload_recipes(ctx, remote='conan-center-dl-staging', package=None, all=False, since_commit=None, parallel=True, + upload=True): """Export and upload the named recipes to the given remote. Exports and uploads all the versions of the selected recipes to the remote.""" @@ -29,11 +33,19 @@ def upload_recipes(ctx, remote='conan-center-dl-staging', package=None, all=Fals print('*** Uploading:') for pkg in sorted_packages: print(f' {pkg}') - for one_package in sorted_packages: - upload_one_package_name(ctx, one_package, remote) + def do_upload(one_package): + upload_one_package_name(ctx, one_package, remote, upload=upload) -def upload_one_package_name(ctx, package_name, remote): + if parallel: + with ThreadPool() as pool: + pool.map(do_upload, sorted_packages) + else: + for one_package in sorted_packages: + do_upload(one_package) + + +def upload_one_package_name(ctx, package_name, remote, upload=True): """Upload one recipe to the given remote""" ctx.run(f'conan remove {package_name} --force') recipe_folder = os.path.join('recipes', package_name) @@ -51,7 +63,8 @@ def upload_one_package_name(ctx, package_name, remote): version = entry.name folder = os.path.join(recipe_folder, version) ctx.run(f'conan export {folder} {package_name}/{version}@') - ctx.run(f'conan upload -r {remote} {package_name} --confirm') + if upload: + ctx.run(f'conan upload -r {remote} {package_name} --confirm') tasks = [] From ae53f1d37d2662f38f10d230cdd20c5a2f61bd5d Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 18 Nov 2021 18:52:34 -0600 Subject: [PATCH 011/173] upload-recipes: Use concurrent.futures for parallelism - Replace undocumented/unfinished multiprocessing.pool.ThreadPool. - concurrent.futures is the more modern way to do parallel processing. - Fail fast: On the first upload that fails, cancel all pending uploads, and report the problem via Exit. --- tasks/__init__.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/tasks/__init__.py b/tasks/__init__.py index 19ce86c4b06aa..ea6f6228859a7 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -1,10 +1,10 @@ import io import os import yaml +from concurrent import futures from dl_conan_build_tools.tasks import conan -from invoke import Collection +from invoke import Collection, Exit from invoke.tasks import Task, task -from multiprocessing.pool import ThreadPool @task(help={'remote': 'remote to upload to, default conan-center-dl-staging', @@ -38,13 +38,25 @@ def do_upload(one_package): upload_one_package_name(ctx, one_package, remote, upload=upload) if parallel: - with ThreadPool() as pool: - pool.map(do_upload, sorted_packages) + upload_in_parallel(do_upload, sorted_packages) else: for one_package in sorted_packages: do_upload(one_package) +def upload_in_parallel(do_upload, sorted_packages, thread_name_prefix=None): + """Upload recipes in parallel""" + with futures.ThreadPoolExecutor(thread_name_prefix='upload_recipes') as executor: + future_to_package = {executor.submit(do_upload, one_package): one_package for one_package in sorted_packages} + for future in futures.as_completed(future_to_package): + exception = future.exception() + if exception: + # Fail on first problem + executor.shutdown(wait=True, cancel_futures=True) + one_package = future_to_package[future] + raise Exit(f'error exporting/uploading {one_package}: {exception}') from exception + + def upload_one_package_name(ctx, package_name, remote, upload=True): """Upload one recipe to the given remote""" ctx.run(f'conan remove {package_name} --force') From 3231c3e37d0e37f089152a4ec073e22ff66d1e3c Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 30 Nov 2021 11:07:23 -0600 Subject: [PATCH 012/173] Jenkinsfile: Set up initial build structure. - Run common stuff on a 'noarch' platform. This is for non-platform-specific work like exporting and uploading recipes. - Then run per-machine stuff on many platforms. This is where testing of recipes and prebuilding of tools will happen. - Print out environment variables to find what's available. --- Jenkinsfile | 222 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 222 insertions(+) create mode 100644 Jenkinsfile diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000000000..870a5f7923b5b --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,222 @@ +def ENV_LOC = [:] +def ARCH = [ + 'aix-32-conan-center-index': 'ppc32', + 'aix-64-conan-center-index': 'ppc64', + 'linux-x86-conan-center-index': 'x86', + 'linux-x64-conan-center-index': 'x64', + 'linux-arm-conan-center-index': 'armv8', + 'mac-x64-conan-center-index': 'x64', + 'mac-arm-conan-center-index': 'armv8', + 'sparcsolaris-32-conan-center-index': 'sparc', + 'sparcsolaris-64-conan-center-index': 'sparcv9', + 'windows-x86-conan-center-index': 'x86', + 'windows-x64-conan-center-index': 'x64'] +pipeline { + parameters { + choice(name: 'PLATFORM_FILTER', + choices: ['all', + 'aix-32-conan-center-index', + 'aix-64-conan-center-index', + 'linux-x86-conan-center-index', + 'linux-x64-conan-center-index', + 'linux-arm-conan-center-index', + 'mac-x64-conan-center-index', + 'mac-arm-conan-center-index', + 'sparcsolaris-32-conan-center-index', + 'sparcsolaris-64-conan-center-index', + 'windows-x86-conan-center-index', + 'windows-x64-conan-center-index'], + description: 'Run on specific platform') + booleanParam defaultValue: false, description: 'Completely clean the workspace before building, including the Conan cache', name: 'CLEAN_WORKSPACE' + } + options{ + buildDiscarder logRotator(artifactDaysToKeepStr: '4', artifactNumToKeepStr: '10', daysToKeepStr: '7', numToKeepStr: '10') + disableConcurrentBuilds() + } + agent { + node { + label 'noarch-conan-center-index' + customWorkspace "workspace/${JOB_NAME}_noarch/" + } + } + environment { + CONAN_USER_HOME = "${WORKSPACE}" + CONAN_NON_INTERACTIVE = '1' + CONAN_PRINT_RUN_COMMANDS = '1' + // AIX workaround. Avoids an issue caused by the jenkins java process which sets + // LIBPATH and causes errors downstream + LIBPATH = "randomval" + } + stages { + stage('Clean/reset Git checkout for release') { + when { + anyOf { + expression { params.CLEAN_WORKSPACE == 'true' } + } + } + steps { + echo "Clean noarch" + script { + // Ensure that the checkout is clean and any changes + // to .gitattributes and .gitignore have been taken + // into effect + if (isUnix()) { + sh """ + git rm -q -r . + git reset --hard HEAD + git clean -fdx + """ + } else { + // On Windows, 'git clean' can't handle long paths in .conan, + // so remove that first. + bat """ + if exist ${WORKSPACE}\\.conan\\ rmdir/s/q ${WORKSPACE}\\.conan + git rm -q -r . + git reset --hard HEAD + git clean -fdx + """ + } + } + } + } + stage('Set-Up Environment') { + steps { + printPlatformNameInStep('noarch') + echo "Set-Up Environment noarch" + script { + if (isUnix()) { + sh './mkenv.py --verbose' + ENV_LOC['noarch'] = sh ( + script: './mkenv.py --env-name', + returnStdout: true + ).trim() + } else { + // Using the mkenv.py script like this assumes the Python Launcher is + // installed on the Windows host. + // https://docs.python.org/3/using/windows.html#launcher + bat '.\\mkenv.py --verbose' + ENV_LOC['noarch'] = bat ( + // The @ prevents Windows from echoing the command itself into the stdout, + // which would corrupt the value of the returned data. + script: '@.\\mkenv.py --env-name', + returnStdout: true + ).trim() + } + } + } + } + stage('Common recipe upload') { + steps { + echo 'Would upload recipes here' + } + } + stage('Per-platform') { + matrix { + agent { + node { + label "${NODE}" + customWorkspace "workspace/${JOB_NAME}_${ARCH[NODE]}/" + } + } + when { anyOf { + expression { params.PLATFORM_FILTER == 'all' } + expression { params.PLATFORM_FILTER == env.NODE } + } } + axes { + axis { + name 'NODE' + values 'aix-32-conan-center-index', + 'aix-64-conan-center-index', + 'linux-x86-conan-center-index', + 'linux-x64-conan-center-index', + 'linux-arm-conan-center-index', + 'mac-x64-conan-center-index', + 'mac-arm-conan-center-index', + 'sparcsolaris-32-conan-center-index', + 'sparcsolaris-64-conan-center-index', + 'windows-x86-conan-center-index', + 'windows-x64-conan-center-index' + } + } + stages { + stage('Clean/reset Git checkout for release') { + when { + anyOf { + expression { params.CLEAN_WORKSPACE == 'true' } + } + } + steps { + echo "Clean ${NODE}" + script { + // Ensure that the checkout is clean and any changes + // to .gitattributes and .gitignore have been taken + // into effect + if (isUnix()) { + sh """ + git rm -q -r . + git reset --hard HEAD + git clean -fdx + """ + } else { + // On Windows, 'git clean' can't handle long paths in .conan, + // so remove that first. + bat """ + if exist ${WORKSPACE}\\.conan\\ rmdir/s/q ${WORKSPACE}\\.conan + git rm -q -r . + git reset --hard HEAD + git clean -fdx + """ + } + } + } + } + stage('Set-Up Environment') { + steps { + printPlatformNameInStep(NODE) + echo "Set-Up Environment ${NODE}" + script { + if (isUnix()) { + sh './mkenv.py --verbose' + ENV_LOC[NODE] = sh ( + script: './mkenv.py --env-name', + returnStdout: true + ).trim() + } else { + // Using the mkenv.py script like this assumes the Python Launcher is + // installed on the Windows host. + // https://docs.python.org/3/using/windows.html#launcher + bat '.\\mkenv.py --verbose' + ENV_LOC[NODE] = bat ( + // The @ prevents Windows from echoing the command itself into the stdout, + // which would corrupt the value of the returned data. + script: '@.\\mkenv.py --env-name', + returnStdout: true + ).trim() + } + } + } + } + stage('Print environment') { + steps { + script { + if (isUnix()) { + sh "env" + } else { + bat "set" + } + } + } + } + } + } + } + } +} + +void printPlatformNameInStep(String node) { + script { + stage("Building on ${node}") { + echo "Building on node: ${node}" + } + } +} From bdafdba2b83b8112c0338fac684e0200049219e3 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 30 Nov 2021 13:40:27 -0600 Subject: [PATCH 013/173] upload-recipes: Add --since-before-last-merge This option selects recipes that have been changed since just before the most recent merge as seen from the current HEAD. Jobs that run due to a branch may find this useful, as it does an incremental update of the new or changed recipes. --- tasks/__init__.py | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/tasks/__init__.py b/tasks/__init__.py index ea6f6228859a7..b127f74108946 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -11,24 +11,39 @@ 'package': 'name of package to upload, can be specified more than once', 'all': 'upload all packages in recipes folder', 'since-commit': 'upload all packages in recipes folder changed since COMMIT', + 'since-before-last-merge': 'upload all packages in recipes folder changed since just before the most ' + 'recent merge (this is useful for automated tools)', 'parallel': 'run uploads in parallel (default)', 'upload': 'upload the recipe (default) (otherwise, just does the exports)' }, iterable=['package']) -def upload_recipes(ctx, remote='conan-center-dl-staging', package=None, all=False, since_commit=None, parallel=True, - upload=True): +def upload_recipes(ctx, remote='conan-center-dl-staging', package=None, all=False, since_commit=None, + since_before_last_merge=False, parallel=True, upload=True): """Export and upload the named recipes to the given remote. Exports and uploads all the versions of the selected recipes to the remote.""" packages = set() + + def update_since_commit(since_commit): + stm = io.StringIO() + ctx.run(f'git diff --name-only {since_commit} -- recipes', out_stream=stm, pty=False, dry=False) + lines = stm.getvalue().strip('\n').split('\n') + packages.update(path.split('/')[1] for path in lines if path) + packages.update(package or []) if all: packages.update(os.listdir('recipes')) if since_commit: + update_since_commit(since_commit) + if since_before_last_merge: stm = io.StringIO() - ctx.run(f'git diff --name-only {since_commit} -- recipes', out_stream=stm, pty=False, dry=False) - lines = stm.getvalue().strip('\n').split('\n') - packages.update(path.split('/')[1] for path in lines if path) + # Find most recent merge commit from current HEAD, basically the first rev that has more than one parent + # https://stackoverflow.com/a/41464631/11996393 + ctx.run('git rev-list --min-parents=2 --max-count=1 HEAD', out_stream=stm, pty=False, dry=False) + commit = stm.getvalue().strip('\n') + # {commit}~1 is the first parent of {commit}; see https://git-scm.com/docs/git-rev-parse#_specifying_revisions + update_since_commit(f'{commit}~1') + sorted_packages = sorted(packages) print('*** Uploading:') for pkg in sorted_packages: From 5e602e9c4c385fe5d2ff6e6a0de64a8dfe1b8efc Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 30 Nov 2021 14:34:03 -0600 Subject: [PATCH 014/173] Jenkinsfile: Upload recipes on branch builds Upload recipes changed since just before the last merge. --- Jenkinsfile | 34 ++++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 870a5f7923b5b..53d349f2139ef 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -28,6 +28,8 @@ pipeline { 'windows-x64-conan-center-index'], description: 'Run on specific platform') booleanParam defaultValue: false, description: 'Completely clean the workspace before building, including the Conan cache', name: 'CLEAN_WORKSPACE' + booleanParam name: 'UPLOAD_ALL_RECIPES', defaultValue: false, + description: 'Upload all recipes, instead of only recipes that changed since the last merge' } options{ buildDiscarder logRotator(artifactDaysToKeepStr: '4', artifactNumToKeepStr: '10', daysToKeepStr: '7', numToKeepStr: '10') @@ -46,6 +48,7 @@ pipeline { // AIX workaround. Avoids an issue caused by the jenkins java process which sets // LIBPATH and causes errors downstream LIBPATH = "randomval" + DL_CONAN_CENTER_INDEX = 'all' } stages { stage('Clean/reset Git checkout for release') { @@ -105,9 +108,36 @@ pipeline { } } } - stage('Common recipe upload') { + stage('Set up Conan') { steps { - echo 'Would upload recipes here' + sh """. ${ENV_LOC['noarch']}/bin/activate + invoke conan.login""" + } + } + stage('Upload new or changed recipes') { + when { + not { + changeRequest() + } + } + steps { + script { + if (env.BRANCH_NAME =~ 'master*') { + remote = 'conan-center-dl' + } else { + remote = 'conan-center-dl-staging' + } + if (params.UPLOAD_ALL_RECIPES) { + range = '--all' + } else { + // assuming this is due to a merge, upload recipes + // modified since just before the last merge. This is an + // incremental update to recipes, and will be much faster + // than uploading all 1100+ recipes. + range = "--since-before-last-merge" + } + sh ". ${ENV_LOC['noarch']}/bin/activate; invoke upload-recipes --remote ${remote} ${range}" + } } } stage('Per-platform') { From 428fac42db88592e6ac4d05989c3b56c5b8ae856 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 30 Nov 2021 15:43:11 -0600 Subject: [PATCH 015/173] dlproject.yaml: Template from dl-conan-build-tools --- dlproject.yaml | 173 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 173 insertions(+) create mode 100644 dlproject.yaml diff --git a/dlproject.yaml b/dlproject.yaml new file mode 100644 index 0000000000000..bcd72f04b1880 --- /dev/null +++ b/dlproject.yaml @@ -0,0 +1,173 @@ +# Config file for the project. Used by the Invoke tasks, primarily. +config: + # Basic configuration variables + global: + # Base configurations, may be overridden by platform + + # Conan configuration. `conan config install` installs configuration file from this URL. + # See: https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install + # This is usually a pointer to a Git repo, from which it clones the repo. + # The config_args are passed to the 'git clone' command and can select + # a branch or tag + config_url: git@octocat.dlogics.com:datalogics/conan-config.git + config_args: --branch artifactory-virtual-repos + + # Conan profile that's used to configure the compiler + # default is the profile Conan makes in the user's home directory + profile: + - default + + # It's possible to use a list of profiles. + # For instance, this can be used to add a profile that has build + # tools in the [build_requires] section, making those tools available + # to the locally-built project via the conan.install task. + # + # Of course, the profile can be placed in a platform dictionary as well. + # profile: + # - default + # - build_tools + + # Multiple build configurations in the IDE + multi: false + + # Regex that indicates a release branch + stable_branch_pattern: release-.* + + # Which dependencies are built. Takes a number of keywords, also the names + # of packages, which can also contain a wildcard. + # See https://docs.conan.io/en/latest/reference/commands/consumer/install.html#build-options + # for the full list of options. + build: + - missing + + # CMake project generator + # Set this based on the build system the project uses + # (on Mac, this may be overridden to Xcode if bootstrap-xcode is run) + # Note: if unset, the Conan CMake build helper will choose the generator + # automatically, including using the correct Visual Studio generator on + # Windows + # cmake_generator: Unix Makefiles + + # options for the ConanMultiPackager + # These state which compilers and architectures are built when doing conan.package + packager: + # Build tools. This profile is installed to a temporary directory to + # provide tools for building packages via the + # conan.install-all-configurations or conan.package tasks. + # + # Optional. Add this to your project if you want to automatically + # include build tools. May also be customized per platform. + # build_tools: build_tools + + archs: + - x86_64 + apple_clang_versions: + # quirk: must be string + - "9.1" + visual_versions: + # Visual Studio + # quirk: must be integer + - 12 + # Specify the toolsets for which this package should be built. + # There is a key for each visual_version, which is a list of + # toolset designators. + # Quirk: The compiler version keys must be strings. + # visual_toolsets: + # "16": + # - v141 + visual_runtimes: + # Permitted runtime versions for Visual Studio. + - MT + - MD + - MTd + - MDd + # Set this to False to make the package set up the vcvars before the build. + # But beware, this can mess up some recipes, especially if toolsets are around. + exclude_vcvars_precommand: True + clang_versions: + # quirk: must be string + - "5.0" + # gcc_versions: + # # quirk: must be string + # - "7" + # cppstds: + # # quirk: must be string + # - "11" + # libcxx: + # # Permitted libcxx settings + # - stdlibc++ + shared: + # Permitted states for the shared option. + - True + - False + + # Build types. If missing, defaults to Debug and Release. + # build_types: + # - Debug + # - Release + + # You can also add additional options with values; all of these + # will be passed through to each package build. + # options: + # some_option: "some_value" + + # You can also add additional settings with values; all of these + # will be passed through to each package build. + # settings: + # some_setting: "some_value" + + # Allow making DLLs with static runtimes. + # Normally, that's not done, but set this to True to allow + # those special cases into the build matrix. + dll_with_static_runtime: False + + # Set to True if this is a pure C project + pure_c: False + + # Set to True if this project supports a header_only option + header_only: False + + # Configs based on platform information + # + # Keys can be (and are merged in this order) + # + # system + # system-machine + # system-version + # system-version-machine + # + # Where: + # system is macos, windows, or the name of the linux distribution (redhat includes centos) + # version is the major version number (major.minor on macos) + # machine is the processor architecture, i.e. x86_64 + # + # items from global and modify them here. + # When merging: + # Dictionary keys override the base + # Lists are appended + + macos: + # Per-platform profiles can be set here. Profiles can be a string or a + # list of strings as seen below. By using a list, the build_tools + # profile can be added, and adds the build tools for development builds. + # + # profile: + # - apple-clang-10.0-macos-10.9 + # - build_tools + + redhat: + + windows: + # Can set the generator differently on a per-machine basis (but automatic is + # better) + # cmake_generator: Visual Studio 12 2013 Win64 + multi: true + + # Can override packager build_tools on a per-platform basis: + # packager: + # build_tools: build_tools_windows + + byhost: + # Configs based on hostname. + # These are applied last. Consider this for only the most extreme cases + kamcentos6: From fa5cb6471cc82c19c034d8c06f3671ee503aa7fa Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 30 Nov 2021 15:44:09 -0600 Subject: [PATCH 016/173] dlproject.yaml: Use the curated-conan-center-index config --- dlproject.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dlproject.yaml b/dlproject.yaml index bcd72f04b1880..1740abbdcaac2 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -10,7 +10,7 @@ config: # The config_args are passed to the 'git clone' command and can select # a branch or tag config_url: git@octocat.dlogics.com:datalogics/conan-config.git - config_args: --branch artifactory-virtual-repos + config_args: --branch curated-conan-center-index # Conan profile that's used to configure the compiler # default is the profile Conan makes in the user's home directory From e3617424e5215a99ffb3fad8d937831b54a01b5a Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 30 Nov 2021 17:30:46 -0600 Subject: [PATCH 017/173] Change the pull request template for DL --- .gitattributes | 2 +- .github/PULL_REQUEST_TEMPLATE.md | 19 +++++++++++-------- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/.gitattributes b/.gitattributes index f0e8d5041ca21..2dfa5430db4d3 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,7 +1,7 @@ *.cmake text eol=lf *.conf text eol=lf *.diff text eol=lf -*.md text eol=lf +*.md text eol=lf -whitespace *.patch text eol=lf *.py text eol=lf *.txt text eol=lf diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index edb988e0d2576..2ef2e17494a6c 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,11 +1,14 @@ -Specify library name and version: **lib/1.0** +- _List changes here_ +- -This is also a good place to share with all of us **why you are submitting this PR** (specially if it is a new addition to ConanCenter): is it a dependency of other libraries you want to package? Are you the author of the library? Thanks! +#### Fulfills JIRA issue [EXAMPLE-1](https://jira.datalogics.com/browse/EXAMPLE-1) ---- +#### Checklist for approving this pull request + +(**PR Author:** amend this with more conditions if necessary) +(**PR Reviewer:** ensure all following items are fulfilled before merging) + +- [ ] The **Pull Request Title** has JIRA issue number, a space, and then a short but descriptive summary. +- [ ] **Commit messages** are well formed: [A note about Git commit messages](http://www.tpope.net/node/106) +- [ ] **Automated tests pass**. -- [ ] I've read the [guidelines](https://github.com/conan-io/conan-center-index/blob/master/docs/how_to_add_packages.md) for contributing. -- [ ] I've followed the [PEP8](https://www.python.org/dev/peps/pep-0008/) style guides for Python code in the recipes. -- [ ] I've used the [latest](https://github.com/conan-io/conan/releases/latest) Conan client version. -- [ ] I've tried at least one configuration locally with the - [conan-center hook](https://github.com/conan-io/hooks.git) activated. From dec77cdaec77f372e3d4b8af9ae90cb5695de891 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 1 Dec 2021 09:47:33 -0600 Subject: [PATCH 018/173] Jenkinsfile: Turn off the per-platform matrix for now - Already verified that this kind of structure works, that there can be a common part and a per-platform part. - Save time for now by not queuing on all the machines. - Will turn back on when we put something into it. --- Jenkinsfile | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/Jenkinsfile b/Jenkinsfile index 53d349f2139ef..9ac98eed23bb7 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -141,6 +141,11 @@ pipeline { } } stage('Per-platform') { + when { + // Turn off for now; testing verified that the structure works (common part then matrix part), + // and the per-platform tasks aren't there yet. + expression { false } + } matrix { agent { node { From 238ee82790b856547ab1ca250b529f4446e02e46 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 3 Dec 2021 11:14:42 -0600 Subject: [PATCH 019/173] Jenkinsfile: Report unsuccessful and fixed jobs to #conan --- Jenkinsfile | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/Jenkinsfile b/Jenkinsfile index 9ac98eed23bb7..c5aba2f0e7b83 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -246,6 +246,26 @@ pipeline { } } } + post { + unsuccessful { + script { + if (env.CHANGE_ID == null) { // i.e. not a pull request; those notify in GitHub + slackSend(channel: "#conan", + message: "Unsuccessful build: ${env.JOB_NAME} ${env.BUILD_NUMBER} (<${env.BUILD_URL}|Open>)", + color: "danger") + } + } + } + fixed { + script { + if (env.CHANGE_ID == null) { // i.e. not a pull request; those notify in GitHub + slackSend(channel: "#conan", + message: "Build is now working: ${env.JOB_NAME} ${env.BUILD_NUMBER} (<${env.BUILD_URL}|Open>)", + color: "good") + } + } + } + } } void printPlatformNameInStep(String node) { From 22ed3cb7faead88e6fed870661e35850749adfe9 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 3 Dec 2021 16:42:56 -0600 Subject: [PATCH 020/173] Jenkinsfile: Add flake8 testing --- Jenkinsfile | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/Jenkinsfile b/Jenkinsfile index c5aba2f0e7b83..32eb810391dbe 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -114,6 +114,24 @@ pipeline { invoke conan.login""" } } + stage('flake8') { + steps { + catchError(message: 'flake8 had errors', stageResult: 'FAILURE') { + script { + sh """. ${ENV_LOC['noarch']}/bin/activate + rm -f flake8.log + flake8 --format=pylint --output=flake8.log --tee""" + } + } + } + post { + always { + recordIssues(enabledForFailure: true, + tool: flake8(pattern: 'flake8.log'), + qualityGates: [[threshold: 1, type: 'TOTAL', unstable: false]]) + } + } + } stage('Upload new or changed recipes') { when { not { From 249fe57ee60e144adaeb1f32c2b4aacc96430316 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 3 Dec 2021 16:43:48 -0600 Subject: [PATCH 021/173] flake8: Add enforcement of no printf-style formatting --- mkenv.py | 3 ++- requirements.in | 1 + tox.ini | 1 - 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mkenv.py b/mkenv.py index 95f707340babe..043845ea3f320 100755 --- a/mkenv.py +++ b/mkenv.py @@ -18,7 +18,8 @@ lower_node = platform.node().split('.')[0].lower().replace(' ', '-') # See https://stackoverflow.com/a/10839538, but use ASCII letters allowed = string.digits + string.ascii_letters + '-_' -HOME_DIR = join(HERE, 'python-env-%s' % ''.join(filter(allowed.__contains__, lower_node))) +filtered_node = ''.join(filter(allowed.__contains__, lower_node)) +HOME_DIR = join(HERE, f'python-env-{filtered_node}') def install_project_requirements(output_route): diff --git a/requirements.in b/requirements.in index 085a1977a34aa..8fb2b21d3c8ea 100644 --- a/requirements.in +++ b/requirements.in @@ -2,5 +2,6 @@ tox dl-conan-build-tools~=3.4 coverage flake8 +flake8-printf-formatting pipdeptree certifi diff --git a/tox.ini b/tox.ini index 36f7a84158374..bd0046adf2805 100644 --- a/tox.ini +++ b/tox.ini @@ -7,5 +7,4 @@ # errors # exclude = .git,.tox,python-env-*,.idea,.conan,recipes -select = E,W,F max-line-length = 120 From 3245ad62bcd4a1fe0e5ec1e3b4f601456517f227 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 4 Mar 2022 16:42:59 -0600 Subject: [PATCH 022/173] flake8: Ignore Python files in .github These help run the CI for conan-io, and we don't care how they're formatted. --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index bd0046adf2805..647726feef4e7 100644 --- a/tox.ini +++ b/tox.ini @@ -6,5 +6,5 @@ # Also, not enforcing flake8 on the recipes; they come from upstream with flake8 # errors # -exclude = .git,.tox,python-env-*,.idea,.conan,recipes +exclude = .git,.tox,python-env-*,.idea,.conan,recipes,.github max-line-length = 120 From 2a2d38baa086f0f7ac449b75bbf1f8d799af9790 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 9 Mar 2022 10:08:31 -0600 Subject: [PATCH 023/173] swig: Make a DL-specific copy of the SWIG recipe - Use the feature of this Conan Center Index setup that allows separate recipes for different version numbers, so that we have a DL recipe for 1.3.40+dl.1 that won't conflict with the 4.x recipes from Conan Center. --- .../cmake/conan-official-swig-targets.cmake | 7 + recipes/swig/dl/conandata.yml | 18 ++ recipes/swig/dl/conanfile.py | 156 ++++++++++++++++++ .../0001-swig-linux-library-path.patch | 50 ++++++ ....0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch | 11 ++ ....0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch | 11 ++ recipes/swig/dl/test_package/CMakeLists.txt | 51 ++++++ recipes/swig/dl/test_package/conanfile.py | 27 +++ recipes/swig/dl/test_package/test.i | 6 + recipes/swig/dl/test_package/test_package.c | 31 ++++ 10 files changed, 368 insertions(+) create mode 100644 recipes/swig/dl/cmake/conan-official-swig-targets.cmake create mode 100644 recipes/swig/dl/conandata.yml create mode 100644 recipes/swig/dl/conanfile.py create mode 100644 recipes/swig/dl/patches/0001-swig-linux-library-path.patch create mode 100644 recipes/swig/dl/patches/0002-4.0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch create mode 100644 recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch create mode 100644 recipes/swig/dl/test_package/CMakeLists.txt create mode 100644 recipes/swig/dl/test_package/conanfile.py create mode 100644 recipes/swig/dl/test_package/test.i create mode 100644 recipes/swig/dl/test_package/test_package.c diff --git a/recipes/swig/dl/cmake/conan-official-swig-targets.cmake b/recipes/swig/dl/cmake/conan-official-swig-targets.cmake new file mode 100644 index 0000000000000..8740b23580975 --- /dev/null +++ b/recipes/swig/dl/cmake/conan-official-swig-targets.cmake @@ -0,0 +1,7 @@ +find_program(SWIG_EXECUTABLE swig) +if(NOT SWIG_DIR) + execute_process(COMMAND ${SWIG_EXECUTABLE} -swiglib + OUTPUT_VARIABLE SWIG_lib_output OUTPUT_STRIP_TRAILING_WHITESPACE) + set(SWIG_DIR ${SWIG_lib_output} CACHE STRING "Location of SWIG library" FORCE) +endif() +mark_as_advanced(SWIG_DIR SWIG_EXECUTABLE) diff --git a/recipes/swig/dl/conandata.yml b/recipes/swig/dl/conandata.yml new file mode 100644 index 0000000000000..639cc9e72797d --- /dev/null +++ b/recipes/swig/dl/conandata.yml @@ -0,0 +1,18 @@ +sources: + "4.0.2": + url: "https://github.com/swig/swig/archive/rel-4.0.2.tar.gz" + sha256: "81d7ce78371f378a3299ddc5aea1da9a6178f325dcabb695d1b742f9e24a0fa6" + "4.0.1": + url: "https://github.com/swig/swig/archive/rel-4.0.1.tar.gz" + sha256: "2eaf6fb89d071d1be280bf995c63360b3729860c0da64948123b5d7e4cfb6cb7" +patches: + "4.0.2": + - base_path: "source_subfolder" + patch_file: "patches/0001-swig-linux-library-path.patch" + - base_path: "source_subfolder" + patch_file: "patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch" + "4.0.1": + - base_path: "source_subfolder" + patch_file: "patches/0001-swig-linux-library-path.patch" + - base_path: "source_subfolder" + patch_file: "patches/0002-4.0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch" diff --git a/recipes/swig/dl/conanfile.py b/recipes/swig/dl/conanfile.py new file mode 100644 index 0000000000000..50a789da329dd --- /dev/null +++ b/recipes/swig/dl/conanfile.py @@ -0,0 +1,156 @@ +from conans import ConanFile, tools, AutoToolsBuildEnvironment +import contextlib +import functools +import os + +required_conan_version = ">=1.33.0" + + +class SwigConan(ConanFile): + name = "swig" + description = "SWIG is a software development tool that connects programs written in C and C++ with a variety of high-level programming languages." + url = "https://github.com/conan-io/conan-center-index" + homepage = "http://www.swig.org" + license = "GPL-3.0-or-later" + topics = ("swig", "python", "java", "wrapper") + exports_sources = "patches/**", "cmake/*" + settings = "os", "arch", "compiler", "build_type" + + @property + def _source_subfolder(self): + return "source_subfolder" + + @property + def _settings_build(self): + return getattr(self, "settings_build", self.settings) + + def requirements(self): + self.requires("pcre/8.45") + + def build_requirements(self): + if self._settings_build.os == "Windows" and not tools.get_env("CONAN_BASH_PATH"): + self.build_requires("msys2/cci.latest") + if self.settings.compiler == "Visual Studio": + self.build_requires("winflexbison/2.5.24") + else: + self.build_requires("bison/3.7.6") + self.build_requires("automake/1.16.4") + + def package_id(self): + del self.info.settings.compiler + + def source(self): + tools.get(**self.conan_data["sources"][self.version], + destination=self._source_subfolder, strip_root=True) + + @property + def _user_info_build(self): + # If using the experimental feature with different context for host and + # build, the 'user_info' attributes of the 'build_requires' packages + # will be located into the 'user_info_build' object. In other cases they + # will be located into the 'deps_user_info' object. + return getattr(self, "user_info_build", self.deps_user_info) + + @contextlib.contextmanager + def _build_context(self): + env = {} + if self.settings.compiler != "Visual Studio": + env["YACC"] = self._user_info_build["bison"].YACC + if self.settings.compiler == "Visual Studio": + with tools.vcvars(self): + env.update({ + "CC": "{} cl -nologo".format(tools.unix_path(self._user_info_build["automake"].compile)), + "CXX": "{} cl -nologo".format(tools.unix_path(self._user_info_build["automake"].compile)), + "AR": "{} link".format(self._user_info_build["automake"].ar_lib), + "LD": "link", + }) + with tools.environment_append(env): + yield + else: + with tools.environment_append(env): + yield + + @functools.lru_cache(1) + def _configure_autotools(self): + autotools = AutoToolsBuildEnvironment(self, win_bash=tools.os_info.is_windows) + deps_libpaths = autotools.library_paths + deps_libs = autotools.libs + deps_defines = autotools.defines + if self.settings.os == "Windows" and self.settings.compiler != "Visual Studio": + autotools.link_flags.append("-static") + + libargs = list("-L\"{}\"".format(p) for p in deps_libpaths) + list("-l\"{}\"".format(l) for l in deps_libs) + args = [ + "PCRE_LIBS={}".format(" ".join(libargs)), + "PCRE_CPPFLAGS={}".format(" ".join("-D{}".format(define) for define in deps_defines)), + "--host={}".format(self.settings.arch), + "--with-swiglibdir={}".format(self._swiglibdir), + ] + if self.settings.compiler == 'gcc': + args.append("LIBS=-ldl") + + host, build = None, None + + if self.settings.compiler == "Visual Studio": + self.output.warn("Visual Studio compiler cannot create ccache-swig. Disabling ccache-swig.") + args.append("--disable-ccache") + autotools.flags.append("-FS") + # MSVC canonical names aren't understood + host, build = False, False + + if self.settings.os == "Macos" and self.settings.arch == "armv8": + # FIXME: Apple ARM should be handled by build helpers + autotools.flags.append("-arch arm64") + autotools.link_flags.append("-arch arm64") + + autotools.libs = [] + autotools.library_paths = [] + + if self.settings.os == "Windows" and self.settings.compiler != "Visual Studio": + autotools.libs.extend(["mingwex", "ssp"]) + + autotools.configure(args=args, configure_dir=self._source_subfolder, + host=host, build=build) + return autotools + + def _patch_sources(self): + for patch in self.conan_data.get("patches", {}).get(self.version, []): + tools.patch(**patch) + + def build(self): + self._patch_sources() + with tools.chdir(os.path.join(self._source_subfolder)): + self.run("./autogen.sh", win_bash=tools.os_info.is_windows) + with self._build_context(): + autotools = self._configure_autotools() + autotools.make() + + def package(self): + self.copy(pattern="LICENSE*", dst="licenses", src=self._source_subfolder) + self.copy(pattern="COPYRIGHT", dst="licenses", src=self._source_subfolder) + self.copy("*", src="cmake", dst=self._module_subfolder) + with self._build_context(): + autotools = self._configure_autotools() + autotools.install() + + @property + def _swiglibdir(self): + return os.path.join(self.package_folder, "bin", "swiglib").replace("\\", "/") + + @property + def _module_subfolder(self): + return os.path.join("lib", "cmake") + + @property + def _module_file(self): + return "conan-official-{}-targets.cmake".format(self.name) + + def package_info(self): + self.cpp_info.names["cmake_find_package"] = "SWIG" + self.cpp_info.names["cmake_find_package_multi"] = "SWIG" + self.cpp_info.builddirs = [self._module_subfolder] + self.cpp_info.build_modules = [os.path.join(self._module_subfolder, self._module_file)] + + bindir = os.path.join(self.package_folder, "bin") + self.output.info("Appending PATH environment variable: {}".format(bindir)) + self.env_info.PATH.append(bindir) diff --git a/recipes/swig/dl/patches/0001-swig-linux-library-path.patch b/recipes/swig/dl/patches/0001-swig-linux-library-path.patch new file mode 100644 index 0000000000000..2c15694cb202e --- /dev/null +++ b/recipes/swig/dl/patches/0001-swig-linux-library-path.patch @@ -0,0 +1,50 @@ +--- Source/Modules/main.cxx ++++ Source/Modules/main.cxx +@@ -879,6 +881,30 @@ static void getoptions(int argc, char *argv[]) { + } + } + ++#if defined(HAVE_UNISTD_H) && !defined(_WIN32) ++#include ++#include ++#include ++ ++static String *get_exe_path(void) { ++ Dl_info info; ++ if (dladdr("main", &info)) { ++ char buffer[PATH_MAX]; ++ char* res = NULL; ++ ++ res = realpath(info.dli_fname, buffer); ++ if (!res) { ++ return NewString(SWIG_LIB); ++ } ++ ++ dirname(buffer); ++ strcat(buffer, "/swiglib"); ++ return NewStringWithSize(buffer, strlen(buffer)); ++ } ++ return NewString(SWIG_LIB); ++} ++#endif ++ + int SWIG_main(int argc, char *argv[], const TargetLanguageModule *tlm) { + char *c; + +@@ -938,13 +953,15 @@ + char buf[MAX_PATH]; + char *p; + if (!(GetModuleFileName(0, buf, MAX_PATH) == 0 || (p = strrchr(buf, '\\')) == 0)) { + *(p + 1) = '\0'; +- SwigLib = NewStringf("%sLib", buf); // Native windows installation path ++ SwigLib = NewStringf("%sswiglib", buf); // Native windows installation path + } else { + SwigLib = NewStringf(""); // Unexpected error + } + if (Len(SWIG_LIB_WIN_UNIX) > 0) + SwigLibWinUnix = NewString(SWIG_LIB_WIN_UNIX); // Unix installation path using a drive letter (for msys/mingw) ++#elif defined(HAVE_UNISTD_H) && !defined(_WIN32) ++ SwigLib = get_exe_path(); + #else + SwigLib = NewString(SWIG_LIB); + #endif diff --git a/recipes/swig/dl/patches/0002-4.0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch b/recipes/swig/dl/patches/0002-4.0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch new file mode 100644 index 0000000000000..53d129b5c5349 --- /dev/null +++ b/recipes/swig/dl/patches/0002-4.0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch @@ -0,0 +1,11 @@ +--- configure.ac ++++ configure.ac +@@ -2728,7 +2728,7 @@ + *-*-cygwin*) SWIG_LIB_WIN_UNIX=`cygpath --mixed "$SWIG_LIB"`;; + *) SWIG_LIB_WIN_UNIX="";; + esac +-AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, ["$SWIG_LIB_WIN_UNIX"], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) ++AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, [""], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) + + SWIG_LIB_PREINST=$ABS_SRCDIR/Lib + AC_SUBST(SWIG_LIB_PREINST) diff --git a/recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch b/recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch new file mode 100644 index 0000000000000..29aee19fe33d0 --- /dev/null +++ b/recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch @@ -0,0 +1,11 @@ +--- configure.ac ++++ configure.ac +@@ -2770,7 +2770,7 @@ + *-*-cygwin*) SWIG_LIB_WIN_UNIX=`cygpath --mixed "$SWIG_LIB"`;; + *) SWIG_LIB_WIN_UNIX="";; + esac +-AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, ["$SWIG_LIB_WIN_UNIX"], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) ++AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, [""], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) + + SWIG_LIB_PREINST=$ABS_SRCDIR/Lib + AC_SUBST(SWIG_LIB_PREINST) diff --git a/recipes/swig/dl/test_package/CMakeLists.txt b/recipes/swig/dl/test_package/CMakeLists.txt new file mode 100644 index 0000000000000..3f3a1aa9d1821 --- /dev/null +++ b/recipes/swig/dl/test_package/CMakeLists.txt @@ -0,0 +1,51 @@ +cmake_minimum_required(VERSION 3.1) +project(PackageTest C) + +include(${CMAKE_BINARY_DIR}/conanbuildinfo.cmake) +conan_basic_setup(TARGETS NO_OUTPUT_DIRS) + +set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY $<1:${CMAKE_BINARY_DIR}>) +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY $<1:${CMAKE_BINARY_DIR}>) +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY $<1:${CMAKE_BINARY_DIR}>) + +find_package(SWIG REQUIRED) +include(UseSWIG) + +set(Python_ADDITIONAL_VERSIONS 3) +find_package(PythonInterp) +find_package(PythonLibs 3) + +enable_testing() + +if(PYTHONINTERP_FOUND AND PYTHONLIBS_FOUND) + swig_add_library(${PROJECT_NAME} + LANGUAGE python + SOURCES + test.i + test_package.c + ) + + get_filename_component(PYTHON_LIBRARY_DIR "${PYTHON_LIBRARIES}" DIRECTORY) + + message(STATUS "PYTHON_INCLUDE_DIRS: ${PYTHON_INCLUDE_DIRS}") + message(STATUS "PYTHON_LIBRARIES: ${PYTHON_LIBRARIES}") + message(STATUS "PYTHON_LIBRARY_DIR: ${PYTHON_LIBRARY_DIR}") + + #target_compile_definitions(_${PROJECT_NAME} PRIVATE MS_NO_COREDLL) + target_include_directories(_${PROJECT_NAME} PRIVATE ${PYTHON_INCLUDE_DIRS}) + target_link_directories(_${PROJECT_NAME} PRIVATE ${PYTHON_LIBRARY_DIR}) + target_link_libraries(_${PROJECT_NAME} PRIVATE ${PYTHON_LIBRARIES}) + + add_test( + NAME gcd_test + COMMAND ${PYTHON_EXECUTABLE} -c "import PackageTest; assert PackageTest.gcd(12, 16) == 4" + WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" + ) + add_test( + NAME foo_test + COMMAND ${PYTHON_EXECUTABLE} -c "import PackageTest; assert PackageTest.cvar.foo == 3.14159265359" + WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" + ) +else() + message(STATUS "Not building swig python module") +endif() diff --git a/recipes/swig/dl/test_package/conanfile.py b/recipes/swig/dl/test_package/conanfile.py new file mode 100644 index 0000000000000..5059612e70e16 --- /dev/null +++ b/recipes/swig/dl/test_package/conanfile.py @@ -0,0 +1,27 @@ +from conans import CMake, ConanFile, tools + + +class TestPackageConan(ConanFile): + settings = "os", "arch", "compiler", "build_type" + generators = "cmake", "cmake_find_package" + + @property + def _can_build(self): + # FIXME: Python does not distribute debug libraries (use cci CPython recipe) + return not (self.settings.compiler == "Visual Studio" and self.settings.build_type == "Debug") + + def build(self): + if not tools.cross_building(self, skip_x64_x86=True): + self.run("swig -swiglib", run_environment=True) + if self._can_build: + cmake = CMake(self) + cmake.verbose = True + cmake.configure() + cmake.build() + + def test(self): + if not tools.cross_building(self): + if self._can_build: + cmake = CMake(self) + cmake.test(output_on_failure=True) + self.run("swig -version", run_environment=True) diff --git a/recipes/swig/dl/test_package/test.i b/recipes/swig/dl/test_package/test.i new file mode 100644 index 0000000000000..98d0531a88cae --- /dev/null +++ b/recipes/swig/dl/test_package/test.i @@ -0,0 +1,6 @@ +%module PackageTest + +%inline %{ +extern int gcd(int u, int v); +extern double foo; +%} diff --git a/recipes/swig/dl/test_package/test_package.c b/recipes/swig/dl/test_package/test_package.c new file mode 100644 index 0000000000000..b02a4a855a18c --- /dev/null +++ b/recipes/swig/dl/test_package/test_package.c @@ -0,0 +1,31 @@ +// Source: https://en.wikipedia.org/wiki/Binary_GCD_algorithm#Recursive_version_in_C +int gcd(int u, int v) { + // simple cases (termination) + if (u == v) + return u; + + if (u == 0) + return v; + + if (v == 0) + return u; + + // look for factors of 2 + if (~u & 1) { // u is even + if (v & 1) // v is odd + return gcd(u >> 1, v); + else // both u and v are even + return gcd(u >> 1, v >> 1) << 1; + } + + if (~v & 1) // u is odd, v is even + return gcd(u, v >> 1); + + // reduce larger argument + if (u > v) + return gcd((u - v) >> 1, v); + + return gcd((v - u) >> 1, u); +} + +double foo = 3.14159265359; From e10da2274e521c3089e010f4d391c3b118c28fe5 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 24 May 2021 11:09:21 -0500 Subject: [PATCH 024/173] swig: Add DL version 1.3.40+dl.1; get sources from Git - Get the sources via Git. We can't use a tarball because Octocat requires authentication to download those. - Add a patch to not define SWIG_LIB_WIN_UNIX on Windows, as was done in the 4.x versions. - Can specify Git sources in conandata.yml - Custom fix only for use at DL --- recipes/swig/config.yml | 2 + recipes/swig/dl/conandata.yml | 22 +++----- recipes/swig/dl/conanfile.py | 9 +++- .../0001-swig-linux-library-path.patch | 50 ------------------- ....0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch | 11 ---- ....0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch | 11 ---- ...3.40-do-not-define-SWIG_LIB_WIN_UNIX.patch | 13 +++++ 7 files changed, 29 insertions(+), 89 deletions(-) delete mode 100644 recipes/swig/dl/patches/0001-swig-linux-library-path.patch delete mode 100644 recipes/swig/dl/patches/0002-4.0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch delete mode 100644 recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch create mode 100644 recipes/swig/dl/patches/0003-1.3.40-do-not-define-SWIG_LIB_WIN_UNIX.patch diff --git a/recipes/swig/config.yml b/recipes/swig/config.yml index 1e30a6b6a6a43..09e45d079455b 100644 --- a/recipes/swig/config.yml +++ b/recipes/swig/config.yml @@ -3,3 +3,5 @@ versions: folder: "all" "4.0.1": folder: "all" + "1.3.40+dl.1": + folder: "dl" diff --git a/recipes/swig/dl/conandata.yml b/recipes/swig/dl/conandata.yml index 639cc9e72797d..4349dbfeae3c5 100644 --- a/recipes/swig/dl/conandata.yml +++ b/recipes/swig/dl/conandata.yml @@ -1,18 +1,10 @@ sources: - "4.0.2": - url: "https://github.com/swig/swig/archive/rel-4.0.2.tar.gz" - sha256: "81d7ce78371f378a3299ddc5aea1da9a6178f325dcabb695d1b742f9e24a0fa6" - "4.0.1": - url: "https://github.com/swig/swig/archive/rel-4.0.1.tar.gz" - sha256: "2eaf6fb89d071d1be280bf995c63360b3729860c0da64948123b5d7e4cfb6cb7" + "1.3.40+dl.1": + git: + url: "git@octocat.dlogics.com:datalogics/swig.git" + branch: "1.3.40+dl.1" + shallow: True patches: - "4.0.2": + "1.3.40+dl.1": - base_path: "source_subfolder" - patch_file: "patches/0001-swig-linux-library-path.patch" - - base_path: "source_subfolder" - patch_file: "patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch" - "4.0.1": - - base_path: "source_subfolder" - patch_file: "patches/0001-swig-linux-library-path.patch" - - base_path: "source_subfolder" - patch_file: "patches/0002-4.0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch" + patch_file: "patches/0003-1.3.40-do-not-define-SWIG_LIB_WIN_UNIX.patch" diff --git a/recipes/swig/dl/conanfile.py b/recipes/swig/dl/conanfile.py index 50a789da329dd..67db7bdc1151d 100644 --- a/recipes/swig/dl/conanfile.py +++ b/recipes/swig/dl/conanfile.py @@ -40,8 +40,13 @@ def package_id(self): del self.info.settings.compiler def source(self): - tools.get(**self.conan_data["sources"][self.version], - destination=self._source_subfolder, strip_root=True) + source = self.conan_data['sources'][self.version] + if 'git' in source: + git = tools.Git(folder=self._source_subfolder) + git.clone(**source['git']) + else: + tools.get(**source, + destination=self._source_subfolder, strip_root=True) @property def _user_info_build(self): diff --git a/recipes/swig/dl/patches/0001-swig-linux-library-path.patch b/recipes/swig/dl/patches/0001-swig-linux-library-path.patch deleted file mode 100644 index 2c15694cb202e..0000000000000 --- a/recipes/swig/dl/patches/0001-swig-linux-library-path.patch +++ /dev/null @@ -1,50 +0,0 @@ ---- Source/Modules/main.cxx -+++ Source/Modules/main.cxx -@@ -879,6 +881,30 @@ static void getoptions(int argc, char *argv[]) { - } - } - -+#if defined(HAVE_UNISTD_H) && !defined(_WIN32) -+#include -+#include -+#include -+ -+static String *get_exe_path(void) { -+ Dl_info info; -+ if (dladdr("main", &info)) { -+ char buffer[PATH_MAX]; -+ char* res = NULL; -+ -+ res = realpath(info.dli_fname, buffer); -+ if (!res) { -+ return NewString(SWIG_LIB); -+ } -+ -+ dirname(buffer); -+ strcat(buffer, "/swiglib"); -+ return NewStringWithSize(buffer, strlen(buffer)); -+ } -+ return NewString(SWIG_LIB); -+} -+#endif -+ - int SWIG_main(int argc, char *argv[], const TargetLanguageModule *tlm) { - char *c; - -@@ -938,13 +953,15 @@ - char buf[MAX_PATH]; - char *p; - if (!(GetModuleFileName(0, buf, MAX_PATH) == 0 || (p = strrchr(buf, '\\')) == 0)) { - *(p + 1) = '\0'; -- SwigLib = NewStringf("%sLib", buf); // Native windows installation path -+ SwigLib = NewStringf("%sswiglib", buf); // Native windows installation path - } else { - SwigLib = NewStringf(""); // Unexpected error - } - if (Len(SWIG_LIB_WIN_UNIX) > 0) - SwigLibWinUnix = NewString(SWIG_LIB_WIN_UNIX); // Unix installation path using a drive letter (for msys/mingw) -+#elif defined(HAVE_UNISTD_H) && !defined(_WIN32) -+ SwigLib = get_exe_path(); - #else - SwigLib = NewString(SWIG_LIB); - #endif diff --git a/recipes/swig/dl/patches/0002-4.0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch b/recipes/swig/dl/patches/0002-4.0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch deleted file mode 100644 index 53d129b5c5349..0000000000000 --- a/recipes/swig/dl/patches/0002-4.0.1-do-not-define-SWIG_LIB_WIN_UNIX.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- configure.ac -+++ configure.ac -@@ -2728,7 +2728,7 @@ - *-*-cygwin*) SWIG_LIB_WIN_UNIX=`cygpath --mixed "$SWIG_LIB"`;; - *) SWIG_LIB_WIN_UNIX="";; - esac --AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, ["$SWIG_LIB_WIN_UNIX"], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) -+AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, [""], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) - - SWIG_LIB_PREINST=$ABS_SRCDIR/Lib - AC_SUBST(SWIG_LIB_PREINST) diff --git a/recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch b/recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch deleted file mode 100644 index 29aee19fe33d0..0000000000000 --- a/recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- configure.ac -+++ configure.ac -@@ -2770,7 +2770,7 @@ - *-*-cygwin*) SWIG_LIB_WIN_UNIX=`cygpath --mixed "$SWIG_LIB"`;; - *) SWIG_LIB_WIN_UNIX="";; - esac --AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, ["$SWIG_LIB_WIN_UNIX"], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) -+AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, [""], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) - - SWIG_LIB_PREINST=$ABS_SRCDIR/Lib - AC_SUBST(SWIG_LIB_PREINST) diff --git a/recipes/swig/dl/patches/0003-1.3.40-do-not-define-SWIG_LIB_WIN_UNIX.patch b/recipes/swig/dl/patches/0003-1.3.40-do-not-define-SWIG_LIB_WIN_UNIX.patch new file mode 100644 index 0000000000000..4ff22b093f12f --- /dev/null +++ b/recipes/swig/dl/patches/0003-1.3.40-do-not-define-SWIG_LIB_WIN_UNIX.patch @@ -0,0 +1,13 @@ +diff --git a/configure.in b/configure.in +index 4edc8e1ae..f5b664fca 100644 +--- a/configure.in ++++ b/configure.in +@@ -2153,7 +2153,7 @@ case $host in + *-*-cygwin*) SWIG_LIB_WIN_UNIX=`cygpath --mixed "$SWIG_LIB"`;; + *) SWIG_LIB_WIN_UNIX="";; + esac +-AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, ["$SWIG_LIB_WIN_UNIX"], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) ++AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, [""], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) + + AC_CONFIG_FILES([ \ + Makefile \ From 7a29f2a6f4e7500505aea7078890cb5e9de6df59 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 24 May 2021 12:36:51 -0500 Subject: [PATCH 025/173] SWIG: Define package compatability - Compiler doesn't matter; this is a tool, not a library. - It's ok to use a release package in a debug build. - It's ok to use a tool package that matches the build arch and os --- recipes/swig/dl/conanfile.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/recipes/swig/dl/conanfile.py b/recipes/swig/dl/conanfile.py index 67db7bdc1151d..3e0d2f6cec90b 100644 --- a/recipes/swig/dl/conanfile.py +++ b/recipes/swig/dl/conanfile.py @@ -14,7 +14,7 @@ class SwigConan(ConanFile): license = "GPL-3.0-or-later" topics = ("swig", "python", "java", "wrapper") exports_sources = "patches/**", "cmake/*" - settings = "os", "arch", "compiler", "build_type" + settings = "os", "arch", "compiler", "build_type", "os_build", "arch_build" @property def _source_subfolder(self): @@ -159,3 +159,17 @@ def package_info(self): bindir = os.path.join(self.package_folder, "bin") self.output.info("Appending PATH environment variable: {}".format(bindir)) self.env_info.PATH.append(bindir) + + def package_id(self): + del self.info.settings.compiler + del self.info.settings.os_build + del self.info.settings.arch_build + + # Doxygen doesn't make executable code. Any package that will run is ok to use. + # It's ok in general to use a release version of the tool that matches the + # build os and architecture. + compatible_pkg = self.info.clone() + compatible_pkg.settings.build_type = 'Release' + compatible_pkg.settings.arch = self.settings.arch_build + compatible_pkg.settings.os = self.settings.os_build + self.compatible_packages.append(compatible_pkg) From 935dfbb414f0d8dd8eaf750fcfcbf9e5260bc2e7 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 26 May 2021 08:46:57 -0500 Subject: [PATCH 026/173] SWIG: make test_package work with very old SWIG SWIG 1.3.40 can't make code that will work with a modern Python (like Python 3). For SWIG < 4.0.0, just test that SWIG runs. --- recipes/swig/dl/test_package/conanfile.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/recipes/swig/dl/test_package/conanfile.py b/recipes/swig/dl/test_package/conanfile.py index 5059612e70e16..82e6ce6171cdc 100644 --- a/recipes/swig/dl/test_package/conanfile.py +++ b/recipes/swig/dl/test_package/conanfile.py @@ -7,6 +7,10 @@ class TestPackageConan(ConanFile): @property def _can_build(self): + if self.deps_cpp_info['swig'].version < tools.Version('4.0.0'): + # SWIG is old and might not be able to build code for a modern Python (say, Python 3) + return False + # FIXME: Python does not distribute debug libraries (use cci CPython recipe) return not (self.settings.compiler == "Visual Studio" and self.settings.build_type == "Debug") From b433b1345a0bc9e295289802de69ed9e6a073c05 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 13 Dec 2021 15:09:55 -0600 Subject: [PATCH 027/173] swig: Disable ccache-swig only for old versions of SWIG --- recipes/swig/dl/conanfile.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/recipes/swig/dl/conanfile.py b/recipes/swig/dl/conanfile.py index 3e0d2f6cec90b..eb09e0030d538 100644 --- a/recipes/swig/dl/conanfile.py +++ b/recipes/swig/dl/conanfile.py @@ -103,6 +103,12 @@ def _configure_autotools(self): # MSVC canonical names aren't understood host, build = False, False + # DL: Old versions of swig-ccache needed yodl2man to build, which isn't + # available. We don't need ccache anyway. + if str(self.version) < tools.Version('4.0.0'): + self.output.warn("Old versions of SWIG need yodl2man to create ccache-swig. Disabling ccache-swig.") + args.append("--disable-ccache") + if self.settings.os == "Macos" and self.settings.arch == "armv8": # FIXME: Apple ARM should be handled by build helpers autotools.flags.append("-arch arm64") From a00301850fe272318c38b8f8f9cf45d4a8bbd6f5 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 14 Dec 2021 12:40:55 -0600 Subject: [PATCH 028/173] swig: Handle the library path in 1.3.40+dl.1 the same way as in 4.x - Create a swig-linux-library-path.patch that does the same thing as in 4.x, but using string handling conventions that were in effect in 1.3.40. --- recipes/swig/dl/conandata.yml | 2 + .../0004-1.3.40-swig-linux-library-path.patch | 41 +++++++++++++++++++ 2 files changed, 43 insertions(+) create mode 100644 recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch diff --git a/recipes/swig/dl/conandata.yml b/recipes/swig/dl/conandata.yml index 4349dbfeae3c5..d4c8b5ff60b82 100644 --- a/recipes/swig/dl/conandata.yml +++ b/recipes/swig/dl/conandata.yml @@ -8,3 +8,5 @@ patches: "1.3.40+dl.1": - base_path: "source_subfolder" patch_file: "patches/0003-1.3.40-do-not-define-SWIG_LIB_WIN_UNIX.patch" + - base_path: "source_subfolder" + patch_file: "patches/0004-1.3.40-swig-linux-library-path.patch" diff --git a/recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch b/recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch new file mode 100644 index 0000000000000..a5cb48be9a66d --- /dev/null +++ b/recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch @@ -0,0 +1,41 @@ +diff --git a/Source/Modules/main.cxx b/Source/Modules/main.cxx +index c824db6f9..ecd039cb6 100644 +--- a/Source/Modules/main.cxx ++++ b/Source/Modules/main.cxx +@@ -809,6 +809,23 @@ void SWIG_getoptions(int argc, char *argv[]) { + + + ++#if defined(HAVE_UNISTD_H) && !defined(_WIN32) ++#include ++#include ++ ++static char *get_exe_path(void) { ++ char buffer[PATH_MAX]; ++ ssize_t nb = readlink("/proc/self/exe", buffer, PATH_MAX); ++ if (nb != -1) { ++ buffer[nb] = '\0'; ++ dirname(buffer); ++ strcat(buffer, "/swiglib"); ++ return Swig_copy_string(buffer); ++ } ++ return Swig_copy_string(SWIG_LIB); ++} ++#endif ++ + int SWIG_main(int argc, char *argv[], Language *l) { + char *c; + extern void Swig_print_xml(Node *obj, String *filename); +@@ -869,9 +886,11 @@ int SWIG_main(int argc, char *argv[], Language *l) { + char *p; + if (!(GetModuleFileName(0, buf, MAX_PATH) == 0 || (p = strrchr(buf, '\\')) == 0)) { + *(p + 1) = '\0'; +- SwigLibWin = NewStringf("%sLib", buf); // Native windows installation path ++ SwigLibWin = NewStringf("%sswiglib", buf); // Native windows installation path + } + SwigLib = Swig_copy_string(SWIG_LIB_WIN_UNIX); // Unix installation path using a drive letter (for msys/mingw) ++#elif defined(HAVE_UNISTD_H) && !defined(_WIN32) ++ SwigLib = get_exe_path(); + #else + SwigLib = Swig_copy_string(SWIG_LIB); + #endif From bad7e2f47df27f63944b2454f2d1ee75f6054694 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 18 Jan 2022 16:10:28 -0600 Subject: [PATCH 029/173] SWIG 1.3.40: Support macOS in get_exe_path() Alter patch for SWIG 1.3.40 to match the changes in commit 1fc6c023eff77f552b69cb52f9e705a75d292213: * Replace get_exe_path by something that works on both Ubuntu and macOS Also, correct the use of dirname, which does not alter its input string on macOS. --- .../0004-1.3.40-swig-linux-library-path.patch | 32 +++++++++++-------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch b/recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch index a5cb48be9a66d..0b7650061c889 100644 --- a/recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch +++ b/recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch @@ -1,32 +1,38 @@ diff --git a/Source/Modules/main.cxx b/Source/Modules/main.cxx -index c824db6f9..ecd039cb6 100644 +index c824db6f9..0d8993695 100644 --- a/Source/Modules/main.cxx +++ b/Source/Modules/main.cxx -@@ -809,6 +809,23 @@ void SWIG_getoptions(int argc, char *argv[]) { - - +@@ -805,9 +805,29 @@ void SWIG_getoptions(int argc, char *argv[]) { + } + } +#if defined(HAVE_UNISTD_H) && !defined(_WIN32) +#include +#include -+ ++#include + +static char *get_exe_path(void) { -+ char buffer[PATH_MAX]; -+ ssize_t nb = readlink("/proc/self/exe", buffer, PATH_MAX); -+ if (nb != -1) { -+ buffer[nb] = '\0'; -+ dirname(buffer); ++ Dl_info info; ++ if (dladdr("main", &info)) { ++ char buffer[PATH_MAX]; ++ char* res = NULL; + ++ res = realpath(info.dli_fname, buffer); ++ if (!res) { ++ return Swig_copy_string(SWIG_LIB); ++ } + ++ strcpy(buffer, dirname(buffer)); + strcat(buffer, "/swiglib"); + return Swig_copy_string(buffer); + } + return Swig_copy_string(SWIG_LIB); +} +#endif -+ + int SWIG_main(int argc, char *argv[], Language *l) { char *c; - extern void Swig_print_xml(Node *obj, String *filename); -@@ -869,9 +886,11 @@ int SWIG_main(int argc, char *argv[], Language *l) { +@@ -869,9 +889,11 @@ int SWIG_main(int argc, char *argv[], Language *l) { char *p; if (!(GetModuleFileName(0, buf, MAX_PATH) == 0 || (p = strrchr(buf, '\\')) == 0)) { *(p + 1) = '\0'; From a1ad404978927d89af8b82ba2917e660457f2a5d Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Sun, 23 Jan 2022 17:35:09 -0600 Subject: [PATCH 030/173] SWIG 1.3.40: Don't try to build get_exe_path() on AIX or Solaris - Incompatible version of dladdr. - Won't impact the builds we do at DL. --- .../dl/patches/0004-1.3.40-swig-linux-library-path.patch | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch b/recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch index 0b7650061c889..5eb70ea1c6332 100644 --- a/recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch +++ b/recipes/swig/dl/patches/0004-1.3.40-swig-linux-library-path.patch @@ -1,12 +1,12 @@ diff --git a/Source/Modules/main.cxx b/Source/Modules/main.cxx -index c824db6f9..0d8993695 100644 +index c824db6f9..007eef1ac 100644 --- a/Source/Modules/main.cxx +++ b/Source/Modules/main.cxx @@ -805,9 +805,29 @@ void SWIG_getoptions(int argc, char *argv[]) { } } -+#if defined(HAVE_UNISTD_H) && !defined(_WIN32) ++#if defined(HAVE_UNISTD_H) && !defined(_WIN32) && !defined(_AIX) && !defined(__sun__) +#include +#include +#include @@ -40,7 +40,7 @@ index c824db6f9..0d8993695 100644 + SwigLibWin = NewStringf("%sswiglib", buf); // Native windows installation path } SwigLib = Swig_copy_string(SWIG_LIB_WIN_UNIX); // Unix installation path using a drive letter (for msys/mingw) -+#elif defined(HAVE_UNISTD_H) && !defined(_WIN32) ++#elif defined(HAVE_UNISTD_H) && !defined(_WIN32) && !defined(_AIX) && !defined(__sun__) + SwigLib = get_exe_path(); #else SwigLib = Swig_copy_string(SWIG_LIB); From 4a84200beb48dbe834bf9e1eb10925fda62fe663 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 24 Jan 2022 15:45:33 -0600 Subject: [PATCH 031/173] SWIG 1.3.40: Put swigp4.ml in the correct place. - Very old SWIG doesn't really handle separate build/source locations. --- recipes/swig/dl/conanfile.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/recipes/swig/dl/conanfile.py b/recipes/swig/dl/conanfile.py index eb09e0030d538..e8f0c3cc94774 100644 --- a/recipes/swig/dl/conanfile.py +++ b/recipes/swig/dl/conanfile.py @@ -2,6 +2,7 @@ import contextlib import functools import os +import shutil required_conan_version = ">=1.33.0" @@ -122,6 +123,11 @@ def _configure_autotools(self): autotools.configure(args=args, configure_dir=self._source_subfolder, host=host, build=build) + # DL: Old versions of SWIG deposited the swigp4.ml file in the build directory, but installed them from + # source + if str(self.version) < tools.Version('4.0.0'): + shutil.copy('Lib/ocaml/swigp4.ml', os.path.join(self._source_subfolder, 'Lib/ocaml/swigp4.ml')) + return autotools def _patch_sources(self): From 0b2e105494772cb060d3c789097edd5c6d8ea168 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 6 Jun 2022 00:40:20 -0500 Subject: [PATCH 032/173] tox.ini: Ignore docs and linter directories for flake8 - These have Python code from conan-io/conan-center-index, and it doesn't pass flake8 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 647726feef4e7..88e79d8c8e6b0 100644 --- a/tox.ini +++ b/tox.ini @@ -6,5 +6,5 @@ # Also, not enforcing flake8 on the recipes; they come from upstream with flake8 # errors # -exclude = .git,.tox,python-env-*,.idea,.conan,recipes,.github +exclude = .git,.tox,python-env-*,.idea,.conan,recipes,.github,docs,linter max-line-length = 120 From ef07d6e7818d1666ee70e5fe6b054df7cb3bdc92 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 6 Dec 2021 16:55:17 -0600 Subject: [PATCH 033/173] Add basic structure for having pytest-based tests - requirements - ignored files - test package --- .gitignore | 4 ++++ requirements.in | 2 ++ tests/__init__.py | 0 tox.ini | 5 +++++ 4 files changed, 11 insertions(+) create mode 100644 tests/__init__.py diff --git a/.gitignore b/.gitignore index a330a2bb21cfc..197e0bb8db008 100644 --- a/.gitignore +++ b/.gitignore @@ -447,3 +447,7 @@ requirements.txt # scons build files *.dblite + +# Test outputs +/test-report.xml +/test-report.html diff --git a/requirements.in b/requirements.in index 8fb2b21d3c8ea..5fdd5fe13af99 100644 --- a/requirements.in +++ b/requirements.in @@ -5,3 +5,5 @@ flake8 flake8-printf-formatting pipdeptree certifi +pytest +pytest-html diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/tox.ini b/tox.ini index 88e79d8c8e6b0..20cf4ed52f37f 100644 --- a/tox.ini +++ b/tox.ini @@ -8,3 +8,8 @@ # exclude = .git,.tox,python-env-*,.idea,.conan,recipes,.github,docs,linter max-line-length = 120 +[pytest] +testpaths = tests +junit_family = xunit2 +junit_suite_name = conan_center_index +addopts = -v --junitxml=test-report.xml --html=test-report.html --self-contained-html From 721d7578167abd7f9232919786a158db6d32b628 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 7 Dec 2021 15:42:56 -0600 Subject: [PATCH 034/173] Build tools via pytest, with initial macOS configuation. --- dlproject.yaml | 37 +++++++++++++++++++++++++++++++++ tests/conftest.py | 10 +++++++++ tests/test_tools.py | 45 ++++++++++++++++++++++++++++++++++++++++ util/recipes/__init__.py | 19 +++++++++++++++++ 4 files changed, 111 insertions(+) create mode 100644 tests/conftest.py create mode 100644 tests/test_tools.py create mode 100644 util/recipes/__init__.py diff --git a/dlproject.yaml b/dlproject.yaml index 1740abbdcaac2..a972ccc1bc7b1 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -154,6 +154,43 @@ config: # profile: # - apple-clang-10.0-macos-10.9 # - build_tools + common: &macOSCommon + # Note: split build/host profiles aren't supported by recipes in Conan Center Index + # profile_build: + # - apple-clang-12.0-macos-10.9-cppstd-14 + build: + - missing + config: + Release: &macOSRelease + <<: *macOSCommon + build_folder: build-release + description: macOS Release + profile_host: + - apple-clang-12.0-macos-10.9-cppstd-14 + Debug: &macOSDebug + <<: *macOSRelease + build_folder: build + description: macOS Debug + settings: + - build_type=Debug + ReleaseTool: &macOSReleaseTool + <<: *macOSCommon + build_folder: build-release + description: macOS Release + profile_host: + - apple-clang-12.0-macos-10.9 + DebugTool: &macOSDebugTool + <<: *macOSReleaseTool + build_folder: build + description: macOS Debug + settings: + - build_type=Debug + prebuilt_tools: + - cmake/3.21.4 + - doxygen/1.9.1 + prebuilt_tools_configs: + - DebugTool + - ReleaseTool redhat: diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000000000..143f43819c6c3 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,10 @@ +import pytest + + +def pytest_addoption(parser): + parser.addoption('--upload-to', help='upload built packages to the given remote') + + +@pytest.fixture +def upload_to(request): + return request.config.getoption('--upload-to') diff --git a/tests/test_tools.py b/tests/test_tools.py new file mode 100644 index 0000000000000..23552952da0bd --- /dev/null +++ b/tests/test_tools.py @@ -0,0 +1,45 @@ +import subprocess + +import dl_conan_build_tools.config +import pytest +from dl_conan_build_tools.tasks.conan import Config + +from util import recipes + +_config = dl_conan_build_tools.config.get_config() + + +@pytest.fixture(scope='package', + params=_config.get('prebuilt_tools', [])) +def prebuilt_tool(request): + return request.param + + +@pytest.fixture(scope='package', + params=_config.get('prebuilt_tools_configs', [])) +def prebuilt_tool_config(request): + config = Config.from_name(request.param) + config.validate() + config = config.normalize() + + config.infer_additional_configuration() + + return config + + +@pytest.fixture(scope='package') +def tool_recipe_folder(prebuilt_tool): + package, version = prebuilt_tool.split('/') + return recipes.versions_to_folders(package)[version] + + +class TestBuildTools(object): + def test_build_tool(self, prebuilt_tool, prebuilt_tool_config, tool_recipe_folder, upload_to): + args = ['conan', 'create', tool_recipe_folder, f'{prebuilt_tool}@', + '--update'] + prebuilt_tool_config.install_options() + print(f'Creating package {prebuilt_tool}: {" ".join(args)}') + subprocess.run(args, check=True) + if upload_to: + args = ['conan', 'upload', '-r', upload_to, f'{prebuilt_tool}@', '--all', '--check'] + print(f'Uploading {prebuilt_tool}: {" ".join(args)}') + subprocess.run(args, check=True) diff --git a/util/recipes/__init__.py b/util/recipes/__init__.py new file mode 100644 index 0000000000000..afc36c800e73f --- /dev/null +++ b/util/recipes/__init__.py @@ -0,0 +1,19 @@ +import os +import yaml + + +def versions_to_folders(package): + """Given a package, return a mapping of version number to recipe folder""" + recipe_folder = os.path.join('recipes', package) + config_yml_file = os.path.join(recipe_folder, 'config.yml') + if os.path.exists(config_yml_file): + with open(config_yml_file, 'r') as config_yml: + config_data = yaml.safe_load(config_yml) + return {version: os.path.join(recipe_folder, data['folder']) + for version, data in config_data['versions'].items()} + else: + with os.scandir(recipe_folder) as dirs: + def valid(entry): + return not entry.name.startswith('.') and entry.is_dir() + + return {entry.name: os.path.join(recipe_folder, entry.name) for entry in dirs if valid(entry)} From 47a893a02c14b5ac69901e4b03cb2b19744b0e55 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 7 Dec 2021 16:29:44 -0600 Subject: [PATCH 035/173] Jenkinsfile: Build tools on branch builds - Remove arch from most node names; still needed to distinguish Intel from ARM for Linux and Mac. - Add tools build node. - Run just the build_tool tests in Pytest to build tools. - Report XML results and HTML output. --- Jenkinsfile | 129 +++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 97 insertions(+), 32 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 32eb810391dbe..6197c792cd83e 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,31 +1,28 @@ def ENV_LOC = [:] -def ARCH = [ - 'aix-32-conan-center-index': 'ppc32', - 'aix-64-conan-center-index': 'ppc64', - 'linux-x86-conan-center-index': 'x86', - 'linux-x64-conan-center-index': 'x64', - 'linux-arm-conan-center-index': 'armv8', - 'mac-x64-conan-center-index': 'x64', - 'mac-arm-conan-center-index': 'armv8', - 'sparcsolaris-32-conan-center-index': 'sparc', - 'sparcsolaris-64-conan-center-index': 'sparcv9', - 'windows-x86-conan-center-index': 'x86', - 'windows-x64-conan-center-index': 'x64'] +// Which nodes build tools. The linux-x64-tools-conan-center-index is an older machine +// that uses an earlier glibc, so the tools will run on every machine. +def BUILD_TOOLS=[ + 'aix-conan-center-index': true, + 'linux-x64-conan-center-index': false, + 'linux-x64-tools-conan-center-index': true, + 'linux-arm-conan-center-index': true, + 'mac-x64-conan-center-index': true, + 'mac-arm-conan-center-index': true, + 'sparcsolaris-conan-center-index': true, + 'windows-conan-center-index': true, +] pipeline { parameters { choice(name: 'PLATFORM_FILTER', choices: ['all', - 'aix-32-conan-center-index', - 'aix-64-conan-center-index', - 'linux-x86-conan-center-index', + 'aix-conan-center-index', 'linux-x64-conan-center-index', + 'linux-x64-tools-conan-center-index', 'linux-arm-conan-center-index', 'mac-x64-conan-center-index', 'mac-arm-conan-center-index', - 'sparcsolaris-32-conan-center-index', - 'sparcsolaris-64-conan-center-index', - 'windows-x86-conan-center-index', - 'windows-x64-conan-center-index'], + 'sparcsolaris-conan-center-index', + 'windows-conan-center-index'], description: 'Run on specific platform') booleanParam defaultValue: false, description: 'Completely clean the workspace before building, including the Conan cache', name: 'CLEAN_WORKSPACE' booleanParam name: 'UPLOAD_ALL_RECIPES', defaultValue: false, @@ -45,10 +42,15 @@ pipeline { CONAN_USER_HOME = "${WORKSPACE}" CONAN_NON_INTERACTIVE = '1' CONAN_PRINT_RUN_COMMANDS = '1' + // Disable FileTracker on Windows, which can give FTK1011 on long path names + TRACKFILEACCESS = 'false' + // Disable node reuse, which gives intermittent build errors on Windows + MSBUILDDISABLENODEREUSE = '1' // AIX workaround. Avoids an issue caused by the jenkins java process which sets // LIBPATH and causes errors downstream LIBPATH = "randomval" DL_CONAN_CENTER_INDEX = 'all' + TOX_TESTENV_PASSENV = 'CONAN_USER_HOME CONAN_NON_INTERACTIVE CONAN_PRINT_RUN_COMMANDS CONAN_LOGIN_USERNAME CONAN_PASSWORD TRACKFILEACCESS MSBUILDDISABLENODEREUSE' } stages { stage('Clean/reset Git checkout for release') { @@ -159,16 +161,11 @@ pipeline { } } stage('Per-platform') { - when { - // Turn off for now; testing verified that the structure works (common part then matrix part), - // and the per-platform tasks aren't there yet. - expression { false } - } matrix { agent { node { label "${NODE}" - customWorkspace "workspace/${JOB_NAME}_${ARCH[NODE]}/" + customWorkspace "workspace/${JOB_NAME}/" } } when { anyOf { @@ -178,19 +175,19 @@ pipeline { axes { axis { name 'NODE' - values 'aix-32-conan-center-index', - 'aix-64-conan-center-index', - 'linux-x86-conan-center-index', + values 'aix-conan-center-index', 'linux-x64-conan-center-index', + 'linux-x64-tools-conan-center-index', 'linux-arm-conan-center-index', 'mac-x64-conan-center-index', 'mac-arm-conan-center-index', - 'sparcsolaris-32-conan-center-index', - 'sparcsolaris-64-conan-center-index', - 'windows-x86-conan-center-index', - 'windows-x64-conan-center-index' + 'sparcsolaris-conan-center-index', + 'windows-conan-center-index' } } + environment { + CONAN_USER_HOME = "${WORKSPACE}" + } stages { stage('Clean/reset Git checkout for release') { when { @@ -260,6 +257,74 @@ pipeline { } } } + stage('Set up Conan') { + steps { + script { + if (isUnix()) { + sh """. ${ENV_LOC[NODE]}/bin/activate + invoke conan.login""" + } else { + bat """CALL ${ENV_LOC[NODE]}\\Scripts\\activate + invoke conan.login""" + } + } + } + } + stage('build tools') { + when { + allOf { + not { + changeRequest() + } + expression { BUILD_TOOLS[NODE] } + } + } + steps { + script { + if (env.BRANCH_NAME =~ 'master*') { + remote = 'conan-center-dl' + } else { + remote = 'conan-center-dl-staging' + } + short_node = NODE.replace('-conan-center-index', '') + pytest_command = "pytest -k build_tool --upload-to ${remote} --junitxml=build-tools.xml --html=${short_node}-build-tools.html" + if (isUnix()) { + catchError(message: 'pytest had errors', stageResult: 'FAILURE') { + script { + // on macOS, /usr/local/bin is not in the path by default, and the + // Python binaries tox is looking for may be in there + sh """. ${ENV_LOC[NODE]}/bin/activate + (env PATH=\$PATH:/usr/local/bin ${pytest_command})""" + } + } + } + else { + catchError(message: 'pytest had errors', stageResult: 'FAILURE') { + script { + bat """CALL ${ENV_LOC[NODE]}\\Scripts\\activate + ${pytest_command}""" + } + } + } + } + } + post { + always { + catchError(message: 'testing had errors', stageResult: 'FAILURE') { + xunit ( + reduceLog: false, + tools: [ + JUnit(deleteOutputFiles: true, + failIfNotNew: true, + pattern: 'build-tools.xml', + skipNoTestFiles: true, + stopProcessingIfError: true) + ]) + archiveArtifacts allowEmptyArchive: true, artifacts: '*-build-tools.html', followSymlinks: false + } + } + } + } } } } From b8ec990fd13ce7a2a0eb0be9c5c6bdd721dc6814 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 8 Dec 2021 13:55:44 -0600 Subject: [PATCH 036/173] Support adding options to prebuilt_tools. - Can build multiple tools with different options. - The list of prebuilt_tools can contain a string, which is the package name, or a dictionary with keys package and options. - The options are a list of package:option=value strings. --- dlproject.yaml | 5 +++++ tests/test_tools.py | 39 +++++++++++++++++++++++++++++++-------- 2 files changed, 36 insertions(+), 8 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index a972ccc1bc7b1..9f8657b4adc84 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -185,9 +185,14 @@ config: description: macOS Debug settings: - build_type=Debug + # If the entry is a string, it's taken to be the package name, + # else the entry can be a dictionary of package name and options prebuilt_tools: - cmake/3.21.4 - doxygen/1.9.1 + - package: doxygen/1.9.1 + options: + - doxygen:enable_search=False prebuilt_tools_configs: - DebugTool - ReleaseTool diff --git a/tests/test_tools.py b/tests/test_tools.py index 23552952da0bd..8a587c246775a 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -1,4 +1,5 @@ import subprocess +from typing import NamedTuple, List import dl_conan_build_tools.config import pytest @@ -9,8 +10,26 @@ _config = dl_conan_build_tools.config.get_config() +class Package(NamedTuple): + package: str + options: List[str] + + def __str__(self): + result = self.package + if self.options: + result = f'{self.package}_{"_".join(self.options)}' + return result + + @classmethod + def from_str_or_dict(cls, str_or_dict): + if isinstance(str_or_dict, str): + return cls(str_or_dict, []) + return cls(**str_or_dict) + + @pytest.fixture(scope='package', - params=_config.get('prebuilt_tools', [])) + params=[Package.from_str_or_dict(entry) for entry in _config.get('prebuilt_tools', [])], + ids=lambda param: str(param)) def prebuilt_tool(request): return request.param @@ -29,17 +48,21 @@ def prebuilt_tool_config(request): @pytest.fixture(scope='package') def tool_recipe_folder(prebuilt_tool): - package, version = prebuilt_tool.split('/') + package, version = prebuilt_tool.package.split('/') return recipes.versions_to_folders(package)[version] class TestBuildTools(object): def test_build_tool(self, prebuilt_tool, prebuilt_tool_config, tool_recipe_folder, upload_to): - args = ['conan', 'create', tool_recipe_folder, f'{prebuilt_tool}@', - '--update'] + prebuilt_tool_config.install_options() - print(f'Creating package {prebuilt_tool}: {" ".join(args)}') + tool_options = [] + for opt in prebuilt_tool.options: + tool_options.append('--options:host') + tool_options.append(opt) + args = ['conan', 'create', tool_recipe_folder, f'{prebuilt_tool.package}@', + '--update'] + prebuilt_tool_config.install_options() + tool_options + print(f'Creating package {prebuilt_tool.package}: {" ".join(args)}') subprocess.run(args, check=True) if upload_to: - args = ['conan', 'upload', '-r', upload_to, f'{prebuilt_tool}@', '--all', '--check'] - print(f'Uploading {prebuilt_tool}: {" ".join(args)}') - subprocess.run(args, check=True) + args = ['conan', 'upload', '-r', upload_to, f'{prebuilt_tool.package}@', '--all', '--check'] + print(f'Uploading {prebuilt_tool.package}: {" ".join(args)}') + subprocess.run(args, check=True) From 3b9b25c5f78cbfe283c21c5e81acb45101b5c4ea Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 9 Dec 2021 10:48:14 -0600 Subject: [PATCH 037/173] pytest: Add --force-build option - --force_build or --force-build=package will force the building of all packages. - --force-build=with_requirements will force the building of all packages and their requirements as well. --- tests/conftest.py | 7 +++++++ tests/test_tools.py | 9 +++++++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 143f43819c6c3..6b77a88c8122c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,8 +3,15 @@ def pytest_addoption(parser): parser.addoption('--upload-to', help='upload built packages to the given remote') + parser.addoption('--force-build', choices=['package', 'with_requirements'], const='package', nargs='?', + help='Force a build of the package or the package with its requirements') @pytest.fixture def upload_to(request): return request.config.getoption('--upload-to') + + +@pytest.fixture +def force_build(request): + return request.config.getoption('--force-build') diff --git a/tests/test_tools.py b/tests/test_tools.py index 8a587c246775a..88544db421517 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -53,13 +53,18 @@ def tool_recipe_folder(prebuilt_tool): class TestBuildTools(object): - def test_build_tool(self, prebuilt_tool, prebuilt_tool_config, tool_recipe_folder, upload_to): + def test_build_tool(self, prebuilt_tool, prebuilt_tool_config, tool_recipe_folder, upload_to, force_build): tool_options = [] for opt in prebuilt_tool.options: tool_options.append('--options:host') tool_options.append(opt) + force_build_options = [] + if force_build == 'package': + force_build_options = ['--build', prebuilt_tool.package.split('/', maxsplit=1)[0]] + elif force_build == 'with-requirements': + force_build_options = ['--build', 'all'] args = ['conan', 'create', tool_recipe_folder, f'{prebuilt_tool.package}@', - '--update'] + prebuilt_tool_config.install_options() + tool_options + '--update'] + prebuilt_tool_config.install_options() + tool_options + force_build_options print(f'Creating package {prebuilt_tool.package}: {" ".join(args)}') subprocess.run(args, check=True) if upload_to: From 12e7894498e6f5a174d80f4b9363e2df374935e4 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 9 Dec 2021 10:49:26 -0600 Subject: [PATCH 038/173] Add Ninja tool - Can be used as an alternative to Makefile for CMake's underlying build system. - A lot faster than Make. - CLion is starting to prefer it, so maybe we can investigate against our projects. --- dlproject.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/dlproject.yaml b/dlproject.yaml index 9f8657b4adc84..e773a4d7c34ff 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -193,6 +193,7 @@ config: - package: doxygen/1.9.1 options: - doxygen:enable_search=False + - ninja/1.10.2 prebuilt_tools_configs: - DebugTool - ReleaseTool From 448e7c84d4540460b69f2b119123be98b8d94654 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 9 Dec 2021 10:50:57 -0600 Subject: [PATCH 039/173] dlproject.yaml: Add configs and prebuilt_tools lists for Linux and Windows --- dlproject.yaml | 106 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index e773a4d7c34ff..123765d5b360f 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -199,6 +199,73 @@ config: - ReleaseTool redhat: + common: &redhatCommon + build: + - missing + config: + Release: &redhatRelease + <<: *redhatCommon + build_folder: build-release + description: RedHat Release + profile_host: + - llvm-toolset-7-cppstd-14 + Debug: &redhatDebug + <<: *redhatRelease + build_folder: build-debug + description: RedHat Debug + settings: + - build_type=Debug + Release32: &redhatRelease32 + <<: *redhatCommon + build_folder: build-release-32 + description: RedHat Release 32 + profile_host: + - llvm-toolset-7-cppstd-14 + settings: + - arch=x86 + Debug32: &redhatDebug32 + <<: *redhatRelease32 + build_folder: build-debug-32 + description: RedHat Debug 32 + settings: + - build_type=Debug + ReleaseTool: &redhatReleaseTool + <<: *redhatCommon + build_folder: build-release-tool + description: RedHat Release Tool + profile_host: + - devtoolset-7 + DebugTool: &redhatDebugTool + <<: *redhatReleaseTool + build_folder: build-debug-tool + description: RedHat Debug Tool + settings: + - build_type=Debug + ReleaseTool32: &redhatReleaseTool32 + <<: *redhatReleaseTool + build_folder: build-release-tool-32 + description: RedHat Release Tool 32 + settings: + - arch=x86 + DebugTool32: &redhatDebugTool32 + <<: *redhatReleaseTool32 + build_folder: build-debug-tool-32 + description: RedHat Debug Tool 32 + settings: + - build_type=Debug + prebuilt_tools: + - cmake/3.21.4 + - doxygen/1.9.1 + - package: doxygen/1.9.1 + options: + - doxygen:enable_search=False + - ninja/1.10.2 + prebuilt_tools_configs: + - DebugTool + - ReleaseTool + - DebugTool32 + - ReleaseTool32 + windows: # Can set the generator differently on a per-machine basis (but automatic is @@ -209,6 +276,45 @@ config: # Can override packager build_tools on a per-platform basis: # packager: # build_tools: build_tools_windows + common: &windowsCommon + profile_host: visual-studio-15 + build: + - missing + config: + Release: &windowsRelease + <<: *windowsCommon + build_folder: build-release + description: Windows Release + Debug: &windowsDebug + <<: *windowsRelease + build_folder: build-debug + description: Windows Debug + settings: + - build_type=Debug + Release32: &windowsRelease32 + <<: *windowsRelease + build_folder: build-release-32 + description: Windows Release 32 + settings: + - arch=x86 + Debug32: &windowsDebug32 + <<: *windowsRelease32 + build_folder: build-debug-32 + description: Windows Debug 32 + settings: + - build_type=Debug + prebuilt_tools: + - cmake/3.21.4 + - doxygen/1.9.1 + - package: doxygen/1.9.1 + options: + - doxygen:enable_search=False + - ninja/1.10.2 + prebuilt_tools_configs: + - Debug + - Release + - Debug32 + - Release32 byhost: # Configs based on hostname. From 1419ca246028497b0eae0afdc2fb1882891d3fb7 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 9 Dec 2021 14:19:37 -0600 Subject: [PATCH 040/173] Distinguish tool profiles between RedHat Intel and ARM --- dlproject.yaml | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/dlproject.yaml b/dlproject.yaml index 123765d5b360f..ac0c6577dc4fa 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -198,7 +198,8 @@ config: - DebugTool - ReleaseTool - redhat: + + redhat-x86_64: common: &redhatCommon build: - missing @@ -266,6 +267,33 @@ config: - DebugTool32 - ReleaseTool32 + redhat-aarch64: + common: &redhatARMCommon + build: + - missing + config: + Release: &redhatARMRelease + <<: *redhatARMCommon + build_folder: build-release + description: RedHat Release + profile_host: + - llvm-toolset-7.0-armv8 + Debug: &redhatARMDebug + <<: *redhatARMRelease + build_folder: build-debug + description: RedHat Debug + settings: + - build_type=Debug + prebuilt_tools: + - cmake/3.21.4 + - doxygen/1.9.1 + - package: doxygen/1.9.1 + options: + - doxygen:enable_search=False + - ninja/1.10.2 + prebuilt_tools_configs: + - Debug + - Release windows: # Can set the generator differently on a per-machine basis (but automatic is From 56be32a5800a3bc5efd9cddd3db1d469c31f4e85 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 9 Dec 2021 14:20:14 -0600 Subject: [PATCH 041/173] Jenkinsfile: Use correct DL Conan Center Index repo for staging/production --- Jenkinsfile | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/Jenkinsfile b/Jenkinsfile index 6197c792cd83e..a5bf9e7752711 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -187,6 +187,7 @@ pipeline { } environment { CONAN_USER_HOME = "${WORKSPACE}" + DL_CONAN_CENTER_INDEX = productionOrStaging() } stages { stage('Clean/reset Git checkout for release') { @@ -351,6 +352,22 @@ pipeline { } } +void productionOrStaging() { + if (env.CHANGE_ID == null) { + if (env.BRANCH_NAME =~ 'master*') { + return 'production' + } else { + return 'staging' + } + } else { + if (env.CHANGE_BRANCH =~ 'master*') { + return 'production' + } else { + return 'staging' + } + } +} + void printPlatformNameInStep(String node) { script { stage("Building on ${node}") { From dd34ba597dcd2ce0255adafffba2c0da9f7fe41e Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 9 Dec 2021 16:16:12 -0600 Subject: [PATCH 042/173] test_build_tool: Upload any requirements used to build a package - Makes it easier to build the package next time. - The requirements are only there if the package had to be built. --- tests/test_tools.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/tests/test_tools.py b/tests/test_tools.py index 88544db421517..86a2ded484b33 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -1,3 +1,4 @@ +import json import subprocess from typing import NamedTuple, List @@ -53,7 +54,8 @@ def tool_recipe_folder(prebuilt_tool): class TestBuildTools(object): - def test_build_tool(self, prebuilt_tool, prebuilt_tool_config, tool_recipe_folder, upload_to, force_build): + def test_build_tool(self, prebuilt_tool, prebuilt_tool_config, tool_recipe_folder, upload_to, force_build, + tmp_path): tool_options = [] for opt in prebuilt_tool.options: tool_options.append('--options:host') @@ -63,11 +65,19 @@ def test_build_tool(self, prebuilt_tool, prebuilt_tool_config, tool_recipe_folde force_build_options = ['--build', prebuilt_tool.package.split('/', maxsplit=1)[0]] elif force_build == 'with-requirements': force_build_options = ['--build', 'all'] - args = ['conan', 'create', tool_recipe_folder, f'{prebuilt_tool.package}@', - '--update'] + prebuilt_tool_config.install_options() + tool_options + force_build_options + create_json = tmp_path / 'create.json' + args = ['conan', 'create', tool_recipe_folder, f'{prebuilt_tool.package}@', '--update', '--json', + str(create_json)] + prebuilt_tool_config.install_options() + tool_options + force_build_options print(f'Creating package {prebuilt_tool.package}: {" ".join(args)}') subprocess.run(args, check=True) if upload_to: - args = ['conan', 'upload', '-r', upload_to, f'{prebuilt_tool.package}@', '--all', '--check'] - print(f'Uploading {prebuilt_tool.package}: {" ".join(args)}') - subprocess.run(args, check=True) + # upload packages mentioned in the create.json, which includes requirements used to build + # this package, if in fact it had to be built. + with open(create_json) as json_file: + create_data = json.load(json_file) + for install in create_data['installed']: + recipe_id = install['recipe']['id'] + ref = recipe_id.split('#')[0] + args = ['conan', 'upload', '-r', upload_to, f'{ref}@', '--all', '--check'] + print(f'Uploading {ref}: {" ".join(args)}') + subprocess.run(args, check=True) From dd446c856ec491e83f7a100623e2f89c193ddfc6 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 9 Dec 2021 16:32:15 -0600 Subject: [PATCH 043/173] Jenkinsfile: Add a option to force tool builds. --- Jenkinsfile | 13 ++++++++++++- tests/conftest.py | 2 +- tests/test_tools.py | 16 +++++++++++++--- 3 files changed, 26 insertions(+), 5 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index a5bf9e7752711..ce63b2b4f6202 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -27,6 +27,10 @@ pipeline { booleanParam defaultValue: false, description: 'Completely clean the workspace before building, including the Conan cache', name: 'CLEAN_WORKSPACE' booleanParam name: 'UPLOAD_ALL_RECIPES', defaultValue: false, description: 'Upload all recipes, instead of only recipes that changed since the last merge' + booleanParam name: 'FORCE_TOOL_BUILD', defaultValue: false, + description: 'Force build of all tools. By default, Conan will download the tool and test it if it\'s already built' + booleanParam name: 'FORCE_TOOL_BUILD_WITH_REQUIREMENTS', defaultValue: false, + description: 'Force build of all tools, and their requirements. By default, Conan will download the tool and test it if it\'s already built' } options{ buildDiscarder logRotator(artifactDaysToKeepStr: '4', artifactNumToKeepStr: '10', daysToKeepStr: '7', numToKeepStr: '10') @@ -288,7 +292,14 @@ pipeline { remote = 'conan-center-dl-staging' } short_node = NODE.replace('-conan-center-index', '') - pytest_command = "pytest -k build_tool --upload-to ${remote} --junitxml=build-tools.xml --html=${short_node}-build-tools.html" + if (params.FORCE_TOOL_BUILD_WITH_REQUIREMENTS) { + force_build = '--force-build with-requirements' + } else if (params.FORCE_TOOL_BUILD) { + force_build = '--force-build' + } else { + force_build = '' + } + pytest_command = "pytest -k build_tool ${force_build} --upload-to ${remote} --junitxml=build-tools.xml --html=${short_node}-build-tools.html" if (isUnix()) { catchError(message: 'pytest had errors', stageResult: 'FAILURE') { script { diff --git a/tests/conftest.py b/tests/conftest.py index 6b77a88c8122c..a708db4adcba4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,7 @@ def pytest_addoption(parser): parser.addoption('--upload-to', help='upload built packages to the given remote') - parser.addoption('--force-build', choices=['package', 'with_requirements'], const='package', nargs='?', + parser.addoption('--force-build', choices=['package', 'with-requirements'], const='package', nargs='?', help='Force a build of the package or the package with its requirements') diff --git a/tests/test_tools.py b/tests/test_tools.py index 86a2ded484b33..81a6e2695202f 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -61,13 +61,23 @@ def test_build_tool(self, prebuilt_tool, prebuilt_tool_config, tool_recipe_folde tool_options.append('--options:host') tool_options.append(opt) force_build_options = [] + if force_build == 'package': - force_build_options = ['--build', prebuilt_tool.package.split('/', maxsplit=1)[0]] + force_build_options = ['--build', prebuilt_tool.package.split('/', maxsplit=1)[0], + '--build', 'missing'] elif force_build == 'with-requirements': - force_build_options = ['--build', 'all'] + force_build_options = ['--build'] + else: + force_build_options = ['--build', 'missing'] + + # Remove "missing" from the build list in the config, because it sets policy; the policy is determined by the + # force_build_options + config_build_without_missing = [build for build in prebuilt_tool_config.build if build != 'missing'] + config = prebuilt_tool_config._replace(build=config_build_without_missing) + create_json = tmp_path / 'create.json' args = ['conan', 'create', tool_recipe_folder, f'{prebuilt_tool.package}@', '--update', '--json', - str(create_json)] + prebuilt_tool_config.install_options() + tool_options + force_build_options + str(create_json)] + config.install_options() + tool_options + force_build_options print(f'Creating package {prebuilt_tool.package}: {" ".join(args)}') subprocess.run(args, check=True) if upload_to: From 52b58e7e540e55e7f637b41ecc5377e3aedec86c Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 10 Dec 2021 10:55:42 -0600 Subject: [PATCH 044/173] test_tools.py: Allow per-tool configs list - Configs in the per-package list must also be in the prebuilt_tools_configs. - If the list of configs isn't there, it uses the default ones in prebuilt_tools_configs. - Pytest attempts all the combinations, and the test itself compares the tool config to the list in the package, and skips if it's not there. --- tests/test_tools.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/tests/test_tools.py b/tests/test_tools.py index 81a6e2695202f..42ee67beb46d5 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -13,7 +13,8 @@ class Package(NamedTuple): package: str - options: List[str] + options: List[str] = list() + configs: List[str] = list() def __str__(self): result = self.package @@ -37,8 +38,13 @@ def prebuilt_tool(request): @pytest.fixture(scope='package', params=_config.get('prebuilt_tools_configs', [])) -def prebuilt_tool_config(request): - config = Config.from_name(request.param) +def prebuilt_tool_config_name(request): + return request.param + + +@pytest.fixture(scope='package') +def prebuilt_tool_config(prebuilt_tool_config_name): + config = Config.from_name(prebuilt_tool_config_name) config.validate() config = config.normalize() @@ -54,8 +60,11 @@ def tool_recipe_folder(prebuilt_tool): class TestBuildTools(object): - def test_build_tool(self, prebuilt_tool, prebuilt_tool_config, tool_recipe_folder, upload_to, force_build, - tmp_path): + def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_tool_config, tool_recipe_folder, + upload_to, force_build, tmp_path): + if prebuilt_tool.configs and prebuilt_tool_config_name not in prebuilt_tool.configs: + pytest.skip(f'Skipping build because config named {prebuilt_tool_config_name} is not in the list of ' + f'configs for this package: {", ".join(prebuilt_tool.configs)}') tool_options = [] for opt in prebuilt_tool.options: tool_options.append('--options:host') From a3fe80c4183f5fa3f5a74527a90f5ecb4373a335 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 10 Dec 2021 10:59:30 -0600 Subject: [PATCH 045/173] Windows: Don't build 32-bit Doxygen with search - Search requires xapian-core, which has to be built with msys, which complains about being in a 32-bit configuration. --- dlproject.yaml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/dlproject.yaml b/dlproject.yaml index ac0c6577dc4fa..f2b82de725fc1 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -333,7 +333,12 @@ config: - build_type=Debug prebuilt_tools: - cmake/3.21.4 - - doxygen/1.9.1 + - package: doxygen/1.9.1 + configs: + # Search doesn't build for 32-bits, because xapian-core requires msys, + # and msys complains about running on 32-bit + - Debug + - Release - package: doxygen/1.9.1 options: - doxygen:enable_search=False From bffba949a5a21920d055e96fb51a7e298b5823e0 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 10 Dec 2021 14:22:54 -0600 Subject: [PATCH 046/173] RedHat-Intel: Don't build 32-bit tools that have problems building. - Doxygen with search doesn't build due to m4 not building (needed via xapian-core). - devtoolset-7 doesn't fully function for 32-bit code. - The native gcc on rhel6 is too old to do C++ 11. --- dlproject.yaml | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index f2b82de725fc1..b30faed455711 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -255,12 +255,21 @@ config: settings: - build_type=Debug prebuilt_tools: - - cmake/3.21.4 - - doxygen/1.9.1 + - package: cmake/3.21.4 + configs: + - DebugTool + - ReleaseTool + - package: doxygen/1.9.1 + configs: + - DebugTool + - ReleaseTool - package: doxygen/1.9.1 options: - doxygen:enable_search=False - - ninja/1.10.2 + - package: ninja/1.10.2 + configs: + - DebugTool + - ReleaseTool prebuilt_tools_configs: - DebugTool - ReleaseTool From f078c358556165ed7cb19dd000fe4563f80ad5db Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 10 Dec 2021 18:20:43 -0600 Subject: [PATCH 047/173] dlproject.yaml: For 32-bit Linux, try setting arch of build tools --- dlproject.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index b30faed455711..516a91d6aa795 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -248,6 +248,9 @@ config: description: RedHat Release Tool 32 settings: - arch=x86 + - bison:arch=x86_64 + - flex:arch=x86_64 + - m4:arch=x86_64 DebugTool32: &redhatDebugTool32 <<: *redhatReleaseTool32 build_folder: build-debug-tool-32 From 23cc4c1cb191429a6bc23c8e62524869aecfacac Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 10 Dec 2021 19:20:54 -0600 Subject: [PATCH 048/173] dlproject.yaml: Add sections for AIX and Solaris --- dlproject.yaml | 80 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 80 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index 516a91d6aa795..6e37fc34f4042 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -361,6 +361,86 @@ config: - Debug32 - Release32 + aix: + common: &aixCommon + build: + - missing + config: + Release: &aixRelease + <<: *aixCommon + build_folder: build-release + description: AIX Release + profile_host: gcc-8-aix71-64 + Debug: &aixDebug + <<: *aixRelease + build_folder: build-debug + description: AIX Debug + settings: + - build_type=Debug + Release32: &aixRelease32 + <<: *aixRelease + build_folder: build-release-32 + description: AIX Release 32 + profile_host: gcc-8-aix71-32 + Debug32: &aixDebug32 + <<: *aixRelease32 + build_folder: build-debug-32 + description: AIX Debug 32 + settings: + - build_type=Debug + prebuilt_tools: + - cmake/3.21.4 + - doxygen/1.9.1 + - package: doxygen/1.9.1 + options: + - doxygen:enable_search=False + - ninja/1.10.2 + prebuilt_tools_configs: + - Debug + - Release + - Debug32 + - Release32 + + sunos: + common: &sparcCommon + build: + - missing + config: + Release: &sparcRelease + <<: *sparcCommon + build_folder: build-release + description: Sparc Release + profile_host: gcc-9-sparc-solaris + Debug: &sparcDebug + <<: *sparcRelease + build_folder: build-debug + description: Sparc Debug + settings: + - build_type=Debug + Release32: &sparcRelease32 + <<: *sparcRelease + build_folder: build-release-32 + description: Sparc Release 32 + profile_host: gcc-9-sparc32-solaris + Debug32: &sparcDebug32 + <<: *sparcRelease32 + build_folder: build-debug-32 + description: Sparc Debug 32 + settings: + - build_type=Debug + prebuilt_tools: + - cmake/3.21.4 + - doxygen/1.9.1 + - package: doxygen/1.9.1 + options: + - doxygen:enable_search=False + - ninja/1.10.2 + prebuilt_tools_configs: + - Debug + - Release + - Debug32 + - Release32 + byhost: # Configs based on hostname. # These are applied last. Consider this for only the most extreme cases From b79cd4190991df6c6f5f5e9664c3323ca39594c1 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 10 Jan 2022 14:37:18 -0600 Subject: [PATCH 049/173] Don't build Doxygen for 32-bit AIX - We don't build DLE there, so we don't need it --- .editorconfig | 4 ++++ dlproject.yaml | 10 +++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index e9dcd7b627cc6..650a5a16b0ac1 100644 --- a/.editorconfig +++ b/.editorconfig @@ -16,6 +16,10 @@ max_line_length = 200 tab_width = 2 indent_size = 2 +[dlproject.yaml] +tab_width = 4 +indent_size = 4 + [{Makefile,Makefile.am,Makefile.in,*.mak}] indent_style = tab indent_size = 8 diff --git a/dlproject.yaml b/dlproject.yaml index 6e37fc34f4042..eaa3a85c432dd 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -390,10 +390,18 @@ config: - build_type=Debug prebuilt_tools: - cmake/3.21.4 - - doxygen/1.9.1 + - package: doxygen/1.9.1 + # We don't build DLE on 32-bit AIX, so we don't need Doxygen + configs: + - Debug + - Release - package: doxygen/1.9.1 options: - doxygen:enable_search=False + # We don't build DLE on 32-bit AIX, so we don't need Doxygen + configs: + - Debug + - Release - ninja/1.10.2 prebuilt_tools_configs: - Debug From 9c0f5c9706a02ac1dd1406e69aaf63eecd1719d0 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 11 Jan 2022 15:37:11 -0600 Subject: [PATCH 050/173] dlproject.yaml: set build_type and arch for tools' build_requirements When building tools, the build_requirements of the tools don't have to be the same architecture, for instance, it's possible to use a 64-bit Release bison to build a 32-bit Debug Doxygen. Do this with per-package settings, because the recipes in conan-center-index don't really support split host/build profiles yet. This fixes problems like not being able to make a 32-bit Doxygen on Windows. --- dlproject.yaml | 201 ++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 165 insertions(+), 36 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index eaa3a85c432dd..b84b8b64dbef0 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -161,6 +161,13 @@ config: build: - missing config: + tools_settings: + # When building tools, the build_requirements of the tools can all + # be the release version for the main host architecture, provided + # they don't have a library component. + settings: + - m4:build_type=Release + - automake:build_type=Release Release: &macOSRelease <<: *macOSCommon build_folder: build-release @@ -175,12 +182,15 @@ config: - build_type=Debug ReleaseTool: &macOSReleaseTool <<: *macOSCommon + include: + - tools_settings build_folder: build-release description: macOS Release profile_host: - apple-clang-12.0-macos-10.9 DebugTool: &macOSDebugTool - <<: *macOSReleaseTool + include: + - ReleaseTool build_folder: build description: macOS Debug settings: @@ -204,6 +214,15 @@ config: build: - missing config: + tools_settings: + # When building tools, the build_requirements of the tools can all + # be the release version for the main host architecture, provided + # they don't have a library component. + settings: + - m4:build_type=Release + - m4:arch=x86_64 + - automake:build_type=Release + - automake:arch=x86_64 Release: &redhatRelease <<: *redhatCommon build_folder: build-release @@ -232,27 +251,25 @@ config: - build_type=Debug ReleaseTool: &redhatReleaseTool <<: *redhatCommon + include: tools_settings build_folder: build-release-tool description: RedHat Release Tool profile_host: - devtoolset-7 DebugTool: &redhatDebugTool - <<: *redhatReleaseTool + include: ReleaseTool build_folder: build-debug-tool description: RedHat Debug Tool settings: - build_type=Debug ReleaseTool32: &redhatReleaseTool32 - <<: *redhatReleaseTool + include: ReleaseTool build_folder: build-release-tool-32 description: RedHat Release Tool 32 settings: - arch=x86 - - bison:arch=x86_64 - - flex:arch=x86_64 - - m4:arch=x86_64 DebugTool32: &redhatDebugTool32 - <<: *redhatReleaseTool32 + include: ReleaseTool32 build_folder: build-debug-tool-32 description: RedHat Debug Tool 32 settings: @@ -284,6 +301,13 @@ config: build: - missing config: + tools_settings: + # When building tools, the build_requirements of the tools can all + # be the release version for the main host architecture, provided + # they don't have a library component. + settings: + - m4:build_type=Release + - automake:build_type=Release Release: &redhatARMRelease <<: *redhatARMCommon build_folder: build-release @@ -296,6 +320,18 @@ config: description: RedHat Debug settings: - build_type=Debug + ReleaseTool: + build_folder: build-release-tool + description: RedHat Release + include: + - tools_settings + - Release + DebugTool: + build_folder: build-debug-tool + description: RedHat Debug + include: + - tools_settings + - Debug prebuilt_tools: - cmake/3.21.4 - doxygen/1.9.1 @@ -304,8 +340,8 @@ config: - doxygen:enable_search=False - ninja/1.10.2 prebuilt_tools_configs: - - Debug - - Release + - DebugTool + - ReleaseTool windows: # Can set the generator differently on a per-machine basis (but automatic is @@ -316,56 +352,92 @@ config: # Can override packager build_tools on a per-platform basis: # packager: # build_tools: build_tools_windows - common: &windowsCommon - profile_host: visual-studio-15 - build: - - missing config: - Release: &windowsRelease - <<: *windowsCommon + tools_settings: + settings: + # Use a 64-bit msys2 for all building + - msys2:build_type=Release + - msys2:arch=x86_64 + Release: build_folder: build-release description: Windows Release + profile_host: visual-studio-15 + build: + - missing Debug: &windowsDebug - <<: *windowsRelease + include: Release build_folder: build-debug description: Windows Debug settings: - build_type=Debug - Release32: &windowsRelease32 - <<: *windowsRelease + Release32: + include: Release build_folder: build-release-32 description: Windows Release 32 settings: - arch=x86 - Debug32: &windowsDebug32 - <<: *windowsRelease32 + Debug32: + include: Release32 build_folder: build-debug-32 description: Windows Debug 32 settings: - build_type=Debug + ReleaseTool: + build_folder: build-release-tool + description: Windows Release + include: + - tools_settings + - Release + DebugTool: + build_folder: build-debug-tool + description: Windows Debug Tool + include: + - tools_settings + - Debug + ReleaseTool32: + build_folder: build-release-32-tool + description: Windows Release 32 Tool + include: + - tools_settings + - Release32 + DebugTool32: + build_folder: build-debug-32-tool + description: Windows Debug 32 Tool + include: + - tools_settings + - Debug32 + default: *windowsDebug prebuilt_tools: - cmake/3.21.4 - package: doxygen/1.9.1 configs: - # Search doesn't build for 32-bits, because xapian-core requires msys, - # and msys complains about running on 32-bit - - Debug - - Release + # xapian-core doesn't work for cross-building x86_64 to x86 + - DebugTool + - ReleaseTool - package: doxygen/1.9.1 options: - doxygen:enable_search=False - ninja/1.10.2 prebuilt_tools_configs: - - Debug - - Release - - Debug32 - - Release32 + - DebugTool + - ReleaseTool + - DebugTool32 + - ReleaseTool32 aix: common: &aixCommon build: - missing config: + tools_settings: + # When building tools, the build_requirements of the tools can all + # be the release version for the main host architecture, provided + # they don't have a library component. + settings: + - m4:build_type=Release + - m4:arch=ppc64 + - automake:build_type=Release + - automake:arch=ppc64 Release: &aixRelease <<: *aixCommon build_folder: build-release @@ -388,6 +460,30 @@ config: description: AIX Debug 32 settings: - build_type=Debug + ReleaseTool: + build_folder: build-release-tool + description: AIX Release Tool + include: + - tools_settings + - Release + DebugTool: + build_folder: build-debug-tool + description: AIX Debug Tool + include: + - tools_settings + - Debug + ReleaseTool32: + build_folder: build-release-tool-32 + description: AIX Release 32 Tool 32 + include: + - tools_settings + - Release32 + DebugTool32: + build_folder: build-debug-tool-32 + description: AIX Debug 32 Tool + include: + - tools_settings + - Debug32 prebuilt_tools: - cmake/3.21.4 - package: doxygen/1.9.1 @@ -404,16 +500,25 @@ config: - Release - ninja/1.10.2 prebuilt_tools_configs: - - Debug - - Release - - Debug32 - - Release32 + - DebugTool + - ReleaseTool + - DebugTool32 + - ReleaseTool32 sunos: common: &sparcCommon build: - missing config: + tools_settings: + # When building tools, the build_requirements of the tools can all + # be the release version for the main host architecture, provided + # they don't have a library component. + settings: + - m4:build_type=Release + - m4:arch=sparcv9 + - automake:build_type=Release + - automake:arch=sparcv9 Release: &sparcRelease <<: *sparcCommon build_folder: build-release @@ -436,6 +541,30 @@ config: description: Sparc Debug 32 settings: - build_type=Debug + ReleaseTool: + build_folder: build-release-tool + description: Sparc Release Tool + include: + - tools_settings + - Release + DebugTool: + build_folder: build-debug-tool + description: Sparc Debug Tool + include: + - tools_settings + - Debug + ReleaseTool32: + build_folder: build-release-tool-32 + description: Sparc Release 32 Tool 32 + include: + - tools_settings + - Release32 + DebugTool32: + build_folder: build-debug-tool-32 + description: Sparc Debug 32 Tool + include: + - tools_settings + - Debug32 prebuilt_tools: - cmake/3.21.4 - doxygen/1.9.1 @@ -444,10 +573,10 @@ config: - doxygen:enable_search=False - ninja/1.10.2 prebuilt_tools_configs: - - Debug - - Release - - Debug32 - - Release32 + - DebugTool + - ReleaseTool + - DebugTool32 + - ReleaseTool32 byhost: # Configs based on hostname. From 7a9f0c17f1b3a6e71aeb47c488fdeb43fdbceed3 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 14 Jan 2022 12:49:18 -0600 Subject: [PATCH 051/173] Jenkinsfile: Avoid putting the customWorkspace in subdirectories - Replace slashes in the JOB_NAME with underscores. - Plays more nicely with the workspace cleaner, because the workspace directory is flat. --- Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index ce63b2b4f6202..ef9506e76f349 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -39,7 +39,7 @@ pipeline { agent { node { label 'noarch-conan-center-index' - customWorkspace "workspace/${JOB_NAME}_noarch/" + customWorkspace "workspace/${JOB_NAME.replaceAll('/','_')}_noarch/" } } environment { @@ -169,7 +169,7 @@ pipeline { agent { node { label "${NODE}" - customWorkspace "workspace/${JOB_NAME}/" + customWorkspace "workspace/${JOB_NAME.replaceAll('/','_')}/" } } when { anyOf { From fbca8fa746ba90246f76078bd57f4039a67fba49 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 18 Jan 2022 17:22:35 -0600 Subject: [PATCH 052/173] Add the DL patch of SWIG to the prebuilt tools --- dlproject.yaml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index b84b8b64dbef0..aa669591c7684 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -167,6 +167,7 @@ config: # they don't have a library component. settings: - m4:build_type=Release + - autoconf:build_type=Release - automake:build_type=Release Release: &macOSRelease <<: *macOSCommon @@ -204,6 +205,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - swig/1.3.40+dl.1 prebuilt_tools_configs: - DebugTool - ReleaseTool @@ -221,6 +223,8 @@ config: settings: - m4:build_type=Release - m4:arch=x86_64 + - autoconf:build_type=Release + - autoconf:arch=x86_64 - automake:build_type=Release - automake:arch=x86_64 Release: &redhatRelease @@ -290,6 +294,7 @@ config: configs: - DebugTool - ReleaseTool + - swig/1.3.40+dl.1 prebuilt_tools_configs: - DebugTool - ReleaseTool @@ -307,6 +312,7 @@ config: # they don't have a library component. settings: - m4:build_type=Release + - autoconf:build_type=Release - automake:build_type=Release Release: &redhatARMRelease <<: *redhatARMCommon @@ -339,6 +345,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - swig/1.3.40+dl.1 prebuilt_tools_configs: - DebugTool - ReleaseTool @@ -358,6 +365,12 @@ config: # Use a 64-bit msys2 for all building - msys2:build_type=Release - msys2:arch=x86_64 + - autoconf:build_type=Release + - autoconf:arch=x86_64 + - automake:build_type=Release + - automake:arch=x86_64 + - m4:build_type=Release + - m4:arch=x86_64 Release: build_folder: build-release description: Windows Release @@ -418,6 +431,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - swig/1.3.40+dl.1 prebuilt_tools_configs: - DebugTool - ReleaseTool @@ -436,6 +450,8 @@ config: settings: - m4:build_type=Release - m4:arch=ppc64 + - autoconf:build_type=Release + - autoconf:arch=ppc64 - automake:build_type=Release - automake:arch=ppc64 Release: &aixRelease @@ -499,6 +515,11 @@ config: - Debug - Release - ninja/1.10.2 + - package: swig/1.3.40+dl.1 + # We don't build DLE on 32-bit AIX, so we don't need SWIG + configs: + - Debug + - Release prebuilt_tools_configs: - DebugTool - ReleaseTool @@ -517,6 +538,8 @@ config: settings: - m4:build_type=Release - m4:arch=sparcv9 + - autoconf:build_type=Release + - autoconf:arch=sparcv9 - automake:build_type=Release - automake:arch=sparcv9 Release: &sparcRelease @@ -572,6 +595,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - swig/1.3.40+dl.1 prebuilt_tools_configs: - DebugTool - ReleaseTool From 665fd2aa01b6e849d60b4b6c75c3f9f44bfb76f1 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 18 Jan 2022 19:41:09 -0600 Subject: [PATCH 053/173] dlproject.yaml: Use Visual Studio 2019 to support long paths - Turn the compiler.toolset back to v141, which is Visual Studio 2017 --- dlproject.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dlproject.yaml b/dlproject.yaml index aa669591c7684..28edae310afa0 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -374,7 +374,9 @@ config: Release: build_folder: build-release description: Windows Release - profile_host: visual-studio-15 + profile_host: visual-studio-16 + settings: + compiler.toolset=v141 build: - missing Debug: &windowsDebug From 19dd24d4fa2a996dcfd3528e4bff292494fe43de Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 19 Jan 2022 14:07:14 -0600 Subject: [PATCH 054/173] Don't try to upload msys2; it's always corrupt msys2 modifies its contents when it's used, which means the package manifest won't match, and attempting an upload will result in failure. --- tests/test_tools.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/test_tools.py b/tests/test_tools.py index 42ee67beb46d5..153ffe031c5e2 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -97,6 +97,10 @@ def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_too for install in create_data['installed']: recipe_id = install['recipe']['id'] ref = recipe_id.split('#')[0] + package = ref.split('/')[0] + if package == 'msys2': + print(f'Not uploading {ref}, because it tends to modify itself during use.') + continue args = ['conan', 'upload', '-r', upload_to, f'{ref}@', '--all', '--check'] print(f'Uploading {ref}: {" ".join(args)}') subprocess.run(args, check=True) From 22a18588721a8f4b46d0cd57ff4bc68441a52d4f Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 19 Jan 2022 14:53:06 -0600 Subject: [PATCH 055/173] Solaris: Manually add CMake 3.19 to the path --- dlproject.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index 28edae310afa0..7a539dbd6d169 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -549,6 +549,8 @@ config: build_folder: build-release description: Sparc Release profile_host: gcc-9-sparc-solaris + env_host: + - PATH=[/opt/cmake-3.19/bin] Debug: &sparcDebug <<: *sparcRelease build_folder: build-debug From c5464d562e4be82d651950bc6a25d43e5457852d Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 20 Jan 2022 15:19:01 -0600 Subject: [PATCH 056/173] Build 64-bit tools on RHEL 6 and 32-bit tools on RHEL 7 - For some reason, building the 32-bit tools doesn't work on RHEL 6 - But, we only use Conan tools for 64-bit DLE on RHEL 6, for APDFL 15 - So, build the 32-bit tools on RHEL 7 - Make use of YAML aliases to avoid having to repeat the whole set of RedHat Intel configs. --- Jenkinsfile | 12 ++++++------ dlproject.yaml | 22 ++++++++++------------ 2 files changed, 16 insertions(+), 18 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index ef9506e76f349..d9d21f88bf3e0 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -3,8 +3,8 @@ def ENV_LOC = [:] // that uses an earlier glibc, so the tools will run on every machine. def BUILD_TOOLS=[ 'aix-conan-center-index': true, - 'linux-x64-conan-center-index': false, - 'linux-x64-tools-conan-center-index': true, + 'linux-x64-rhws6-conan-center-index': true, + 'linux-x64-rhel7-conan-center-index': true, 'linux-arm-conan-center-index': true, 'mac-x64-conan-center-index': true, 'mac-arm-conan-center-index': true, @@ -16,8 +16,8 @@ pipeline { choice(name: 'PLATFORM_FILTER', choices: ['all', 'aix-conan-center-index', - 'linux-x64-conan-center-index', - 'linux-x64-tools-conan-center-index', + 'linux-x64-rhws6-conan-center-index', + 'linux-x64-rhel7-conan-center-index', 'linux-arm-conan-center-index', 'mac-x64-conan-center-index', 'mac-arm-conan-center-index', @@ -180,8 +180,8 @@ pipeline { axis { name 'NODE' values 'aix-conan-center-index', - 'linux-x64-conan-center-index', - 'linux-x64-tools-conan-center-index', + 'linux-x64-rhws6-conan-center-index', + 'linux-x64-rhel7-conan-center-index', 'linux-arm-conan-center-index', 'mac-x64-conan-center-index', 'mac-arm-conan-center-index', diff --git a/dlproject.yaml b/dlproject.yaml index 7a539dbd6d169..2610e0bdfef20 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -211,11 +211,11 @@ config: - ReleaseTool - redhat-x86_64: + redhat-6-x86_64: common: &redhatCommon build: - missing - config: + config: &redhat6Config tools_settings: # When building tools, the build_requirements of the tools can all # be the release version for the main host architecture, provided @@ -278,26 +278,24 @@ config: description: RedHat Debug Tool 32 settings: - build_type=Debug - prebuilt_tools: + prebuilt_tools: &redhat6Tools - package: cmake/3.21.4 - configs: - - DebugTool - - ReleaseTool - package: doxygen/1.9.1 - configs: - - DebugTool - - ReleaseTool - package: doxygen/1.9.1 options: - doxygen:enable_search=False - package: ninja/1.10.2 - configs: - - DebugTool - - ReleaseTool - swig/1.3.40+dl.1 prebuilt_tools_configs: + # Build 64-bit tools on RHEL 6, because they're used for 64-bit DLE for APDFL 15 - DebugTool - ReleaseTool + + redhat-7-x86_64: + config: *redhat6Config + prebuilt_tools: *redhat6Tools + prebuilt_tools_configs: + # Build 32-bit tools on RHEL 7; they don't build on RHEL 6 - DebugTool32 - ReleaseTool32 From 775b61f042d4ec77dcd537f12d92d454a4d7cf3d Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 21 Jan 2022 13:37:10 -0600 Subject: [PATCH 057/173] dlproject.yaml: Remove tools_settings in favor of updated global profiles --- dlproject.yaml | 77 -------------------------------------------------- 1 file changed, 77 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index 2610e0bdfef20..b5e2a1e6c4cc3 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -161,14 +161,6 @@ config: build: - missing config: - tools_settings: - # When building tools, the build_requirements of the tools can all - # be the release version for the main host architecture, provided - # they don't have a library component. - settings: - - m4:build_type=Release - - autoconf:build_type=Release - - automake:build_type=Release Release: &macOSRelease <<: *macOSCommon build_folder: build-release @@ -183,8 +175,6 @@ config: - build_type=Debug ReleaseTool: &macOSReleaseTool <<: *macOSCommon - include: - - tools_settings build_folder: build-release description: macOS Release profile_host: @@ -216,17 +206,6 @@ config: build: - missing config: &redhat6Config - tools_settings: - # When building tools, the build_requirements of the tools can all - # be the release version for the main host architecture, provided - # they don't have a library component. - settings: - - m4:build_type=Release - - m4:arch=x86_64 - - autoconf:build_type=Release - - autoconf:arch=x86_64 - - automake:build_type=Release - - automake:arch=x86_64 Release: &redhatRelease <<: *redhatCommon build_folder: build-release @@ -255,7 +234,6 @@ config: - build_type=Debug ReleaseTool: &redhatReleaseTool <<: *redhatCommon - include: tools_settings build_folder: build-release-tool description: RedHat Release Tool profile_host: @@ -304,14 +282,6 @@ config: build: - missing config: - tools_settings: - # When building tools, the build_requirements of the tools can all - # be the release version for the main host architecture, provided - # they don't have a library component. - settings: - - m4:build_type=Release - - autoconf:build_type=Release - - automake:build_type=Release Release: &redhatARMRelease <<: *redhatARMCommon build_folder: build-release @@ -328,13 +298,11 @@ config: build_folder: build-release-tool description: RedHat Release include: - - tools_settings - Release DebugTool: build_folder: build-debug-tool description: RedHat Debug include: - - tools_settings - Debug prebuilt_tools: - cmake/3.21.4 @@ -358,17 +326,6 @@ config: # packager: # build_tools: build_tools_windows config: - tools_settings: - settings: - # Use a 64-bit msys2 for all building - - msys2:build_type=Release - - msys2:arch=x86_64 - - autoconf:build_type=Release - - autoconf:arch=x86_64 - - automake:build_type=Release - - automake:arch=x86_64 - - m4:build_type=Release - - m4:arch=x86_64 Release: build_folder: build-release description: Windows Release @@ -399,25 +356,21 @@ config: build_folder: build-release-tool description: Windows Release include: - - tools_settings - Release DebugTool: build_folder: build-debug-tool description: Windows Debug Tool include: - - tools_settings - Debug ReleaseTool32: build_folder: build-release-32-tool description: Windows Release 32 Tool include: - - tools_settings - Release32 DebugTool32: build_folder: build-debug-32-tool description: Windows Debug 32 Tool include: - - tools_settings - Debug32 default: *windowsDebug prebuilt_tools: @@ -443,17 +396,6 @@ config: build: - missing config: - tools_settings: - # When building tools, the build_requirements of the tools can all - # be the release version for the main host architecture, provided - # they don't have a library component. - settings: - - m4:build_type=Release - - m4:arch=ppc64 - - autoconf:build_type=Release - - autoconf:arch=ppc64 - - automake:build_type=Release - - automake:arch=ppc64 Release: &aixRelease <<: *aixCommon build_folder: build-release @@ -480,25 +422,21 @@ config: build_folder: build-release-tool description: AIX Release Tool include: - - tools_settings - Release DebugTool: build_folder: build-debug-tool description: AIX Debug Tool include: - - tools_settings - Debug ReleaseTool32: build_folder: build-release-tool-32 description: AIX Release 32 Tool 32 include: - - tools_settings - Release32 DebugTool32: build_folder: build-debug-tool-32 description: AIX Debug 32 Tool include: - - tools_settings - Debug32 prebuilt_tools: - cmake/3.21.4 @@ -531,17 +469,6 @@ config: build: - missing config: - tools_settings: - # When building tools, the build_requirements of the tools can all - # be the release version for the main host architecture, provided - # they don't have a library component. - settings: - - m4:build_type=Release - - m4:arch=sparcv9 - - autoconf:build_type=Release - - autoconf:arch=sparcv9 - - automake:build_type=Release - - automake:arch=sparcv9 Release: &sparcRelease <<: *sparcCommon build_folder: build-release @@ -570,25 +497,21 @@ config: build_folder: build-release-tool description: Sparc Release Tool include: - - tools_settings - Release DebugTool: build_folder: build-debug-tool description: Sparc Debug Tool include: - - tools_settings - Debug ReleaseTool32: build_folder: build-release-tool-32 description: Sparc Release 32 Tool 32 include: - - tools_settings - Release32 DebugTool32: build_folder: build-debug-tool-32 description: Sparc Debug 32 Tool include: - - tools_settings - Debug32 prebuilt_tools: - cmake/3.21.4 From 007329a048d3f8211a4a44f7a34dccc7320261a7 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 21 Jan 2022 13:39:41 -0600 Subject: [PATCH 058/173] dlproject.yaml: Only build 64-bit Release tools - Profiles in conan-config will use only 64-bit Release tools in project builds. --- dlproject.yaml | 30 ------------------------------ 1 file changed, 30 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index b5e2a1e6c4cc3..1b4271ae1b96f 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -197,7 +197,6 @@ config: - ninja/1.10.2 - swig/1.3.40+dl.1 prebuilt_tools_configs: - - DebugTool - ReleaseTool @@ -266,16 +265,10 @@ config: - swig/1.3.40+dl.1 prebuilt_tools_configs: # Build 64-bit tools on RHEL 6, because they're used for 64-bit DLE for APDFL 15 - - DebugTool - ReleaseTool redhat-7-x86_64: config: *redhat6Config - prebuilt_tools: *redhat6Tools - prebuilt_tools_configs: - # Build 32-bit tools on RHEL 7; they don't build on RHEL 6 - - DebugTool32 - - ReleaseTool32 redhat-aarch64: common: &redhatARMCommon @@ -313,7 +306,6 @@ config: - ninja/1.10.2 - swig/1.3.40+dl.1 prebuilt_tools_configs: - - DebugTool - ReleaseTool windows: @@ -378,7 +370,6 @@ config: - package: doxygen/1.9.1 configs: # xapian-core doesn't work for cross-building x86_64 to x86 - - DebugTool - ReleaseTool - package: doxygen/1.9.1 options: @@ -386,10 +377,7 @@ config: - ninja/1.10.2 - swig/1.3.40+dl.1 prebuilt_tools_configs: - - DebugTool - ReleaseTool - - DebugTool32 - - ReleaseTool32 aix: common: &aixCommon @@ -441,28 +429,13 @@ config: prebuilt_tools: - cmake/3.21.4 - package: doxygen/1.9.1 - # We don't build DLE on 32-bit AIX, so we don't need Doxygen - configs: - - Debug - - Release - package: doxygen/1.9.1 options: - doxygen:enable_search=False - # We don't build DLE on 32-bit AIX, so we don't need Doxygen - configs: - - Debug - - Release - ninja/1.10.2 - package: swig/1.3.40+dl.1 - # We don't build DLE on 32-bit AIX, so we don't need SWIG - configs: - - Debug - - Release prebuilt_tools_configs: - - DebugTool - ReleaseTool - - DebugTool32 - - ReleaseTool32 sunos: common: &sparcCommon @@ -522,10 +495,7 @@ config: - ninja/1.10.2 - swig/1.3.40+dl.1 prebuilt_tools_configs: - - DebugTool - ReleaseTool - - DebugTool32 - - ReleaseTool32 byhost: # Configs based on hostname. From 386bfc51e2e3162c4829634d9a140815a0f8f8ad Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Sun, 23 Jan 2022 17:28:11 -0600 Subject: [PATCH 059/173] dlproject.yaml: Don't build CMake with OpenSSL for Solaris or AIX - openssl doesn't build on some of those platforms. - We probably don't use features of CMake that need OpenSSL. --- dlproject.yaml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index 1b4271ae1b96f..f2d36e03f7275 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -427,7 +427,9 @@ config: include: - Debug32 prebuilt_tools: - - cmake/3.21.4 + - package: cmake/3.21.4 + options: + - cmake:with_openssl=False - package: doxygen/1.9.1 - package: doxygen/1.9.1 options: @@ -487,7 +489,9 @@ config: include: - Debug32 prebuilt_tools: - - cmake/3.21.4 + - package: cmake/3.21.4 + options: + - cmake:with_openssl=False - doxygen/1.9.1 - package: doxygen/1.9.1 options: From b2633a109f2953ede46deca7b3ee54f887d5c146 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 24 Jan 2022 00:24:09 -0600 Subject: [PATCH 060/173] Combine stderr with stdout when building tools - Makes it easier to line up what's going on in the HTML report for pytest. --- tests/test_tools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_tools.py b/tests/test_tools.py index 153ffe031c5e2..6b1c0fc0e4e92 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -88,7 +88,7 @@ def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_too args = ['conan', 'create', tool_recipe_folder, f'{prebuilt_tool.package}@', '--update', '--json', str(create_json)] + config.install_options() + tool_options + force_build_options print(f'Creating package {prebuilt_tool.package}: {" ".join(args)}') - subprocess.run(args, check=True) + subprocess.run(args, check=True, stderr=subprocess.STDOUT) if upload_to: # upload packages mentioned in the create.json, which includes requirements used to build # this package, if in fact it had to be built. @@ -103,4 +103,4 @@ def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_too continue args = ['conan', 'upload', '-r', upload_to, f'{ref}@', '--all', '--check'] print(f'Uploading {ref}: {" ".join(args)}') - subprocess.run(args, check=True) + subprocess.run(args, check=True, stderr=subprocess.STDOUT) From 85a27402cd9d19a87265619d8f64b2dd1f900268 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 24 Jan 2022 09:24:01 -0600 Subject: [PATCH 061/173] dlproject.yaml: Don't build Doxygen with search on AIX - DLE doesn't use it. - xapian-core doesn't build on AIX --- dlproject.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/dlproject.yaml b/dlproject.yaml index f2d36e03f7275..44e79dc1206ab 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -430,7 +430,6 @@ config: - package: cmake/3.21.4 options: - cmake:with_openssl=False - - package: doxygen/1.9.1 - package: doxygen/1.9.1 options: - doxygen:enable_search=False From 85a179f24c9686d514ab2719c310915aaacbc684 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 24 Jan 2022 13:10:15 -0600 Subject: [PATCH 062/173] dlproject.yaml: Don't force -std=c99 on tools on Solaris --- dlproject.yaml | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index 44e79dc1206ab..1c0415be306a9 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -472,21 +472,31 @@ config: description: Sparc Release Tool include: - Release + env: + # Override to not contain -std=c99, which breaks m4 by turning off the 'asm' keyword + - CFLAGS=-pthread -m64 DebugTool: build_folder: build-debug-tool description: Sparc Debug Tool include: - - Debug + - ReleaseTool + settings: + - build_type=Debug ReleaseTool32: build_folder: build-release-tool-32 description: Sparc Release 32 Tool 32 include: - Release32 + env: + # Override to not contain -std=c99, which breaks m4 by turning off the 'asm' keyword + - CFLAGS="-pthread -m32" DebugTool32: build_folder: build-debug-tool-32 description: Sparc Debug 32 Tool include: - - Debug32 + - DebugTool32 + settings: + - build_type=Debug prebuilt_tools: - package: cmake/3.21.4 options: From e9537bc71d6c44810e9602c012db18112fd6b468 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 27 Jan 2022 16:50:04 -0600 Subject: [PATCH 063/173] cmake: Make requirements private Having the requirements private avoids building openssl when the cmake package is cached or downloaded. --- recipes/cmake/3.x.x/conanfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/recipes/cmake/3.x.x/conanfile.py b/recipes/cmake/3.x.x/conanfile.py index 09216acf50522..f58061bed89f9 100644 --- a/recipes/cmake/3.x.x/conanfile.py +++ b/recipes/cmake/3.x.x/conanfile.py @@ -33,7 +33,7 @@ def config_options(self): def requirements(self): if self.options.with_openssl: - self.requires("openssl/1.1.1o") + self.requires("openssl/1.1.1o", private=True) def validate(self): if self.settings.os == "Macos" and self.settings.arch == "x86": From 548fcb64bb375076704db29457e373f6e14588ca Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 27 Jan 2022 16:50:56 -0600 Subject: [PATCH 064/173] doxygen: Make requirements private Having the requirements private avoids building xapian-core or zlib when the doxygen package is cached or downloaded. --- recipes/doxygen/all/conanfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/recipes/doxygen/all/conanfile.py b/recipes/doxygen/all/conanfile.py index 6b9735d4d82bb..f9f604cfaf36a 100644 --- a/recipes/doxygen/all/conanfile.py +++ b/recipes/doxygen/all/conanfile.py @@ -54,8 +54,8 @@ def configure(self): del self.settings.compiler.cppstd def requirements(self): if self.options.enable_search: - self.requires("xapian-core/1.4.18") - self.requires("zlib/1.2.12") + self.requires("xapian-core/1.4.18", private=True) + self.requires("zlib/1.2.12", private=True) def build_requirements(self): if self._settings_build.os == "Windows": From a8f2920e04c3ab0f72589fc6a8e89540c80c17a7 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 27 Jan 2022 16:52:06 -0600 Subject: [PATCH 065/173] swig: Make requirements private Having the requirements private avoids building pcre when the swig package is cached or downloaded. --- recipes/swig/all/conanfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/recipes/swig/all/conanfile.py b/recipes/swig/all/conanfile.py index 50a789da329dd..91e511ed15761 100644 --- a/recipes/swig/all/conanfile.py +++ b/recipes/swig/all/conanfile.py @@ -25,7 +25,7 @@ def _settings_build(self): return getattr(self, "settings_build", self.settings) def requirements(self): - self.requires("pcre/8.45") + self.requires("pcre/8.45", private=True) def build_requirements(self): if self._settings_build.os == "Windows" and not tools.get_env("CONAN_BASH_PATH"): From d525a76961953bbb4416c5065b4ba6ea6c7222b0 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 27 Jan 2022 17:12:40 -0600 Subject: [PATCH 066/173] dlproject.yaml: macOS: Use Xcode13, separate config for arm64 --- dlproject.yaml | 54 +++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 51 insertions(+), 3 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index 1c0415be306a9..9153a4d32e07c 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -146,7 +146,7 @@ config: # Dictionary keys override the base # Lists are appended - macos: + macos-x86_64: # Per-platform profiles can be set here. Profiles can be a string or a # list of strings as seen below. By using a list, the build_tools # profile can be added, and adds the build tools for development builds. @@ -166,7 +166,7 @@ config: build_folder: build-release description: macOS Release profile_host: - - apple-clang-12.0-macos-10.9-cppstd-14 + - apple-clang-13.0-intel-cppstd-14 Debug: &macOSDebug <<: *macOSRelease build_folder: build @@ -178,7 +178,7 @@ config: build_folder: build-release description: macOS Release profile_host: - - apple-clang-12.0-macos-10.9 + - apple-clang-13.0-intel DebugTool: &macOSDebugTool include: - ReleaseTool @@ -200,6 +200,54 @@ config: - ReleaseTool + macos-arm64: + # Per-platform profiles can be set here. Profiles can be a string or a + # list of strings as seen below. By using a list, the build_tools + # profile can be added, and adds the build tools for development builds. + # + # profile: + # - apple-clang-10.0-macos-10.9 + # - build_tools + config: + Release: + <<: *macOSCommon + build_folder: build-release + description: macOS Release + profile_host: + - apple-clang-13.0-arm-cppstd-14 + Debug: + <<: *macOSRelease + build_folder: build + description: macOS Debug + settings: + - build_type=Debug + ReleaseTool: + <<: *macOSCommon + build_folder: build-release + description: macOS Release + profile_host: + - apple-clang-13.0-arm + DebugTool: + include: + - ReleaseTool + build_folder: build + description: macOS Debug + settings: + - build_type=Debug + # If the entry is a string, it's taken to be the package name, + # else the entry can be a dictionary of package name and options + prebuilt_tools: + - cmake/3.21.4 + - doxygen/1.9.1 + - package: doxygen/1.9.1 + options: + - doxygen:enable_search=False + - ninja/1.10.2 + - swig/1.3.40+dl.1 + prebuilt_tools_configs: + - ReleaseTool + + redhat-6-x86_64: common: &redhatCommon build: From cac2a98db3d3c74a5b535434cab354dc115e4a8a Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 27 Jan 2022 19:42:54 -0600 Subject: [PATCH 067/173] Add b2 to the list of tools being built --- dlproject.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index 9153a4d32e07c..58b058799cab5 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -195,6 +195,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - b2/4.5.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -243,6 +244,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - b2/4.5.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -310,6 +312,7 @@ config: options: - doxygen:enable_search=False - package: ninja/1.10.2 + - b2/4.5.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: # Build 64-bit tools on RHEL 6, because they're used for 64-bit DLE for APDFL 15 @@ -352,6 +355,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - b2/4.5.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -423,6 +427,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - b2/4.5.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -482,6 +487,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - b2/4.5.0 - package: swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -554,6 +560,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - b2/4.5.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool From 385fcf52fe699d52d4616fdfea76f42f4bf7168e Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 7 Feb 2022 15:58:42 -0600 Subject: [PATCH 068/173] Add the innoextract tool on Windows - Used by pdfl18_installer manifest creation --- dlproject.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/dlproject.yaml b/dlproject.yaml index 58b058799cab5..74c94eacf44b9 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -429,6 +429,7 @@ config: - ninja/1.10.2 - b2/4.5.0 - swig/1.3.40+dl.1 + - innoextract/1.9.0 prebuilt_tools_configs: - ReleaseTool From fae1b25ca68d34607a42ddfbd4b99e1a7524c159 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 10 Feb 2022 13:00:47 -0600 Subject: [PATCH 069/173] Use bash from msys2 on Windows to avoid WSL If WSL is installed, Conan will find the bash.exe in C:\Windows\System32, and use that to decide what subsystem it's in, and pick WSL. We want to build Conan Center Index recipes for msys2 (they're usually set up for that on Windows), so install msys2, find its binary directory, and tell Conan to use that bash.exe. --- tests/test_tools.py | 51 +++++++++++++++++++++++++++++++++++++++------ 1 file changed, 45 insertions(+), 6 deletions(-) diff --git a/tests/test_tools.py b/tests/test_tools.py index 6b1c0fc0e4e92..1f4386e18d2aa 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -1,4 +1,6 @@ import json +import os +import platform import subprocess from typing import NamedTuple, List @@ -42,9 +44,8 @@ def prebuilt_tool_config_name(request): return request.param -@pytest.fixture(scope='package') -def prebuilt_tool_config(prebuilt_tool_config_name): - config = Config.from_name(prebuilt_tool_config_name) +def config_from_name(config_name): + config = Config.from_name(config_name) config.validate() config = config.normalize() @@ -53,15 +54,53 @@ def prebuilt_tool_config(prebuilt_tool_config_name): return config +@pytest.fixture(scope='package') +def prebuilt_tool_config(prebuilt_tool_config_name): + return config_from_name(prebuilt_tool_config_name) + + +@pytest.fixture(scope='package') +def release_tool_config(): + return config_from_name('ReleaseTool') + + @pytest.fixture(scope='package') def tool_recipe_folder(prebuilt_tool): package, version = prebuilt_tool.package.split('/') return recipes.versions_to_folders(package)[version] +@pytest.fixture(scope='package') +def msys_env(release_tool_config, tmpdir_factory): + if platform.system() == 'Windows': + msys2_dir = tmpdir_factory.mktemp('msys2_install') + args = ['conan', 'install', 'msys2/cci.latest@', '-if', msys2_dir, '-g', 'json'] + args.extend(release_tool_config.install_options()) + subprocess.run(args, check=True) + with open(msys2_dir / 'conanbuildinfo.json', 'r') as json_file: + conanbuildinfo = json.load(json_file) + return conanbuildinfo['deps_env_info'] + + +@pytest.fixture(scope='package') +def msys_bin(msys_env): + """Return the value of MSYS_BIN from the msys2 package, or None if not on Windows""" + return (msys_env or {}).get('MSYS_BIN') + + +@pytest.fixture(scope='package') +def conan_env(msys_bin): + """Create an environment with extra variables for running Conan. This may include + setting a path to MSYS2 bash so that Conan doesn't try hooking into WSL (if installed).""" + env = os.environ.copy() + if msys_bin: + env['CONAN_BASH_PATH'] = os.path.join(msys_bin, 'bash.exe') + return env + + class TestBuildTools(object): def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_tool_config, tool_recipe_folder, - upload_to, force_build, tmp_path): + upload_to, force_build, tmp_path, conan_env): if prebuilt_tool.configs and prebuilt_tool_config_name not in prebuilt_tool.configs: pytest.skip(f'Skipping build because config named {prebuilt_tool_config_name} is not in the list of ' f'configs for this package: {", ".join(prebuilt_tool.configs)}') @@ -88,7 +127,7 @@ def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_too args = ['conan', 'create', tool_recipe_folder, f'{prebuilt_tool.package}@', '--update', '--json', str(create_json)] + config.install_options() + tool_options + force_build_options print(f'Creating package {prebuilt_tool.package}: {" ".join(args)}') - subprocess.run(args, check=True, stderr=subprocess.STDOUT) + subprocess.run(args, check=True, stderr=subprocess.STDOUT, env=conan_env) if upload_to: # upload packages mentioned in the create.json, which includes requirements used to build # this package, if in fact it had to be built. @@ -103,4 +142,4 @@ def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_too continue args = ['conan', 'upload', '-r', upload_to, f'{ref}@', '--all', '--check'] print(f'Uploading {ref}: {" ".join(args)}') - subprocess.run(args, check=True, stderr=subprocess.STDOUT) + subprocess.run(args, check=True, stderr=subprocess.STDOUT, env=conan_env) From a015141de8933f3609167c10db242fcb2e818a35 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 21 Feb 2022 10:34:43 -0600 Subject: [PATCH 070/173] Update to b2/4.7.1 to fix problems with build.sh on some systems - It's also the one used by the latest Conan recipes from upstream. --- dlproject.yaml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index 74c94eacf44b9..1adcbbb99e718 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -195,7 +195,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.5.0 + - b2/4.7.1 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -244,7 +244,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.5.0 + - b2/4.7.1 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -312,7 +312,7 @@ config: options: - doxygen:enable_search=False - package: ninja/1.10.2 - - b2/4.5.0 + - b2/4.7.1 - swig/1.3.40+dl.1 prebuilt_tools_configs: # Build 64-bit tools on RHEL 6, because they're used for 64-bit DLE for APDFL 15 @@ -355,7 +355,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.5.0 + - b2/4.7.1 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -427,7 +427,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.5.0 + - b2/4.7.1 - swig/1.3.40+dl.1 - innoextract/1.9.0 prebuilt_tools_configs: @@ -488,7 +488,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.5.0 + - b2/4.7.1 - package: swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -561,7 +561,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.5.0 + - b2/4.7.1 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool From e567896f6edd0b4ef3cb83a10de6b93e30523758 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 22 Feb 2022 00:23:32 -0600 Subject: [PATCH 071/173] Windows: Don't use compiler toolset when building tools --- dlproject.yaml | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index 1adcbbb99e718..f440fb0211cae 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -399,23 +399,28 @@ config: ReleaseTool: build_folder: build-release-tool description: Windows Release - include: - - Release + profile_host: visual-studio-16 DebugTool: build_folder: build-debug-tool description: Windows Debug Tool include: - - Debug + - ReleaseTool + settings: + - build-type=Debug ReleaseTool32: build_folder: build-release-32-tool description: Windows Release 32 Tool include: - - Release32 + - ReleaseTool + settings: + - arch=x86 DebugTool32: build_folder: build-debug-32-tool description: Windows Debug 32 Tool include: - - Debug32 + - ReleaseTool32 + settings: + - build-type=Debug default: *windowsDebug prebuilt_tools: - cmake/3.21.4 From f4750c1f147cd5c9a4bc7417e77d730cccad8517 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 8 Mar 2022 17:26:00 -0600 Subject: [PATCH 072/173] Build msys2 if necessary, and upload it if built - If it's just built, then it hasn't been polluted yet, and it can be uploaded. --- tests/conftest.py | 2 +- tests/test_tools.py | 23 +++++++++++++++++++++-- 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index a708db4adcba4..aabc390b86016 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,7 @@ def pytest_addoption(parser): help='Force a build of the package or the package with its requirements') -@pytest.fixture +@pytest.fixture(scope='package') def upload_to(request): return request.config.getoption('--upload-to') diff --git a/tests/test_tools.py b/tests/test_tools.py index 1f4386e18d2aa..70c3d8c902a36 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -71,12 +71,31 @@ def tool_recipe_folder(prebuilt_tool): @pytest.fixture(scope='package') -def msys_env(release_tool_config, tmpdir_factory): +def msys_env(release_tool_config, tmpdir_factory, upload_to): if platform.system() == 'Windows': msys2_dir = tmpdir_factory.mktemp('msys2_install') - args = ['conan', 'install', 'msys2/cci.latest@', '-if', msys2_dir, '-g', 'json'] + install_json = msys2_dir / 'install.json' + args = ['conan', 'install', 'msys2/cci.latest@', '-if', msys2_dir, '-g', 'json', '--build', 'missing', + '-j', install_json] args.extend(release_tool_config.install_options()) subprocess.run(args, check=True) + + # Upload msys2 if it was built, since it should still be clean + with open(install_json, 'r') as json_file: + install_data = json.load(json_file) + for install in install_data['installed']: + recipe_id = install['recipe']['id'] + ref = recipe_id.split('#')[0] + package = ref.split('/')[0] + if package == 'msys2': + built = False + for package in install['packages']: + built = built or package['built'] + if built and upload_to: + args = ['conan', 'upload', '-r', upload_to, f'{ref}@', '--all', '--check'] + print(f'Uploading {ref}: {" ".join(args)}') + subprocess.run(args, check=True, stderr=subprocess.STDOUT) + with open(msys2_dir / 'conanbuildinfo.json', 'r') as json_file: conanbuildinfo = json.load(json_file) return conanbuildinfo['deps_env_info'] From 6ada093beaeec0b577bea545b9be0bc6b4542be8 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 4 Apr 2022 10:04:17 -0500 Subject: [PATCH 073/173] Update b2 to 4.8.0 - The Boost recipe was updated --- dlproject.yaml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index f440fb0211cae..cd1f5cf9e8e08 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -195,7 +195,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.7.1 + - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -244,7 +244,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.7.1 + - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -312,7 +312,7 @@ config: options: - doxygen:enable_search=False - package: ninja/1.10.2 - - b2/4.7.1 + - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: # Build 64-bit tools on RHEL 6, because they're used for 64-bit DLE for APDFL 15 @@ -355,7 +355,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.7.1 + - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -432,7 +432,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.7.1 + - b2/4.8.0 - swig/1.3.40+dl.1 - innoextract/1.9.0 prebuilt_tools_configs: @@ -493,7 +493,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.7.1 + - b2/4.8.0 - package: swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool @@ -566,7 +566,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 - - b2/4.7.1 + - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: - ReleaseTool From cded9b8c5151dd235889d1d9e23f36d26e3e0531 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 7 Jun 2022 20:17:55 -0500 Subject: [PATCH 074/173] mkenv.py: This project requires Python 3.9 - For features of concurrent.Executor --- mkenv.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mkenv.py b/mkenv.py index 043845ea3f320..a7359e040955c 100755 --- a/mkenv.py +++ b/mkenv.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python3.9 # -*- coding: utf-8 -*- import string import sys @@ -9,9 +9,9 @@ import platform import argparse -if sys.version_info[:2] < (3, 6): - # Don't allow anything but Python 3.6 or higher - raise SystemError("Only Python 3.6+ is allowed") +if sys.version_info[:2] < (3, 9): + # Don't allow anything but Python 3.9 or higher + raise SystemError("Only Python 3.9+ is allowed") HERE = os.path.dirname(os.path.abspath(__file__)) join = os.path.join From 3162404b1b40e9bbf3dcfef7555d01ae9c57bb3a Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 15 Jun 2022 14:22:21 -0500 Subject: [PATCH 075/173] Add Ninja 1.11.0 --- dlproject.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index cd1f5cf9e8e08..36c329c3839bc 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -195,6 +195,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - ninja/1.11.0 - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: @@ -244,6 +245,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - ninja/1.11.0 - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: @@ -312,6 +314,7 @@ config: options: - doxygen:enable_search=False - package: ninja/1.10.2 + - ninja/1.11.0 - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: @@ -355,6 +358,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - ninja/1.11.0 - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: @@ -432,6 +436,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - ninja/1.11.0 - b2/4.8.0 - swig/1.3.40+dl.1 - innoextract/1.9.0 @@ -493,6 +498,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - ninja/1.11.0 - b2/4.8.0 - package: swig/1.3.40+dl.1 prebuilt_tools_configs: @@ -566,6 +572,7 @@ config: options: - doxygen:enable_search=False - ninja/1.10.2 + - ninja/1.11.0 - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: From 06ecf32b7b4f3e53eb3793dc848670e41767156b Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 23 Jun 2022 13:51:27 -0500 Subject: [PATCH 076/173] test_tools: Allow version ranges in prebuilt_tools - Resolve the version range against the list of versions found in the recipe folders. - Version ranges can be used to build the latest patch of a recipe, because CCI upstream is not keeping, for instance, CMake 3.21.6 when 3.21.7 comes out. - Change fixtures to allow unresolved version ranges through, so that the test_build_tool function can assert that the version range was resolved, moving the exception to an individual test. --- requirements.in | 1 + tests/test_tools.py | 38 +++++++++++++++++++++++++++++++++++--- 2 files changed, 36 insertions(+), 3 deletions(-) diff --git a/requirements.in b/requirements.in index 5fdd5fe13af99..65e939b320442 100644 --- a/requirements.in +++ b/requirements.in @@ -1,5 +1,6 @@ tox dl-conan-build-tools~=3.4 +node-semver coverage flake8 flake8-printf-formatting diff --git a/tests/test_tools.py b/tests/test_tools.py index 70c3d8c902a36..88e4f2773435c 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -1,6 +1,7 @@ import json import os import platform +import semver import subprocess from typing import NamedTuple, List @@ -26,13 +27,38 @@ def __str__(self): @classmethod def from_str_or_dict(cls, str_or_dict): + """Create a package from a string or dict.""" if isinstance(str_or_dict, str): return cls(str_or_dict, []) return cls(**str_or_dict) + @classmethod + def create_resolved(cls, str_or_dict): + """Create a package from a string or dict, and resolve ranges.""" + return cls.from_str_or_dict(str_or_dict).resolve_ranges() + + def resolve_ranges(self): + """Check if the package has a range expression, and resolve it if necessary.""" + package, range = self.package.split('/', maxsplit=1) + if range.startswith('[') and range.endswith(']'): + versions = recipes.versions_to_folders(package).keys() + range = range[1:-1] # strip off brackets + # Could call conans.client.graph.range_resolver.satisfying() here, but + # avoiding using Conan internals. That means not supporting loose or include_prerelease + # for now. In Conan, loose is the default. + resolved_version = semver.max_satisfying(versions, range, loose=True, include_prerelease=False) + if resolved_version is None: + print(f'*** No range resolution for {self.package}') + return self + resolved_package = f'{package}/{resolved_version}' + print(f'Resolved {self.package} to {resolved_package}') + return Package(resolved_package, self.options, self.configs) + else: + return self + @pytest.fixture(scope='package', - params=[Package.from_str_or_dict(entry) for entry in _config.get('prebuilt_tools', [])], + params=[Package.create_resolved(entry) for entry in _config.get('prebuilt_tools', [])], ids=lambda param: str(param)) def prebuilt_tool(request): return request.param @@ -67,7 +93,7 @@ def release_tool_config(): @pytest.fixture(scope='package') def tool_recipe_folder(prebuilt_tool): package, version = prebuilt_tool.package.split('/') - return recipes.versions_to_folders(package)[version] + return recipes.versions_to_folders(package).get(version) @pytest.fixture(scope='package') @@ -123,6 +149,12 @@ def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_too if prebuilt_tool.configs and prebuilt_tool_config_name not in prebuilt_tool.configs: pytest.skip(f'Skipping build because config named {prebuilt_tool_config_name} is not in the list of ' f'configs for this package: {", ".join(prebuilt_tool.configs)}') + + package_name, package_version = prebuilt_tool.package.split('/', maxsplit=1) + assert not package_version.startswith('[') and not package_version.endswith(']'), 'version range must have ' \ + 'been resolved' + assert tool_recipe_folder is not None, 'the recipe folder must be found' + tool_options = [] for opt in prebuilt_tool.options: tool_options.append('--options:host') @@ -130,7 +162,7 @@ def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_too force_build_options = [] if force_build == 'package': - force_build_options = ['--build', prebuilt_tool.package.split('/', maxsplit=1)[0], + force_build_options = ['--build', package_name, '--build', 'missing'] elif force_build == 'with-requirements': force_build_options = ['--build'] From acbb944c09e2e6b109b3ee4efa838f4d26347fda Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 23 Jun 2022 14:08:10 -0500 Subject: [PATCH 077/173] dlproject.yaml: Use version ranges for CMake and Ninja - Accounts for CCI not keeping patch releases of CMake in the recipes. - Assuming the same for Ninja. - Also using >=1.0.0 to also build latest Ninja. --- dlproject.yaml | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index 36c329c3839bc..23a925ed11230 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -189,13 +189,13 @@ config: # If the entry is a string, it's taken to be the package name, # else the entry can be a dictionary of package name and options prebuilt_tools: - - cmake/3.21.4 + - cmake/[~3.21.0] - doxygen/1.9.1 - package: doxygen/1.9.1 options: - doxygen:enable_search=False - - ninja/1.10.2 - - ninja/1.11.0 + - ninja/[~1.10.0] + - ninja/[>=1.0.0] - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: @@ -239,13 +239,13 @@ config: # If the entry is a string, it's taken to be the package name, # else the entry can be a dictionary of package name and options prebuilt_tools: - - cmake/3.21.4 + - cmake/[~3.21.0] - doxygen/1.9.1 - package: doxygen/1.9.1 options: - doxygen:enable_search=False - - ninja/1.10.2 - - ninja/1.11.0 + - ninja/[~1.10.0] + - ninja/[>=1.0.0] - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: @@ -308,13 +308,13 @@ config: settings: - build_type=Debug prebuilt_tools: &redhat6Tools - - package: cmake/3.21.4 + - package: cmake/[~3.21.0] - package: doxygen/1.9.1 - package: doxygen/1.9.1 options: - doxygen:enable_search=False - - package: ninja/1.10.2 - - ninja/1.11.0 + - package: ninja/[~1.10.0] + - ninja/[>=1.0.0] - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: @@ -352,13 +352,13 @@ config: include: - Debug prebuilt_tools: - - cmake/3.21.4 + - cmake/[~3.21.0] - doxygen/1.9.1 - package: doxygen/1.9.1 options: - doxygen:enable_search=False - - ninja/1.10.2 - - ninja/1.11.0 + - ninja/[~1.10.0] + - ninja/[>=1.0.0] - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: @@ -427,7 +427,7 @@ config: - build-type=Debug default: *windowsDebug prebuilt_tools: - - cmake/3.21.4 + - cmake/[~3.21.0] - package: doxygen/1.9.1 configs: # xapian-core doesn't work for cross-building x86_64 to x86 @@ -435,8 +435,8 @@ config: - package: doxygen/1.9.1 options: - doxygen:enable_search=False - - ninja/1.10.2 - - ninja/1.11.0 + - ninja/[~1.10.0] + - ninja/[>=1.0.0] - b2/4.8.0 - swig/1.3.40+dl.1 - innoextract/1.9.0 @@ -491,14 +491,14 @@ config: include: - Debug32 prebuilt_tools: - - package: cmake/3.21.4 + - package: cmake/[~3.21.0] options: - cmake:with_openssl=False - package: doxygen/1.9.1 options: - doxygen:enable_search=False - - ninja/1.10.2 - - ninja/1.11.0 + - ninja/[~1.10.0] + - ninja/[>=1.0.0] - b2/4.8.0 - package: swig/1.3.40+dl.1 prebuilt_tools_configs: @@ -564,15 +564,15 @@ config: settings: - build_type=Debug prebuilt_tools: - - package: cmake/3.21.4 + - package: cmake/[~3.21.0] options: - cmake:with_openssl=False - doxygen/1.9.1 - package: doxygen/1.9.1 options: - doxygen:enable_search=False - - ninja/1.10.2 - - ninja/1.11.0 + - ninja/[~1.10.0] + - ninja/[>=1.0.0] - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: From b350344a53c34d851f0fb374e843dfb20c0306cf Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 27 Jun 2022 12:04:23 -0500 Subject: [PATCH 078/173] Use RedHat 7 to build latest Ninja - It gets a floating point exception on RedHat 6. - Projects that build on RedHat 6 and use Ninja can use 1.10.x --- dlproject.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dlproject.yaml b/dlproject.yaml index 23a925ed11230..a0e81a7f59aef 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -314,7 +314,6 @@ config: options: - doxygen:enable_search=False - package: ninja/[~1.10.0] - - ninja/[>=1.0.0] - b2/4.8.0 - swig/1.3.40+dl.1 prebuilt_tools_configs: @@ -323,6 +322,10 @@ config: redhat-7-x86_64: config: *redhat6Config + prebuilt_tools: + - ninja/[>=1.0.0] + prebuilt_tools_configs: + - ReleaseTool redhat-aarch64: common: &redhatARMCommon From d865f8aaecbdd3f452e0d5befbb5bdb86d550d13 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 27 Jun 2022 13:35:42 -0500 Subject: [PATCH 079/173] upload-recipes: Add --since-merge-from-branch option - Upload recipes based on when merges from a given branch happened. This branch is often conan-io/master. This option can be used to get a tranche of changes from conan-io, regardless of whether there were DL-local merges as well. - Option --merges tells how many merges back to go. It defaults to 2, presuming the current commit is a merge, and going back to get at least one set of changes from the specified branch. --- tasks/__init__.py | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/tasks/__init__.py b/tasks/__init__.py index b127f74108946..548b536c29d58 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -13,12 +13,15 @@ 'since-commit': 'upload all packages in recipes folder changed since COMMIT', 'since-before-last-merge': 'upload all packages in recipes folder changed since just before the most ' 'recent merge (this is useful for automated tools)', + 'since-merge-from-branch': 'upload packages changed since a merge from the given branch', + 'merges': 'number of merges to look back for since-merge-from-branch, default 2', 'parallel': 'run uploads in parallel (default)', 'upload': 'upload the recipe (default) (otherwise, just does the exports)' }, iterable=['package']) def upload_recipes(ctx, remote='conan-center-dl-staging', package=None, all=False, since_commit=None, - since_before_last_merge=False, parallel=True, upload=True): + since_before_last_merge=False, since_merge_from_branch=None, merges=2, + parallel=True, upload=True): """Export and upload the named recipes to the given remote. Exports and uploads all the versions of the selected recipes to the remote.""" @@ -30,6 +33,25 @@ def update_since_commit(since_commit): lines = stm.getvalue().strip('\n').split('\n') packages.update(path.split('/')[1] for path in lines if path) + def search_branch_merge(): + # Get all revs from branch + stm = io.StringIO() + ctx.run(f'git rev-list {since_merge_from_branch}', out_stream=stm, pty=False, dry=False) + branch_revs = set(stm.getvalue().strip('\n').split('\n')) + + # Get all merges, and all their parents + stm = io.StringIO() + ctx.run("git log --min-parents=2 --pretty='%H %P'", out_stream=stm, pty=False, dry=False) + merges_seen = 0 + for line in stm.getvalue().strip('\n').split('\n'): + refs = line.split() + merge_commit = refs[0] + parents = refs[1:] + if set(parents).intersection(branch_revs): + merges_seen += 1 + if merges_seen == merges: + return merge_commit + packages.update(package or []) if all: packages.update(os.listdir('recipes')) @@ -43,6 +65,9 @@ def update_since_commit(since_commit): commit = stm.getvalue().strip('\n') # {commit}~1 is the first parent of {commit}; see https://git-scm.com/docs/git-rev-parse#_specifying_revisions update_since_commit(f'{commit}~1') + if since_merge_from_branch: + commit = search_branch_merge() + update_since_commit(commit) sorted_packages = sorted(packages) print('*** Uploading:') From 311e310bbf9d39fa6029711c1a754f53e9649fd5 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 27 Jun 2022 13:47:09 -0500 Subject: [PATCH 080/173] Jenkinsfile: Also upload recipes since merge from conan-io/master - Make sure that the conan-io remote is present and fetched. --- Jenkinsfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index d9d21f88bf3e0..adf74b96997d1 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -154,11 +154,14 @@ pipeline { if (params.UPLOAD_ALL_RECIPES) { range = '--all' } else { + // make sure conan-io is available and up-to-date + sh "git remote | grep conan-io || git remote add conan-io https://github.com/conan-io/conan-center-index.git" + sh "git fetch conan-io" // assuming this is due to a merge, upload recipes // modified since just before the last merge. This is an // incremental update to recipes, and will be much faster // than uploading all 1100+ recipes. - range = "--since-before-last-merge" + range = "--since-before-last-merge --since-merge-from-branch=conan-io/master" } sh ". ${ENV_LOC['noarch']}/bin/activate; invoke upload-recipes --remote ${remote} ${range}" } From 524de57e40db880394c130e8d5fc42b46812bb95 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 28 Jun 2022 10:59:53 -0500 Subject: [PATCH 081/173] aix: Build everything with xlC except b2 - xlC can build Doxygen, while gcc cannot. - xlC is used for DLE, while gcc is used for APDFL. - It was previously determined that b2 builds better with gcc. --- dlproject.yaml | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index a0e81a7f59aef..7ec2eac187cee 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -474,8 +474,13 @@ config: settings: - build_type=Debug ReleaseTool: + <<: *aixCommon + build_folder: build-release-tool + description: AIX Release + profile_host: aix-xlc16-ppc + ReleaseToolGCC: build_folder: build-release-tool - description: AIX Release Tool + description: AIX Release Tool with GCC include: - Release DebugTool: @@ -497,15 +502,26 @@ config: - package: cmake/[~3.21.0] options: - cmake:with_openssl=False + configs: + - ReleaseTool - package: doxygen/1.9.1 options: - doxygen:enable_search=False - - ninja/[~1.10.0] - - ninja/[>=1.0.0] - - b2/4.8.0 + - package: ninja/[~1.10.0] + configs: + - ReleaseTool + - package: ninja/[>=1.0.0] + configs: + - ReleaseTool + - package: b2/4.8.0 + configs: + - ReleaseToolGCC - package: swig/1.3.40+dl.1 + configs: + - ReleaseTool prebuilt_tools_configs: - ReleaseTool + - ReleaseToolGCC sunos: common: &sparcCommon From 2f72d1493845aa6a22901a0cc98b8690f2bac42d Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 1 Jul 2022 09:52:50 -0500 Subject: [PATCH 082/173] Jenkinsfile: Remove AIX from the list of build machines - Kept AIX entries that are information (like BUILD_TOOLS) --- Jenkinsfile | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index adf74b96997d1..a530c1352cb97 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -15,7 +15,6 @@ pipeline { parameters { choice(name: 'PLATFORM_FILTER', choices: ['all', - 'aix-conan-center-index', 'linux-x64-rhws6-conan-center-index', 'linux-x64-rhel7-conan-center-index', 'linux-arm-conan-center-index', @@ -182,8 +181,7 @@ pipeline { axes { axis { name 'NODE' - values 'aix-conan-center-index', - 'linux-x64-rhws6-conan-center-index', + values 'linux-x64-rhws6-conan-center-index', 'linux-x64-rhel7-conan-center-index', 'linux-arm-conan-center-index', 'mac-x64-conan-center-index', From d7f8026e428875e87da9d84079acf462310eccbe Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 1 Jul 2022 09:58:34 -0500 Subject: [PATCH 083/173] dlproject.yaml: Build CMake 3.23 or newer - When Conan makes CMake presets, it currently requires CMake 3.23 or newer, see https://docs.conan.io/en/latest/reference/conanfile/tools/cmake/cmaketoolchain.html --- dlproject.yaml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index 7ec2eac187cee..334b4e2d9c569 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -189,7 +189,7 @@ config: # If the entry is a string, it's taken to be the package name, # else the entry can be a dictionary of package name and options prebuilt_tools: - - cmake/[~3.21.0] + - cmake/[>=3.23.0] - doxygen/1.9.1 - package: doxygen/1.9.1 options: @@ -239,7 +239,7 @@ config: # If the entry is a string, it's taken to be the package name, # else the entry can be a dictionary of package name and options prebuilt_tools: - - cmake/[~3.21.0] + - cmake/[>=3.23.0] - doxygen/1.9.1 - package: doxygen/1.9.1 options: @@ -308,7 +308,7 @@ config: settings: - build_type=Debug prebuilt_tools: &redhat6Tools - - package: cmake/[~3.21.0] + - package: cmake/[>=3.23.0] - package: doxygen/1.9.1 - package: doxygen/1.9.1 options: @@ -355,7 +355,7 @@ config: include: - Debug prebuilt_tools: - - cmake/[~3.21.0] + - cmake/[>=3.23.0] - doxygen/1.9.1 - package: doxygen/1.9.1 options: @@ -430,7 +430,7 @@ config: - build-type=Debug default: *windowsDebug prebuilt_tools: - - cmake/[~3.21.0] + - cmake/[>=3.23.0] - package: doxygen/1.9.1 configs: # xapian-core doesn't work for cross-building x86_64 to x86 @@ -499,7 +499,7 @@ config: include: - Debug32 prebuilt_tools: - - package: cmake/[~3.21.0] + - package: cmake/[>=3.23.0] options: - cmake:with_openssl=False configs: @@ -583,7 +583,7 @@ config: settings: - build_type=Debug prebuilt_tools: - - package: cmake/[~3.21.0] + - package: cmake/[>=3.23.0] options: - cmake:with_openssl=False - doxygen/1.9.1 From 1552e28f4119372a0967cb8e39b213e6b4b1efda Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 14 Jul 2022 11:33:35 -0500 Subject: [PATCH 084/173] innoextract: Make requirements private --- recipes/innoextract/all/conanfile.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/recipes/innoextract/all/conanfile.py b/recipes/innoextract/all/conanfile.py index 2d0098f3d9b93..6fcd4d2d984be 100644 --- a/recipes/innoextract/all/conanfile.py +++ b/recipes/innoextract/all/conanfile.py @@ -14,9 +14,9 @@ class InnoextractConan(ConanFile): url = "https://github.com/conan-io/conan-center-index" exports_sources = ["CMakeLists.txt", "patches/*"] requires = ( - "boost/1.78.0", - "xz_utils/5.2.5", - "libiconv/1.16" + ("boost/1.78.0", "private"), + ("xz_utils/5.2.5", "private"), + ("libiconv/1.16", "private"), ) generators = "cmake", "cmake_find_package" settings = "os", "arch", "compiler", "build_type" From 35488dcffb9d28abc5f7e048b71947219d0f136f Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Sun, 17 Jul 2022 11:43:49 -0500 Subject: [PATCH 085/173] Solaris: Only build Doxygen with enable_search=False - When doing full builds with requirements, bison doesn't build, perhaps due to misconfiguration. Are the autotools from xapian-core -> libuuid -> libtool leaking into the bison build? - In any event, Doxygen is only used for DLE, and DLE has enable_search=False. --- dlproject.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/dlproject.yaml b/dlproject.yaml index 334b4e2d9c569..1a8f3499cc8de 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -586,7 +586,6 @@ config: - package: cmake/[>=3.23.0] options: - cmake:with_openssl=False - - doxygen/1.9.1 - package: doxygen/1.9.1 options: - doxygen:enable_search=False From a109969227fd83d4b5bd64a09470855863be2b4b Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 19 Jul 2022 17:02:07 -0500 Subject: [PATCH 086/173] Fail tool builds on Windows if WSL2 installed. - The workaround involving setting CONAN_BASH_PATH no longer works. - Either autoconf doesn't build, or its test package doesn't build, depending on whether CONAN_BASH_PATH is set. - For now, fail if WSL2 installed. See discussion at: https://github.com/conan-io/conan-center-index/issues/7944 --- tests/test_tools.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/test_tools.py b/tests/test_tools.py index 88e4f2773435c..bdb542a1efd7b 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -2,6 +2,7 @@ import os import platform import semver +import shutil import subprocess from typing import NamedTuple, List @@ -139,7 +140,14 @@ def conan_env(msys_bin): setting a path to MSYS2 bash so that Conan doesn't try hooking into WSL (if installed).""" env = os.environ.copy() if msys_bin: - env['CONAN_BASH_PATH'] = os.path.join(msys_bin, 'bash.exe') + # It turns out that there's really no workaround that works with WSL2 installed; + # see the discussion at https://github.com/conan-io/conan-center-index/issues/7944 + # Either autoconf doesn't build (without CONAN_BASH_PATH) or its test_package doesn't + # build (with CONAN_BASH_PATH), so maybe the best solution is to inform the user that + # WSL2 should not be enabled. + # env['CONAN_BASH_PATH'] = os.path.join(msys_bin, 'bash.exe') + bash = (shutil.which('bash') or '').lower() + assert bash != r'c:\windows\system32\bash.exe', "Building on Windows doesn't work with WSL2 installed" return env From ba40d9525b566199d6042e5077350f508622e31a Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 20 Jul 2022 17:35:44 -0500 Subject: [PATCH 087/173] Jenkinsfile: Use 'def' to make local variables By default, in a script, referencing a variable will cause it to become part of the script's "binding", which essentially makes it a global. Some of the variables in the scripting stages were mixing across parallel runs, causing mixups in the run, crossing the names of test output files and the like. See: https://stackoverflow.com/a/60734868/11996393 See: https://stackoverflow.com/a/185879/11996393 --- Jenkinsfile | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index a530c1352cb97..888303792cb74 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -145,11 +145,13 @@ pipeline { } steps { script { + def remote if (env.BRANCH_NAME =~ 'master*') { remote = 'conan-center-dl' } else { remote = 'conan-center-dl-staging' } + def range if (params.UPLOAD_ALL_RECIPES) { range = '--all' } else { @@ -287,12 +289,14 @@ pipeline { } steps { script { + def remote if (env.BRANCH_NAME =~ 'master*') { remote = 'conan-center-dl' } else { remote = 'conan-center-dl-staging' } - short_node = NODE.replace('-conan-center-index', '') + def short_node = NODE.replace('-conan-center-index', '') + def force_build if (params.FORCE_TOOL_BUILD_WITH_REQUIREMENTS) { force_build = '--force-build with-requirements' } else if (params.FORCE_TOOL_BUILD) { @@ -300,7 +304,7 @@ pipeline { } else { force_build = '' } - pytest_command = "pytest -k build_tool ${force_build} --upload-to ${remote} --junitxml=build-tools.xml --html=${short_node}-build-tools.html" + def pytest_command = "pytest -k build_tool ${force_build} --upload-to ${remote} --junitxml=build-tools.xml --html=${short_node}-build-tools.html" if (isUnix()) { catchError(message: 'pytest had errors', stageResult: 'FAILURE') { script { From e097e2653b89965378ae40560430713ef507453c Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 20 Jul 2022 17:49:39 -0500 Subject: [PATCH 088/173] Jenkinsfile: Correctly use CLEAN_WORKSPACE - It's a boolean, so just use the variable rather than comparing to 'true'. --- Jenkinsfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 888303792cb74..861a00b0adc6b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -59,7 +59,7 @@ pipeline { stage('Clean/reset Git checkout for release') { when { anyOf { - expression { params.CLEAN_WORKSPACE == 'true' } + expression { params.CLEAN_WORKSPACE } } } steps { @@ -200,7 +200,7 @@ pipeline { stage('Clean/reset Git checkout for release') { when { anyOf { - expression { params.CLEAN_WORKSPACE == 'true' } + expression { params.CLEAN_WORKSPACE } } } steps { From 12acb12abbb43bfe9b465565947d4a829e750970 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 21 Jul 2022 10:48:16 -0500 Subject: [PATCH 089/173] Use mkenv.py that gets the main part from a repo --- .gitignore | 2 + mkenv.py | 176 +++++++++++++++++++++-------------------------------- tox.ini | 2 +- 3 files changed, 72 insertions(+), 108 deletions(-) diff --git a/.gitignore b/.gitignore index 197e0bb8db008..a0a1ea43e1c9c 100644 --- a/.gitignore +++ b/.gitignore @@ -451,3 +451,5 @@ requirements.txt # Test outputs /test-report.xml /test-report.html + +/.mkenv diff --git a/mkenv.py b/mkenv.py index a7359e040955c..6793d3558162a 100755 --- a/mkenv.py +++ b/mkenv.py @@ -1,127 +1,89 @@ -#!/usr/bin/env python3.9 +#!/usr/bin/env python3 # -*- coding: utf-8 -*- -import string -import sys +import importlib import os - -import venv import subprocess -import platform -import argparse +import sys if sys.version_info[:2] < (3, 9): # Don't allow anything but Python 3.9 or higher raise SystemError("Only Python 3.9+ is allowed") +MKENV_IMPL = 'mkenv_impl' HERE = os.path.dirname(os.path.abspath(__file__)) -join = os.path.join -lower_node = platform.node().split('.')[0].lower().replace(' ', '-') -# See https://stackoverflow.com/a/10839538, but use ASCII letters -allowed = string.digits + string.ascii_letters + '-_' -filtered_node = ''.join(filter(allowed.__contains__, lower_node)) -HOME_DIR = join(HERE, f'python-env-{filtered_node}') +MKENV_SUBDIR = '.mkenv' +MKENV_REPO = os.environ.get('DL_MKENV_REPO', 'git@octocat.dlogics.com:datalogics/mkenv.git') +MKENV_BRANCH = os.environ.get('DL_MKENV_BRANCH', 'main') +DL_MKENV_ENVIRONMENT_OVERRIDE = 'DL_MKENV_REPO' in os.environ or 'DL_MKENV_BRANCH' in os.environ +# The oldest Git v2 we have on our machines is 2.3, and it seems to work for mkenv. +GIT_REQUIRED = (2, 3) -def install_project_requirements(output_route): - """ - Install the project's required modules via pip-tools. - """ - print('Checking required packages are installed...') +def run(command_args, verbose=False, check=True, capture_output=False, *args, **kwargs): + if verbose: + print(' '.join(command_args), file=sys.stderr) + redirect_stderr = subprocess.PIPE if capture_output else None + redirect_stdout = subprocess.PIPE if capture_output else sys.stderr + return subprocess.run(command_args, check=check, stdout=redirect_stdout, stderr=redirect_stderr, + *args, **kwargs) - activation_path = HOME_DIR - execut = '' - if windows(): - execut = '.exe' - activation_path = os.path.join(activation_path, 'Scripts') - else: - activation_path = os.path.join(activation_path, 'bin') +def get_mkenv_impl_from_git(): + old_sys_path = sys.path.copy() try: - print('Update pip ... ') - # update pip so the other steps won't fail with a warning to update pip - # Also, install pip-tools for better dependency management - subprocess.check_call([os.path.join(activation_path, 'python' + execut), - '-m', 'pip', 'install', '--upgrade', 'pip', 'pip-tools', - 'wheel'], - stdout=output_route, stderr=subprocess.STDOUT) - except subprocess.CalledProcessError: - print('ERROR: Could not install/upgrade pip, pip-tools, and wheel') - raise - except PermissionError: - print('ERROR: Could not install pip due to permission error', activation_path) - raise - - try: - pip_compile_cmd = os.path.join(activation_path, 'pip-compile' + execut) - pip_sync_cmd = os.path.join(activation_path, 'pip-sync' + execut) - print('Installing / Refreshing required packages... ') - artifactory_url = 'http://artifactory.dlogics.com:8081/artifactory' - index_url = artifactory_url + '/api/pypi/pypi/simple' - artifactory = 'artifactory.dlogics.com' - print('Dependency resolution...') - # Avoid PEP 517. This gets around a problem with system_site_packages, - # pip >= 19.0.0, and older setuptools. - # See: https://github.com/pypa/pip/issues/6264#issuecomment-470498695 - # in pip-tools, this is the --no-build-isolation option - subprocess.check_call([pip_compile_cmd, '--no-build-isolation', '--upgrade', '-i', - index_url, '--trusted-host', - artifactory], - stdout=output_route, stderr=subprocess.STDOUT) - print('Installing/upgrading packages...') - subprocess.check_call([pip_sync_cmd, '-i', - index_url, '--trusted-host', - artifactory], - stdout=output_route, stderr=subprocess.STDOUT) - - except subprocess.CalledProcessError: - print('ERROR: Could not install required packages using ', pip_compile_cmd, ' and ', pip_sync_cmd) - raise - except PermissionError: - print('ERROR: Could not run pip-tools due to permission error', activation_path) - raise - - print('Packages up to date...') - activate_cmd = (f' . .{HOME_DIR}/bin/activate\n' if not windows() else - f' {HOME_DIR}\\Scripts\\activate.bat\n') - print('\n Now activate the virtual environment with:\n ' + activate_cmd) + mkenv_dir = os.path.join(HERE, MKENV_SUBDIR) + # snipe the verbose flag from the argv to decide how chatty to be about the + # git commands + verbose = '-v' in sys.argv or '--verbose' in sys.argv + + # Check git version + completed = run(['git', 'version'], capture_output=True, check=True) + git_version = completed.stdout.decode().strip().split()[2] + # Windows has string stuff after the first three numbers, hence the [:3] + git_version_split = [int(x) for x in git_version.split('.')[:3]] + if tuple(git_version_split[:len(GIT_REQUIRED)]) < GIT_REQUIRED: + ver_string = '.'.join(str(x) for x in GIT_REQUIRED) + sys.exit(f'*** Git version {ver_string} or newer required, found {git_version}; ' + f' older versions are no longer supported') + + if os.path.isdir(mkenv_dir) and not os.path.islink(mkenv_dir): + # In case the repo wasn't initialized...initializing it again actually doesn't hurt anything + run(['git', '-C', mkenv_dir, 'init']) + completion = run(['git', '-C', mkenv_dir, 'remote', 'set-url', 'origin', MKENV_REPO], verbose=verbose, + check=False) + if completion.returncode != 0: + completion = run(['git', '-C', mkenv_dir, 'remote', 'add', 'origin', MKENV_REPO], verbose=verbose) + run(['git', '-C', mkenv_dir, 'fetch', 'origin'], verbose=verbose) + run(['git', '-C', mkenv_dir, 'checkout', MKENV_BRANCH], verbose=verbose) + run(['git', '-C', mkenv_dir, 'reset', '--hard', f'origin/{MKENV_BRANCH}'], verbose=verbose) + elif os.path.exists(mkenv_dir): + sys.exit('*** .mkenv is not a directory; remove it and try again.') + else: + run(['git', 'clone', MKENV_REPO, MKENV_SUBDIR, '--branch', MKENV_BRANCH], verbose=verbose) + sys.path.insert(0, mkenv_dir) + mkenv_impl = importlib.import_module(MKENV_IMPL) + return mkenv_impl + finally: + sys.path[:] = old_sys_path def main(): - parser = argparse.ArgumentParser(description='Virtual environment setup script') - parser.add_argument('-v', '--verbose', action='store_true', - help='Show package installation output') - parser.add_argument('--env-name', action='store_true', - help='Print the environment name and exit') - parser.add_argument('--env-path', action='store_true', - help='Print the path to the programs in the environment and exit') - opts = parser.parse_args() - - output_route = None if opts.verbose else subprocess.DEVNULL - - if opts.env_name: - print(HOME_DIR) - return - - if opts.env_path: - scripts_or_bin = 'Scripts' if windows() else 'bin' - print(os.path.join(HERE, HOME_DIR, scripts_or_bin)) - return - - print('Creating virtualenv ', HOME_DIR) - # venv.main() does this, and it makes it possible to create a virtual environment - # more than once on Windows. - if os.name == 'nt': - use_symlinks = False - else: - use_symlinks = True - venv.create(HOME_DIR, system_site_packages=False, symlinks=use_symlinks, with_pip=True) - - install_project_requirements(output_route) - - -def windows(): - 'returns True on Windows platforms' - return platform.system() == 'Windows' + mkenv_impl = None + + if not DL_MKENV_ENVIRONMENT_OVERRIDE: + # Use local module only if exists and not overridden by environment + old_sys_path = sys.path.copy() + sys.path.insert(0, HERE) + try: + mkenv_impl = importlib.import_module(MKENV_IMPL) + except ModuleNotFoundError: + pass + finally: + sys.path[:] = old_sys_path + + if mkenv_impl is None: + mkenv_impl = get_mkenv_impl_from_git() + return mkenv_impl.main(HERE) if __name__ == '__main__': diff --git a/tox.ini b/tox.ini index 20cf4ed52f37f..4796de0ee3a63 100644 --- a/tox.ini +++ b/tox.ini @@ -6,7 +6,7 @@ # Also, not enforcing flake8 on the recipes; they come from upstream with flake8 # errors # -exclude = .git,.tox,python-env-*,.idea,.conan,recipes,.github,docs,linter +exclude = .git,.tox,python-env-*,.idea,.conan,recipes,.github,docs,linter,.mkenv max-line-length = 120 [pytest] testpaths = tests From 6a40d02c83cd5acfcd6a1d5eb27895178445d8a8 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 21 Jul 2022 10:54:26 -0500 Subject: [PATCH 090/173] Also clean Git repositories that are in the Conan cache Add an extra -f to 'git clean', so it is now 'git clean -f -fdx'. The extra -f causes Git to delete even embedded Git repositories, which can happen if the Conan cache is in ./.conan, and a recipe (like SWIG) checks out its code with Git. See: https://git-scm.com/docs/git-clean#Documentation/git-clean.txt--f --- Jenkinsfile | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 861a00b0adc6b..e63e679b81cde 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -68,11 +68,18 @@ pipeline { // Ensure that the checkout is clean and any changes // to .gitattributes and .gitignore have been taken // into effect + // + // The extra -f causes Git to delete even embedded Git + // repositories, which can happen if the Conan cache + // is in ./.conan, and a recipe (like SWIG) checks out + // its code with Git. + // + // See: https://git-scm.com/docs/git-clean#Documentation/git-clean.txt--f if (isUnix()) { sh """ git rm -q -r . git reset --hard HEAD - git clean -fdx + git clean -f -fdx """ } else { // On Windows, 'git clean' can't handle long paths in .conan, @@ -81,7 +88,7 @@ pipeline { if exist ${WORKSPACE}\\.conan\\ rmdir/s/q ${WORKSPACE}\\.conan git rm -q -r . git reset --hard HEAD - git clean -fdx + git clean -f -fdx """ } } @@ -213,7 +220,7 @@ pipeline { sh """ git rm -q -r . git reset --hard HEAD - git clean -fdx + git clean -f -fdx """ } else { // On Windows, 'git clean' can't handle long paths in .conan, @@ -222,7 +229,7 @@ pipeline { if exist ${WORKSPACE}\\.conan\\ rmdir/s/q ${WORKSPACE}\\.conan git rm -q -r . git reset --hard HEAD - git clean -fdx + git clean -f -fdx """ } } From 96642f08197b64a5df437d0301b612fabf5c5cc3 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 21 Jul 2022 15:21:28 -0500 Subject: [PATCH 091/173] upload-recipe: Upload the exported recipe as the latest Don't just upload the recipe, upload it with --force, which will upload it even if it's already there. That covers the case where branch-switching might have left the exported recipe as an older revision than some other recipe, which would mean building with the incorrect packages. --- tasks/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tasks/__init__.py b/tasks/__init__.py index 548b536c29d58..303221e3f1dc4 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -116,7 +116,10 @@ def upload_one_package_name(ctx, package_name, remote, upload=True): folder = os.path.join(recipe_folder, version) ctx.run(f'conan export {folder} {package_name}/{version}@') if upload: - ctx.run(f'conan upload -r {remote} {package_name} --confirm') + # Force upload to make sure that if there has been back-and-forth changes + # to the branch, the current recipe rises to the top of the revisions + # sorted by date. + ctx.run(f'conan upload -r {remote} {package_name} --force --confirm') tasks = [] From 6fa2af5a6eed424238a67659ad3d88b9d450fa97 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 1 Aug 2022 14:38:59 -0500 Subject: [PATCH 092/173] swig: Add version 4.0.2+dl.1 - relevant patches copied from recipes/swig/all/patches/ --- recipes/swig/dl/conandata.yml | 10 ++++ .../0001-swig-linux-library-path.patch | 50 +++++++++++++++++++ ....0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch | 11 ++++ 3 files changed, 71 insertions(+) create mode 100644 recipes/swig/dl/patches/0001-swig-linux-library-path.patch create mode 100644 recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch diff --git a/recipes/swig/dl/conandata.yml b/recipes/swig/dl/conandata.yml index d4c8b5ff60b82..71fd054bf777d 100644 --- a/recipes/swig/dl/conandata.yml +++ b/recipes/swig/dl/conandata.yml @@ -4,9 +4,19 @@ sources: url: "git@octocat.dlogics.com:datalogics/swig.git" branch: "1.3.40+dl.1" shallow: True + "4.0.2+dl.1": + git: + url: "git@octocat.dlogics.com:datalogics/swig.git" + branch: "4.0.2+dl.1" + shallow: True patches: "1.3.40+dl.1": - base_path: "source_subfolder" patch_file: "patches/0003-1.3.40-do-not-define-SWIG_LIB_WIN_UNIX.patch" - base_path: "source_subfolder" patch_file: "patches/0004-1.3.40-swig-linux-library-path.patch" + "4.0.2+dl.1": + - base_path: "source_subfolder" + patch_file: "patches/0001-swig-linux-library-path.patch" + - base_path: "source_subfolder" + patch_file: "patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch" diff --git a/recipes/swig/dl/patches/0001-swig-linux-library-path.patch b/recipes/swig/dl/patches/0001-swig-linux-library-path.patch new file mode 100644 index 0000000000000..2c15694cb202e --- /dev/null +++ b/recipes/swig/dl/patches/0001-swig-linux-library-path.patch @@ -0,0 +1,50 @@ +--- Source/Modules/main.cxx ++++ Source/Modules/main.cxx +@@ -879,6 +881,30 @@ static void getoptions(int argc, char *argv[]) { + } + } + ++#if defined(HAVE_UNISTD_H) && !defined(_WIN32) ++#include ++#include ++#include ++ ++static String *get_exe_path(void) { ++ Dl_info info; ++ if (dladdr("main", &info)) { ++ char buffer[PATH_MAX]; ++ char* res = NULL; ++ ++ res = realpath(info.dli_fname, buffer); ++ if (!res) { ++ return NewString(SWIG_LIB); ++ } ++ ++ dirname(buffer); ++ strcat(buffer, "/swiglib"); ++ return NewStringWithSize(buffer, strlen(buffer)); ++ } ++ return NewString(SWIG_LIB); ++} ++#endif ++ + int SWIG_main(int argc, char *argv[], const TargetLanguageModule *tlm) { + char *c; + +@@ -938,13 +953,15 @@ + char buf[MAX_PATH]; + char *p; + if (!(GetModuleFileName(0, buf, MAX_PATH) == 0 || (p = strrchr(buf, '\\')) == 0)) { + *(p + 1) = '\0'; +- SwigLib = NewStringf("%sLib", buf); // Native windows installation path ++ SwigLib = NewStringf("%sswiglib", buf); // Native windows installation path + } else { + SwigLib = NewStringf(""); // Unexpected error + } + if (Len(SWIG_LIB_WIN_UNIX) > 0) + SwigLibWinUnix = NewString(SWIG_LIB_WIN_UNIX); // Unix installation path using a drive letter (for msys/mingw) ++#elif defined(HAVE_UNISTD_H) && !defined(_WIN32) ++ SwigLib = get_exe_path(); + #else + SwigLib = NewString(SWIG_LIB); + #endif diff --git a/recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch b/recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch new file mode 100644 index 0000000000000..29aee19fe33d0 --- /dev/null +++ b/recipes/swig/dl/patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch @@ -0,0 +1,11 @@ +--- configure.ac ++++ configure.ac +@@ -2770,7 +2770,7 @@ + *-*-cygwin*) SWIG_LIB_WIN_UNIX=`cygpath --mixed "$SWIG_LIB"`;; + *) SWIG_LIB_WIN_UNIX="";; + esac +-AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, ["$SWIG_LIB_WIN_UNIX"], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) ++AC_DEFINE_UNQUOTED(SWIG_LIB_WIN_UNIX, [""], [Directory for SWIG system-independent libraries (Unix install on native Windows)]) + + SWIG_LIB_PREINST=$ABS_SRCDIR/Lib + AC_SUBST(SWIG_LIB_PREINST) From 1e7eb642ac6604534aa883025bb81d77a4841b98 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 1 Aug 2022 19:09:05 -0500 Subject: [PATCH 093/173] swig: DL: Don't use get_exe_path() on Solaris or AIX - Can't get dladdr call to compile. - Conan defines SWIG_LIB anyway. - ...and so does DLE, the prime consumer of SWIG. --- recipes/swig/dl/patches/0001-swig-linux-library-path.patch | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/recipes/swig/dl/patches/0001-swig-linux-library-path.patch b/recipes/swig/dl/patches/0001-swig-linux-library-path.patch index 2c15694cb202e..58afde944586b 100644 --- a/recipes/swig/dl/patches/0001-swig-linux-library-path.patch +++ b/recipes/swig/dl/patches/0001-swig-linux-library-path.patch @@ -4,7 +4,7 @@ } } -+#if defined(HAVE_UNISTD_H) && !defined(_WIN32) ++#if defined(HAVE_UNISTD_H) && !defined(_WIN32) && !defined(_AIX) && !defined(__sun__) +#include +#include +#include @@ -43,7 +43,7 @@ } if (Len(SWIG_LIB_WIN_UNIX) > 0) SwigLibWinUnix = NewString(SWIG_LIB_WIN_UNIX); // Unix installation path using a drive letter (for msys/mingw) -+#elif defined(HAVE_UNISTD_H) && !defined(_WIN32) ++#elif defined(HAVE_UNISTD_H) && !defined(_WIN32) && !defined(_AIX) && !defined(__sun__) + SwigLib = get_exe_path(); #else SwigLib = NewString(SWIG_LIB); From 4c392ba657622bf64118b0a91f0ac94685109c47 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 12 Aug 2022 11:30:02 -0500 Subject: [PATCH 094/173] swig: Add version 4.0.2+dl.2 --- recipes/swig/dl/conandata.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/recipes/swig/dl/conandata.yml b/recipes/swig/dl/conandata.yml index 71fd054bf777d..b90acf9e8d89c 100644 --- a/recipes/swig/dl/conandata.yml +++ b/recipes/swig/dl/conandata.yml @@ -9,6 +9,11 @@ sources: url: "git@octocat.dlogics.com:datalogics/swig.git" branch: "4.0.2+dl.1" shallow: True + "4.0.2+dl.2": + git: + url: "git@octocat.dlogics.com:datalogics/swig.git" + branch: "4.0.2+dl.2" + shallow: True patches: "1.3.40+dl.1": - base_path: "source_subfolder" @@ -20,3 +25,8 @@ patches: patch_file: "patches/0001-swig-linux-library-path.patch" - base_path: "source_subfolder" patch_file: "patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch" + "4.0.2+dl.2": + - base_path: "source_subfolder" + patch_file: "patches/0001-swig-linux-library-path.patch" + - base_path: "source_subfolder" + patch_file: "patches/0002-4.0.2-do-not-define-SWIG_LIB_WIN_UNIX.patch" From 5caf7d036e20c1f642e925a32266de31436ccad8 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 14 Sep 2022 20:19:29 -0500 Subject: [PATCH 095/173] Add new b2 and SWIG packages - b2/4.9.2, needed by newer Boost packages - swig/4.0.2+dl.2, the current SWIG update for DLE --- dlproject.yaml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index 1a8f3499cc8de..998b28cc464c1 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -197,7 +197,9 @@ config: - ninja/[~1.10.0] - ninja/[>=1.0.0] - b2/4.8.0 + - b2/4.9.2 - swig/1.3.40+dl.1 + - swig/4.0.2+dl.2 prebuilt_tools_configs: - ReleaseTool @@ -247,7 +249,9 @@ config: - ninja/[~1.10.0] - ninja/[>=1.0.0] - b2/4.8.0 + - b2/4.9.2 - swig/1.3.40+dl.1 + - swig/4.0.2+dl.2 prebuilt_tools_configs: - ReleaseTool @@ -315,7 +319,9 @@ config: - doxygen:enable_search=False - package: ninja/[~1.10.0] - b2/4.8.0 + - b2/4.9.2 - swig/1.3.40+dl.1 + - swig/4.0.2+dl.2 prebuilt_tools_configs: # Build 64-bit tools on RHEL 6, because they're used for 64-bit DLE for APDFL 15 - ReleaseTool @@ -363,7 +369,9 @@ config: - ninja/[~1.10.0] - ninja/[>=1.0.0] - b2/4.8.0 + - b2/4.9.2 - swig/1.3.40+dl.1 + - swig/4.0.2+dl.2 prebuilt_tools_configs: - ReleaseTool @@ -441,7 +449,9 @@ config: - ninja/[~1.10.0] - ninja/[>=1.0.0] - b2/4.8.0 + - b2/4.9.2 - swig/1.3.40+dl.1 + - swig/4.0.2+dl.2 - innoextract/1.9.0 prebuilt_tools_configs: - ReleaseTool @@ -516,9 +526,15 @@ config: - package: b2/4.8.0 configs: - ReleaseToolGCC + - package: b2/4.9.2 + configs: + - ReleaseToolGCC - package: swig/1.3.40+dl.1 configs: - ReleaseTool + - package: swig/4.0.2+dl.2 + configs: + - ReleaseTool prebuilt_tools_configs: - ReleaseTool - ReleaseToolGCC @@ -592,7 +608,9 @@ config: - ninja/[~1.10.0] - ninja/[>=1.0.0] - b2/4.8.0 + - b2/4.9.2 - swig/1.3.40+dl.1 + - swig/4.0.2+dl.2 prebuilt_tools_configs: - ReleaseTool From 10176e43d011ad6d6970f65285207be342357bab Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 14 Sep 2022 20:26:20 -0500 Subject: [PATCH 096/173] swig: Add missing DL versions in config.yml --- recipes/swig/config.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/recipes/swig/config.yml b/recipes/swig/config.yml index 09e45d079455b..6c5552d329566 100644 --- a/recipes/swig/config.yml +++ b/recipes/swig/config.yml @@ -5,3 +5,7 @@ versions: folder: "all" "1.3.40+dl.1": folder: "dl" + "4.0.2+dl.1": + folder: "dl" + "4.0.2+dl.2": + folder: "dl" From 0223478692540b9ab7afc390fb58991f0d9f8810 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 15 Sep 2022 13:01:12 -0500 Subject: [PATCH 097/173] Add pre-commit hooks - Mainly copied from dl-conanfile. - Ignore files that come from conan-center-index; we don't wan't to enforce our coding conventions on them. - This means we're checking the code we wrote, including the tests and our Invoke tasks. --- .ecrc | 19 +++++++++++++ .pre-commit-config.yaml | 59 +++++++++++++++++++++++++++++++++++++++++ requirements.in | 1 + tox.ini | 2 +- 4 files changed, 80 insertions(+), 1 deletion(-) create mode 100644 .ecrc create mode 100644 .pre-commit-config.yaml diff --git a/.ecrc b/.ecrc new file mode 100644 index 0000000000000..2292cd04e6e70 --- /dev/null +++ b/.ecrc @@ -0,0 +1,19 @@ +{ + "Version": "2.4.0", + "Verbose": false, + "Debug": false, + "IgnoreDefaults": false, + "SpacesAftertabs": false, + "NoColor": false, + "Exclude": ["^.idea/","^.github/","^assets/","^docs/","^linter/","^recipes/","^CONTRIBUTING.md$","^README.md"], + "AllowedContentTypes": [], + "PassedFiles": [], + "Disable": { + "EndOfLine": false, + "Indentation": false, + "InsertFinalNewline": false, + "TrimTrailingWhitespace": false, + "IndentSize": true, + "MaxLineLength": false + } +} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000000..23678dcd68c18 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,59 @@ +exclude: | + (?x)^( + .idea/.*| + .github/.*| + assets/.*| + docs/.*| + linter/.*| + recipes/.*| + CONTRIBUTING.md| + README.md + )$ +default_stages: [commit, manual] +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: trailing-whitespace + args: [ --markdown-linebreak-ext=md ] + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - id: double-quote-string-fixer + - repo: https://gitlab.com/pycqa/flake8 + rev: 3.9.2 + hooks: + - id: flake8 + - repo: https://github.com/pycqa/isort + rev: 5.10.1 + hooks: + - id: isort + name: isort (python) + - repo: https://github.com/editorconfig-checker/editorconfig-checker.python + rev: 2.4.0 + hooks: + - id: editorconfig-checker + - repo: https://github.com/executablebooks/mdformat + rev: 0.7.14 # Use the ref you want to point at + hooks: + - id: mdformat + name: mdformat on non-.github files + exclude: ^.github/ + args: [ '--wrap', '80', '--number' ] + # Optionally add plugins + additional_dependencies: + - mdformat-gfm + - mdformat-frontmatter + - mdformat-footnote + - mdformat-toc + - id: mdformat + name: mdformat on .github files + files: ^.github/.*$ + # Don't wrap the templates in .github; they don't look good in the description + args: [ '--wrap', 'no', '--number' ] + # Optionally add plugins + additional_dependencies: + - mdformat-gfm + - mdformat-frontmatter + - mdformat-footnote + - mdformat-toc diff --git a/requirements.in b/requirements.in index 65e939b320442..454d628504cb9 100644 --- a/requirements.in +++ b/requirements.in @@ -8,3 +8,4 @@ pipdeptree certifi pytest pytest-html +pre-commit; sys_platform != 'aix' and sys_platform != 'sunos5' diff --git a/tox.ini b/tox.ini index 4796de0ee3a63..e3bb5ad83553a 100644 --- a/tox.ini +++ b/tox.ini @@ -6,7 +6,7 @@ # Also, not enforcing flake8 on the recipes; they come from upstream with flake8 # errors # -exclude = .git,.tox,python-env-*,.idea,.conan,recipes,.github,docs,linter,.mkenv +exclude = .git,.tox,python-env-*,.idea,.conan,recipes,.github,docs,linter,.mkenv,.cache max-line-length = 120 [pytest] testpaths = tests From 3a74f022bea6d047368360dd8bc4b7a757d824b8 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 15 Sep 2022 13:18:55 -0500 Subject: [PATCH 098/173] Fixes from running pre-commit run --all --- mkenv.py | 2 +- tasks/__init__.py | 3 ++- tests/test_tools.py | 4 ++-- util/recipes/__init__.py | 1 + 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/mkenv.py b/mkenv.py index 6793d3558162a..9c6b49ba4f102 100755 --- a/mkenv.py +++ b/mkenv.py @@ -7,7 +7,7 @@ if sys.version_info[:2] < (3, 9): # Don't allow anything but Python 3.9 or higher - raise SystemError("Only Python 3.9+ is allowed") + raise SystemError('Only Python 3.9+ is allowed') MKENV_IMPL = 'mkenv_impl' HERE = os.path.dirname(os.path.abspath(__file__)) diff --git a/tasks/__init__.py b/tasks/__init__.py index 303221e3f1dc4..2d9a88e54a6b1 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -1,7 +1,8 @@ import io import os -import yaml from concurrent import futures + +import yaml from dl_conan_build_tools.tasks import conan from invoke import Collection, Exit from invoke.tasks import Task, task diff --git a/tests/test_tools.py b/tests/test_tools.py index bdb542a1efd7b..1b29ea59682aa 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -1,13 +1,13 @@ import json import os import platform -import semver import shutil import subprocess -from typing import NamedTuple, List +from typing import List, NamedTuple import dl_conan_build_tools.config import pytest +import semver from dl_conan_build_tools.tasks.conan import Config from util import recipes diff --git a/util/recipes/__init__.py b/util/recipes/__init__.py index afc36c800e73f..152ed285150e8 100644 --- a/util/recipes/__init__.py +++ b/util/recipes/__init__.py @@ -1,4 +1,5 @@ import os + import yaml From 7a8529b30ba07a8827c31ad96c74964cb1dbd262 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 15 Sep 2022 14:11:30 -0500 Subject: [PATCH 099/173] Run pre-commit checks in CI. --- Jenkinsfile | 22 ++++++++++++++++++++++ requirements.in | 1 + tasks/__init__.py | 9 +++++++++ 3 files changed, 32 insertions(+) diff --git a/Jenkinsfile b/Jenkinsfile index e63e679b81cde..bac38f4e13757 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -144,6 +144,28 @@ pipeline { } } } + stage('Pre-commit checks') { + when { + changeRequest() + } + steps { + catchError(message: 'pre-commit had errors', stageResult: 'FAILURE') { + script { + if (isUnix()) { + sh """ + . ${ENV_LOC['noarch']}/bin/activate + invoke jenkins.pre-commit + """ + } else { + bat """ + CALL ${ENV_LOC['noarch']}\\Scripts\\activate + invoke jenkins.pre-commit + """ + } + } + } + } + } stage('Upload new or changed recipes') { when { not { diff --git a/requirements.in b/requirements.in index 454d628504cb9..71c8f4d7ba2a2 100644 --- a/requirements.in +++ b/requirements.in @@ -9,3 +9,4 @@ certifi pytest pytest-html pre-commit; sys_platform != 'aix' and sys_platform != 'sunos5' +dl-pre-commit-hooks; sys_platform != 'aix' and sys_platform != 'sunos5' diff --git a/tasks/__init__.py b/tasks/__init__.py index 2d9a88e54a6b1..d281f292a181f 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -1,3 +1,4 @@ +import importlib import io import os from concurrent import futures @@ -131,6 +132,14 @@ def upload_one_package_name(ctx, package_name, remote, upload=True): conan_tasks.add_task(conan.login) conan_tasks.add_task(conan.purge) +# Load the Jenkins tasks form dl_pre_commit_hooks, if it is available +# (which it is not on AIX or Solaris) +try: + jenkins_tasks = importlib.import_module('dl_pre_commit_hooks.tasks.jenkins') + tasks.append(jenkins_tasks) +except ModuleNotFoundError: + pass + ns = Collection(*tasks) ns.add_collection(conan_tasks, 'conan') From ca061398eab730d3381b137ef085a17bdbb9b0fa Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 15 Sep 2022 16:13:59 -0500 Subject: [PATCH 100/173] Jenkinsfile: Build tools for a PR, without uploading - If the PR introduced a new tool, it wasn't testing if the tool actually built. - Building tools is fast if there are no changes to the tools: Conan will just download the tool and run the test_package. - To be more rigorous, build the tools on all CI runs, but only upload them if the run was for a branch (env.CHANGE_ID == null). --- Jenkinsfile | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index bac38f4e13757..b18819605e221 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -309,20 +309,17 @@ pipeline { } stage('build tools') { when { - allOf { - not { - changeRequest() - } - expression { BUILD_TOOLS[NODE] } - } + expression { BUILD_TOOLS[NODE] } } steps { script { - def remote - if (env.BRANCH_NAME =~ 'master*') { - remote = 'conan-center-dl' - } else { - remote = 'conan-center-dl-staging' + def upload = "" + if (env.CHANGE_ID == null) { // i.e. not a pull request + if (env.BRANCH_NAME =~ 'master*') { + upload = '--upload-to conan-center-dl' + } else { + upload = '--upload-to conan-center-dl-staging' + } } def short_node = NODE.replace('-conan-center-index', '') def force_build @@ -333,7 +330,7 @@ pipeline { } else { force_build = '' } - def pytest_command = "pytest -k build_tool ${force_build} --upload-to ${remote} --junitxml=build-tools.xml --html=${short_node}-build-tools.html" + def pytest_command = "pytest -k build_tool ${force_build} ${upload} --junitxml=build-tools.xml --html=${short_node}-build-tools.html" if (isUnix()) { catchError(message: 'pytest had errors', stageResult: 'FAILURE') { script { From 616689dde5b5495389a419844c08f9d045db2f5f Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 15 Sep 2022 16:56:00 -0500 Subject: [PATCH 101/173] test_tools: Don't upload non-OS packages on Windows - Windows can't preserve things like X bits in packages like autoconf. This means if a Windows run happened to create the package, it won't run correctly on POSIX platforms. - So, only upload packages on Windows if they have an "os" setting. For packages like autoconf, one of the POSIX platforms will upload the package. --- tests/test_tools.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/test_tools.py b/tests/test_tools.py index 1b29ea59682aa..d89c185f3d344 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -152,6 +152,15 @@ def conan_env(msys_bin): class TestBuildTools(object): + def search_local_package(self, ref, conan_env, tmp_path): + search_json = tmp_path / 'search.json' + args = ['conan', 'search', f'{ref}@', '-j', str(search_json)] + print(f'Getting package information for {ref}: {" ".join(args)}') + subprocess.run(args, check=True, stderr=subprocess.STDOUT, env=conan_env) + with open(search_json) as json_file: + search_data = json.load(json_file) + return search_data + def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_tool_config, tool_recipe_folder, upload_to, force_build, tmp_path, conan_env): if prebuilt_tool.configs and prebuilt_tool_config_name not in prebuilt_tool.configs: @@ -199,6 +208,13 @@ def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_too if package == 'msys2': print(f'Not uploading {ref}, because it tends to modify itself during use.') continue + search_data = self.search_local_package(ref, conan_env, tmp_path) + settings = search_data['results'][0]['items'][0]['packages'][0]['settings'] + if platform.system() == 'Windows' and 'os' not in settings: + # Don't upload OS-universal packages from Windows; this avoids packaging + # script-based packages like autoconf without the proper mode bits + print(f'Not uploading {ref} on Windows, because it is not os-specific.') + continue args = ['conan', 'upload', '-r', upload_to, f'{ref}@', '--all', '--check'] print(f'Uploading {ref}: {" ".join(args)}') subprocess.run(args, check=True, stderr=subprocess.STDOUT, env=conan_env) From b8647b3a30db74ad92a084ce6d743339084dc8b9 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 15 Sep 2022 18:28:28 -0500 Subject: [PATCH 102/173] DL swig recipe: Bring over package_info changes from CCI recipe --- recipes/swig/dl/conanfile.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/recipes/swig/dl/conanfile.py b/recipes/swig/dl/conanfile.py index e8f0c3cc94774..e41e0dd592c39 100644 --- a/recipes/swig/dl/conanfile.py +++ b/recipes/swig/dl/conanfile.py @@ -163,10 +163,14 @@ def _module_file(self): return "conan-official-{}-targets.cmake".format(self.name) def package_info(self): + self.cpp_info.includedirs=[] self.cpp_info.names["cmake_find_package"] = "SWIG" self.cpp_info.names["cmake_find_package_multi"] = "SWIG" self.cpp_info.builddirs = [self._module_subfolder] - self.cpp_info.build_modules = [os.path.join(self._module_subfolder, self._module_file)] + self.cpp_info.build_modules["cmake_find_package"] = \ + [os.path.join(self._module_subfolder, self._module_file)] + self.cpp_info.build_modules["cmake_find_package_multi"] = \ + [os.path.join(self._module_subfolder, self._module_file)] bindir = os.path.join(self.package_folder, "bin") self.output.info("Appending PATH environment variable: {}".format(bindir)) From 92b3f489bf95e2cd1d3b08cb09490a17b7fcdbbe Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 15 Sep 2022 18:57:04 -0500 Subject: [PATCH 103/173] swig: Force linking to libdl on Linux ARM - Fixes a problem where it's not automatically figuring that out --- dlproject.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index 998b28cc464c1..87d89e3b859ab 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -350,16 +350,22 @@ config: description: RedHat Debug settings: - build_type=Debug + ToolCommon: + env: + # This is necessary to get SWIG to link on ARM; for some reason, it's not automatic + - swig:LDFLAGS=-ldl ReleaseTool: build_folder: build-release-tool description: RedHat Release include: - Release + - ToolCommon DebugTool: build_folder: build-debug-tool description: RedHat Debug include: - Debug + - ToolCommon prebuilt_tools: - cmake/[>=3.23.0] - doxygen/1.9.1 From 5f534988b205b01c2bedc07a1eae4ce67e7a9b01 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 21 Sep 2022 18:27:55 -0500 Subject: [PATCH 104/173] .editorconfig: DL python files have 120 character lines - Keep 200 character limit for files maintained by CCI. --- .editorconfig | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index 650a5a16b0ac1..1c59c73ec3a94 100644 --- a/.editorconfig +++ b/.editorconfig @@ -9,9 +9,12 @@ tab_width = 4 trim_trailing_whitespace = true indent_size = 4 -[*.py] +[{docs,linter,recipes}/**.py] max_line_length = 200 +[*.py] +max_line_length = 120 + [*.yml] tab_width = 2 indent_size = 2 From 5b0aa2361a903a891d502a84f202d2cc56b9a04c Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 21 Sep 2022 13:31:15 -0500 Subject: [PATCH 105/173] pre-commit: Check for breakpoint statements and docstrings first --- .pre-commit-config.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 23678dcd68c18..670582590b7e3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,6 +20,8 @@ repos: - id: check-yaml - id: check-added-large-files - id: double-quote-string-fixer + - id: check-docstring-first + - id: debug-statements - repo: https://gitlab.com/pycqa/flake8 rev: 3.9.2 hooks: From 24cded46be7b733728148779555a86260767aa5f Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 20 Sep 2022 15:16:21 -0500 Subject: [PATCH 106/173] merge-upstream: Task that merges in changes from upstream - Add a config object that can be updated from dlproject.yaml. Avoids repating strings all over the code, and makes it easy to change where things get merged. - Initial cut of merge-upstream task that works when there are no merge conflicts. --- tasks/__init__.py | 3 ++ tasks/merging.py | 109 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 112 insertions(+) create mode 100644 tasks/merging.py diff --git a/tasks/__init__.py b/tasks/__init__.py index d281f292a181f..71199f07e44e0 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -8,6 +8,8 @@ from invoke import Collection, Exit from invoke.tasks import Task, task +from . import merging + @task(help={'remote': 'remote to upload to, default conan-center-dl-staging', 'package': 'name of package to upload, can be specified more than once', @@ -126,6 +128,7 @@ def upload_one_package_name(ctx, package_name, remote, upload=True): tasks = [] tasks.extend([v for v in locals().values() if isinstance(v, Task)]) +tasks.append(merging.merge_upstream) conan_tasks = Collection() conan_tasks.add_task(conan.install_config) diff --git a/tasks/merging.py b/tasks/merging.py new file mode 100644 index 0000000000000..01febbc54b836 --- /dev/null +++ b/tasks/merging.py @@ -0,0 +1,109 @@ +import dataclasses +import platform +import shutil +from typing import Optional + +import yaml +from invoke import Exit, Task, UnexpectedExit + + +class MergeHadConflicts(Exception): + pass + + +@dataclasses.dataclass +class MergeUpstreamConfig: + """Configuration for the merge-upstream task.""" + cci_url: str = 'git@github.com:conan-io/conan-center-index.git' + cci_branch: str = 'master' + local_host: str = 'octocat.dlogics.com' + local_organization: str = 'kam' # TODO: datalogics + local_branch: str = 'develop' + local_remote_name: str = 'merge-local-remote' + pr_reviewers: list[str] = dataclasses.field(default_factory=list) + pr_assignee: Optional[str] = None + + @property + def local_url(self) -> str: + return f'git@{self.local_host}:{self.local_organization}/conan-center-index.git' + + @classmethod + def create_from_dlproject(cls): + """Create a MergeUpstreamConfig with defaults updated from dlproject.yaml""" + with open('dlproject.yaml') as dlproject_file: + dlproject = yaml.safe_load(dlproject_file) + config_data = dlproject.get('merge_upstream', dict()) + return dataclasses.replace(cls(), **config_data) + + +@Task +def merge_upstream(ctx): + '''Merge updated recipes and other files from conan-io/conan-center-index. + + If the merge does not succeed, it will open a pull request against the destination + repository, assigning the PR, and requesting reviewers. + ''' + config = MergeUpstreamConfig.create_from_dlproject() + _check_preconditions(ctx, config) + print(f'Configuration: {config}') + + _update_remote(ctx, config) + _update_branch(ctx, config) + + # Try to merge from CCI + try: + _merge_and_push(ctx, config) + except MergeHadConflicts: + ctx.run('git merge --abort') + raise Exit('There were merge conflicts!') + + +def _check_preconditions(ctx, config): + """Check the preconditions for the merge-upstream task.""" + if platform.system() not in ['Darwin', 'Linux']: + raise Exit('Run this task on macOS or Linux') + # https://stackoverflow.com/a/2659808/11996393 + result = ctx.run('git diff-index --quiet HEAD --', warn=True, hide='stdout') + if not result.ok: + raise Exit('The local worktree has uncommitted changes') + if not shutil.which('gh'): + raise Exit('This task requires the GitHub CLI. See installation instructions at https://cli.github.com/') + result = ctx.run(f'gh auth status --hostname {config.local_host}', warn=True) + if not result.ok: + raise Exit(f'GitHub CLI must be logged in to {config.local_host}, or a token supplied in GH_TOKEN; ' + f'see https://cli.github.com/manual/gh_auth_login') + + +def _update_remote(ctx, config): + '''Make merge-local-remote point to the repo we're going to merge into + This also makes it work in CI, where there might not be an "upstream"''' + result = ctx.run(f'git remote get-url {config.local_remote_name}', hide='both', warn=True, pty=False) + if result.ok and result.stdout.strip() != '': + ctx.run(f'git remote set-url {config.local_remote_name} {config.local_url}') + else: + ctx.run(f'git remote add {config.local_remote_name} {config.local_url}') + ctx.run(f'git remote update {config.local_remote_name}') + + +def _update_branch(ctx, config): + """Check out and update branch""" + result = ctx.run(f'git rev-parse --quiet --verify {config.local_branch}', warn=True, hide='stdout') + if result.ok: + ctx.run(f'git checkout {config.local_branch}') + ctx.run(f'git reset --hard {config.local_remote_name}/{config.local_branch}') + else: + ctx.run(f'git checkout --track {config.local_remote_name}/{config.local_branch}') + + +def _merge_and_push(ctx, config): + """Attempt to merge upstream branch and push it to the local repo.""" + merge_result = ctx.run(f'git pull --no-ff --no-edit {config.cci_url} {config.cci_branch}', warn=True) + if merge_result.ok: + ctx.run(f'git push {config.local_remote_name} {config.local_branch}') + else: + # Check for merge conflicts: https://stackoverflow.com/a/27991004/11996393 + result = ctx.run('git ls-files -u', hide='stdout', warn=True, pty=False) + if result.ok and result.stdout.strip(): + raise MergeHadConflicts + # Something else went wrong with the merge + raise UnexpectedExit(merge_result) From a03aaa1b95525ebf7b62abbaf3450cc75ad86e2f Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 21 Sep 2022 16:05:29 -0500 Subject: [PATCH 107/173] merge-upstream: If there's a merge conflict, create a pull request - Create a branch and set it to the tip of conan-io/master - Form a PR description with information about the conflicting files, and which commits are involved in the conflict. - Use GitHub CLI to create a pull request, setting the assignee, reviewers, and label. --- tasks/merging.py | 91 +++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 87 insertions(+), 4 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index 01febbc54b836..96e146ad2ce24 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -1,6 +1,10 @@ import dataclasses +import getpass import platform +import shlex import shutil +import tempfile +import textwrap from typing import Optional import yaml @@ -20,13 +24,20 @@ class MergeUpstreamConfig: local_organization: str = 'kam' # TODO: datalogics local_branch: str = 'develop' local_remote_name: str = 'merge-local-remote' + local_fork: str = getpass.getuser() + merge_branch_name: str = 'merge-from-conan-io' pr_reviewers: list[str] = dataclasses.field(default_factory=list) pr_assignee: Optional[str] = None + pr_labels: list[str] = dataclasses.field(default_factory=lambda: ['from-conan-io']) @property def local_url(self) -> str: return f'git@{self.local_host}:{self.local_organization}/conan-center-index.git' + @property + def fork_url(self) -> str: + return f'git@{self.local_host}:{self.local_fork}/conan-center-index.git' + @classmethod def create_from_dlproject(cls): """Create a MergeUpstreamConfig with defaults updated from dlproject.yaml""" @@ -54,8 +65,11 @@ def merge_upstream(ctx): try: _merge_and_push(ctx, config) except MergeHadConflicts: - ctx.run('git merge --abort') - raise Exit('There were merge conflicts!') + try: + pr_body = _form_pr_body(ctx, config) + finally: + ctx.run('git merge --abort') + _create_pull_request(ctx, config, pr_body) def _check_preconditions(ctx, config): @@ -85,10 +99,15 @@ def _update_remote(ctx, config): ctx.run(f'git remote update {config.local_remote_name}') +def _branch_exists(ctx, branch): + """Return true if the given branch exists locally""" + result = ctx.run(f'git rev-parse --quiet --verify {branch}', warn=True, hide='stdout') + return result.ok + + def _update_branch(ctx, config): """Check out and update branch""" - result = ctx.run(f'git rev-parse --quiet --verify {config.local_branch}', warn=True, hide='stdout') - if result.ok: + if _branch_exists(ctx, config.local_branch): ctx.run(f'git checkout {config.local_branch}') ctx.run(f'git reset --hard {config.local_remote_name}/{config.local_branch}') else: @@ -107,3 +126,67 @@ def _merge_and_push(ctx, config): raise MergeHadConflicts # Something else went wrong with the merge raise UnexpectedExit(merge_result) + + +def _form_pr_body(ctx, config): + """Create a body for the pull request summarizing information about the merge conflicts.""" + # Note: pty=False to enforce not using a PTY; that makes sure that Git doesn't + # see a terminal and put escapes into the output we want to format. + conflict_files_result = ctx.run('git diff --no-color --name-only --diff-filter=U', hide='stdout', pty=False) + commits_on_upstream_result = ctx.run( + 'git log --no-color --merge HEAD..MERGE_HEAD --pretty=format:"%h -%d %s (%cr) <%an>"', hide='stdout', pty=False) + commits_local_result = ctx.run( + 'git log --no-color --merge MERGE_HEAD..HEAD --pretty=format:"%h -%d %s (%cr) <%an>"', hide='stdout', pty=False) + body = textwrap.dedent(''' + Merge changes from conan-io/conan-center-index into {local_branch}. + + This PR was automatically created due to merge conflicts in the automated merge. + + ## Conflict information + + ### List of conflict files + + {conflict_files} + + ### Commits for conflict files on `conan-io` + + {commits_on_upstream} + + ### Commits for conflict files, local + + {commits_local} + ''').format(local_branch=config.local_branch, + conflict_files=conflict_files_result.stdout, + commits_on_upstream=commits_on_upstream_result.stdout, + commits_local=commits_local_result.stdout) + + return body + + +def _create_pull_request(ctx, config, pr_body): + """Create a pull request to merge in the data from upstream.""" + # Get on a merge branch + ctx.run(f'git fetch {config.cci_url} {config.cci_branch}') + if _branch_exists(ctx, config.merge_branch_name): + ctx.run(f'git checkout {config.merge_branch_name}') + ctx.run('git reset --hard FETCH_HEAD') + else: + ctx.run(f'git checkout -b {config.merge_branch_name} FETCH_HEAD') + + # TODO: handle PR already exists + + ctx.run(f'git push --force {config.fork_url} {config.merge_branch_name}') + with tempfile.NamedTemporaryFile(prefix='pr-body', mode='w+', encoding='utf-8') as pr_body_file: + pr_body_file.write(pr_body) + # Before passing the filename to gh pr create, flush it so all the data is on the disk + pr_body_file.flush() + + title = shlex.quote('Merge in changes from conan-io/master') + labels = f' --label {",".join(config.pr_labels)}' if config.pr_labels else '' + assignee = f' --assignee {config.pr_assignee}' if config.pr_assignee else '' + reviewer = f' --reviewer {",".join(config.pr_reviewers)}' if config.pr_reviewers else '' + ctx.run(f'gh pr create --repo {config.local_host}/{config.local_organization}/conan-center-index ' + f'--base {config.local_branch} ' + f'--title {title} --body-file {pr_body_file.name} ' + f'--head {config.local_fork}:{config.merge_branch_name}' + f'{labels}{assignee}{reviewer}') From 16a82c77e6a3b22a7b0c9b2f8997d915637e972a Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 21 Sep 2022 16:23:06 -0500 Subject: [PATCH 108/173] merge-upstream: preserve the branch and commit across the operation - If the operation succeeds or fails, put the current branch and current commit back where they were. - This could be important, for instance, when running in CI, if any more work is to be done after this task is done. --- tasks/merging.py | 40 ++++++++++++++++++++++++++++++---------- 1 file changed, 30 insertions(+), 10 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index 96e146ad2ce24..90f16574de444 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -1,3 +1,4 @@ +import contextlib import dataclasses import getpass import platform @@ -58,18 +59,37 @@ def merge_upstream(ctx): _check_preconditions(ctx, config) print(f'Configuration: {config}') - _update_remote(ctx, config) - _update_branch(ctx, config) + with _preserving_branch_and_commit(ctx): + _update_remote(ctx, config) + _update_branch(ctx, config) - # Try to merge from CCI - try: - _merge_and_push(ctx, config) - except MergeHadConflicts: + # Try to merge from CCI try: - pr_body = _form_pr_body(ctx, config) - finally: - ctx.run('git merge --abort') - _create_pull_request(ctx, config, pr_body) + _merge_and_push(ctx, config) + except MergeHadConflicts: + try: + pr_body = _form_pr_body(ctx, config) + finally: + ctx.run('git merge --abort') + _create_pull_request(ctx, config, pr_body) + + +@contextlib.contextmanager +def _preserving_branch_and_commit(ctx): + """Context manager to run complicated sets of Git commands, while returning + to the original branch and placing that branch back onto the original commit.""" + result = ctx.run('git rev-parse --abbrev-ref HEAD', hide='stdout') + branch = result.stdout.strip() + result = ctx.run('git rev-parse HEAD', hide='stdout') + commit = result.stdout.strip() + try: + yield + finally: + if branch == 'HEAD': + ctx.run(f'git checkout --detach {commit}') + else: + ctx.run(f'git checkout --force {branch}') + ctx.run(f'git reset --hard {commit}') def _check_preconditions(ctx, config): From 6d834939ed3303824168aa32951ee1d786e60e87 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 21 Sep 2022 18:26:45 -0500 Subject: [PATCH 109/173] merge-upstream task: Update existing pull request If there is an existing pull request for merge conflicts, just update the body. In any case, the branch is force-pushed. --- tasks/merging.py | 39 ++++++++++++++++++++++++++++----------- 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index 90f16574de444..a2a5d8e8f79af 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -1,6 +1,7 @@ import contextlib import dataclasses import getpass +import json import platform import shlex import shutil @@ -193,20 +194,36 @@ def _create_pull_request(ctx, config, pr_body): else: ctx.run(f'git checkout -b {config.merge_branch_name} FETCH_HEAD') - # TODO: handle PR already exists - ctx.run(f'git push --force {config.fork_url} {config.merge_branch_name}') with tempfile.NamedTemporaryFile(prefix='pr-body', mode='w+', encoding='utf-8') as pr_body_file: pr_body_file.write(pr_body) # Before passing the filename to gh pr create, flush it so all the data is on the disk pr_body_file.flush() - title = shlex.quote('Merge in changes from conan-io/master') - labels = f' --label {",".join(config.pr_labels)}' if config.pr_labels else '' - assignee = f' --assignee {config.pr_assignee}' if config.pr_assignee else '' - reviewer = f' --reviewer {",".join(config.pr_reviewers)}' if config.pr_reviewers else '' - ctx.run(f'gh pr create --repo {config.local_host}/{config.local_organization}/conan-center-index ' - f'--base {config.local_branch} ' - f'--title {title} --body-file {pr_body_file.name} ' - f'--head {config.local_fork}:{config.merge_branch_name}' - f'{labels}{assignee}{reviewer}') + existing_prs = _list_merge_pull_requests(ctx, config) + if existing_prs: + assert len(existing_prs) == 1 + url = existing_prs[0]['url'] + ctx.run(f'gh pr edit --repo {config.local_host}/{config.local_organization}/conan-center-index ' + f'{url} --body-file {pr_body_file.name}') + else: + title = shlex.quote('Merge in changes from conan-io/master') + labels = f' --label {",".join(config.pr_labels)}' if config.pr_labels else '' + assignee = f' --assignee {config.pr_assignee}' if config.pr_assignee else '' + reviewer = f' --reviewer {",".join(config.pr_reviewers)}' if config.pr_reviewers else '' + ctx.run(f'gh pr create --repo {config.local_host}/{config.local_organization}/conan-center-index ' + f'--base {config.local_branch} ' + f'--title {title} --body-file {pr_body_file.name} ' + f'--head {config.local_fork}:{config.merge_branch_name}' + f'{labels}{assignee}{reviewer}') + + +def _list_merge_pull_requests(ctx, config): + result = ctx.run(f'gh pr list --repo {config.local_host}/{config.local_organization}/conan-center-index ' + '--json number,url,author,headRefName,headRepositoryOwner ', + hide='stdout', + pty=False) + out = result.stdout.strip() + requests = json.loads(out) if out else [] + return [r for r in requests if + r['headRefName'] == config.merge_branch_name and r['headRepositoryOwner']['login'] == config.local_fork] From a8379b68535108ae61f9a22dab14b1aeb1cecbe8 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 22 Sep 2022 12:00:22 -0500 Subject: [PATCH 110/173] merge-upstream task: Write the status to .merge_upstream_status - The Jenkinsfile can check this file to decide what to do based on the status. For instance, if the branch was merged, it can successfully end the job, because the updated branch will trigger with new commits. --- .gitignore | 3 +++ tasks/merging.py | 46 ++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 47 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index a0a1ea43e1c9c..55864f394a9f7 100644 --- a/.gitignore +++ b/.gitignore @@ -453,3 +453,6 @@ requirements.txt /test-report.html /.mkenv + +# outputs from invoke tasks +/.merge-upstream-status diff --git a/tasks/merging.py b/tasks/merging.py index a2a5d8e8f79af..88ba671b4c572 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -2,21 +2,37 @@ import dataclasses import getpass import json +import os import platform import shlex import shutil import tempfile import textwrap +from enum import Enum, auto from typing import Optional import yaml from invoke import Exit, Task, UnexpectedExit +# Name of a status file +MERGE_UPSTREAM_STATUS = '.merge-upstream-status' + class MergeHadConflicts(Exception): pass +class MergeStatus(Enum): + """The status of the attempted merge. The name of this status will be placed into the + file .merge-upstream-status.""" + UP_TO_DATE = auto() + """The branch was already up to date.""" + MERGED = auto() + """The branch was merged (and pushed).""" + PULL_REQUEST = auto() + """A pull request was necessary.""" + + @dataclasses.dataclass class MergeUpstreamConfig: """Configuration for the merge-upstream task.""" @@ -61,18 +77,34 @@ def merge_upstream(ctx): print(f'Configuration: {config}') with _preserving_branch_and_commit(ctx): + _remove_status_file() + _update_remote(ctx, config) _update_branch(ctx, config) # Try to merge from CCI try: - _merge_and_push(ctx, config) + _write_status_file(_merge_and_push(ctx, config)) except MergeHadConflicts: try: pr_body = _form_pr_body(ctx, config) finally: ctx.run('git merge --abort') _create_pull_request(ctx, config, pr_body) + _write_status_file(MergeStatus.PULL_REQUEST) + + +def _remove_status_file(): + try: + os.remove(MERGE_UPSTREAM_STATUS) + except FileNotFoundError: + pass + + +def _write_status_file(merge_status): + """Write the merge status to the status file.""" + with open(MERGE_UPSTREAM_STATUS, 'w') as merge_upstream_status: + merge_upstream_status.write(merge_status.name) @contextlib.contextmanager @@ -139,7 +171,17 @@ def _merge_and_push(ctx, config): """Attempt to merge upstream branch and push it to the local repo.""" merge_result = ctx.run(f'git pull --no-ff --no-edit {config.cci_url} {config.cci_branch}', warn=True) if merge_result.ok: - ctx.run(f'git push {config.local_remote_name} {config.local_branch}') + # Check to see if a push is necessary by counting the number of revisions + # that differ between current head and the push destination. + count_revs_result = ctx.run( + f'git rev-list {config.local_remote_name}/{config.local_branch}..HEAD --count', + hide='stdout', pty=False) + needs_push = int(count_revs_result.stdout) != 0 + if needs_push: + ctx.run(f'git push {config.local_remote_name} {config.local_branch}') + return MergeStatus.MERGED + else: + return MergeStatus.UP_TO_DATE else: # Check for merge conflicts: https://stackoverflow.com/a/27991004/11996393 result = ctx.run('git ls-files -u', hide='stdout', warn=True, pty=False) From 26e03b45fdda1e435dcc01fdc1438cb706260da1 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 22 Sep 2022 13:57:16 -0500 Subject: [PATCH 111/173] Jenkinsfile: Merge upstream commits from conan-io/conan-center-index - If the merge was successful, skip the rest of the job, because the updated develop branch will trigger another build. --- Jenkinsfile | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index b18819605e221..efd72b110e118 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -11,6 +11,7 @@ def BUILD_TOOLS=[ 'sparcsolaris-conan-center-index': true, 'windows-conan-center-index': true, ] +def skipBuilding = false pipeline { parameters { choice(name: 'PLATFORM_FILTER', @@ -166,10 +167,29 @@ pipeline { } } } + stage('Merge from upstream') { + when { + expression { env.BRANCH_NAME =~ 'develop*' } + } + steps { + script { + sh """ + . ${ENV_LOC['noarch']}/bin/activate + invoke merge-upstream + """ + def merge_upstream_status = readFile(file: '.merge-upstream-status') + echo "merge-upstream status is ${merge_upstream_status}" + // If the status of the upstream merge is MERGED, then don't do anything + // else; Jenkins will notice the branch changed and re-run. + skipBuilding = merge_upstream_status == 'MERGED' + } + } + } stage('Upload new or changed recipes') { when { - not { - changeRequest() + allOf { + expression { !skipBuilding } + not { changeRequest() } } } steps { @@ -206,8 +226,8 @@ pipeline { } } when { anyOf { - expression { params.PLATFORM_FILTER == 'all' } - expression { params.PLATFORM_FILTER == env.NODE } + expression { params.PLATFORM_FILTER == 'all' && !skipBuilding } + expression { params.PLATFORM_FILTER == env.NODE && !skipBuilding } } } axes { axis { From 908d81f56eec06ee3c2ef4169ec8df573255a4cd Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 22 Sep 2022 18:12:06 -0500 Subject: [PATCH 112/173] Jenkinsfile: Add credentials for the 'gh' command --- Jenkinsfile | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/Jenkinsfile b/Jenkinsfile index efd72b110e118..29f5d6298380a 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -55,6 +55,13 @@ pipeline { LIBPATH = "randomval" DL_CONAN_CENTER_INDEX = 'all' TOX_TESTENV_PASSENV = 'CONAN_USER_HOME CONAN_NON_INTERACTIVE CONAN_PRINT_RUN_COMMANDS CONAN_LOGIN_USERNAME CONAN_PASSWORD TRACKFILEACCESS MSBUILDDISABLENODEREUSE' + // Create a personal access token on the devauto account on Octocat with repo and read:org access, and use it to + // create a secret text credential with the name github-cli-devauto-octocat-access-token + // See: https://cli.github.com/manual/gh_auth_login + GH_ENTERPRISE_TOKEN = credentials('github-cli-devauto-octocat-access-token') + // When using the token above 'gh help environment' says to also set GH_HOST + // https://cli.github.com/manual/gh_help_environment + GH_HOST = 'octocat.dlogics.com' } stages { stage('Clean/reset Git checkout for release') { From 0780050c94ef4cbe7c5551e31ecaa5c783abeff5 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 22 Sep 2022 18:57:28 -0500 Subject: [PATCH 113/173] Jenkinsfile: Merge the upstream on timer-triggered builds - Merge from upstream when triggered by a timer or the MERGE_UPSTREAM parameter. - Trigger branch builds on a nightly basis. --- Jenkinsfile | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 29f5d6298380a..13b0d8f4bb745 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -31,6 +31,8 @@ pipeline { description: 'Force build of all tools. By default, Conan will download the tool and test it if it\'s already built' booleanParam name: 'FORCE_TOOL_BUILD_WITH_REQUIREMENTS', defaultValue: false, description: 'Force build of all tools, and their requirements. By default, Conan will download the tool and test it if it\'s already built' + booleanParam name: 'MERGE_UPSTREAM', defaultValue: false, + description: 'If building develop branch, merge changes from upstream, i.e., conan-io/conan-center-index' } options{ buildDiscarder logRotator(artifactDaysToKeepStr: '4', artifactNumToKeepStr: '10', daysToKeepStr: '7', numToKeepStr: '10') @@ -42,6 +44,11 @@ pipeline { customWorkspace "workspace/${JOB_NAME.replaceAll('/','_')}_noarch/" } } + triggers { + // From the doc: @midnight actually means some time between 12:00 AM and 2:59 AM. + // This gives us automatic spreading out of jobs, so they don't cause load spikes. + cron('@midnight') + } environment { CONAN_USER_HOME = "${WORKSPACE}" CONAN_NON_INTERACTIVE = '1' @@ -176,7 +183,10 @@ pipeline { } stage('Merge from upstream') { when { - expression { env.BRANCH_NAME =~ 'develop*' } + expression { + // Merge upstream on develop-prefixed branches if triggered by timer, or forced by parameter + env.BRANCH_NAME =~ 'develop*' && (currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause') || params.MERGE_UPSTREAM) + } } steps { script { From 507b545c723840f47e95e3ef5cdf60c515660490 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 23 Sep 2022 14:24:01 -0500 Subject: [PATCH 114/173] merge-upstream: Clean up the remote the task creates - Use _merge_remote as a context manager, cleaning up the remote it creates if anything fails. - This cleans up the Git repository in Jenkins jobs. With this and _preserving_branch_and_commit together, the merge-upstream task leaves the local Git repository the way it found it. - Side note: It pulls directly from conan-io/conan-center-index without using a remote, so there isn't anything left from that either. --- tasks/merging.py | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index 88ba671b4c572..0115064f9fd26 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -76,10 +76,10 @@ def merge_upstream(ctx): _check_preconditions(ctx, config) print(f'Configuration: {config}') - with _preserving_branch_and_commit(ctx): - _remove_status_file() - - _update_remote(ctx, config) + # if anything fails past this point, the missing status file will also abort the Jenkins run. + _remove_status_file() + # Nested context handlers; see https://docs.python.org/3.10/reference/compound_stmts.html#the-with-statement + with _preserving_branch_and_commit(ctx), _merge_remote(ctx, config): _update_branch(ctx, config) # Try to merge from CCI @@ -141,15 +141,22 @@ def _check_preconditions(ctx, config): f'see https://cli.github.com/manual/gh_auth_login') -def _update_remote(ctx, config): +@contextlib.contextmanager +def _merge_remote(ctx, config): '''Make merge-local-remote point to the repo we're going to merge into - This also makes it work in CI, where there might not be an "upstream"''' - result = ctx.run(f'git remote get-url {config.local_remote_name}', hide='both', warn=True, pty=False) - if result.ok and result.stdout.strip() != '': - ctx.run(f'git remote set-url {config.local_remote_name} {config.local_url}') - else: - ctx.run(f'git remote add {config.local_remote_name} {config.local_url}') - ctx.run(f'git remote update {config.local_remote_name}') + This also makes it work in CI, where there might not be an "upstream". + + Used as a context manager, cleans up the remote when done.''' + try: + result = ctx.run(f'git remote get-url {config.local_remote_name}', hide='both', warn=True, pty=False) + if result.ok and result.stdout.strip() != '': + ctx.run(f'git remote set-url {config.local_remote_name} {config.local_url}') + else: + ctx.run(f'git remote add {config.local_remote_name} {config.local_url}') + ctx.run(f'git remote update {config.local_remote_name}') + yield + finally: + ctx.run(f'git remote remove {config.local_remote_name}', warn=True, hide='both') def _branch_exists(ctx, branch): From 11de7133048214dc61719188d9c88b663c46435f Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 23 Sep 2022 15:30:23 -0500 Subject: [PATCH 115/173] merge-upstream: Make the PR branch without a local copy In the service of leaving the local repo as unchanged as possible, directly push the fetched branch from conan-io/conan-center-index to the local repository, without creating a local branch. --- tasks/merging.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index 0115064f9fd26..d5f560cdf8674 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -235,15 +235,11 @@ def _form_pr_body(ctx, config): def _create_pull_request(ctx, config, pr_body): """Create a pull request to merge in the data from upstream.""" - # Get on a merge branch + # Get the upstream ref ctx.run(f'git fetch {config.cci_url} {config.cci_branch}') - if _branch_exists(ctx, config.merge_branch_name): - ctx.run(f'git checkout {config.merge_branch_name}') - ctx.run('git reset --hard FETCH_HEAD') - else: - ctx.run(f'git checkout -b {config.merge_branch_name} FETCH_HEAD') - - ctx.run(f'git push --force {config.fork_url} {config.merge_branch_name}') + # Push it to the fork the PR will be on. Have to include refs/heads in case the branch didn't + # already exist + ctx.run(f'git push --force {config.fork_url} FETCH_HEAD:refs/heads/{config.merge_branch_name}') with tempfile.NamedTemporaryFile(prefix='pr-body', mode='w+', encoding='utf-8') as pr_body_file: pr_body_file.write(pr_body) # Before passing the filename to gh pr create, flush it so all the data is on the disk From fb5c25268b449732be55d55ef0c4ad86d3883093 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 23 Sep 2022 15:50:14 -0500 Subject: [PATCH 116/173] merge-upstream: Do the merging without using a local branch - Instead of making a local branch that has to be cleaned up, just do the merging on a detached head and push that up to the repo. - Fewer modifications to the local editor state. - Do some of the 'git checkout' commands with '--quiet' so that it doesn't complain about leaving commits behind. --- tasks/merging.py | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index d5f560cdf8674..e76e45e0becf9 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -80,8 +80,6 @@ def merge_upstream(ctx): _remove_status_file() # Nested context handlers; see https://docs.python.org/3.10/reference/compound_stmts.html#the-with-statement with _preserving_branch_and_commit(ctx), _merge_remote(ctx, config): - _update_branch(ctx, config) - # Try to merge from CCI try: _write_status_file(_merge_and_push(ctx, config)) @@ -119,9 +117,10 @@ def _preserving_branch_and_commit(ctx): yield finally: if branch == 'HEAD': - ctx.run(f'git checkout --detach {commit}') + ctx.run(f'git checkout --quiet --detach {commit}') + ctx.run('git reset --hard HEAD') else: - ctx.run(f'git checkout --force {branch}') + ctx.run(f'git checkout --quiet --force {branch}') ctx.run(f'git reset --hard {commit}') @@ -165,17 +164,9 @@ def _branch_exists(ctx, branch): return result.ok -def _update_branch(ctx, config): - """Check out and update branch""" - if _branch_exists(ctx, config.local_branch): - ctx.run(f'git checkout {config.local_branch}') - ctx.run(f'git reset --hard {config.local_remote_name}/{config.local_branch}') - else: - ctx.run(f'git checkout --track {config.local_remote_name}/{config.local_branch}') - - def _merge_and_push(ctx, config): """Attempt to merge upstream branch and push it to the local repo.""" + ctx.run(f'git checkout --quiet --detach {config.local_remote_name}/{config.local_branch}') merge_result = ctx.run(f'git pull --no-ff --no-edit {config.cci_url} {config.cci_branch}', warn=True) if merge_result.ok: # Check to see if a push is necessary by counting the number of revisions @@ -185,7 +176,7 @@ def _merge_and_push(ctx, config): hide='stdout', pty=False) needs_push = int(count_revs_result.stdout) != 0 if needs_push: - ctx.run(f'git push {config.local_remote_name} {config.local_branch}') + ctx.run(f'git push {config.local_remote_name} HEAD:refs/heads/{config.local_branch}') return MergeStatus.MERGED else: return MergeStatus.UP_TO_DATE From 49dc7ecc798818aace2236077ea553f0fa6b2e4f Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 23 Sep 2022 17:51:13 -0500 Subject: [PATCH 117/173] merge-upstream: Add logging to explain the actions it's taking --- requirements.in | 2 ++ tasks/__init__.py | 6 ++++++ tasks/merging.py | 23 ++++++++++++++++++++++- 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/requirements.in b/requirements.in index 71c8f4d7ba2a2..12d7d1aecead0 100644 --- a/requirements.in +++ b/requirements.in @@ -10,3 +10,5 @@ pytest pytest-html pre-commit; sys_platform != 'aix' and sys_platform != 'sunos5' dl-pre-commit-hooks; sys_platform != 'aix' and sys_platform != 'sunos5' +coloredlogs +colorama; sys_platform == 'win32' diff --git a/tasks/__init__.py b/tasks/__init__.py index 71199f07e44e0..6c3285699f1e8 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -3,6 +3,7 @@ import os from concurrent import futures +import coloredlogs import yaml from dl_conan_build_tools.tasks import conan from invoke import Collection, Exit @@ -10,6 +11,11 @@ from . import merging +TASKS_FIELD_STYLES = coloredlogs.DEFAULT_FIELD_STYLES.copy() +TASKS_FIELD_STYLES.update(levelname=dict(color='magenta', bold=True)) +coloredlogs.install(field_styles=TASKS_FIELD_STYLES, + fmt='%(asctime)s %(funcName)s %(name)s[%(process)d] %(levelname)s %(message)s') + @task(help={'remote': 'remote to upload to, default conan-center-dl-staging', 'package': 'name of package to upload, can be specified more than once', diff --git a/tasks/merging.py b/tasks/merging.py index e76e45e0becf9..da937766791c1 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -2,6 +2,7 @@ import dataclasses import getpass import json +import logging import os import platform import shlex @@ -17,6 +18,9 @@ # Name of a status file MERGE_UPSTREAM_STATUS = '.merge-upstream-status' +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + class MergeHadConflicts(Exception): pass @@ -74,7 +78,7 @@ def merge_upstream(ctx): ''' config = MergeUpstreamConfig.create_from_dlproject() _check_preconditions(ctx, config) - print(f'Configuration: {config}') + logger.info(f'merge-upstream configuration: {config}') # if anything fails past this point, the missing status file will also abort the Jenkins run. _remove_status_file() @@ -101,6 +105,7 @@ def _remove_status_file(): def _write_status_file(merge_status): """Write the merge status to the status file.""" + logger.info(f'Write status {merge_status.name} to file {MERGE_UPSTREAM_STATUS}') with open(MERGE_UPSTREAM_STATUS, 'w') as merge_upstream_status: merge_upstream_status.write(merge_status.name) @@ -109,6 +114,7 @@ def _write_status_file(merge_status): def _preserving_branch_and_commit(ctx): """Context manager to run complicated sets of Git commands, while returning to the original branch and placing that branch back onto the original commit.""" + logger.info('Save current checkout state...') result = ctx.run('git rev-parse --abbrev-ref HEAD', hide='stdout') branch = result.stdout.strip() result = ctx.run('git rev-parse HEAD', hide='stdout') @@ -116,6 +122,7 @@ def _preserving_branch_and_commit(ctx): try: yield finally: + logger.info('Restore checkout state...') if branch == 'HEAD': ctx.run(f'git checkout --quiet --detach {commit}') ctx.run('git reset --hard HEAD') @@ -126,6 +133,7 @@ def _preserving_branch_and_commit(ctx): def _check_preconditions(ctx, config): """Check the preconditions for the merge-upstream task.""" + logger.info('Check preconditions...') if platform.system() not in ['Darwin', 'Linux']: raise Exit('Run this task on macOS or Linux') # https://stackoverflow.com/a/2659808/11996393 @@ -147,6 +155,7 @@ def _merge_remote(ctx, config): Used as a context manager, cleans up the remote when done.''' try: + logger.info('Create remote to refer to destination fork...') result = ctx.run(f'git remote get-url {config.local_remote_name}', hide='both', warn=True, pty=False) if result.ok and result.stdout.strip() != '': ctx.run(f'git remote set-url {config.local_remote_name} {config.local_url}') @@ -155,18 +164,22 @@ def _merge_remote(ctx, config): ctx.run(f'git remote update {config.local_remote_name}') yield finally: + logger.info('Remove remote...') ctx.run(f'git remote remove {config.local_remote_name}', warn=True, hide='both') def _branch_exists(ctx, branch): """Return true if the given branch exists locally""" + logger.info(f'Check if {branch} exists...') result = ctx.run(f'git rev-parse --quiet --verify {branch}', warn=True, hide='stdout') return result.ok def _merge_and_push(ctx, config): """Attempt to merge upstream branch and push it to the local repo.""" + logger.info(f'Check out local {config.local_branch} branch...') ctx.run(f'git checkout --quiet --detach {config.local_remote_name}/{config.local_branch}') + logger.info('Merge upstream branch...') merge_result = ctx.run(f'git pull --no-ff --no-edit {config.cci_url} {config.cci_branch}', warn=True) if merge_result.ok: # Check to see if a push is necessary by counting the number of revisions @@ -176,11 +189,14 @@ def _merge_and_push(ctx, config): hide='stdout', pty=False) needs_push = int(count_revs_result.stdout) != 0 if needs_push: + logger.info('Push to local repo...') ctx.run(f'git push {config.local_remote_name} HEAD:refs/heads/{config.local_branch}') return MergeStatus.MERGED else: + logger.info('Repo is already up to date') return MergeStatus.UP_TO_DATE else: + logger.info('Check for merge conflicts...') # Check for merge conflicts: https://stackoverflow.com/a/27991004/11996393 result = ctx.run('git ls-files -u', hide='stdout', warn=True, pty=False) if result.ok and result.stdout.strip(): @@ -193,6 +209,7 @@ def _form_pr_body(ctx, config): """Create a body for the pull request summarizing information about the merge conflicts.""" # Note: pty=False to enforce not using a PTY; that makes sure that Git doesn't # see a terminal and put escapes into the output we want to format. + logger.info('Create body of pull request message...') conflict_files_result = ctx.run('git diff --no-color --name-only --diff-filter=U', hide='stdout', pty=False) commits_on_upstream_result = ctx.run( 'git log --no-color --merge HEAD..MERGE_HEAD --pretty=format:"%h -%d %s (%cr) <%an>"', hide='stdout', pty=False) @@ -226,6 +243,7 @@ def _form_pr_body(ctx, config): def _create_pull_request(ctx, config, pr_body): """Create a pull request to merge in the data from upstream.""" + logger.info('Create pull request from upstream branch...') # Get the upstream ref ctx.run(f'git fetch {config.cci_url} {config.cci_branch}') # Push it to the fork the PR will be on. Have to include refs/heads in case the branch didn't @@ -240,9 +258,11 @@ def _create_pull_request(ctx, config, pr_body): if existing_prs: assert len(existing_prs) == 1 url = existing_prs[0]['url'] + logger.info('Edit existing pull request...') ctx.run(f'gh pr edit --repo {config.local_host}/{config.local_organization}/conan-center-index ' f'{url} --body-file {pr_body_file.name}') else: + logger.info('Create new pull request...') title = shlex.quote('Merge in changes from conan-io/master') labels = f' --label {",".join(config.pr_labels)}' if config.pr_labels else '' assignee = f' --assignee {config.pr_assignee}' if config.pr_assignee else '' @@ -255,6 +275,7 @@ def _create_pull_request(ctx, config, pr_body): def _list_merge_pull_requests(ctx, config): + logger.info('Check for existing pull requests...') result = ctx.run(f'gh pr list --repo {config.local_host}/{config.local_organization}/conan-center-index ' '--json number,url,author,headRefName,headRepositoryOwner ', hide='stdout', From 4796336e8085f09bc2ce31fe9c76ced761dbeab5 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Sat, 24 Sep 2022 13:09:06 -0500 Subject: [PATCH 118/173] Jenkinsfile: use parameterizedCron to set MERGE_UPSTREAM - Use parameterizedCron to run the job and set MERGE_UPSTREAM for the develop branch. - As a safety backup, also check the BRANCH_NAME for the 'Merge from upstream' stage. --- Jenkinsfile | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 13b0d8f4bb745..c52f64872d8bb 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -47,7 +47,7 @@ pipeline { triggers { // From the doc: @midnight actually means some time between 12:00 AM and 2:59 AM. // This gives us automatic spreading out of jobs, so they don't cause load spikes. - cron('@midnight') + parameterizedCron(env.BRANCH_NAME =~ 'develop*' ? '@midnight % MERGE_UPSTREAM=true' : '@midnight') } environment { CONAN_USER_HOME = "${WORKSPACE}" @@ -184,8 +184,9 @@ pipeline { stage('Merge from upstream') { when { expression { - // Merge upstream on develop-prefixed branches if triggered by timer, or forced by parameter - env.BRANCH_NAME =~ 'develop*' && (currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause') || params.MERGE_UPSTREAM) + // Merge upstream on develop-prefixed branches if forced by parameter + // The parametrized Cron timer sets MERGE_UPSTREAM at appropriate times. + env.BRANCH_NAME =~ 'develop*' && params.MERGE_UPSTREAM } } steps { From d1994462ca218c1b15c7b7d43f0d79d63e3d6fd5 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Sun, 25 Sep 2022 10:13:17 -0500 Subject: [PATCH 119/173] merge-upstream: Make the merge message use the branch name ...instead of HEAD --- tasks/merging.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tasks/merging.py b/tasks/merging.py index da937766791c1..3279c3c8eac3f 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -180,7 +180,11 @@ def _merge_and_push(ctx, config): logger.info(f'Check out local {config.local_branch} branch...') ctx.run(f'git checkout --quiet --detach {config.local_remote_name}/{config.local_branch}') logger.info('Merge upstream branch...') - merge_result = ctx.run(f'git pull --no-ff --no-edit {config.cci_url} {config.cci_branch}', warn=True) + ctx.run(f'git fetch {config.cci_url} {config.cci_branch}') + # --into name sets the branch name so it says "...into develop" instead of "...into HEAD" + # Have to fetch and use FETCH_HEAD because --into-name isn't available on git pull + merge_result = ctx.run( + f'git merge --no-ff --no-edit --into-name {config.local_branch} FETCH_HEAD', warn=True) if merge_result.ok: # Check to see if a push is necessary by counting the number of revisions # that differ between current head and the push destination. From 6cd2f7865e780422684f323e7bc4c43475b1d763 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 26 Sep 2022 17:12:16 -0500 Subject: [PATCH 120/173] Add pylint to project - Pylint will be used on new code, especially where it would suggest a fix that would change the API. --- requirements.in | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.in b/requirements.in index 12d7d1aecead0..a997f60babdcc 100644 --- a/requirements.in +++ b/requirements.in @@ -12,3 +12,4 @@ pre-commit; sys_platform != 'aix' and sys_platform != 'sunos5' dl-pre-commit-hooks; sys_platform != 'aix' and sys_platform != 'sunos5' coloredlogs colorama; sys_platform == 'win32' +pylint From 04bc19f6612dcd117d68e2cd8463321cac2957f1 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 26 Sep 2022 11:58:48 -0500 Subject: [PATCH 121/173] Default pylintrc from pylint --generate-rcfile --- pylintrc | 618 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 618 insertions(+) create mode 100644 pylintrc diff --git a/pylintrc b/pylintrc new file mode 100644 index 0000000000000..f964b83848be4 --- /dev/null +++ b/pylintrc @@ -0,0 +1,618 @@ +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +extension-pkg-whitelist= + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. +ignore=CVS + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\' represents the directory delimiter on Windows systems, it +# can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.9 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the 'python-enchant' package. +spelling-dict= + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + +# Regular expression of note tags to take in consideration. +notes-rgx= + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of names allowed to shadow builtins +allowed-redefined-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[METHOD_ARGS] + +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=BaseException, + Exception + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=yes + +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. If left empty, argument names will be checked with the set +# naming style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +#class-attribute-rgx= + +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. If left empty, class names will be checked with the set naming style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. If left empty, function names will be checked with the set +# naming style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. If left empty, variable names will be checked with the set +# naming style. +#variable-rgx= From 3876eb0d9a7c748019523aedeffa0131783325f6 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 26 Sep 2022 17:13:05 -0500 Subject: [PATCH 122/173] Initial pylint configuration, ignored directories - Ignore the directories that flake8 does, plus mkenv.py and the tests. - Also turn off checks in the tasks/__init__.py - Pylint will be introduced to the existing code in separate pull requests. - For now, using pylint on the new tasks/merging.py module - Add pre-commit hooks to run pylint on the files in tasks --- .pre-commit-config.yaml | 17 +++++++++++++++++ pylintrc | 16 +++++++++++++--- tasks/__init__.py | 1 + 3 files changed, 31 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 670582590b7e3..f5c353b33e128 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -59,3 +59,20 @@ repos: - mdformat-frontmatter - mdformat-footnote - mdformat-toc + # pylint has to be run from inside the virtual environment, so it has access to all + # the modules imported by a given Python modules. + # See: https://pylint.pycqa.org/en/latest/user_guide/installation/pre-commit-integration.html#pre-commit-integration + - repo: local + hooks: + - id: pylint + name: pylint + entry: pylint + language: system + types: [ python ] + # only tasks are linted at present + files: ^tasks/.*$ + args: + [ + "-rn", # only display messages + "-sn", # don't display the score + ] diff --git a/pylintrc b/pylintrc index f964b83848be4..60823e8b7c9ed 100644 --- a/pylintrc +++ b/pylintrc @@ -42,7 +42,7 @@ fail-under=10 #from-stdin= # Files or directories to be skipped. They should be base names, not paths. -ignore=CVS +ignore=.git,.tox,.idea,.conan,recipes,.github,docs,linter,.mkenv,mkenv.py,.cache,tests # Add files or directories matching the regular expressions patterns to the # ignore-list. The regex matches against paths and can be in Posix or Windows @@ -53,7 +53,7 @@ ignore-paths= # Files or directories matching the regular expression patterns are skipped. # The regex matches against base names, not paths. The default value ignores # Emacs file locks -ignore-patterns=^\.# +ignore-patterns=^(\.#|python-env-) # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime @@ -78,6 +78,16 @@ limit-inference-results=100 # List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins= + pylint.extensions.confusing_elif, + pylint.extensions.for_any_all, + pylint.extensions.consider_ternary_expression, + pylint.extensions.check_elif, + pylint.extensions.private_import, + pylint.extensions.redefined_variable_type, + pylint.extensions.overlapping_exceptions, + pylint.extensions.redefined_loop_name, + pylint.extensions.set_membership, + pylint.extensions.typing # Pickle collected data for later comparisons. persistent=yes @@ -336,7 +346,7 @@ indent-after-paren=4 indent-string=' ' # Maximum number of characters on a single line. -max-line-length=100 +max-line-length=120 # Maximum number of lines in a module. max-module-lines=1000 diff --git a/tasks/__init__.py b/tasks/__init__.py index 6c3285699f1e8..27ba7ebb45e07 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -1,3 +1,4 @@ +# pylint: disable=all import importlib import io import os From 19ed7ef34ffc80a7e10d3adfaf92572833faf258 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 26 Sep 2022 18:36:28 -0500 Subject: [PATCH 123/173] merging: Refactor MergeUpstreamConfig into nested dataclasses - pylint was complaining that MergeUpstreamConfig has too many attributes. And that's correct; it was using prefixes to group attributes. - Use nested dataclasses instead. - Use dacite to create a MergeUpstreamConfig from the data in dlproject.yaml. dacite will check that all the fields are named correctly (strict), and check data types. - The resulting code is somewhat clearer to read, and the structure of the configuration makes more sense in YAML - Dump the config as a pretty-printed dictionary --- dlproject.yaml | 23 ++++++++ requirements.in | 1 + tasks/merging.py | 144 ++++++++++++++++++++++++++++++++--------------- 3 files changed, 123 insertions(+), 45 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index 87d89e3b859ab..95526ae3d2af4 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -624,3 +624,26 @@ config: # Configs based on hostname. # These are applied last. Consider this for only the most extreme cases kamcentos6: +merge_upstream: + # Overrides the defaults in the MergeUpstreamConfig class in tasks/merge_upstream.py + # Note the use of nested dataclasses; use nested dictionaries here. + pull_request: + reviewers: [ kam ] + assignee: kam + # Defaults: + # cci: + # url: git@github.com:conan-io/conan-center-index.git + # branch: master + # upstream: + # host: octocat.dlogics.com + # organization: datalogics + # branch: develop + # remote_name: merge-upstream-remote + # pull_request: + # host: octocat.dlogics.com + # fork: + # merge_branch_name: merge-from-conan-io + # reviewers: [ ] + # assignee: null + # labels: + # - from-conan-io diff --git a/requirements.in b/requirements.in index a997f60babdcc..c8de0ac880574 100644 --- a/requirements.in +++ b/requirements.in @@ -13,3 +13,4 @@ dl-pre-commit-hooks; sys_platform != 'aix' and sys_platform != 'sunos5' coloredlogs colorama; sys_platform == 'win32' pylint +dacite diff --git a/tasks/merging.py b/tasks/merging.py index 3279c3c8eac3f..9e782692bebb6 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -12,6 +12,7 @@ from enum import Enum, auto from typing import Optional +import dacite import yaml from invoke import Exit, Task, UnexpectedExit @@ -38,27 +39,65 @@ class MergeStatus(Enum): @dataclasses.dataclass -class MergeUpstreamConfig: - """Configuration for the merge-upstream task.""" - cci_url: str = 'git@github.com:conan-io/conan-center-index.git' - cci_branch: str = 'master' - local_host: str = 'octocat.dlogics.com' - local_organization: str = 'kam' # TODO: datalogics - local_branch: str = 'develop' - local_remote_name: str = 'merge-local-remote' - local_fork: str = getpass.getuser() - merge_branch_name: str = 'merge-from-conan-io' - pr_reviewers: list[str] = dataclasses.field(default_factory=list) - pr_assignee: Optional[str] = None - pr_labels: list[str] = dataclasses.field(default_factory=lambda: ['from-conan-io']) +class ConanCenterIndexConfig: + """Configuration for Conan Center Index""" + url: str = 'git@github.com:conan-io/conan-center-index.git' + """URL for the Conan Center Index""" + branch: str = 'master' + """Branch to fetch from""" + + +@dataclasses.dataclass +class UpstreamConfig: + """Configuration describing parameters for the upstream repo. (usually Datalogics)""" + host: str = 'octocat.dlogics.com' + """Host for the Datalogics upstream""" + organization: str = 'datalogics' + """Name of the upstream organization""" + branch: str = 'develop' + """Name of the branch that Conan Center Index is merged to""" + remote_name: str = 'merge-upstream-remote' + """Name of a temporary remote to create to do the work""" @property - def local_url(self) -> str: - return f'git@{self.local_host}:{self.local_organization}/conan-center-index.git' + def url(self) -> str: + """The URL for the upstream Git repository.""" + return f'git@{self.host}:{self.organization}/conan-center-index.git' + + +@dataclasses.dataclass +class PullRequestConfig: + """Configuration describing parameters for the pull request""" + host: str = 'octocat.dlogics.com' + """Host for the pull request""" + fork: str = getpass.getuser() + """The fork to create the pull request on.""" + merge_branch_name: str = 'merge-from-conan-io' + """The name of the head branch to create""" + reviewers: list[str] = dataclasses.field(default_factory=list) + """A list of usernames from which to request reviews""" + assignee: Optional[str] = None + """A username to be the assignee""" + labels: list[str] = dataclasses.field(default_factory=lambda: ['from-conan-io']) + """Labels to place on the pull request""" @property - def fork_url(self) -> str: - return f'git@{self.local_host}:{self.local_fork}/conan-center-index.git' + def url(self) -> str: + """Return the URL to push to for the pull request.""" + return f'git@{self.host}:{self.fork}/conan-center-index.git' + + +@dataclasses.dataclass +class MergeUpstreamConfig: + """Configuration for the merge-upstream task.""" + cci: ConanCenterIndexConfig = dataclasses.field(default_factory=ConanCenterIndexConfig) + """Configuration for Conan Center Index""" + upstream: UpstreamConfig = dataclasses.field(default_factory=UpstreamConfig) + """Configuration for the Datalogics upstream""" + pull_request: PullRequestConfig = dataclasses.field(default_factory=PullRequestConfig) + + class ConfigurationError(Exception): + """Configuration error when reading data.""" @classmethod def create_from_dlproject(cls): @@ -66,7 +105,20 @@ def create_from_dlproject(cls): with open('dlproject.yaml') as dlproject_file: dlproject = yaml.safe_load(dlproject_file) config_data = dlproject.get('merge_upstream', dict()) - return dataclasses.replace(cls(), **config_data) + try: + return dacite.from_dict(data_class=MergeUpstreamConfig, + data=config_data, + config=dacite.Config(strict=True)) + except dacite.DaciteError as exception: + raise cls.ConfigurationError( + f'Error reading merge_upstream from dlproject.yaml: {exception}') from exception + + def asyaml(self): + """Return a string containing the yaml for this dataclass, + in canonical form.""" + # sort_keys=False to preserve the ordering that's in the dataclasses + # dict objects preserve order since Python 3.7 + return yaml.dump(dataclasses.asdict(self), sort_keys=False, indent=4) @Task @@ -78,7 +130,7 @@ def merge_upstream(ctx): ''' config = MergeUpstreamConfig.create_from_dlproject() _check_preconditions(ctx, config) - logger.info(f'merge-upstream configuration: {config}') + logger.info('merge-upstream configuration:\n%s', config.asyaml()) # if anything fails past this point, the missing status file will also abort the Jenkins run. _remove_status_file() @@ -142,9 +194,9 @@ def _check_preconditions(ctx, config): raise Exit('The local worktree has uncommitted changes') if not shutil.which('gh'): raise Exit('This task requires the GitHub CLI. See installation instructions at https://cli.github.com/') - result = ctx.run(f'gh auth status --hostname {config.local_host}', warn=True) + result = ctx.run(f'gh auth status --hostname {config.upstream.host}', warn=True) if not result.ok: - raise Exit(f'GitHub CLI must be logged in to {config.local_host}, or a token supplied in GH_TOKEN; ' + raise Exit(f'GitHub CLI must be logged in to {config.upstream.host}, or a token supplied in GH_TOKEN; ' f'see https://cli.github.com/manual/gh_auth_login') @@ -156,16 +208,16 @@ def _merge_remote(ctx, config): Used as a context manager, cleans up the remote when done.''' try: logger.info('Create remote to refer to destination fork...') - result = ctx.run(f'git remote get-url {config.local_remote_name}', hide='both', warn=True, pty=False) + result = ctx.run(f'git remote get-url {config.upstream.remote_name}', hide='both', warn=True, pty=False) if result.ok and result.stdout.strip() != '': - ctx.run(f'git remote set-url {config.local_remote_name} {config.local_url}') + ctx.run(f'git remote set-url {config.upstream.remote_name} {config.upstream.url}') else: - ctx.run(f'git remote add {config.local_remote_name} {config.local_url}') - ctx.run(f'git remote update {config.local_remote_name}') + ctx.run(f'git remote add {config.upstream.remote_name} {config.upstream.url}') + ctx.run(f'git remote update {config.upstream.remote_name}') yield finally: logger.info('Remove remote...') - ctx.run(f'git remote remove {config.local_remote_name}', warn=True, hide='both') + ctx.run(f'git remote remove {config.upstream.remote_name}', warn=True, hide='both') def _branch_exists(ctx, branch): @@ -177,24 +229,24 @@ def _branch_exists(ctx, branch): def _merge_and_push(ctx, config): """Attempt to merge upstream branch and push it to the local repo.""" - logger.info(f'Check out local {config.local_branch} branch...') - ctx.run(f'git checkout --quiet --detach {config.local_remote_name}/{config.local_branch}') + logger.info(f'Check out local {config.upstream.branch} branch...') + ctx.run(f'git checkout --quiet --detach {config.upstream.remote_name}/{config.upstream.branch}') logger.info('Merge upstream branch...') - ctx.run(f'git fetch {config.cci_url} {config.cci_branch}') + ctx.run(f'git fetch {config.cci.url} {config.cci.branch}') # --into name sets the branch name so it says "...into develop" instead of "...into HEAD" # Have to fetch and use FETCH_HEAD because --into-name isn't available on git pull merge_result = ctx.run( - f'git merge --no-ff --no-edit --into-name {config.local_branch} FETCH_HEAD', warn=True) + f'git merge --no-ff --no-edit --into-name {config.upstream.branch} FETCH_HEAD', warn=True) if merge_result.ok: # Check to see if a push is necessary by counting the number of revisions # that differ between current head and the push destination. count_revs_result = ctx.run( - f'git rev-list {config.local_remote_name}/{config.local_branch}..HEAD --count', + f'git rev-list {config.upstream.remote_name}/{config.upstream.branch}..HEAD --count', hide='stdout', pty=False) needs_push = int(count_revs_result.stdout) != 0 if needs_push: logger.info('Push to local repo...') - ctx.run(f'git push {config.local_remote_name} HEAD:refs/heads/{config.local_branch}') + ctx.run(f'git push {config.upstream.remote_name} HEAD:refs/heads/{config.upstream.branch}') return MergeStatus.MERGED else: logger.info('Repo is already up to date') @@ -237,7 +289,7 @@ def _form_pr_body(ctx, config): ### Commits for conflict files, local {commits_local} - ''').format(local_branch=config.local_branch, + ''').format(local_branch=config.upstream.branch, conflict_files=conflict_files_result.stdout, commits_on_upstream=commits_on_upstream_result.stdout, commits_local=commits_local_result.stdout) @@ -249,10 +301,11 @@ def _create_pull_request(ctx, config, pr_body): """Create a pull request to merge in the data from upstream.""" logger.info('Create pull request from upstream branch...') # Get the upstream ref - ctx.run(f'git fetch {config.cci_url} {config.cci_branch}') + ctx.run(f'git fetch {config.cci.url} {config.cci.branch}') # Push it to the fork the PR will be on. Have to include refs/heads in case the branch didn't # already exist - ctx.run(f'git push --force {config.fork_url} FETCH_HEAD:refs/heads/{config.merge_branch_name}') + ctx.run(f'git push --force {config.pull_request.url} ' + f'FETCH_HEAD:refs/heads/{config.pull_request.merge_branch_name}') with tempfile.NamedTemporaryFile(prefix='pr-body', mode='w+', encoding='utf-8') as pr_body_file: pr_body_file.write(pr_body) # Before passing the filename to gh pr create, flush it so all the data is on the disk @@ -263,28 +316,29 @@ def _create_pull_request(ctx, config, pr_body): assert len(existing_prs) == 1 url = existing_prs[0]['url'] logger.info('Edit existing pull request...') - ctx.run(f'gh pr edit --repo {config.local_host}/{config.local_organization}/conan-center-index ' + ctx.run(f'gh pr edit --repo {config.upstream.host}/{config.upstream.organization}/conan-center-index ' f'{url} --body-file {pr_body_file.name}') else: logger.info('Create new pull request...') title = shlex.quote('Merge in changes from conan-io/master') - labels = f' --label {",".join(config.pr_labels)}' if config.pr_labels else '' - assignee = f' --assignee {config.pr_assignee}' if config.pr_assignee else '' - reviewer = f' --reviewer {",".join(config.pr_reviewers)}' if config.pr_reviewers else '' - ctx.run(f'gh pr create --repo {config.local_host}/{config.local_organization}/conan-center-index ' - f'--base {config.local_branch} ' + labels = f' --label {",".join(config.pull_request.labels)}' if config.pull_request.labels else '' + assignee = f' --assignee {config.pull_request.assignee}' if config.pull_request.assignee else '' + reviewer = f' --reviewer {",".join(config.pull_request.reviewers)}' if config.pull_request.reviewers else '' + ctx.run(f'gh pr create --repo {config.upstream.host}/{config.upstream.organization}/conan-center-index ' + f'--base {config.upstream.branch} ' f'--title {title} --body-file {pr_body_file.name} ' - f'--head {config.local_fork}:{config.merge_branch_name}' + f'--head {config.pull_request.fork}:{config.pull_request.merge_branch_name}' f'{labels}{assignee}{reviewer}') def _list_merge_pull_requests(ctx, config): logger.info('Check for existing pull requests...') - result = ctx.run(f'gh pr list --repo {config.local_host}/{config.local_organization}/conan-center-index ' + result = ctx.run(f'gh pr list --repo {config.upstream.host}/{config.upstream.organization}/conan-center-index ' '--json number,url,author,headRefName,headRepositoryOwner ', hide='stdout', pty=False) out = result.stdout.strip() requests = json.loads(out) if out else [] - return [r for r in requests if - r['headRefName'] == config.merge_branch_name and r['headRepositoryOwner']['login'] == config.local_fork] + branch_name = config.pull_request.merge_branch_name + fork = config.pull_request.fork + return [r for r in requests if r['headRefName'] == branch_name and r['headRepositoryOwner']['login'] == fork] From cd95efdb7297a2ae5956cb9fbf81e71639224aa1 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 26 Sep 2022 19:08:15 -0500 Subject: [PATCH 124/173] tasks/merging.py: Address issues raised by pylint - Add missing docstrings. - Adding UTF-8 encoding to open() calls. - Using dict literal {} instead of dict(). - Don't use else: when the if: block contains a return. - Don't do f-string interpolation for logging statements (the practice is to let the logger do the interpolation, which it does only if the logging level is enabled). --- tasks/merging.py | 40 +++++++++++++++++++--------------------- 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index 9e782692bebb6..76f56a12a3e58 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -1,3 +1,4 @@ +"""Tasks and supporting functions related to merging branches.""" import contextlib import dataclasses import getpass @@ -24,7 +25,7 @@ class MergeHadConflicts(Exception): - pass + """Thrown when the merge had conflicts. Usually handled by making a pull request.""" class MergeStatus(Enum): @@ -102,9 +103,9 @@ class ConfigurationError(Exception): @classmethod def create_from_dlproject(cls): """Create a MergeUpstreamConfig with defaults updated from dlproject.yaml""" - with open('dlproject.yaml') as dlproject_file: + with open('dlproject.yaml', encoding='utf-8') as dlproject_file: dlproject = yaml.safe_load(dlproject_file) - config_data = dlproject.get('merge_upstream', dict()) + config_data = dlproject.get('merge_upstream', {}) try: return dacite.from_dict(data_class=MergeUpstreamConfig, data=config_data, @@ -157,8 +158,8 @@ def _remove_status_file(): def _write_status_file(merge_status): """Write the merge status to the status file.""" - logger.info(f'Write status {merge_status.name} to file {MERGE_UPSTREAM_STATUS}') - with open(MERGE_UPSTREAM_STATUS, 'w') as merge_upstream_status: + logger.info('Write status %s to file %s', merge_status.name, MERGE_UPSTREAM_STATUS) + with open(MERGE_UPSTREAM_STATUS, 'w', encoding='utf-8') as merge_upstream_status: merge_upstream_status.write(merge_status.name) @@ -222,14 +223,14 @@ def _merge_remote(ctx, config): def _branch_exists(ctx, branch): """Return true if the given branch exists locally""" - logger.info(f'Check if {branch} exists...') + logger.info('Check if %s exists...', branch) result = ctx.run(f'git rev-parse --quiet --verify {branch}', warn=True, hide='stdout') return result.ok def _merge_and_push(ctx, config): """Attempt to merge upstream branch and push it to the local repo.""" - logger.info(f'Check out local {config.upstream.branch} branch...') + logger.info('Check out local %s branch...', config.upstream.branch) ctx.run(f'git checkout --quiet --detach {config.upstream.remote_name}/{config.upstream.branch}') logger.info('Merge upstream branch...') ctx.run(f'git fetch {config.cci.url} {config.cci.branch}') @@ -243,22 +244,19 @@ def _merge_and_push(ctx, config): count_revs_result = ctx.run( f'git rev-list {config.upstream.remote_name}/{config.upstream.branch}..HEAD --count', hide='stdout', pty=False) - needs_push = int(count_revs_result.stdout) != 0 - if needs_push: - logger.info('Push to local repo...') - ctx.run(f'git push {config.upstream.remote_name} HEAD:refs/heads/{config.upstream.branch}') - return MergeStatus.MERGED - else: + if int(count_revs_result.stdout) == 0: logger.info('Repo is already up to date') return MergeStatus.UP_TO_DATE - else: - logger.info('Check for merge conflicts...') - # Check for merge conflicts: https://stackoverflow.com/a/27991004/11996393 - result = ctx.run('git ls-files -u', hide='stdout', warn=True, pty=False) - if result.ok and result.stdout.strip(): - raise MergeHadConflicts - # Something else went wrong with the merge - raise UnexpectedExit(merge_result) + logger.info('Push to local repo...') + ctx.run(f'git push {config.upstream.remote_name} HEAD:refs/heads/{config.upstream.branch}') + return MergeStatus.MERGED + logger.info('Check for merge conflicts...') + # Check for merge conflicts: https://stackoverflow.com/a/27991004/11996393 + result = ctx.run('git ls-files -u', hide='stdout', warn=True, pty=False) + if result.ok and result.stdout.strip(): + raise MergeHadConflicts + # Something else went wrong with the merge + raise UnexpectedExit(merge_result) def _form_pr_body(ctx, config): From 4d8033c7789135be3be1a2cb5a1823d9d2f77ba7 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 27 Sep 2022 16:42:05 -0500 Subject: [PATCH 125/173] Delete GitHub-related files we've taken over for our fork These are files from CCI, but we want to use our own files for our fork. --- .github/CODEOWNERS | 23 --- .github/ISSUE_TEMPLATE/center_conan_io.md | 8 - .github/ISSUE_TEMPLATE/package_bug.yml | 48 ------ .github/ISSUE_TEMPLATE/package_request.md | 14 -- .../ISSUE_TEMPLATE/package_upstream_update.md | 13 -- .github/ISSUE_TEMPLATE/question.md | 8 - .github/ISSUE_TEMPLATE/service.md | 8 - .github/stale.yml | 61 ------- .github/workflows/hooks-warnings.yml | 84 ---------- .github/workflows/linter-conan-v2.yml | 149 ------------------ .github/workflows/linter-yaml.yml | 93 ----------- .github/workflows/on-push-do-doco.yml | 30 ---- CONTRIBUTING.md | 59 ------- 13 files changed, 598 deletions(-) delete mode 100644 .github/CODEOWNERS delete mode 100644 .github/ISSUE_TEMPLATE/center_conan_io.md delete mode 100644 .github/ISSUE_TEMPLATE/package_bug.yml delete mode 100644 .github/ISSUE_TEMPLATE/package_request.md delete mode 100644 .github/ISSUE_TEMPLATE/package_upstream_update.md delete mode 100644 .github/ISSUE_TEMPLATE/question.md delete mode 100644 .github/ISSUE_TEMPLATE/service.md delete mode 100644 .github/stale.yml delete mode 100644 .github/workflows/hooks-warnings.yml delete mode 100644 .github/workflows/linter-conan-v2.yml delete mode 100644 .github/workflows/linter-yaml.yml delete mode 100644 .github/workflows/on-push-do-doco.yml delete mode 100644 CONTRIBUTING.md diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS deleted file mode 100644 index 902845de67eaf..0000000000000 --- a/.github/CODEOWNERS +++ /dev/null @@ -1,23 +0,0 @@ -# Conan Center Index - Recipe Watchers -# -# If you are interested to watch/subscribe any recipe from Conan Center Index and receive -# a notification when a new PR is created, add the recipe repo and your @, -# for instance: -# -# recipes/zlib @myusername -# -# NOTE: Github uses case-sensitive filesystem, so your name must be the same from your account. -# -# Github will add you as a reviewer and will notify you by e-mail. -# This feature is named 'CodeOwners', however, it's a Github's feature name. -# On Conan Center Index, there are no owners/maintainers, all recipes are owned by the -# community. This feature only helps people that are more interested in watching and following -# specific recipes, instead of receiving a new notification for each new pull request. -# -# Full reference: https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners -# - -.github @ericLemanissier -recipes/aaf @MartinDelille -recipes/gtk @ericLemanissier -recipes/qt @ericLemanissier diff --git a/.github/ISSUE_TEMPLATE/center_conan_io.md b/.github/ISSUE_TEMPLATE/center_conan_io.md deleted file mode 100644 index f95bb699d0607..0000000000000 --- a/.github/ISSUE_TEMPLATE/center_conan_io.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: 'ConanCenter: Web UI Bugs Or Requests' -about: 'Bugs or feature requests for the Web UI of ConanCenter at https://conan.io/center' -title: '[conan.io/center] SHORT DESCRIPTION' -labels: conan.io/center ---- - - diff --git a/.github/ISSUE_TEMPLATE/package_bug.yml b/.github/ISSUE_TEMPLATE/package_bug.yml deleted file mode 100644 index 8d4162d3beef4..0000000000000 --- a/.github/ISSUE_TEMPLATE/package_bug.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: 'Package: Bug Report' -description: 'Report a bug, something does not work as it supposed to' -title: '[package] /: SHORT DESCRIPTION' -labels: bug -body: -- type: markdown - attributes: - value: | - Please don't forget to update the issue title. - Include all applicable information to help us reproduce your problem. - -- type: textarea - attributes: - label: Package and Environment Details - description: include every applicable attribute) - value: | - * Package Name/Version: **zlib/1.2.8** - * Operating System+version: **Linux Ubuntu 18.04** - * Compiler+version: **GCC 8** - * Docker image: **conanio/gcc8** - * Conan version: **conan 1.18.0** - * Python version: **Python 3.7.4** - validations: - required: true - - -- type: textarea - attributes: - label: Conan profile - description: output of `conan profile show default` or `conan profile show ` if custom profile is in use - -- type: textarea - attributes: - label: Steps to reproduce - description: Include if Applicable - -- type: textarea - attributes: - label: Logs - description: Include/Attach if Applicable - value: | -
Click to expand log - - ``` - Put your log output here - ``` - -
diff --git a/.github/ISSUE_TEMPLATE/package_request.md b/.github/ISSUE_TEMPLATE/package_request.md deleted file mode 100644 index 2d23c97fae995..0000000000000 --- a/.github/ISSUE_TEMPLATE/package_request.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -name: 'Package: Completely New Recipe' -about: 'If would like to see a completely new recipe' -title: '[request] /' -labels: 'library request' ---- - -### Package Details - * Package Name/Version: **cmake/3.15.3** - * Website: **https://cmake.org/** - * Source code: **https://github.com/Kitware/CMake** - - -### Description Of The Library / Tool diff --git a/.github/ISSUE_TEMPLATE/package_upstream_update.md b/.github/ISSUE_TEMPLATE/package_upstream_update.md deleted file mode 100644 index 15db000618746..0000000000000 --- a/.github/ISSUE_TEMPLATE/package_upstream_update.md +++ /dev/null @@ -1,13 +0,0 @@ ---- -name: 'Package: New Version' -about: 'If an existing package recipe needs an update for a new upstream version' -title: '[request] /' -labels: 'upstream update' ---- - -### Package Details - * Package Name/Version: **cmake/3.15.3** - * Changelog: **https://cmake.org/cmake/help/latest/release/3.15.html** - - -The above mentioned version is newly released by the upstream project and not yet available as a recipe. Please add this version. diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md deleted file mode 100644 index 226701a9fa97c..0000000000000 --- a/.github/ISSUE_TEMPLATE/question.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: 'Question' -about: 'If something needs clarification' -title: '[question] SHORT DESCRIPTION' -labels: question ---- - - diff --git a/.github/ISSUE_TEMPLATE/service.md b/.github/ISSUE_TEMPLATE/service.md deleted file mode 100644 index d0b1cf7de1921..0000000000000 --- a/.github/ISSUE_TEMPLATE/service.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: 'Service: Infrastructure Bugs Or Requests' -about: 'Bug or feature requests for Conan Center Index itself' -title: '[service] SHORT DESCRIPTION' -labels: service ---- - - diff --git a/.github/stale.yml b/.github/stale.yml deleted file mode 100644 index f3a08a0c9f04e..0000000000000 --- a/.github/stale.yml +++ /dev/null @@ -1,61 +0,0 @@ -# Configuration for probot-stale - https://github.com/probot/stale - -# Number of days of inactivity before an Issue or Pull Request becomes stale -daysUntilStale: 30 - -# Number of days of inactivity before an Issue or Pull Request with the stale label is closed. -# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale. -daysUntilClose: 30 - -# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled) -onlyLabels: [] - -# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable -exemptLabels: - - blocked - - infrastructure - -# Set to true to ignore issues in a project (defaults to false) -exemptProjects: false - -# Set to true to ignore issues in a milestone (defaults to false) -exemptMilestones: false - -# Set to true to ignore issues with an assignee (defaults to false) -exemptAssignees: false - -# Label to use when marking as stale -staleLabel: stale - -# Comment to post when marking as stale. Set to `false` to disable -markComment: > - This pull request has been automatically marked as stale because it has not had - recent activity. It will be closed if no further activity occurs. Thank you - for your contributions. - -# Comment to post when removing the stale label. -# unmarkComment: > -# Your comment here. - -# Comment to post when closing a stale Issue or Pull Request. -closeComment: > - This pull request has been automatically closed because it has not had - recent activity. Thank you for your contributions. - -# Limit the number of actions per hour, from 1-30. Default is 30 -limitPerRun: 30 - -# Limit to only `issues` or `pulls` -only: pulls - -# Optionally, specify configuration settings that are specific to just 'issues' or 'pulls': -# pulls: -# daysUntilStale: 30 -# markComment: > -# This pull request has been automatically marked as stale because it has not had -# recent activity. It will be closed if no further activity occurs. Thank you -# for your contributions. - -# issues: -# exemptLabels: -# - confirmed diff --git a/.github/workflows/hooks-warnings.yml b/.github/workflows/hooks-warnings.yml deleted file mode 100644 index e19b8ea189eb5..0000000000000 --- a/.github/workflows/hooks-warnings.yml +++ /dev/null @@ -1,84 +0,0 @@ -name: hooks warnings - -on: - issue_comment: - types: [created] - -jobs: - comment: - if: ${{ github.event.issue.pull_request && github.event.sender.login == 'conan-center-bot' }} - runs-on: ubuntu-latest - steps: - - uses: actions/github-script@v6 - with: - script: | - warnings_map = new Map() - const link_regex = /\[All logs\]\((?https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9@:%_\+.~#?&//=]*))\)/g; - for (const match of context.payload.comment.body.matchAll(link_regex)) - { - prefix = "https://c3i.jfrog.io/c3i/misc/summary.html?json=" - if(!match.groups.url.startsWith(prefix)) - continue; - url = match.groups.url.slice(prefix.length) - try { - result = await github.request({ - url: url, - }); - } catch(err) { - core.warning(`error ${err.status} for url ${err.request.url}`) - continue - } - for (const job of result.data) - { - if (job.build == null) - continue; - try { - log = await github.request({ - baseUrl: "https://c3i.jfrog.io/c3i/misc/", - url: job.build, - }); - } catch(err) { - core.warning(`error ${err.status} for url ${err.request.url}`) - continue - } - prefix = "[HOOK - conan-center.py] " - warnings = log.data.split("\n").filter(line => line.startsWith(prefix) && line.includes(" WARN: ")) - if(warnings.length == 0) - continue - if(!warnings_map.has(job.reference)) - warnings_map.set(job.reference, new Set()) - for(const warning of warnings) - { - msg = warning.slice(prefix.length) - warnings_map.get(job.reference).add(msg) - core.warning(msg) - } - } - } - if(warnings_map.size > 0) - { - comment = "
\n" - comment += "Hooks produced the following warnings" - const shaRegex = /(All green|Failure) in build \d+ \(`(?(\d|[a-z])+)`\):/g; - const shaMatch = shaRegex.exec(context.payload.comment.body) - if(shaMatch) - { - comment += " for commit " + shaMatch.groups.sha - } - comment += "\n\n" - for (const [ref, warnings] of warnings_map) - { - comment += "
\n" - comment += "" + ref + "\n\n```\n" - for(const warning of warnings) - comment += warning + "\n" - comment += "```\n
\n" - } - comment += "
\n" - github.rest.issues.createComment({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - body: comment - }) - } diff --git a/.github/workflows/linter-conan-v2.yml b/.github/workflows/linter-conan-v2.yml deleted file mode 100644 index d1f78543433e7..0000000000000 --- a/.github/workflows/linter-conan-v2.yml +++ /dev/null @@ -1,149 +0,0 @@ -name: "[linter] Conan v2 migration" - -on: - pull_request: - -env: - PYTHONPATH: ${{github.workspace}} - PYVER: "3.8" - SCORE_THRESHOLD: "9.5" - REQUIREMENTS: "pylint==2.14" - -jobs: - test_linter: - name: Test linter changes (v2 migration) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 2 - - name: Get changed files - uses: tj-actions/changed-files@v20 - id: changed_files - with: - files: | - linter/** - - name: Get Conan v1 version - id: parse_conan_v1_version - if: steps.changed_files.outputs.any_changed == 'true' - uses: mikefarah/yq@master - with: - cmd: yq '.conan.version' '.c3i/config_v1.yml' - - uses: actions/setup-python@v3 - if: steps.changed_files.outputs.any_changed == 'true' - with: - python-version: ${{ env.PYVER }} - - name: Install requirements - if: steps.changed_files.outputs.any_changed == 'true' - run: | - pip install ${{ env.REQUIREMENTS }} conan==${{ steps.parse_conan_v1_version.outputs.result }} - - - name: Execute linter over all recipes in the repository - id: linter_recipes - if: steps.changed_files.outputs.any_changed == 'true' - run: | - pylint --rcfile=linter/pylintrc_recipe recipes/*/*/conanfile.py --output-format=json --output=recipes.json --score=y --fail-under=${{ env.SCORE_THRESHOLD }} - - - name: Execute linter over all test_package/recipes in the repository - id: linter_test_package - if: steps.changed_files.outputs.any_changed == 'true' - run: | - pylint --rcfile=linter/pylintrc_testpackage recipes/*/*/test_*/conanfile.py --ignore-paths="recipes/[^/]*/[^/]*/test_v1[^/]*/conanfile.py" --output-format=json --output=test_package.json --score=y --fail-under=${{ env.SCORE_THRESHOLD }} - - - name: Archive production artifacts - if: steps.changed_files.outputs.any_changed == 'true' && always() - uses: actions/upload-artifact@v3 - with: - name: linter-output - path: | - recipes.json - test_package.json - - - name: Create report (recipes) - if: steps.changed_files.outputs.any_changed == 'true' && steps.linter_recipes.outcome != 'skipped' && always() - run: | - echo '## Linter summary (recipes)' >> $GITHUB_STEP_SUMMARY - jq 'map( select(.type=="error")) | group_by (.message)[] | {message: .[0].message, length: length}' recipes.json > recipes2.json - jq -r '" * \(.message): \(.length)"' recipes2.json >> $GITHUB_STEP_SUMMARY - - - name: Create report (test_package) - if: steps.changed_files.outputs.any_changed == 'true' && steps.linter_test_package.outcome != 'skipped' && always() - run: | - echo '## Linter summary (test_package)' >> $GITHUB_STEP_SUMMARY - jq 'map( select(.type=="error")) | group_by (.message)[] | {message: .[0].message, length: length}' test_package.json > test_package2.json - jq -r '" * \(.message): \(.length)"' test_package2.json >> $GITHUB_STEP_SUMMARY - - - name: Create report - if: steps.changed_files.outputs.any_changed == 'true' && always() - run: | - echo '> Note.- Check uploaded artifacts for a full report.' >> $GITHUB_STEP_SUMMARY - - conanfile_recipe: - name: Lint changed conanfile.py (v2 migration) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 2 - - name: Get changed files - id: changed-files - uses: tj-actions/changed-files@v20 - with: - files: | - recipes/*/*/conanfile.py - - name: Get Conan v1 version - id: parse_conan_v1_version - if: steps.changed-files.outputs.any_changed == 'true' - uses: mikefarah/yq@master - with: - cmd: yq '.conan.version' '.c3i/config_v1.yml' - - uses: actions/setup-python@v3 - if: steps.changed-files.outputs.any_changed == 'true' - with: - python-version: ${{ env.PYVER }} - - name: Install dependencies - if: steps.changed-files.outputs.any_changed == 'true' - run: | - pip install ${{ env.REQUIREMENTS }} conan==${{ steps.parse_conan_v1_version.outputs.result }} - - name: Run linter - if: steps.changed-files.outputs.any_changed == 'true' - run: | - echo "::add-matcher::linter/recipe_linter.json" - for file in ${{ steps.changed-files.outputs.all_changed_files }}; do - pylint --rcfile=linter/pylintrc_recipe --output-format=parseable ${file} - done - - conanfile_test_package: - name: Lint changed test_package/conanfile.py (v2 migration) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 2 - - name: Get changed files - id: changed-files - uses: tj-actions/changed-files@v20 - with: - files: | - recipes/*/*/test_*/conanfile.py - - name: Get Conan v1 version - id: parse_conan_v1_version - if: steps.changed-files.outputs.any_changed == 'true' - uses: mikefarah/yq@master - with: - cmd: yq '.conan.version' '.c3i/config_v1.yml' - - uses: actions/setup-python@v3 - if: steps.changed-files.outputs.any_changed == 'true' - with: - python-version: ${{ env.PYVER }} - - name: Install dependencies - if: steps.changed-files.outputs.any_changed == 'true' - run: | - pip install ${{ env.REQUIREMENTS }} conan==${{ steps.parse_conan_v1_version.outputs.result }} - - name: Run linter - if: steps.changed-files.outputs.any_changed == 'true' - run: | - echo "::add-matcher::linter/recipe_linter.json" - for file in ${{ steps.changed-files.outputs.all_changed_files }}; do - pylint --rcfile=linter/pylintrc_testpackage --ignore-paths="recipes/[^/]*/[^/]*/test_v1[^/]*/conanfile.py" --output-format=parseable ${file} - done diff --git a/.github/workflows/linter-yaml.yml b/.github/workflows/linter-yaml.yml deleted file mode 100644 index 511e654ab8bb0..0000000000000 --- a/.github/workflows/linter-yaml.yml +++ /dev/null @@ -1,93 +0,0 @@ -name: "[linter] YAML files" - -on: - pull_request: - -env: - PYTHONPATH: ${{github.workspace}} - PYVER: "3.8" - CONFIG_FILES_PATH: "recipes/**/config.yml" - CONANDATA_FILES_PATH: "recipes/**/**/conandata.yml" - -jobs: - test_linter: - # A job to run when the linter changes. We want to know in advance how many files will be broken - name: Test linter changes (YAML files) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 2 - - - name: Get changed files - uses: tj-actions/changed-files@v20 - id: changed_files - with: - files: | - linter/** - - - uses: actions/setup-python@v3 - if: steps.changed_files.outputs.any_changed == 'true' - with: - python-version: ${{ env.PYVER }} - - - name: Install dependencies - if: steps.changed_files.outputs.any_changed == 'true' - run: pip install yamllint - - - name: Run linter (config.yml) - if: steps.changed_files.outputs.any_changed == 'true' && always() - run: yamllint --config-file linter/yamllint_rules.yml -f parsable ${{ env.CONFIG_FILES_PATH }} - - - name: Run linter (conandata.yml) - if: steps.changed_files.outputs.any_changed == 'true' && always() - run: yamllint --config-file linter/yamllint_rules.yml -f parsable ${{ env.CONANDATA_FILES_PATH }} - - - lint_pr_files: - # Lint files modified in the pull_request - name: Lint changed files (YAML files) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 2 - - - uses: actions/setup-python@v3 - with: - python-version: ${{ env.PYVER }} - - - name: Install dependencies - run: pip install yamllint - - ## Work on config.yml files - - name: Get changed files (config) - id: changed_files_config - if: always() - uses: tj-actions/changed-files@v20 - with: - files: | - ${{ env.CONFIG_FILES_PATH }} - - - name: Run linter (config.yml) - if: steps.changed_files_config.outputs.any_changed == 'true' && always() - run: | - for file in ${{ steps.changed_files_config.outputs.all_changed_files }}; do - yamllint --config-file linter/yamllint_rules.yml ${file} - done - - ## Work on conandata.yml files - - name: Get changed files (conandata) - id: changed_files_conandata - if: always() - uses: tj-actions/changed-files@v20 - with: - files: | - ${{ env.CONANDATA_FILES_PATH }} - - - name: Run linter (conandata.yml) - if: steps.changed_files_conandata.outputs.any_changed == 'true' && always() - run: | - for file in ${{ steps.changed_files_conandata.outputs.all_changed_files }}; do - yamllint --config-file linter/yamllint_rules.yml ${file} - done diff --git a/.github/workflows/on-push-do-doco.yml b/.github/workflows/on-push-do-doco.yml deleted file mode 100644 index fe33f4984dfb3..0000000000000 --- a/.github/workflows/on-push-do-doco.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: docs_markdown_toc -on: - workflow_dispatch: - inputs: {} - push: - branches: - - master - paths: - - 'docs/**' - -jobs: - docs_markdown_toc: - runs-on: windows-latest - steps: - - uses: actions/checkout@v3 - - name: Run MarkdownSnippets - run: | - dotnet tool install --global MarkdownSnippets.Tool - mdsnippets ${GITHUB_WORKSPACE} \ - --convention InPlaceOverwrite \ - --exclude-directories 'recipes' \ - --toc-level 5 - shell: bash - - name: Create Pull Request - uses: peter-evans/create-pull-request@v3 - with: - branch: bot/action-doc-toc - commit-message: "[docs] Regenerate tables of contents" - title: "[docs] Regenerate tables of contents" - body: "Automatic update of the documentation TOCs." diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 22027379ce53b..0000000000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,59 +0,0 @@ -# Contributing to Conan Center Index - -The following summarizes the process for contributing to the CCI (Conan Center Index) project. - - -## Contents - - * [Community](#community) - * [Dev-flow & Pull Requests](#dev-flow--pull-requests) - * [Issues](#issues) - -## Community - -Conan Center Index is an Open Source MIT licensed project. -Conan Center Index is developed by the Conan maintainers and a great community of contributors. - -## Dev-flow & Pull Requests - -CCI follows the ["GitFlow"](https://datasift.github.io/gitflow/IntroducingGitFlow.html) branching model. -Issues are triaged and categorized mainly by type (package request, bug...) and priority (high, medium...) using GitHub - labels. - -To contribute follow the next steps: - -1. Comment in the corresponding issue that you want to contribute the package/fix proposed. If there is no open issue, we strongly suggest - to open one to gather feedback. -2. Check the [how_to_add_packages.md](docs/how_to_add_packages.md) if are - contributing for a new package. -3. Fork the [CCI repository](https://github.com/conan-io/conan-center-index) and create a `package/xxx` branch from the `master` branch and develop - your fix/packages as discussed in previous step. -4. Try to keep your branch updated with the `master` branch to avoid conflicts. -5. Add the text (besides other comments): "fixes #IssueNumber" in the body of the PR, referring to the issue of step 1. - -The ``conan-io`` organization maintainers will review and help with the packaging. - -## Issues - -If you think you found a bug in CCI or in a recipe, open an issue indicating the following: - -- Explain the Conan version, Operating System, compiler and any other tool that could be related to the issue. -- Explain, as detailed as possible, how to reproduce the issue. Use git repository to contain code/recipes to reproduce issues, or a snippet. -- Include the expected behavior as well as what actually happened. -- Provide output captures (as text). -- Feel free to attach a screenshot or video illustrating the issue if you think it will be helpful. - -For any suggestion, feature request or question open an issue indicating the following: - -- Questions and support requests are always welcome. -- Use the [question] or [suggestion] tags in the title (provided by github issues templates). -- Try to explain the motivation, what are you trying to do, what is the pain it tries to solve. -- What do you expect from CCI. - -We use the following tags to control the status of the issues: - -- **infrastructure**: Waiting on tools or services belonging to the infra. -- **library request**: Request a new package to be created. -- **question**: Further information is requested . -- **upstream update**: Bump a new package version. -- **conan.io/center**: Issues and features related to Web UI . From 846b47801c135b1c1248a119b87165593b3df345 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 27 Sep 2022 18:58:39 -0500 Subject: [PATCH 126/173] .gitattributes-merge: File to specify files belonging to DL - Files that upstream might change are marked in .gitattributes-merge as merge=ours. - In the merge-upstream task, supply this attributes file to the git merge, along with a merge merge driver that does nothing, which preserves our version. - Custom merge drivers don't work on GitHub, so these changes can't be in .gitattributes. --- .gitattributes-merge | 5 +++++ tasks/merging.py | 18 +++++++++++++++++- 2 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 .gitattributes-merge diff --git a/.gitattributes-merge b/.gitattributes-merge new file mode 100644 index 0000000000000..d0ab3b95b53f7 --- /dev/null +++ b/.gitattributes-merge @@ -0,0 +1,5 @@ +# Conflicts that should always resolve in favor of Datalogics +# See the section "Merge Strategies" at the end of +# https://www.git-scm.com/book/en/v2/Customizing-Git-Git-Attributes +/README.md merge=ours +/.github/** merge=ours diff --git a/tasks/merging.py b/tasks/merging.py index 76f56a12a3e58..3904ac30e40ae 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -128,6 +128,9 @@ def merge_upstream(ctx): If the merge does not succeed, it will open a pull request against the destination repository, assigning the PR, and requesting reviewers. + + To make a file always keep DL's version in a merge, add it to .gitattributes-merge + with the attribute merge=ours. ''' config = MergeUpstreamConfig.create_from_dlproject() _check_preconditions(ctx, config) @@ -236,8 +239,21 @@ def _merge_and_push(ctx, config): ctx.run(f'git fetch {config.cci.url} {config.cci.branch}') # --into name sets the branch name so it says "...into develop" instead of "...into HEAD" # Have to fetch and use FETCH_HEAD because --into-name isn't available on git pull + # + # For files in .gitattributes-merge that have merge=ours, resolve in favor of + # our changes. These files have been "taken over" from GitHub. merge_result = ctx.run( - f'git merge --no-ff --no-edit --into-name {config.upstream.branch} FETCH_HEAD', warn=True) + 'git ' + # Add the attributes in .gitattributes-merge to the list of attributes, + # see https://www.git-scm.com/docs/git-config#Documentation/git-config.txt-coreattributesFile + '-c core.attributesFile=.gitattributes-merge ' + # Add a custom merge driver 'ours' which keeps just the file on HEAD, favoring + # our version of those files. + # See the section "Merge Strategies" at the end of + # https://www.git-scm.com/book/en/v2/Customizing-Git-Git-Attributes + '-c merge.ours.driver=true ' + f'merge --no-ff --no-edit --into-name {config.upstream.branch} FETCH_HEAD', + warn=True) if merge_result.ok: # Check to see if a push is necessary by counting the number of revisions # that differ between current head and the push destination. From 22e5a571e5627dbc4d9d97f96cff4525ba40ef5d Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 28 Sep 2022 09:52:27 -0500 Subject: [PATCH 127/173] Jenkinsfile: Allow setting pytest options - Can be used to limit what gets run/built, for instance -k swig to just build/test SWIG --- Jenkinsfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index c52f64872d8bb..9941e952e0db4 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -25,6 +25,8 @@ pipeline { 'windows-conan-center-index'], description: 'Run on specific platform') booleanParam defaultValue: false, description: 'Completely clean the workspace before building, including the Conan cache', name: 'CLEAN_WORKSPACE' + string(name: 'PYTEST_OPTIONS', defaultValue: '', + description: 'Additional parameters for pytest, for instance, work on just swig with -k swig. See: https://docs.pytest.org/en/7.1.x/how-to/usage.html') booleanParam name: 'UPLOAD_ALL_RECIPES', defaultValue: false, description: 'Upload all recipes, instead of only recipes that changed since the last merge' booleanParam name: 'FORCE_TOOL_BUILD', defaultValue: false, @@ -368,7 +370,7 @@ pipeline { } else { force_build = '' } - def pytest_command = "pytest -k build_tool ${force_build} ${upload} --junitxml=build-tools.xml --html=${short_node}-build-tools.html" + def pytest_command = "pytest -k build_tool ${force_build} ${upload} --junitxml=build-tools.xml --html=${short_node}-build-tools.html ${params.PYTEST_OPTIONS}" if (isUnix()) { catchError(message: 'pytest had errors', stageResult: 'FAILURE') { script { From 144159584a54d8849da5014a7510d2644a9b3204 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 28 Sep 2022 10:41:03 -0500 Subject: [PATCH 128/173] README.md: Remove CCI text and replace with DL README - Also enable formatting of README.md and files in .github/ - Markdown files should have 80 character lines, in .editorconfig - Remove Markdown files from .ecrc; any formatting fixes are handled by the mdformat pre-commit plugin. --- .ecrc | 2 +- .editorconfig | 3 ++ .github/PULL_REQUEST_TEMPLATE.md | 5 +- .pre-commit-config.yaml | 4 +- README.md | 80 +++----------------------------- 5 files changed, 14 insertions(+), 80 deletions(-) diff --git a/.ecrc b/.ecrc index 2292cd04e6e70..7ea080a63ebc7 100644 --- a/.ecrc +++ b/.ecrc @@ -5,7 +5,7 @@ "IgnoreDefaults": false, "SpacesAftertabs": false, "NoColor": false, - "Exclude": ["^.idea/","^.github/","^assets/","^docs/","^linter/","^recipes/","^CONTRIBUTING.md$","^README.md"], + "Exclude": ["^.idea/","^assets/","^docs/","^linter/","^recipes/","\\.md$"], "AllowedContentTypes": [], "PassedFiles": [], "Disable": { diff --git a/.editorconfig b/.editorconfig index 1c59c73ec3a94..ebe94002ed112 100644 --- a/.editorconfig +++ b/.editorconfig @@ -29,3 +29,6 @@ indent_size = 8 [*.{diff,patch,md}] trim_trailing_whitespace = false + +[*.md] +max_line_length = 80 diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 2ef2e17494a6c..961ab760931ec 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,14 +1,13 @@ - _List changes here_ -- +- #### Fulfills JIRA issue [EXAMPLE-1](https://jira.datalogics.com/browse/EXAMPLE-1) #### Checklist for approving this pull request -(**PR Author:** amend this with more conditions if necessary) +(**PR Author:** amend this with more conditions if necessary)\ (**PR Reviewer:** ensure all following items are fulfilled before merging) - [ ] The **Pull Request Title** has JIRA issue number, a space, and then a short but descriptive summary. - [ ] **Commit messages** are well formed: [A note about Git commit messages](http://www.tpope.net/node/106) - [ ] **Automated tests pass**. - diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f5c353b33e128..d9ce3da8c7ffb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,13 +1,11 @@ exclude: | (?x)^( .idea/.*| - .github/.*| assets/.*| docs/.*| linter/.*| recipes/.*| - CONTRIBUTING.md| - README.md + CONTRIBUTING.md )$ default_stages: [commit, manual] repos: diff --git a/README.md b/README.md index d9dc76350c51e..086a3de99c6ee 100644 --- a/README.md +++ b/README.md @@ -1,77 +1,11 @@ -

- -

+# conan-center-index -Conan Center Index is the source index of recipes of the [ConanCenter](https://conan.io/center) package repository for [Conan](https://conan.io). +This is the Datalogics fork of +[conan-io/conan-center-index](https://github.com/conan-io/conan-center-index). -This repository includes a Continuous Integration system that will build automatically the Conan packages for the recipes submitted via -[Pull Request](https://github.com/conan-io/conan-center-index/pulls). +It contains curated branches, and Datalogics-local modifications of recipes. -### Add ConanCenter remote +It also has Invoke tasks and CI implementations that: -ConanCenter remote is configured by default in any Conan client installation. If, for any reason, you need to add it manually, just execute: - -``` -conan remote add conancenter https://center.conan.io -``` - -### How to consume recipes - -Starting to use recipes from this repository is as easy as running -one simple command after installing Conan: - -``` -conan install name/version@ [-g ] -``` - -Of course, we really encourage you to use a `conanfile.txt` or `conanfile.py` -to list all the requirements or your project and install them all together -(Conan will build a single graph and ensure congruency). - -:warning: It is very important to notice that recipes will evolve over time -and, while they are fixing some issues, they might introduce new features and -improvements, and your project can break if you upgrade them -([How to prevent these breaking changes in my project?](docs/consuming_recipes.md)). - - - -### Documentation - -All the documentation is available in this same repository in the [`docs/` subfolder](docs/README.md). - -This is a list of shortcuts to some interesting topics: - -* :rocket: If you want to learn how to **contribute new recipes**, please read [docs/how_to_add_packages.md](docs/how_to_add_packages.md). -* :speech_balloon: **FAQ**: most common questions are listed in [docs/faqs.md](docs/faqs.md). -* :warning: The conan-center **hook errors** reported by CCI Bot can be found in the [docs/error_knowledge_base.md](docs/error_knowledge_base.md). -* :hammer_and_wrench: The internal changes related to infrastructure can be checked in [docs/changelog.md](docs/changelog.md). -* :world_map: There are various community lead initiatives which are outlined in [docs/community_resources.md](docs/community_resources.md). -* :magic_wand: To start preparing your recipes for **Conan 2.0**, please check [docs/v2_migration.md](docs/v2_migration.md). - -### Reporting Issues - -You can open issues in the [issue tracker](https://github.com/conan-io/conan-center-index/issues) to: - -* :bug: Report **bugs/issues** in a package: - - Use the `[package]` tag in the title of the issue to help identifying them. - - If you detect any issue or missing feature in a package, for example, a build failure or a recipe that not support a specific configuration. - - Specify the name and version (`zlib/1.2.11`) and any relevant details about the fail configuration: Applied profile, building machine... - -* :bulb: Request a **new library** to be added: - - Use the `[request]` label to search the library in the issue tracker in case the it was already requested. - - If not, use the same `[request]` tag in the title of the issue to help identifying them. - - Indicate the name and the version of the library you would like to have in the repository. Also links to the project's website, - source download/repository and in general any relevant information that helps creating a recipe for it. - -* :robot: Report **a failure** in the CI system: - - If you open a Pull Request and get an unexpected error you might comment in the failing PR. - - If the service or repository is down or failing, use the `[service]` tag in the title of a new issue to help identifying them. - -If your issue is not appropriate for a public discussion, please contact us via e-mail at `info@conan.io`. Thanks! - - -### License - -All the Conan recipes in this repository are distributed under the [MIT](LICENSE) license. There -are other files, like patches or examples used to test the packages, that could use different licenses, -for those files specific license and credit must be checked in the file itself. +- Upload recipes to our own repositories on Artifactory. +- Pre-build tools with specific profiles and upload them to Artifactory. From 2e070c5f42eb68c6ca01e4944ef73c383753b764 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 28 Sep 2022 12:14:02 -0500 Subject: [PATCH 129/173] merge-upstream: Handle some merge conflicts - Handle merge conflicts where we deleted a file and CCI modified it. - Refactor the _merge_and_push method to reduce its complexity. - If the merge has conflicts, look for status DU, indicating that we Deleted the file and it's Unmerged (has a conflict). Delete those files (favoring our work). - This complements the .gitattributes-merge file, which tells Git, for certain files, to keep our changes in cases where both changed. --- tasks/merging.py | 85 ++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 67 insertions(+), 18 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index 3904ac30e40ae..7d39c2cc7624e 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -122,6 +122,15 @@ def asyaml(self): return yaml.dump(dataclasses.asdict(self), sort_keys=False, indent=4) +@dataclasses.dataclass +class GitFileStatus: + """A Git status""" + status: str + """A Git short status string, see https://git-scm.com/docs/git-status#_short_format""" + path: str + """A file path, which may be two paths separated by -> if a rename or copy""" + + @Task def merge_upstream(ctx): '''Merge updated recipes and other files from conan-io/conan-center-index. @@ -252,27 +261,67 @@ def _merge_and_push(ctx, config): # See the section "Merge Strategies" at the end of # https://www.git-scm.com/book/en/v2/Customizing-Git-Git-Attributes '-c merge.ours.driver=true ' - f'merge --no-ff --no-edit --into-name {config.upstream.branch} FETCH_HEAD', + f'merge --no-ff --no-edit --no-verify --into-name {config.upstream.branch} FETCH_HEAD', warn=True) if merge_result.ok: - # Check to see if a push is necessary by counting the number of revisions - # that differ between current head and the push destination. - count_revs_result = ctx.run( - f'git rev-list {config.upstream.remote_name}/{config.upstream.branch}..HEAD --count', - hide='stdout', pty=False) - if int(count_revs_result.stdout) == 0: - logger.info('Repo is already up to date') - return MergeStatus.UP_TO_DATE - logger.info('Push to local repo...') - ctx.run(f'git push {config.upstream.remote_name} HEAD:refs/heads/{config.upstream.branch}') - return MergeStatus.MERGED - logger.info('Check for merge conflicts...') - # Check for merge conflicts: https://stackoverflow.com/a/27991004/11996393 - result = ctx.run('git ls-files -u', hide='stdout', warn=True, pty=False) - if result.ok and result.stdout.strip(): + return _maybe_push(ctx, config) + conflicts = _retrieve_merge_conflicts(ctx) + if not conflicts: + # Something else went wrong with the merge + raise UnexpectedExit(merge_result) + _remove_files_deleted_by_us(ctx, conflicts) + conflicts = _retrieve_merge_conflicts(ctx) + if conflicts: raise MergeHadConflicts - # Something else went wrong with the merge - raise UnexpectedExit(merge_result) + logger.info('Commit merge with resolved conflicts...') + # Finish the merge by committing. --no-verify is necessary to avoid running commit + # hooks, which aren't run on merge commits that succeed. + ctx.run('git commit --no-edit --no-verify') + return _maybe_push(ctx, config) + + +def _remove_files_deleted_by_us(ctx, conflicts): + """Examine conflicts for files deleted by us (status DU) and remove them with 'git rm'. + This may clear enough of the conflicts to allow auto-merging to continue.""" + logger.info('Removing conflict files deleted by us...') + for conflict in conflicts: + if conflict.status == 'DU': # we deleted, they modified (unmerged) + ctx.run(f'git rm {conflict.path}') + + +def _retrieve_merge_conflicts(ctx): + """Get a list of merge conflicts, from the current status. + Returns a tuple of (code, path), where code is a combination + of D (deleted) A (added) and U (unmerged). + + DD: unmerged, both deleted + AU: unmerged, added by us + UD: unmerged, deleted by them + UA: unmerged, added by them + DU: unmerged, deleted by us + AA: unmerged, both added + UU: unmerged, both modified + + See: https://git-scm.com/docs/git-status#_short_format""" + logger.info('Check for merge conflicts...') + result = ctx.run('git status --porcelain=v1', pty=False, hide='stdout') + status_entries = [GitFileStatus(*line.split(maxsplit=1)) for line in result.stdout.splitlines()] + conflict_statuses = {'DD', 'AU', 'UD', 'UA', 'DU', 'AA', 'UU'} + return [entry for entry in status_entries if entry.status in conflict_statuses] + + +def _maybe_push(ctx, config): + """Check to see if a push is necessary by counting the number of revisions + that differ between current head and the push destination. Push if necessary""" + count_revs_result = ctx.run( + f'git rev-list {config.upstream.remote_name}/{config.upstream.branch}..HEAD --count', + hide='stdout', pty=False) + if int(count_revs_result.stdout) == 0: + logger.info('Repo is already up to date') + return MergeStatus.UP_TO_DATE + logger.info('Push to local repo...') + ctx.run(f'git push {config.upstream.remote_name} HEAD:refs/heads/{config.upstream.branch}') + return MergeStatus.MERGED def _form_pr_body(ctx, config): From fa3d833a9320408ea9a2f4e59160c7e69dbc2755 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 28 Sep 2022 15:58:30 -0500 Subject: [PATCH 130/173] Add documentation for auto merge conflict resolution. --- README.md | 4 ++ dl-docs/auto-merge-conflict-resolution.md | 82 +++++++++++++++++++++++ 2 files changed, 86 insertions(+) create mode 100644 dl-docs/auto-merge-conflict-resolution.md diff --git a/README.md b/README.md index 086a3de99c6ee..91ec934e54272 100644 --- a/README.md +++ b/README.md @@ -9,3 +9,7 @@ It also has Invoke tasks and CI implementations that: - Upload recipes to our own repositories on Artifactory. - Pre-build tools with specific profiles and upload them to Artifactory. + +# DL Documentation + +- [Automatically Resolved Merge Conflicts](dl-docs/auto-merge-conflict-resolution.md) diff --git a/dl-docs/auto-merge-conflict-resolution.md b/dl-docs/auto-merge-conflict-resolution.md new file mode 100644 index 0000000000000..f0fb362f0a9eb --- /dev/null +++ b/dl-docs/auto-merge-conflict-resolution.md @@ -0,0 +1,82 @@ +# Automatically resolved merge conflicts + +conan-center-index at DL is a fork of the conan-io/conan-center-index repo run +by the Conan project. There is some project metadata that has the same names ( +such as the `.github` directory and `README.md`), and as such, there is +potential for merge conflicts. + +The `invoke merge-upstream` task can automatically resolve some of those merge +conflicts. + + + +- [Files that both conan-io and Datalogics modify](#files-that-both-conan-io-and-datalogics-modify) + - [Why `.gitattributes-merge` and not `.gitattributes`?](#why-gitattributes-merge-and-not-gitattributes) + - [Verifying the coverage of `.gitattributes-merge`](#verifying-the-coverage-of-gitattributes-merge) +- [Files that Datalogics has deleted](#files-that-datalogics-has-deleted) +- [References](#references) + + + +## Files that both conan-io and Datalogics modify + +For files that both conan-io and Datalogics modify, add them to the +`.gitattributes-merge` file with an attribute of `merge=ours`. The +`invoke merge-upstream` task arranges for there to be a merge driver called +"ours", which resolves modify/modify conflicts in favor of "our" branch, i.e., +Datalogics. + +As an example, this file currently contains: + +``` +# Conflicts that should always resolve in favor of Datalogics +# See the section "Merge Strategies" at the end of +# https://www.git-scm.com/book/en/v2/Customizing-Git-Git-Attributes +/README.md merge=ours +/.github/** merge=ours +``` + +### Why `.gitattributes-merge` and not `.gitattributes`? + +It's not possible to use custom merge drivers on GitHub, so if the `merge=ours` +attributes were put into the `.gitattributes` file, it would cause problems with +merges done by GitHub. To avoid breaking GitHub, this project uses the separate +file `.gitattributes-merge`, and uses the +[`core.attributesFile`](https://git-scm.com/docs/git-config#Documentation/git-config.txt-coreattributesFile) +configuration option to add it to the list of attributes files. + +### Verifying the coverage of `.gitattributes-merge` + +To verify the coverage of `.gitattributes-merge`, use the +[`git check-attr`](https://git-scm.com/docs/git-check-attr) command to search +for the merge attributes on files. +[`git ls-files`](https://git-scm.com/docs/git-ls-files) lists files that are +part of the repository, and use `grep` to ignore files that have unspecified +merge attributes. + +```bash +$ git ls-files | git -c core.attributesFile=.gitattributes-merge check-attr --stdin merge | grep -v 'merge: unspecified' +.github/PULL_REQUEST_TEMPLATE.md: merge: ours +README.md: merge: ours +``` + +## Files that Datalogics has deleted + +If there are files that Datalogics has deleted, but conan-io has modified, those +files are automatically resolved in favor of Datalogics, by removing them. + +If we decide we need those files, one can bring them into the Datalogics fork by +using commands like the following: + +```shell +git remote add conan-io git@github.com:conan-io/conan-center-index.git +git fetch conan-io +git checkout conan-io/master -- +git commit +``` + +...and then making a pull request. + +## References + +- [Git documentation: 8.2 Customizing Git - Git Attributes](https://www.git-scm.com/book/en/v2/Customizing-Git-Git-Attributes) From b54ea6c5f3403eed9c72c676df054896e65f3498 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 29 Sep 2022 11:30:14 -0500 Subject: [PATCH 131/173] Jenkinsfile: Don't upload recipes or packages if test in job name - This lets jobs run in a multibranch pipeline or branch that has 'test' in it, without affecting the Artifactory repositories. - Also fix the syntax of the find (=~) comparisons. Currently they match 'develop' or 'master' anywhere in the branch name. The '*' was unnecessary. May consider using the match operator (==~), which would be used like branch ==~ 'develop.*', requiring the .* for the exact match. See: http://groovy-lang.org/operators.html#_find_operator See: http://groovy-lang.org/operators.html#_match_operator --- Jenkinsfile | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 9941e952e0db4..45f72d5ac33c4 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -12,6 +12,10 @@ def BUILD_TOOLS=[ 'windows-conan-center-index': true, ] def skipBuilding = false +// Don't upload things if the job name has 'test' in it +// Converting matcher to boolean with asBoolean() or find(): https://stackoverflow.com/a/35529715/11996393 +def upload_ok = ! (env.JOB_NAME =~ 'test').find() + pipeline { parameters { choice(name: 'PLATFORM_FILTER', @@ -49,7 +53,7 @@ pipeline { triggers { // From the doc: @midnight actually means some time between 12:00 AM and 2:59 AM. // This gives us automatic spreading out of jobs, so they don't cause load spikes. - parameterizedCron(env.BRANCH_NAME =~ 'develop*' ? '@midnight % MERGE_UPSTREAM=true' : '@midnight') + parameterizedCron(env.BRANCH_NAME =~ 'develop' ? '@midnight % MERGE_UPSTREAM=true' : '@midnight') } environment { CONAN_USER_HOME = "${WORKSPACE}" @@ -188,7 +192,7 @@ pipeline { expression { // Merge upstream on develop-prefixed branches if forced by parameter // The parametrized Cron timer sets MERGE_UPSTREAM at appropriate times. - env.BRANCH_NAME =~ 'develop*' && params.MERGE_UPSTREAM + env.BRANCH_NAME =~ 'develop' && params.MERGE_UPSTREAM } } steps { @@ -208,14 +212,14 @@ pipeline { stage('Upload new or changed recipes') { when { allOf { - expression { !skipBuilding } + expression { !skipBuilding && upload_ok } not { changeRequest() } } } steps { script { def remote - if (env.BRANCH_NAME =~ 'master*') { + if (env.BRANCH_NAME =~ 'master') { remote = 'conan-center-dl' } else { remote = 'conan-center-dl-staging' @@ -354,8 +358,8 @@ pipeline { steps { script { def upload = "" - if (env.CHANGE_ID == null) { // i.e. not a pull request - if (env.BRANCH_NAME =~ 'master*') { + if (env.CHANGE_ID == null && upload_ok) { // i.e. not a pull request, and uploads are permitted + if (env.BRANCH_NAME =~ 'master') { upload = '--upload-to conan-center-dl' } else { upload = '--upload-to conan-center-dl-staging' @@ -436,13 +440,13 @@ pipeline { void productionOrStaging() { if (env.CHANGE_ID == null) { - if (env.BRANCH_NAME =~ 'master*') { + if (env.BRANCH_NAME =~ 'master') { return 'production' } else { return 'staging' } } else { - if (env.CHANGE_BRANCH =~ 'master*') { + if (env.CHANGE_BRANCH =~ 'master') { return 'production' } else { return 'staging' From 8425a89162d75efeb759fb51b66eb93ba3e8cdac Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 29 Sep 2022 14:54:49 -0500 Subject: [PATCH 132/173] Add documentation for the merge-upstream task --- README.md | 5 +- dl-docs/merge-upstream.md | 102 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 105 insertions(+), 2 deletions(-) create mode 100644 dl-docs/merge-upstream.md diff --git a/README.md b/README.md index 91ec934e54272..76568fee4314e 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@ It also has Invoke tasks and CI implementations that: - Upload recipes to our own repositories on Artifactory. - Pre-build tools with specific profiles and upload them to Artifactory. -# DL Documentation +## DL Documentation -- [Automatically Resolved Merge Conflicts](dl-docs/auto-merge-conflict-resolution.md) +- [`merge-upstream` task](dl-docs/merge-upstream.md) + - [Automatically Resolved Merge Conflicts](dl-docs/auto-merge-conflict-resolution.md) diff --git a/dl-docs/merge-upstream.md b/dl-docs/merge-upstream.md new file mode 100644 index 0000000000000..2124769bf99d5 --- /dev/null +++ b/dl-docs/merge-upstream.md @@ -0,0 +1,102 @@ +# `merge-upstream` task + +The `invoke merge-upstream` task fetches the latest `master` branch from the +[`conan-io/conan-center-index`](https://github.com/conan-io/conan-center-index) +repository, and merges it into the `develop` branch. + + + +- [Operation](#operation) + - [Nuances](#nuances) +- [Configuration](#configuration) +- [When it runs](#when-it-runs) +- [See also](#see-also) + + + +## Operation + +1. Check for preconditions: The repository is not dirty, the `gh` + [GitHub CLI](https://cli.github.com/) command is installed, and the `gh` + command is logged in to Octocat. +2. Fetch the `master` branch from `conan-io/conan-center-index`. +3. Attempt to merge it into the `develop` branch, automatically resolving some + merge conflicts in favor of Datalogics' changes as specified in + `.gitattributes-merge`. +4. If there are any merge conflicts that resulted from the Conan project + modifying files that Datalogics deleted, resolve those in favor of the + Datalogics deletion. This means we can delete GitHub templates and the like, + so they don't affect the way we use our fork. +5. If merge conflicts remain, create a pull request in the current user's fork, + using a copy of the `master` branch at `conan-io/conan-center-index`. A + developer will have to review and resolve the merge conflicts, and approve + the PR. The assignee and reviewers for the pull request can be configured. +6. If there were no merge conflicts, then push the merge to the `develop` + branch. + +### Nuances + +- If there is already a pull request due to a merge conflict, and + `merge-upstream` discovers more new commits in `conan-io/conan-center-index`, + then it updates the pull request instead of making a new one. +- The credentials in the Jenkins job are passed by setting `GH_ENTERPRISE_TOKEN` + and `GH_HOST` in the environment. See the comment in the `Jenkinsfile` for how + to make a token and store it in Kepler. + +## Configuration + +The configuration is controlled by the `merge_upstream` key in `dlproject.yaml`. +Any unspecified values will get the following defaults. + +The defaults are: + +```yaml +merge_upstream: + cci: + url: git@github.com:conan-io/conan-center-index.git + branch: master + upstream: + host: octocat.dlogics.com + organization: datalogics + branch: develop + remote_name: merge-upstream-remote + pull_request: + host: octocat.dlogics.com + fork: + merge_branch_name: merge-from-conan-io + reviewers: [ ] + assignee: null + labels: + - from-conan-io +``` + +One use of this would be to use a personal fork for testing, to avoid polluting +the Datalogics organization repo: + +```yaml +merge_upstream: + upstream: + # Temporary overrides + organization: kam +``` + +## When it runs + +- When Jenkins builds the `develop` branch triggered by the `parameterizedCron` + statement in the `triggers` section of the `Jenkinsfile`, when the + `MERGE_UPSTREAM` parameter is set. As of this writing, the merge occurs + nightly. There's enough flexibility in the Cron triggers to permit, for + instance, only doing the merges on the weekends. +- By going to the `develop` branch of `conan-center-index` on Jenkins and doing + a **Build with parameters**, and clicking the **MERGE_UPSTREAM** parameter. +- By invoking `invoke merge-upstream` from the command line. This should only be + done when developing and testing; for everyday use, request the merge via + Jenkins. + +If Jenkins runs `invoke merge-upstream`, and the branch was successfully pushed, +it skips the rest of the job. Updating the `develop` branch will trigger a +following job with the new commits. + +## See also + +- [Automatically Resolved Merge Conflicts](auto-merge-conflict-resolution.md) From 64d5edcb9bd4ca023c8719af1101ac6f72ef9e97 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 30 Sep 2022 15:00:53 -0500 Subject: [PATCH 133/173] TestBuildTools: Fix uploading when no local packages - Make search_local_packages check the JSON more, and return the package list. - If there are no local packages, don't attempt uploading. - Fixes a problem where the tool came from the cache, and the requirements hadn't been downloaded, and looking for the packages' settings resulted in an IndexError. --- tests/test_tools.py | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/tests/test_tools.py b/tests/test_tools.py index d89c185f3d344..c669b4b95abba 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -1,3 +1,4 @@ +import collections.abc import json import os import platform @@ -152,14 +153,23 @@ def conan_env(msys_bin): class TestBuildTools(object): - def search_local_package(self, ref, conan_env, tmp_path): + def search_local_packages(self, ref, conan_env, tmp_path): search_json = tmp_path / 'search.json' args = ['conan', 'search', f'{ref}@', '-j', str(search_json)] print(f'Getting package information for {ref}: {" ".join(args)}') subprocess.run(args, check=True, stderr=subprocess.STDOUT, env=conan_env) with open(search_json) as json_file: search_data = json.load(json_file) - return search_data + assert search_data['results'], 'there should have been results' + results = search_data['results'] + assert results[0]['items'], 'there should have been an item in the results' + items = results[0]['items'][0] + # Note: checking for key, because it is ok for this function to return an empty package list. + # Using abstract base class to check that something is a mapping (it might not subclass + # dict): https://stackoverflow.com/a/1278070/11996393 + assert isinstance(items, collections.abc.Mapping) + assert 'packages' in items, 'there should have been an package list in the first item' + return items['packages'] def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_tool_config, tool_recipe_folder, upload_to, force_build, tmp_path, conan_env): @@ -208,8 +218,11 @@ def test_build_tool(self, prebuilt_tool, prebuilt_tool_config_name, prebuilt_too if package == 'msys2': print(f'Not uploading {ref}, because it tends to modify itself during use.') continue - search_data = self.search_local_package(ref, conan_env, tmp_path) - settings = search_data['results'][0]['items'][0]['packages'][0]['settings'] + packages = self.search_local_packages(ref, conan_env, tmp_path) + if not packages: + print(f'Not uploading {ref} because there are no local packages') + continue + settings = packages[0].get('settings', {}) if platform.system() == 'Windows' and 'os' not in settings: # Don't upload OS-universal packages from Windows; this avoids packaging # script-based packages like autoconf without the proper mode bits From bb323478d6cd80b98f0b7ea2faf57a22f9500651 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 3 Oct 2022 14:50:10 -0500 Subject: [PATCH 134/173] merging: Extract Config subclass from MergeUpstreamConfig - Make a subclass so that common code for the MergeStagingToProductionConfig can also use it. - Isolate common functions create_from_dlproject() and asyaml() - Move the YAML key to a ClassVar so that each subclass has its own key. --- tasks/merging.py | 41 +++++++++++++++++++++++++++++------------ 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index 7d39c2cc7624e..0c34f6e355e3e 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -10,6 +10,7 @@ import shutil import tempfile import textwrap +import typing from enum import Enum, auto from typing import Optional @@ -88,40 +89,56 @@ def url(self) -> str: return f'git@{self.host}:{self.fork}/conan-center-index.git' -@dataclasses.dataclass -class MergeUpstreamConfig: - """Configuration for the merge-upstream task.""" - cci: ConanCenterIndexConfig = dataclasses.field(default_factory=ConanCenterIndexConfig) - """Configuration for Conan Center Index""" - upstream: UpstreamConfig = dataclasses.field(default_factory=UpstreamConfig) - """Configuration for the Datalogics upstream""" - pull_request: PullRequestConfig = dataclasses.field(default_factory=PullRequestConfig) +class Config: + """Base class for Config dataclasses that read from dlproject.yaml""" + yaml_key = None class ConfigurationError(Exception): """Configuration error when reading data.""" + @classmethod + def _check_attributes(cls): + if cls.yaml_key is None: + raise NotImplementedError(f"Class {cls.__name__} must define 'yaml_key' as a 'ClassVar[str]' \n" + ' which indicates the key for the config in dlproject.yaml.') + @classmethod def create_from_dlproject(cls): - """Create a MergeUpstreamConfig with defaults updated from dlproject.yaml""" + """Create an instance of cls with defaults updated from dlproject.yaml""" + cls._check_attributes() with open('dlproject.yaml', encoding='utf-8') as dlproject_file: dlproject = yaml.safe_load(dlproject_file) - config_data = dlproject.get('merge_upstream', {}) + config_data = dlproject.get(cls.yaml_key, {}) try: - return dacite.from_dict(data_class=MergeUpstreamConfig, + return dacite.from_dict(data_class=cls, data=config_data, config=dacite.Config(strict=True)) except dacite.DaciteError as exception: raise cls.ConfigurationError( - f'Error reading merge_upstream from dlproject.yaml: {exception}') from exception + f'Error reading {cls.yaml_key} from dlproject.yaml: {exception}') from exception def asyaml(self): """Return a string containing the yaml for this dataclass, in canonical form.""" + self._check_attributes() # sort_keys=False to preserve the ordering that's in the dataclasses # dict objects preserve order since Python 3.7 return yaml.dump(dataclasses.asdict(self), sort_keys=False, indent=4) +@dataclasses.dataclass +class MergeUpstreamConfig(Config): + """Configuration for the merge-upstream task.""" + cci: ConanCenterIndexConfig = dataclasses.field(default_factory=ConanCenterIndexConfig) + """Configuration for Conan Center Index""" + upstream: UpstreamConfig = dataclasses.field(default_factory=UpstreamConfig) + """Configuration for the Datalogics upstream""" + pull_request: PullRequestConfig = dataclasses.field(default_factory=PullRequestConfig) + """Configuration for the pull request""" + yaml_key: typing.ClassVar[str] = 'merge_upstream' + """Key for this configuration in dlproject.yaml.""" + + @dataclasses.dataclass class GitFileStatus: """A Git status""" From 55c0ad8b859d597867e698d579e2bb07401b17ff Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 3 Oct 2022 14:56:24 -0500 Subject: [PATCH 135/173] Add merge-staging-to-production task --- dlproject.yaml | 6 ++++++ tasks/__init__.py | 1 + tasks/merging.py | 38 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 45 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index 95526ae3d2af4..bd3501ad9160f 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -647,3 +647,9 @@ merge_upstream: # assignee: null # labels: # - from-conan-io +merge_staging_to_production: + # Defaults: + # host: octocat.dlogics.com + # organization: datalogics + # staging_branch: develop + # production_branch: master diff --git a/tasks/__init__.py b/tasks/__init__.py index 27ba7ebb45e07..d5b306b9e83e4 100644 --- a/tasks/__init__.py +++ b/tasks/__init__.py @@ -136,6 +136,7 @@ def upload_one_package_name(ctx, package_name, remote, upload=True): tasks = [] tasks.extend([v for v in locals().values() if isinstance(v, Task)]) tasks.append(merging.merge_upstream) +tasks.append(merging.merge_staging_to_production) conan_tasks = Collection() conan_tasks.add_task(conan.install_config) diff --git a/tasks/merging.py b/tasks/merging.py index 0c34f6e355e3e..7b550d1ee062d 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -139,6 +139,26 @@ class MergeUpstreamConfig(Config): """Key for this configuration in dlproject.yaml.""" +@dataclasses.dataclass +class MergeStagingToProductionConfig(Config): + """Configuration describing parameters for production merges in the upstream repo. (usually Datalogics)""" + host: str = 'octocat.dlogics.com' + """Host for the Datalogics upstream""" + organization: str = 'datalogics' + """Name of the upstream organization""" + staging_branch: str = 'develop' + """Name of the staging branch""" + production_branch: str = 'master' + """Name of the production branch""" + yaml_key: typing.ClassVar[str] = 'merge_staging_to_production' + """Key for this configuration in dlproject.yaml.""" + + @property + def url(self) -> str: + """The URL for the upstream Git repository.""" + return f'git@{self.host}:{self.organization}/conan-center-index.git' + + @dataclasses.dataclass class GitFileStatus: """A Git status""" @@ -178,6 +198,24 @@ def merge_upstream(ctx): _write_status_file(MergeStatus.PULL_REQUEST) +@Task +def merge_staging_to_production(ctx): + """Merge the staging branch to the production branch""" + config = MergeStagingToProductionConfig.create_from_dlproject() + logger.info('merge-staging-to-production configuration:\n%s', config.asyaml()) + with _preserving_branch_and_commit(ctx): + logger.info('Check out production branch...') + ctx.run(f'git fetch {config.url} {config.production_branch}') + ctx.run('git checkout --detach FETCH_HEAD') + + logger.info('Merge staging branch...') + ctx.run(f'git fetch {config.url} {config.staging_branch}') + ctx.run(f'git merge --no-ff --no-edit --no-verify --into-name {config.production_branch} FETCH_HEAD') + + logger.info('Push merged production branch...') + ctx.run(f'git push {config.url} HEAD:refs/heads/{config.production_branch}') + + def _remove_status_file(): try: os.remove(MERGE_UPSTREAM_STATUS) From 669cf4e808eae96bb1a9390799b3185e5708b2bc Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 3 Oct 2022 16:49:43 -0500 Subject: [PATCH 136/173] merging: Factor writing status file and counting revisions --- tasks/merging.py | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index 7b550d1ee062d..6fc2c434b1c95 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -183,19 +183,19 @@ def merge_upstream(ctx): logger.info('merge-upstream configuration:\n%s', config.asyaml()) # if anything fails past this point, the missing status file will also abort the Jenkins run. - _remove_status_file() + _remove_status_file(MERGE_UPSTREAM_STATUS) # Nested context handlers; see https://docs.python.org/3.10/reference/compound_stmts.html#the-with-statement with _preserving_branch_and_commit(ctx), _merge_remote(ctx, config): # Try to merge from CCI try: - _write_status_file(_merge_and_push(ctx, config)) + _write_status_file(_merge_and_push(ctx, config), to_file=MERGE_UPSTREAM_STATUS) except MergeHadConflicts: try: pr_body = _form_pr_body(ctx, config) finally: ctx.run('git merge --abort') _create_pull_request(ctx, config, pr_body) - _write_status_file(MergeStatus.PULL_REQUEST) + _write_status_file(MergeStatus.PULL_REQUEST, to_file=MERGE_UPSTREAM_STATUS) @Task @@ -216,18 +216,18 @@ def merge_staging_to_production(ctx): ctx.run(f'git push {config.url} HEAD:refs/heads/{config.production_branch}') -def _remove_status_file(): +def _remove_status_file(filename): try: - os.remove(MERGE_UPSTREAM_STATUS) + os.remove(filename) except FileNotFoundError: pass -def _write_status_file(merge_status): +def _write_status_file(merge_status, to_file): """Write the merge status to the status file.""" - logger.info('Write status %s to file %s', merge_status.name, MERGE_UPSTREAM_STATUS) - with open(MERGE_UPSTREAM_STATUS, 'w', encoding='utf-8') as merge_upstream_status: - merge_upstream_status.write(merge_status.name) + logger.info('Write status %s to file %s', merge_status.name, to_file) + with open(to_file, 'w', encoding='utf-8') as status: + status.write(merge_status.name) @contextlib.contextmanager @@ -295,6 +295,13 @@ def _branch_exists(ctx, branch): return result.ok +def _count_revs(ctx, commit): + """Count the revisions in the given commit, which can be a range like branch..HEAD, or + other commit expression.""" + count_revs_result = ctx.run(f'git rev-list {commit} --count', hide='stdout', pty=False) + return int(count_revs_result.stdout) + + def _merge_and_push(ctx, config): """Attempt to merge upstream branch and push it to the local repo.""" logger.info('Check out local %s branch...', config.upstream.branch) @@ -368,10 +375,8 @@ def _retrieve_merge_conflicts(ctx): def _maybe_push(ctx, config): """Check to see if a push is necessary by counting the number of revisions that differ between current head and the push destination. Push if necessary""" - count_revs_result = ctx.run( - f'git rev-list {config.upstream.remote_name}/{config.upstream.branch}..HEAD --count', - hide='stdout', pty=False) - if int(count_revs_result.stdout) == 0: + + if _count_revs(ctx, f'{config.upstream.remote_name}/{config.upstream.branch}..HEAD') == 0: logger.info('Repo is already up to date') return MergeStatus.UP_TO_DATE logger.info('Push to local repo...') From e506f4f7ed8caf5e79471877afaad3a9b44544b2 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 3 Oct 2022 16:52:45 -0500 Subject: [PATCH 137/173] Add merge-staging-to-production to Jenkins - Add a parameter MERGE_STAGING_TO_PRODUCTION - Return the status from the Invoke task to Jenkins. - Skip the rest of the run if there were no further changes. --- .gitignore | 1 + Jenkinsfile | 23 +++++++++++++++++++++++ tasks/merging.py | 7 +++++++ 3 files changed, 31 insertions(+) diff --git a/.gitignore b/.gitignore index 55864f394a9f7..cf4c384a2baec 100644 --- a/.gitignore +++ b/.gitignore @@ -456,3 +456,4 @@ requirements.txt # outputs from invoke tasks /.merge-upstream-status +/.merge-staging-to-production-status diff --git a/Jenkinsfile b/Jenkinsfile index 45f72d5ac33c4..8ae9c79089ea4 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -39,6 +39,8 @@ pipeline { description: 'Force build of all tools, and their requirements. By default, Conan will download the tool and test it if it\'s already built' booleanParam name: 'MERGE_UPSTREAM', defaultValue: false, description: 'If building develop branch, merge changes from upstream, i.e., conan-io/conan-center-index' + booleanParam name: 'MERGE_STAGING_TO_PRODUCTION', defaultValue: false, + description: 'If building master branch, merge changes from the develop branch' } options{ buildDiscarder logRotator(artifactDaysToKeepStr: '4', artifactNumToKeepStr: '10', daysToKeepStr: '7', numToKeepStr: '10') @@ -209,6 +211,27 @@ pipeline { } } } + stage('Merge staging to production') { + when { + expression { + // Merge upstream on master-prefixed branches if forced by parameter + env.BRANCH_NAME =~ 'master' && params.MERGE_STAGING_TO_PRODUCTION + } + } + steps { + script { + sh """ + . ${ENV_LOC['noarch']}/bin/activate + invoke merge-staging-to-production + """ + def merge_staging_to_production_status = readFile(file: '.merge-staging-to-production-status') + echo "merge-staging-to-production status is ${merge_staging_to_production_status}" + // If the status of the merge is MERGED, then don't do anything + // else; Jenkins will notice the branch changed and re-run. + skipBuilding = merge_staging_to_production_status == 'MERGED' + } + } + } stage('Upload new or changed recipes') { when { allOf { diff --git a/tasks/merging.py b/tasks/merging.py index 6fc2c434b1c95..18d62397a4480 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -20,6 +20,7 @@ # Name of a status file MERGE_UPSTREAM_STATUS = '.merge-upstream-status' +MERGE_STAGING_TO_PRODUCTION_STATUS = '.merge-staging-to-production-status' logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) @@ -204,16 +205,22 @@ def merge_staging_to_production(ctx): config = MergeStagingToProductionConfig.create_from_dlproject() logger.info('merge-staging-to-production configuration:\n%s', config.asyaml()) with _preserving_branch_and_commit(ctx): + _remove_status_file(MERGE_STAGING_TO_PRODUCTION_STATUS) logger.info('Check out production branch...') ctx.run(f'git fetch {config.url} {config.production_branch}') ctx.run('git checkout --detach FETCH_HEAD') logger.info('Merge staging branch...') ctx.run(f'git fetch {config.url} {config.staging_branch}') + if _count_revs(ctx, 'HEAD..FETCH_HEAD') == 0: + logger.info('%s is up to date.', config.production_branch) + _write_status_file(MergeStatus.UP_TO_DATE, to_file=MERGE_STAGING_TO_PRODUCTION_STATUS) + return ctx.run(f'git merge --no-ff --no-edit --no-verify --into-name {config.production_branch} FETCH_HEAD') logger.info('Push merged production branch...') ctx.run(f'git push {config.url} HEAD:refs/heads/{config.production_branch}') + _write_status_file(MergeStatus.MERGED, to_file=MERGE_STAGING_TO_PRODUCTION_STATUS) def _remove_status_file(filename): From c667f742af9368a16a808df6827b43146cc93ec9 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 4 Oct 2022 11:33:02 -0500 Subject: [PATCH 138/173] dlproject.yaml: Use common build profiles from conan-config - Use the common build profiles from conan-config for the host profile for all the tools builders. - Don't use build profiles (profile_build) yet, because not all the recipes in Conan Center Index support them yet. To support build and host profiles, a recipe has to use the new Conan tools and environments. --- dlproject.yaml | 39 +++++++++++++-------------------------- 1 file changed, 13 insertions(+), 26 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index bd3501ad9160f..54e67c0e0a1f8 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -178,7 +178,7 @@ config: build_folder: build-release description: macOS Release profile_host: - - apple-clang-13.0-intel + - build-profile-macos-intel DebugTool: &macOSDebugTool include: - ReleaseTool @@ -230,7 +230,7 @@ config: build_folder: build-release description: macOS Release profile_host: - - apple-clang-13.0-arm + - build-profile-macos-arm DebugTool: include: - ReleaseTool @@ -292,7 +292,7 @@ config: build_folder: build-release-tool description: RedHat Release Tool profile_host: - - devtoolset-7 + - build-profile-linux-intel DebugTool: &redhatDebugTool include: ReleaseTool build_folder: build-debug-tool @@ -350,22 +350,18 @@ config: description: RedHat Debug settings: - build_type=Debug - ToolCommon: - env: - # This is necessary to get SWIG to link on ARM; for some reason, it's not automatic - - swig:LDFLAGS=-ldl ReleaseTool: build_folder: build-release-tool description: RedHat Release - include: - - Release - - ToolCommon + profile_host: + - build-profile-linux-arm DebugTool: build_folder: build-debug-tool description: RedHat Debug include: - - Debug - - ToolCommon + - ReleaseTool + settings: + - build_type=Debug prebuilt_tools: - cmake/[>=3.23.0] - doxygen/1.9.1 @@ -420,7 +416,7 @@ config: ReleaseTool: build_folder: build-release-tool description: Windows Release - profile_host: visual-studio-16 + profile_host: build-profile-windows-intel DebugTool: build_folder: build-debug-tool description: Windows Debug Tool @@ -493,12 +489,11 @@ config: <<: *aixCommon build_folder: build-release-tool description: AIX Release - profile_host: aix-xlc16-ppc + profile_host: build-profile-aix-ppc ReleaseToolGCC: build_folder: build-release-tool description: AIX Release Tool with GCC - include: - - Release + profile_host: build-profile-aix-ppc-gcc DebugTool: build_folder: build-debug-tool description: AIX Debug Tool @@ -577,11 +572,7 @@ config: ReleaseTool: build_folder: build-release-tool description: Sparc Release Tool - include: - - Release - env: - # Override to not contain -std=c99, which breaks m4 by turning off the 'asm' keyword - - CFLAGS=-pthread -m64 + profile_host: build-profile-solaris-sparc DebugTool: build_folder: build-debug-tool description: Sparc Debug Tool @@ -592,11 +583,7 @@ config: ReleaseTool32: build_folder: build-release-tool-32 description: Sparc Release 32 Tool 32 - include: - - Release32 - env: - # Override to not contain -std=c99, which breaks m4 by turning off the 'asm' keyword - - CFLAGS="-pthread -m32" + profile_host: build-profile-solaris-sparc-32 DebugTool32: build_folder: build-debug-tool-32 description: Sparc Debug 32 Tool From 5d656f7750c454819faf2bc62df64fd870e58242 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 5 Oct 2022 11:42:04 -0500 Subject: [PATCH 139/173] merging: asyaml() dumps configs with their main key --- tasks/merging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tasks/merging.py b/tasks/merging.py index 18d62397a4480..f6324918ee524 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -124,7 +124,7 @@ def asyaml(self): self._check_attributes() # sort_keys=False to preserve the ordering that's in the dataclasses # dict objects preserve order since Python 3.7 - return yaml.dump(dataclasses.asdict(self), sort_keys=False, indent=4) + return yaml.dump({self.yaml_key: dataclasses.asdict(self)}, sort_keys=False, indent=4) @dataclasses.dataclass From d90a8520d761ac944fb24fc58a77c663ebb79362 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 5 Oct 2022 16:52:31 -0500 Subject: [PATCH 140/173] test_tools: When installing msys2 from Conan, update --- tests/test_tools.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_tools.py b/tests/test_tools.py index c669b4b95abba..38fa0890bdd4c 100644 --- a/tests/test_tools.py +++ b/tests/test_tools.py @@ -103,8 +103,8 @@ def msys_env(release_tool_config, tmpdir_factory, upload_to): if platform.system() == 'Windows': msys2_dir = tmpdir_factory.mktemp('msys2_install') install_json = msys2_dir / 'install.json' - args = ['conan', 'install', 'msys2/cci.latest@', '-if', msys2_dir, '-g', 'json', '--build', 'missing', - '-j', install_json] + args = ['conan', 'install', '--update', 'msys2/cci.latest@', '-if', msys2_dir, '-g', 'json', + '--build', 'missing', '-j', install_json] args.extend(release_tool_config.install_options()) subprocess.run(args, check=True) From 19f9e3b93c6404e40cece0179cd715e7e97b5712 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 18 Oct 2022 12:45:30 -0500 Subject: [PATCH 141/173] innoextract: Bump requirement versions --- recipes/innoextract/all/conanfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/recipes/innoextract/all/conanfile.py b/recipes/innoextract/all/conanfile.py index 6fcd4d2d984be..a793b8328fc47 100644 --- a/recipes/innoextract/all/conanfile.py +++ b/recipes/innoextract/all/conanfile.py @@ -14,9 +14,9 @@ class InnoextractConan(ConanFile): url = "https://github.com/conan-io/conan-center-index" exports_sources = ["CMakeLists.txt", "patches/*"] requires = ( - ("boost/1.78.0", "private"), + ("boost/1.80.0", "private"), ("xz_utils/5.2.5", "private"), - ("libiconv/1.16", "private"), + ("libiconv/1.17", "private"), ) generators = "cmake", "cmake_find_package" settings = "os", "arch", "compiler", "build_type" From d60c1910e178a20aa370d88e56434612a36a6acf Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 18 Oct 2022 12:48:46 -0500 Subject: [PATCH 142/173] innoextract: There are no include directories - Silence hook error. --- recipes/innoextract/all/conanfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/recipes/innoextract/all/conanfile.py b/recipes/innoextract/all/conanfile.py index a793b8328fc47..1b5659d2be3ac 100644 --- a/recipes/innoextract/all/conanfile.py +++ b/recipes/innoextract/all/conanfile.py @@ -63,6 +63,7 @@ def package_id(self): self.info.requires.clear() def package_info(self): + self.cpp_info.includedirs = [] self.cpp_info.libdirs = [] bindir = os.path.join(self.package_folder, "bin") self.output.info("Appending PATH environment variable: {}" From 1231fcf05e456d349d514c998e728fa19a2f95d1 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 17 Oct 2022 17:20:22 -0500 Subject: [PATCH 143/173] merge-upstream: PR: pre-resolve the delete/modify conflicts - Remember the conflict list in the MergeHadConflicts exception. - When creating the PR, add a commit to the branch that deletes the files that had delete/modify conflicts because they were deleted by DL. - This increases the cases where the conflicts can be resolved on the web. --- tasks/merging.py | 29 +++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index f6324918ee524..ae75eea135211 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -29,6 +29,14 @@ class MergeHadConflicts(Exception): """Thrown when the merge had conflicts. Usually handled by making a pull request.""" + def __init__(self, conflicts=None): + """ + Create the exception, with optional list of conflicts. + + @param conflicts: a list of conflicts that were found + """ + self.conflicts = conflicts + class MergeStatus(Enum): """The status of the attempted merge. The name of this status will be placed into the @@ -190,12 +198,12 @@ def merge_upstream(ctx): # Try to merge from CCI try: _write_status_file(_merge_and_push(ctx, config), to_file=MERGE_UPSTREAM_STATUS) - except MergeHadConflicts: + except MergeHadConflicts as merge_exception: try: pr_body = _form_pr_body(ctx, config) finally: ctx.run('git merge --abort') - _create_pull_request(ctx, config, pr_body) + _create_pull_request(ctx, config, pr_body, merge_exception.conflicts) _write_status_file(MergeStatus.PULL_REQUEST, to_file=MERGE_UPSTREAM_STATUS) @@ -334,14 +342,15 @@ def _merge_and_push(ctx, config): warn=True) if merge_result.ok: return _maybe_push(ctx, config) - conflicts = _retrieve_merge_conflicts(ctx) - if not conflicts: + original_conflicts = _retrieve_merge_conflicts(ctx) + if not original_conflicts: # Something else went wrong with the merge raise UnexpectedExit(merge_result) - _remove_files_deleted_by_us(ctx, conflicts) + _remove_files_deleted_by_us(ctx, original_conflicts) conflicts = _retrieve_merge_conflicts(ctx) if conflicts: - raise MergeHadConflicts + # Note: Raising with the original conflicts, which include the delete/delete conflicts. + raise MergeHadConflicts(original_conflicts) logger.info('Commit merge with resolved conflicts...') # Finish the merge by committing. --no-verify is necessary to avoid running commit # hooks, which aren't run on merge commits that succeed. @@ -427,15 +436,19 @@ def _form_pr_body(ctx, config): return body -def _create_pull_request(ctx, config, pr_body): +def _create_pull_request(ctx, config, pr_body, conflicts): """Create a pull request to merge in the data from upstream.""" logger.info('Create pull request from upstream branch...') # Get the upstream ref ctx.run(f'git fetch {config.cci.url} {config.cci.branch}') + ctx.run('git checkout --detach FETCH_HEAD') + # Remove files that DL deleted, but were modified by conan-io + _remove_files_deleted_by_us(ctx, conflicts) + ctx.run('git commit --no-verify -m "Delete conflicting files that were deleted by DL"') # Push it to the fork the PR will be on. Have to include refs/heads in case the branch didn't # already exist ctx.run(f'git push --force {config.pull_request.url} ' - f'FETCH_HEAD:refs/heads/{config.pull_request.merge_branch_name}') + f'HEAD:refs/heads/{config.pull_request.merge_branch_name}') with tempfile.NamedTemporaryFile(prefix='pr-body', mode='w+', encoding='utf-8') as pr_body_file: pr_body_file.write(pr_body) # Before passing the filename to gh pr create, flush it so all the data is on the disk From 84f13bafb67521d82f6db530139ebffa98205560 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 20 Oct 2022 11:23:52 -0500 Subject: [PATCH 144/173] merge-upstream: Don't show merge commits in PR message - Merge commits aren't where the real work happens, and listing them adds some confusion. --- tasks/merging.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index ae75eea135211..cff415e9ccf13 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -407,9 +407,11 @@ def _form_pr_body(ctx, config): logger.info('Create body of pull request message...') conflict_files_result = ctx.run('git diff --no-color --name-only --diff-filter=U', hide='stdout', pty=False) commits_on_upstream_result = ctx.run( - 'git log --no-color --merge HEAD..MERGE_HEAD --pretty=format:"%h -%d %s (%cr) <%an>"', hide='stdout', pty=False) + 'git log --no-color --no-merges --merge HEAD..MERGE_HEAD --pretty=format:"%h -%d %s (%cr) <%an>"', + hide='stdout', pty=False) commits_local_result = ctx.run( - 'git log --no-color --merge MERGE_HEAD..HEAD --pretty=format:"%h -%d %s (%cr) <%an>"', hide='stdout', pty=False) + 'git log --no-color --no-merges --merge MERGE_HEAD..HEAD --pretty=format:"%h -%d %s (%cr) <%an>"', + hide='stdout', pty=False) body = textwrap.dedent(''' Merge changes from conan-io/conan-center-index into {local_branch}. From 8e60193c3c85442751c53cd1c69b0df940380eb6 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 20 Oct 2022 11:56:01 -0500 Subject: [PATCH 145/173] merge-upstream: Include diffs of changed files in description - Diffs of each side may help in deciding how to resolve conflicts --- tasks/merging.py | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/tasks/merging.py b/tasks/merging.py index cff415e9ccf13..c80fb941b837a 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -409,9 +409,15 @@ def _form_pr_body(ctx, config): commits_on_upstream_result = ctx.run( 'git log --no-color --no-merges --merge HEAD..MERGE_HEAD --pretty=format:"%h -%d %s (%cr) <%an>"', hide='stdout', pty=False) + files = conflict_files_result.stdout.strip().replace('\n', ' ') + # Note: 'git diff HEAD...MERGE_HEAD' is a diff of changes on MERGE_HEAD that are not on HEAD. + # It's the same as: git diff $(git merge-base HEAD MERGE_HEAD) MERGE_HEAD + # See: https://git-scm.com/docs/git-diff for more details + diff_on_upstream_result = ctx.run(f'git diff -U HEAD...MERGE_HEAD -- {files}', hide='stdout', pty=False) commits_local_result = ctx.run( 'git log --no-color --no-merges --merge MERGE_HEAD..HEAD --pretty=format:"%h -%d %s (%cr) <%an>"', hide='stdout', pty=False) + diff_on_local_result = ctx.run(f'git diff -U MERGE_HEAD...HEAD -- {files}', hide='stdout', pty=False) body = textwrap.dedent(''' Merge changes from conan-io/conan-center-index into {local_branch}. @@ -427,13 +433,36 @@ def _form_pr_body(ctx, config): {commits_on_upstream} + #### Differences on `conan-io` + +
Click to reveal... + + ```diff + {diff_on_upstream} + ``` + +
+ ### Commits for conflict files, local {commits_local} + + #### Differences, local + +
Click to reveal... + + ```diff + {diff_on_local} + ``` + +
+ ''').format(local_branch=config.upstream.branch, conflict_files=conflict_files_result.stdout, commits_on_upstream=commits_on_upstream_result.stdout, - commits_local=commits_local_result.stdout) + diff_on_upstream=diff_on_upstream_result.stdout, + commits_local=commits_local_result.stdout, + diff_on_local=diff_on_local_result.stdout) return body From 783434b2c2d73fb3dbe83a3b1a12b0db57d4fce6 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 20 Oct 2022 13:57:22 -0500 Subject: [PATCH 146/173] merge-upstream: Remove decorations from list of commits in PR --- tasks/merging.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index c80fb941b837a..cacddbd1789a4 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -407,7 +407,7 @@ def _form_pr_body(ctx, config): logger.info('Create body of pull request message...') conflict_files_result = ctx.run('git diff --no-color --name-only --diff-filter=U', hide='stdout', pty=False) commits_on_upstream_result = ctx.run( - 'git log --no-color --no-merges --merge HEAD..MERGE_HEAD --pretty=format:"%h -%d %s (%cr) <%an>"', + 'git log --no-color --no-merges --merge HEAD..MERGE_HEAD --pretty=format:"%h - %s (%cr) <%an>"', hide='stdout', pty=False) files = conflict_files_result.stdout.strip().replace('\n', ' ') # Note: 'git diff HEAD...MERGE_HEAD' is a diff of changes on MERGE_HEAD that are not on HEAD. @@ -415,7 +415,7 @@ def _form_pr_body(ctx, config): # See: https://git-scm.com/docs/git-diff for more details diff_on_upstream_result = ctx.run(f'git diff -U HEAD...MERGE_HEAD -- {files}', hide='stdout', pty=False) commits_local_result = ctx.run( - 'git log --no-color --no-merges --merge MERGE_HEAD..HEAD --pretty=format:"%h -%d %s (%cr) <%an>"', + 'git log --no-color --no-merges --merge MERGE_HEAD..HEAD --pretty=format:"%h - %s (%cr) <%an>"', hide='stdout', pty=False) diff_on_local_result = ctx.run(f'git diff -U MERGE_HEAD...HEAD -- {files}', hide='stdout', pty=False) body = textwrap.dedent(''' From 618bb0089f463efe669f6ddd264d4e3927ba2dc2 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 20 Oct 2022 16:20:23 -0500 Subject: [PATCH 147/173] merge-upstream: PR: Resolve conflicts that have merge=ours For files that have merge=ours in .gitattributes-merge, add a commit to the PR branch that sets those files to the DL version. That way, they won't show up in the merge resolution in GitHub. --- tasks/merging.py | 56 ++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 49 insertions(+), 7 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index cacddbd1789a4..9bc51ccbb9124 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -175,6 +175,8 @@ class GitFileStatus: """A Git short status string, see https://git-scm.com/docs/git-status#_short_format""" path: str """A file path, which may be two paths separated by -> if a rename or copy""" + merge_attr: str = 'unspecified' + """The merge attribute for this path from .gitattributes""" @Task @@ -349,8 +351,9 @@ def _merge_and_push(ctx, config): _remove_files_deleted_by_us(ctx, original_conflicts) conflicts = _retrieve_merge_conflicts(ctx) if conflicts: - # Note: Raising with the original conflicts, which include the delete/delete conflicts. - raise MergeHadConflicts(original_conflicts) + # There are still unresolved conflicts. Raise an exception, + # and the top level PR will turn it into a pull request. + _raise_exception_for_conflicted_merge(ctx) logger.info('Commit merge with resolved conflicts...') # Finish the merge by committing. --no-verify is necessary to avoid running commit # hooks, which aren't run on merge commits that succeed. @@ -358,13 +361,26 @@ def _merge_and_push(ctx, config): return _maybe_push(ctx, config) +def _raise_exception_for_conflicted_merge(ctx): + """Redo the merge to get the complete list of conflicts, without the 'ours' merge + driver. Then, raise the MergeHadConflicts exception with the complete list.""" + # Redo the merge to get all the conflicts, including the ones we resolve as 'ours' + logger.info('Redoing merge to get complete conflict list') + ctx.run('git merge --abort') + ctx.run('git -c merge.rerere=false merge --no-commit --no-ff FETCH_HEAD', warn=True) + conflicts = _retrieve_merge_conflicts(ctx) + conflicts = _find_merge_attributes(ctx, conflicts) + raise MergeHadConflicts(conflicts) + + def _remove_files_deleted_by_us(ctx, conflicts): """Examine conflicts for files deleted by us (status DU) and remove them with 'git rm'. This may clear enough of the conflicts to allow auto-merging to continue.""" logger.info('Removing conflict files deleted by us...') - for conflict in conflicts: - if conflict.status == 'DU': # we deleted, they modified (unmerged) - ctx.run(f'git rm {conflict.path}') + paths = [conflict.path for conflict in conflicts if conflict.status == 'DU'] + for path in paths: + ctx.run(f'git rm {path}') + return paths def _retrieve_merge_conflicts(ctx): @@ -400,6 +416,23 @@ def _maybe_push(ctx, config): return MergeStatus.MERGED +def _find_merge_attributes(ctx, conflicts): + """Find the merge attributes for the conflicts""" + modify_conflicts = [conflict.path for conflict in conflicts if conflict.status == 'UU'] + check_attr = ctx.run( + f'git -c core.attributesFile=.gitattributes-merge check-attr merge -z -- {" ".join(modify_conflicts)}', + hide='stdout', pty=False) + # The -z means data fields separated by NUL + check_attr_data = check_attr.stdout.strip('\0').split('\0') + # Iterate in groups of three + # https://stackoverflow.com/a/18541496/11996393 + check_attr_iters = [iter(check_attr_data)] * 3 + path_attrs = {path: value for path, _, value in zip(*check_attr_iters)} + new_conflicts = [dataclasses.replace(conflict, merge_attr=path_attrs.get(conflict.path, 'unspecified')) + for conflict in conflicts] + return new_conflicts + + def _form_pr_body(ctx, config): """Create a body for the pull request summarizing information about the merge conflicts.""" # Note: pty=False to enforce not using a PTY; that makes sure that Git doesn't @@ -473,9 +506,18 @@ def _create_pull_request(ctx, config, pr_body, conflicts): # Get the upstream ref ctx.run(f'git fetch {config.cci.url} {config.cci.branch}') ctx.run('git checkout --detach FETCH_HEAD') + # Remove files that DL deleted, but were modified by conan-io - _remove_files_deleted_by_us(ctx, conflicts) - ctx.run('git commit --no-verify -m "Delete conflicting files that were deleted by DL"') + if _remove_files_deleted_by_us(ctx, conflicts): + ctx.run('git commit --no-verify -m "Delete conflicting files that were deleted by DL"') + + # Resolve files in our favor if they have the attribute merge=ours + merge_ours = [conflict.path for conflict in conflicts if conflict.status == 'UU' and conflict.merge_attr == 'ours'] + if merge_ours: + for path in merge_ours: + ctx.run(f'git checkout {config.upstream.remote_name}/{config.upstream.branch} -- {path}') + ctx.run('git commit --no-verify -m "Favor DL changes for files where merge=ours"') + # Push it to the fork the PR will be on. Have to include refs/heads in case the branch didn't # already exist ctx.run(f'git push --force {config.pull_request.url} ' From 423d0c9777b7d7b3ba9f212400ad5a8d15882248 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 20 Oct 2022 17:50:54 -0500 Subject: [PATCH 148/173] merging.py: Disable rerere on all 'git merge' operations - Don't want to either use nor record automatic merge conflict resolution. --- tasks/merging.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index 9bc51ccbb9124..328a22a35bbed 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -18,6 +18,9 @@ import yaml from invoke import Exit, Task, UnexpectedExit +# Git config option to disable rerere, which tries to reuse merge conflict resolutions +DISABLE_RERERE = '-c rerere.enabled=false ' + # Name of a status file MERGE_UPSTREAM_STATUS = '.merge-upstream-status' MERGE_STAGING_TO_PRODUCTION_STATUS = '.merge-staging-to-production-status' @@ -226,7 +229,9 @@ def merge_staging_to_production(ctx): logger.info('%s is up to date.', config.production_branch) _write_status_file(MergeStatus.UP_TO_DATE, to_file=MERGE_STAGING_TO_PRODUCTION_STATUS) return - ctx.run(f'git merge --no-ff --no-edit --no-verify --into-name {config.production_branch} FETCH_HEAD') + ctx.run( + f'git {DISABLE_RERERE} merge --no-ff --no-edit --no-verify --into-name ' + f'{config.production_branch} FETCH_HEAD') logger.info('Push merged production branch...') ctx.run(f'git push {config.url} HEAD:refs/heads/{config.production_branch}') @@ -340,6 +345,7 @@ def _merge_and_push(ctx, config): # See the section "Merge Strategies" at the end of # https://www.git-scm.com/book/en/v2/Customizing-Git-Git-Attributes '-c merge.ours.driver=true ' + f'{DISABLE_RERERE} ' f'merge --no-ff --no-edit --no-verify --into-name {config.upstream.branch} FETCH_HEAD', warn=True) if merge_result.ok: @@ -367,7 +373,7 @@ def _raise_exception_for_conflicted_merge(ctx): # Redo the merge to get all the conflicts, including the ones we resolve as 'ours' logger.info('Redoing merge to get complete conflict list') ctx.run('git merge --abort') - ctx.run('git -c merge.rerere=false merge --no-commit --no-ff FETCH_HEAD', warn=True) + ctx.run(f'git {DISABLE_RERERE} merge --no-commit --no-ff FETCH_HEAD', warn=True) conflicts = _retrieve_merge_conflicts(ctx) conflicts = _find_merge_attributes(ctx, conflicts) raise MergeHadConflicts(conflicts) From 49399b66f431164ce9e3e3fb65813ba1142ddcbd Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 21 Oct 2022 10:52:25 -0500 Subject: [PATCH 149/173] merge-upstream: List only unresolvable files in the PR - Filter the list of conflicts in _form_pr_body(), and use that to create the PR description, instead of using all the conflicts. - This ensures that the description has information only about files that can't be autoresolved. - Files deleted by DL and modified by conan-io are autoresolved. - Files marked 'merge=ours' in .gitattributes-merge are also autoresolved. --- tasks/merging.py | 34 +++++++++++++++++++++++++--------- 1 file changed, 25 insertions(+), 9 deletions(-) diff --git a/tasks/merging.py b/tasks/merging.py index 328a22a35bbed..ec054eb525113 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -205,7 +205,7 @@ def merge_upstream(ctx): _write_status_file(_merge_and_push(ctx, config), to_file=MERGE_UPSTREAM_STATUS) except MergeHadConflicts as merge_exception: try: - pr_body = _form_pr_body(ctx, config) + pr_body = _form_pr_body(ctx, config, merge_exception.conflicts) finally: ctx.run('git merge --abort') _create_pull_request(ctx, config, pr_body, merge_exception.conflicts) @@ -439,24 +439,40 @@ def _find_merge_attributes(ctx, conflicts): return new_conflicts -def _form_pr_body(ctx, config): +def _unresolvable_conflicts(conflicts): + """Filter the conflict list, returning the ones that are unresolvable""" + + def resolvable(conflict): + # DU conflicts (Datalogics deleted, conan-io modified) are resolvable + if conflict.status == 'DU': + return True + # merge=ours conflicts are resolvable + if conflict.status == 'UU' and conflict.merge_attr == 'ours': + return True + return False + + return [conflict for conflict in conflicts if not resolvable(conflict)] + + +def _form_pr_body(ctx, config, conflicts): """Create a body for the pull request summarizing information about the merge conflicts.""" # Note: pty=False to enforce not using a PTY; that makes sure that Git doesn't # see a terminal and put escapes into the output we want to format. logger.info('Create body of pull request message...') - conflict_files_result = ctx.run('git diff --no-color --name-only --diff-filter=U', hide='stdout', pty=False) + files = [conflict.path for conflict in _unresolvable_conflicts(conflicts)] + files_arg = ' '.join(files) commits_on_upstream_result = ctx.run( - 'git log --no-color --no-merges --merge HEAD..MERGE_HEAD --pretty=format:"%h - %s (%cr) <%an>"', + f'git log --no-color --no-merges --merge HEAD..MERGE_HEAD --pretty=format:"%h - %s (%cr) <%an>" -- {files_arg}', hide='stdout', pty=False) - files = conflict_files_result.stdout.strip().replace('\n', ' ') + # Get the paths of only the unresolvable conflicts # Note: 'git diff HEAD...MERGE_HEAD' is a diff of changes on MERGE_HEAD that are not on HEAD. # It's the same as: git diff $(git merge-base HEAD MERGE_HEAD) MERGE_HEAD # See: https://git-scm.com/docs/git-diff for more details - diff_on_upstream_result = ctx.run(f'git diff -U HEAD...MERGE_HEAD -- {files}', hide='stdout', pty=False) + diff_on_upstream_result = ctx.run(f'git diff -U HEAD...MERGE_HEAD -- {files_arg}', hide='stdout', pty=False) commits_local_result = ctx.run( - 'git log --no-color --no-merges --merge MERGE_HEAD..HEAD --pretty=format:"%h - %s (%cr) <%an>"', + f'git log --no-color --no-merges --merge MERGE_HEAD..HEAD --pretty=format:"%h - %s (%cr) <%an>" -- {files_arg}', hide='stdout', pty=False) - diff_on_local_result = ctx.run(f'git diff -U MERGE_HEAD...HEAD -- {files}', hide='stdout', pty=False) + diff_on_local_result = ctx.run(f'git diff -U MERGE_HEAD...HEAD -- {files_arg}', hide='stdout', pty=False) body = textwrap.dedent(''' Merge changes from conan-io/conan-center-index into {local_branch}. @@ -497,7 +513,7 @@ def _form_pr_body(ctx, config): ''').format(local_branch=config.upstream.branch, - conflict_files=conflict_files_result.stdout, + conflict_files='\n'.join(files), commits_on_upstream=commits_on_upstream_result.stdout, diff_on_upstream=diff_on_upstream_result.stdout, commits_local=commits_local_result.stdout, From 2d8f733e1c852407bb41193165f97a513f0ad776 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 24 Oct 2022 17:48:24 -0500 Subject: [PATCH 150/173] Add doxygen/1.9.2 - Same configs as doxygen/1.9.1 - We have some projects that use each --- dlproject.yaml | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index 54e67c0e0a1f8..1d2e6a6c5a1b4 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -191,9 +191,13 @@ config: prebuilt_tools: - cmake/[>=3.23.0] - doxygen/1.9.1 + - doxygen/1.9.2 - package: doxygen/1.9.1 options: - doxygen:enable_search=False + - package: doxygen/1.9.2 + options: + - doxygen:enable_search=False - ninja/[~1.10.0] - ninja/[>=1.0.0] - b2/4.8.0 @@ -243,9 +247,13 @@ config: prebuilt_tools: - cmake/[>=3.23.0] - doxygen/1.9.1 + - doxygen/1.9.2 - package: doxygen/1.9.1 options: - doxygen:enable_search=False + - package: doxygen/1.9.2 + options: + - doxygen:enable_search=False - ninja/[~1.10.0] - ninja/[>=1.0.0] - b2/4.8.0 @@ -314,9 +322,13 @@ config: prebuilt_tools: &redhat6Tools - package: cmake/[>=3.23.0] - package: doxygen/1.9.1 + - package: doxygen/1.9.2 - package: doxygen/1.9.1 options: - doxygen:enable_search=False + - package: doxygen/1.9.2 + options: + - doxygen:enable_search=False - package: ninja/[~1.10.0] - b2/4.8.0 - b2/4.9.2 @@ -365,9 +377,13 @@ config: prebuilt_tools: - cmake/[>=3.23.0] - doxygen/1.9.1 + - doxygen/1.9.2 - package: doxygen/1.9.1 options: - doxygen:enable_search=False + - package: doxygen/1.9.2 + options: + - doxygen:enable_search=False - ninja/[~1.10.0] - ninja/[>=1.0.0] - b2/4.8.0 @@ -445,9 +461,16 @@ config: configs: # xapian-core doesn't work for cross-building x86_64 to x86 - ReleaseTool + - package: doxygen/1.9.2 + configs: + # xapian-core doesn't work for cross-building x86_64 to x86 + - ReleaseTool - package: doxygen/1.9.1 options: - doxygen:enable_search=False + - package: doxygen/1.9.2 + options: + - doxygen:enable_search=False - ninja/[~1.10.0] - ninja/[>=1.0.0] - b2/4.8.0 @@ -518,6 +541,9 @@ config: - package: doxygen/1.9.1 options: - doxygen:enable_search=False + - package: doxygen/1.9.2 + options: + - doxygen:enable_search=False - package: ninja/[~1.10.0] configs: - ReleaseTool @@ -598,6 +624,9 @@ config: - package: doxygen/1.9.1 options: - doxygen:enable_search=False + - package: doxygen/1.9.2 + options: + - doxygen:enable_search=False - ninja/[~1.10.0] - ninja/[>=1.0.0] - b2/4.8.0 From dc2e0796c445bbd87adb48f7d70c5b17fd56a76c Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 24 Oct 2022 20:30:32 -0500 Subject: [PATCH 151/173] Don't build Doxygen 1.9.2 on Solaris 1.9.2 introduced new filesystem code that doesn't build on Solaris until Doxygen version 1.9.5. See the fix at https://github.com/doxygen/doxygen/pull/9244 Doxygen 1.9.1 is sufficient for DLE; 1.9.2 is only used by some OCR packaes, and thus, not on Solaris. --- dlproject.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index 1d2e6a6c5a1b4..12f7e14c1d2be 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -624,9 +624,6 @@ config: - package: doxygen/1.9.1 options: - doxygen:enable_search=False - - package: doxygen/1.9.2 - options: - - doxygen:enable_search=False - ninja/[~1.10.0] - ninja/[>=1.0.0] - b2/4.8.0 From d0caef84b8385cad2d353a8cb21d1c6937fb69ec Mon Sep 17 00:00:00 2001 From: devauto Date: Sun, 20 Nov 2022 02:34:29 -0600 Subject: [PATCH 152/173] Delete conflicting files that were deleted by DL --- CONTRIBUTING.md | 56 ------------------------------------------------- 1 file changed, 56 deletions(-) delete mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index e3e7f52de2da8..0000000000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,56 +0,0 @@ -# Contributing to Conan Center Index - -The following summarizes the process for contributing to the CCI (Conan Center Index) project. - - -## Contents - - * [Community](#community) - * [Dev-flow & Pull Requests](#dev-flow--pull-requests) - * [Issues](#issues) - -## Community - -ConanCenterIndex is an Open Source MIT licensed project; it is developed by the Conan maintainers and a great community of contributors. - -## Dev-flow & Pull Requests - -CCI follows the ["GitFlow"](https://datasift.github.io/gitflow/IntroducingGitFlow.html) branching model. -Issues are triaged and categorized mainly by type (package request, bug...) and priority (high, medium...) using GitHub -labels. - -To contribute follow the next steps: - -1. Comment in the corresponding issue that you want to contribute the package/fix proposed. If there is no open issue, we strongly suggest - opening one to gather feedback. -2. Make sure to [request access](docs/adding_packages/README.md#request-access) and be aware there is a [CLA](docs/CONTRIBUTOR_LICENSE_AGREEMENT.md). -3. Get setup by following the [Developing Recipes](docs/developing_recipes_locally.md) guide and learn the basic commands. -4. Check the [How To Add Packages](docs/adding_packages/README.md) page for the break down of ConanCenterIndex specific conventions and practices. -5. In your fork create a `package/xxx` branch from the `master` branch and develop - your fix/packages as discussed in previous step. -6. [Submit a pull request](docs/adding_packages/README.md#submitting-a-package) once you are ready. This can be when you - got everything working or even if you need help. Add the text to the issue body (besides other comments): "fixes #IssueNumber" - in the body of the PR, referring to the issue of step 1. - -The Conan Community works hard to review all the pull requests and provided assistance where need. -The [Review Process](docs/review_process.md) is partially automated with the help of @conan-center-index-bot :rocket: - -## Issues - -If you think you found a bug in CCI or in a recipe, open an issue indicating the following: - -- Explain the Conan version, Operating System, compiler and any other tool that could be related to the issue. -- Explain, as detailed as possible, how to reproduce the issue. Use git repository to contain code/recipes to reproduce issues, or a snippet. -- Include the expected behavior as well as what actually happened. -- Provide output captures (as text). -- Feel free to attach a screenshot or video illustrating the issue if you think it will be helpful. - -For any suggestion, feature request or question open an issue indicating the following: - -- Questions and support requests are always welcome. -- Use the [question] or [suggestion] tags in the title (provided by github issues templates). -- Try to explain the motivation, what are you trying to do, what is the pain it tries to solve. -- What do you expect from CCI. - -We use the following tags to control the status of the issues and pull requests, you can learn more in [Labels](docs/labels.md) document -which details the important one and their roles. From 838d8a4da099915626ff83f93b214a3e654cc14f Mon Sep 17 00:00:00 2001 From: devauto Date: Sun, 20 Nov 2022 02:34:29 -0600 Subject: [PATCH 153/173] Favor DL changes for files where merge=ours --- .github/PULL_REQUEST_TEMPLATE.md | 17 ++++--- README.md | 81 ++++---------------------------- 2 files changed, 20 insertions(+), 78 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index a999449576ae4..961ab760931ec 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,10 +1,13 @@ -Specify library name and version: **lib/1.0** +- _List changes here_ +- -This is also a good place to share with all of us **why you are submitting this PR** (specially if it is a new addition to ConanCenter): is it a dependency of other libraries you want to package? Are you the author of the library? Thanks! +#### Fulfills JIRA issue [EXAMPLE-1](https://jira.datalogics.com/browse/EXAMPLE-1) ---- +#### Checklist for approving this pull request -- [ ] I've read the [guidelines](https://github.com/conan-io/conan-center-index/blob/master/docs/adding_packages/README.md) for contributing. -- [ ] I've followed the [PEP8](https://www.python.org/dev/peps/pep-0008/) style guides for Python code in the recipes. -- [ ] I've used the [latest](https://github.com/conan-io/conan/releases/latest) Conan client version. -- [ ] I've tried at least one configuration locally with the [conan-center hook](https://github.com/conan-io/hooks.git) activated. +(**PR Author:** amend this with more conditions if necessary)\ +(**PR Reviewer:** ensure all following items are fulfilled before merging) + +- [ ] The **Pull Request Title** has JIRA issue number, a space, and then a short but descriptive summary. +- [ ] **Commit messages** are well formed: [A note about Git commit messages](http://www.tpope.net/node/106) +- [ ] **Automated tests pass**. diff --git a/README.md b/README.md index 516167a9ba258..76568fee4314e 100644 --- a/README.md +++ b/README.md @@ -1,77 +1,16 @@ -

- -

+# conan-center-index -Conan Center Index is the source index of recipes of the [ConanCenter](https://conan.io/center) package repository for [Conan](https://conan.io). +This is the Datalogics fork of +[conan-io/conan-center-index](https://github.com/conan-io/conan-center-index). -This repository includes a Continuous Integration system that will build automatically the Conan packages for the recipes submitted via -[Pull Request](https://github.com/conan-io/conan-center-index/pulls). +It contains curated branches, and Datalogics-local modifications of recipes. -### Add ConanCenter remote +It also has Invoke tasks and CI implementations that: -ConanCenter remote is configured by default in any Conan client installation. If, for any reason, you need to add it manually, just execute: +- Upload recipes to our own repositories on Artifactory. +- Pre-build tools with specific profiles and upload them to Artifactory. -``` -conan remote add conancenter https://center.conan.io -``` +## DL Documentation -### How to consume recipes - -Starting to use recipes from this repository is as easy as running -one simple command after installing Conan: - -``` -conan install name/version@ [-g ] -``` - -Of course, we really encourage you to use a `conanfile.txt` or `conanfile.py` -to list all the requirements or your project and install them all together -(Conan will build a single graph and ensure congruency). - -:warning: It is very important to notice that recipes will evolve over time -and, while they are fixing some issues, they might introduce new features and -improvements, and your project can break if you upgrade them -([How to prevent these breaking changes in my project?](docs/consuming_recipes.md)). - - - -### Documentation - -All the documentation is available in this same repository in the [`docs/` subfolder](docs/README.md). - -This is a list of shortcuts to some interesting topics: - -* :rocket: If you want to learn how to **contribute new recipes**, please read [docs/adding_packages/](docs/adding_packages/README.md). -* :speech_balloon: **FAQ**: most common questions are listed in [docs/faqs.md](docs/faqs.md). -* :warning: The conan-center **hook errors** reported by CCI Bot can be found in the [docs/error_knowledge_base.md](docs/error_knowledge_base.md). -* :hammer_and_wrench: The internal changes related to infrastructure can be checked in [docs/changelog.md](docs/changelog.md). -* :world_map: There are various community lead initiatives which are outlined in [docs/community_resources.md](docs/community_resources.md). -* :magic_wand: To start preparing your recipes for **Conan 2.0**, please check [docs/v2_migration.md](docs/v2_migration.md). - -### Reporting Issues - -You can open issues in the [issue tracker](https://github.com/conan-io/conan-center-index/issues) to: - -* :bug: Report **bugs/issues** in a package: - - Use the `[package]` tag in the title of the issue to help identifying them. - - If you detect any issue or missing feature in a package, for example, a build failure or a recipe that not support a specific configuration. - - Specify the name and version (`zlib/1.2.11`) and any relevant details about the fail configuration: Applied profile, building machine... - -* :bulb: Request a **new library** to be added: - - Use the `[request]` label to search the library in the issue tracker in case the it was already requested. - - If not, use the same `[request]` tag in the title of the issue to help identifying them. - - Indicate the name and the version of the library you would like to have in the repository. Also links to the project's website, - source download/repository and in general any relevant information that helps creating a recipe for it. - -* :robot: Report **a failure** in the CI system: - - If you open a Pull Request and get an unexpected error you might comment in the failing PR. - - If the service or repository is down or failing, use the `[service]` tag in the title of a new issue to help identifying them. - -If your issue is not appropriate for a public discussion, please contact us via e-mail at `info@conan.io`. Thanks! - - -### License - -All the Conan recipes in this repository are distributed under the [MIT](LICENSE) license. There -are other files, like patches or examples used to test the packages, that could use different licenses, -for those files specific license and credit must be checked in the file itself. +- [`merge-upstream` task](dl-docs/merge-upstream.md) + - [Automatically Resolved Merge Conflicts](dl-docs/auto-merge-conflict-resolution.md) From ce07db1caa42f80a4e624ecf4ea672b2faca37fd Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 18 Nov 2022 10:06:08 -0600 Subject: [PATCH 154/173] pre-commit: Use flake8 from GitHub - Needed to get this PR to pass CI --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d9ce3da8c7ffb..30c434af64316 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: double-quote-string-fixer - id: check-docstring-first - id: debug-statements - - repo: https://gitlab.com/pycqa/flake8 + - repo: https://github.com/PyCQA/flake8 rev: 3.9.2 hooks: - id: flake8 From c18ceb4a0afb3d1c3fbec2511a49409ec4c1752c Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 18 Nov 2022 10:26:02 -0600 Subject: [PATCH 155/173] doxygen: update zlib to 1.2.13 to match xapian core - Needed to get CI to run on the merge. - Also needed because we're keeping zlib private in the DL repo. --- recipes/doxygen/all/conanfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/recipes/doxygen/all/conanfile.py b/recipes/doxygen/all/conanfile.py index 8b903fcbac59b..319d913d5acc5 100644 --- a/recipes/doxygen/all/conanfile.py +++ b/recipes/doxygen/all/conanfile.py @@ -58,7 +58,7 @@ def configure(self): def requirements(self): if self.options.enable_search: self.requires("xapian-core/1.4.19", private=True) - self.requires("zlib/1.2.12", private=True) + self.requires("zlib/1.2.13", private=True) def build_requirements(self): if self._settings_build.os == "Windows": From 4e5fb55e889e3b87de1f599c8be3cb60144e46c9 Mon Sep 17 00:00:00 2001 From: devauto Date: Mon, 28 Nov 2022 02:37:07 -0600 Subject: [PATCH 156/173] Delete conflicting files that were deleted by DL --- .github/workflows/linter-conan-v2.yml | 143 -------------------------- .github/workflows/linter-yaml.yml | 118 --------------------- 2 files changed, 261 deletions(-) delete mode 100644 .github/workflows/linter-conan-v2.yml delete mode 100644 .github/workflows/linter-yaml.yml diff --git a/.github/workflows/linter-conan-v2.yml b/.github/workflows/linter-conan-v2.yml deleted file mode 100644 index 146c807267d26..0000000000000 --- a/.github/workflows/linter-conan-v2.yml +++ /dev/null @@ -1,143 +0,0 @@ -name: "[linter] Conan v2 migration" - -on: - pull_request: - -env: - PYTHONPATH: ${{github.workspace}} - PYVER: "3.8" - SCORE_THRESHOLD: "9.5" - REQUIREMENTS: "pylint==2.14" - -jobs: - test_linter: - name: Test linter changes (v2 migration) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Get changed files - uses: ./.github/actions/pr_changed_files - id: changed_files - with: - files: | - linter/** - - name: Get Conan v1 version - id: parse_conan_v1_version - if: steps.changed_files.outputs.any_changed == 'true' - uses: mikefarah/yq@master - with: - cmd: yq '.conan.version' '.c3i/config_v1.yml' - - uses: actions/setup-python@v4 - if: steps.changed_files.outputs.any_changed == 'true' - with: - python-version: ${{ env.PYVER }} - - name: Install requirements - if: steps.changed_files.outputs.any_changed == 'true' - run: | - pip install ${{ env.REQUIREMENTS }} conan==${{ steps.parse_conan_v1_version.outputs.result }} - - - name: Execute linter over all recipes in the repository - id: linter_recipes - if: steps.changed_files.outputs.any_changed == 'true' - run: | - pylint --rcfile=linter/pylintrc_recipe recipes/*/*/conanfile.py --output-format=json --output=recipes.json --score=y --fail-under=${{ env.SCORE_THRESHOLD }} - - - name: Execute linter over all test_package/recipes in the repository - id: linter_test_package - if: steps.changed_files.outputs.any_changed == 'true' - run: | - pylint --rcfile=linter/pylintrc_testpackage recipes/*/*/test_*/conanfile.py --ignore-paths="recipes/[^/]*/[^/]*/test_v1[^/]*/conanfile.py" --output-format=json --output=test_package.json --score=y --fail-under=${{ env.SCORE_THRESHOLD }} - - - name: Archive production artifacts - if: steps.changed_files.outputs.any_changed == 'true' && always() - uses: actions/upload-artifact@v3 - with: - name: linter-output - path: | - recipes.json - test_package.json - - - name: Create report (recipes) - if: steps.changed_files.outputs.any_changed == 'true' && steps.linter_recipes.outcome != 'skipped' && always() - run: | - echo '## Linter summary (recipes)' >> $GITHUB_STEP_SUMMARY - jq 'map( select(.type=="error")) | group_by (.message)[] | {message: .[0].message, length: length}' recipes.json > recipes2.json - jq -r '" * \(.message): \(.length)"' recipes2.json >> $GITHUB_STEP_SUMMARY - - - name: Create report (test_package) - if: steps.changed_files.outputs.any_changed == 'true' && steps.linter_test_package.outcome != 'skipped' && always() - run: | - echo '## Linter summary (test_package)' >> $GITHUB_STEP_SUMMARY - jq 'map( select(.type=="error")) | group_by (.message)[] | {message: .[0].message, length: length}' test_package.json > test_package2.json - jq -r '" * \(.message): \(.length)"' test_package2.json >> $GITHUB_STEP_SUMMARY - - - name: Create report - if: steps.changed_files.outputs.any_changed == 'true' && always() - run: | - echo '> Note.- Check uploaded artifacts for a full report.' >> $GITHUB_STEP_SUMMARY - - conanfile_recipe: - name: Lint changed conanfile.py (v2 migration) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Get changed files - id: changed-files - uses: ./.github/actions/pr_changed_files - with: - files: | - recipes/*/*/conanfile.py - - name: Get Conan v1 version - id: parse_conan_v1_version - if: steps.changed-files.outputs.any_changed == 'true' - uses: mikefarah/yq@master - with: - cmd: yq '.conan.version' '.c3i/config_v1.yml' - - uses: actions/setup-python@v4 - if: steps.changed-files.outputs.any_changed == 'true' - with: - python-version: ${{ env.PYVER }} - - name: Install dependencies - if: steps.changed-files.outputs.any_changed == 'true' - run: | - pip install ${{ env.REQUIREMENTS }} conan==${{ steps.parse_conan_v1_version.outputs.result }} - - name: Run linter - if: steps.changed-files.outputs.any_changed == 'true' - run: | - echo "::add-matcher::linter/recipe_linter.json" - for file in ${{ steps.changed-files.outputs.all_changed_files }}; do - pylint --rcfile=linter/pylintrc_recipe --output-format=parseable ${file} - done - - conanfile_test_package: - name: Lint changed test_package/conanfile.py (v2 migration) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Get changed files - id: changed-files - uses: ./.github/actions/pr_changed_files - with: - files: | - recipes/*/*/test_*/conanfile.py - - name: Get Conan v1 version - id: parse_conan_v1_version - if: steps.changed-files.outputs.any_changed == 'true' - uses: mikefarah/yq@master - with: - cmd: yq '.conan.version' '.c3i/config_v1.yml' - - uses: actions/setup-python@v4 - if: steps.changed-files.outputs.any_changed == 'true' - with: - python-version: ${{ env.PYVER }} - - name: Install dependencies - if: steps.changed-files.outputs.any_changed == 'true' - run: | - pip install ${{ env.REQUIREMENTS }} conan==${{ steps.parse_conan_v1_version.outputs.result }} - - name: Run linter - if: steps.changed-files.outputs.any_changed == 'true' - run: | - echo "::add-matcher::linter/recipe_linter.json" - for file in ${{ steps.changed-files.outputs.all_changed_files }}; do - pylint --rcfile=linter/pylintrc_testpackage --ignore-paths="recipes/[^/]*/[^/]*/test_v1[^/]*/conanfile.py" --output-format=parseable ${file} - done diff --git a/.github/workflows/linter-yaml.yml b/.github/workflows/linter-yaml.yml deleted file mode 100644 index d7d4050e8071d..0000000000000 --- a/.github/workflows/linter-yaml.yml +++ /dev/null @@ -1,118 +0,0 @@ -name: "[linter] YAML files" - -on: - pull_request: - -env: - PYTHONPATH: ${{github.workspace}} - PYVER: "3.8" - CONFIG_FILES_PATH: "recipes/*/config.yml" - CONANDATA_FILES_PATH: "recipes/*/*/conandata.yml" - -jobs: - test_linter: - # A job to run when the linter changes. We want to know in advance how many files will be broken - name: Test linter changes (YAML files) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Get changed files - uses: ./.github/actions/pr_changed_files - id: changed_files - with: - files: | - linter/** - - - uses: actions/setup-python@v4 - if: steps.changed_files.outputs.any_changed == 'true' - with: - python-version: ${{ env.PYVER }} - - - name: Install dependencies - if: steps.changed_files.outputs.any_changed == 'true' - run: pip install yamllint strictyaml argparse - - - name: Run linter (config.yml) - if: steps.changed_files.outputs.any_changed == 'true' && always() - run: | - echo "::add-matcher::linter/yamllint_matcher.json" - yamllint --config-file linter/yamllint_rules.yml -f standard ${{ env.CONFIG_FILES_PATH }} - echo "::remove-matcher owner=yamllint_matcher::" - - - name: Run schema check (config.yml) - if: steps.changed_files.outputs.any_changed == 'true' && always() - run: | - for file in ${{ env.CONFIG_FILES_PATH }}; do - python3 linter/config_yaml_linter.py ${file} - done - - - name: Run linter (conandata.yml) - if: steps.changed_files.outputs.any_changed == 'true' && always() - run: | - echo "::add-matcher::linter/yamllint_matcher.json" - yamllint --config-file linter/yamllint_rules.yml -f standard ${{ env.CONANDATA_FILES_PATH }} - echo "::remove-matcher owner=yamllint_matcher::" - - - name: Run schema check (conandata.yml) - if: steps.changed_files.outputs.any_changed == 'true' && always() - run: | - for file in ${{ env.CONANDATA_FILES_PATH }}; do - python3 linter/conandata_yaml_linter.py ${file} - done - - lint_pr_files: - # Lint files modified in the pull_request - name: Lint changed files (YAML files) - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYVER }} - - - name: Install dependencies - run: pip install yamllint strictyaml argparse - - ## Work on config.yml files - - name: Get changed files (config) - id: changed_files_config - if: always() - uses: ./.github/actions/pr_changed_files - with: - files: | - ${{ env.CONFIG_FILES_PATH }} - - - name: Run linter (config.yml) - if: steps.changed_files_config.outputs.any_changed == 'true' && always() - run: | - echo "::add-matcher::linter/yamllint_matcher.json" - for file in ${{ steps.changed_files_config.outputs.all_changed_files }}; do - yamllint --config-file linter/yamllint_rules.yml -f standard ${file} - done - echo "::remove-matcher owner=yamllint_matcher::" - - for file in ${{ steps.changed_files_conandata.outputs.all_changed_files }}; do - python3 linter/config_yaml_linter.py ${file} - done - - ## Work on conandata.yml files - - name: Get changed files (conandata) - id: changed_files_conandata - if: always() - uses: ./.github/actions/pr_changed_files - with: - files: | - ${{ env.CONANDATA_FILES_PATH }} - - - name: Run linter (conandata.yml) - if: steps.changed_files_conandata.outputs.any_changed == 'true' && always() - run: | - echo "::add-matcher::linter/yamllint_matcher.json" - for file in ${{ steps.changed_files_conandata.outputs.all_changed_files }}; do - yamllint --config-file linter/yamllint_rules.yml -f standard ${file} - done - echo "::remove-matcher owner=yamllint_matcher::" - - for file in ${{ steps.changed_files_conandata.outputs.all_changed_files }}; do - python3 linter/conandata_yaml_linter.py ${file} - done From 0edc37857e00c2745684d8799998e9d60bca48af Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 28 Nov 2022 00:16:54 -0600 Subject: [PATCH 157/173] Remove GitHub actions and workflows from conan-io - We don't use those at DL - Avoid pre-commit and merge problems --- .github/actions/pr_changed_files/action.yml | 49 --------------------- .github/workflows/marldown-links.yml | 25 ----------- .github/workflows/mlc_config.json | 13 ------ 3 files changed, 87 deletions(-) delete mode 100644 .github/actions/pr_changed_files/action.yml delete mode 100644 .github/workflows/marldown-links.yml delete mode 100644 .github/workflows/mlc_config.json diff --git a/.github/actions/pr_changed_files/action.yml b/.github/actions/pr_changed_files/action.yml deleted file mode 100644 index 4dc73a981748b..0000000000000 --- a/.github/actions/pr_changed_files/action.yml +++ /dev/null @@ -1,49 +0,0 @@ -name: 'Changed files in PR' -description: 'Get all changed files in a Pull Request' -author: 'ericLemanissier' -inputs: - files: - description: "Check for changes using only this list of files (Defaults to the entire repo)" - required: false - default: "" - -outputs: - all_changed_files: - description: List of all copied, modified, and added files. - value: ${{ steps.changed-files.outputs.all_changed_files }} - any_changed: - description: Return true only when any files provided using the files input have changed. - value: ${{ steps.changed-files.outputs.any_changed }} -runs: - using: "composite" - steps: - - uses: actions/setup-python@v4 - with: - python-version: ${{ env.PYVER }} - - name: Get changed files - id: changed-files - shell: python - env: - GITHUB_TOKEN: ${{ github.token }} - run: | - import json - import subprocess - import fnmatch - import os - from pathlib import Path - - patterns = [Path(p).parts for p in '''${{ inputs.files }}'''.splitlines()] - - res = subprocess.run(["gh", "api", "/repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files", "--paginate"], capture_output=True, check=True) - files = [] - for f in json.loads(res.stdout): - filename = Path(f["filename"]).parts - for pattern in patterns: - if len(pattern) != len(filename): - continue - if all(fnmatch.fnmatch(filename[i], pattern[i]) for i in range(len(pattern))): - files.append(f["filename"]) - break - with open(os.getenv("GITHUB_OUTPUT"), "a") as output_file: - output_file.write(f"any_changed={'true' if files else 'false'}\n") - output_file.write(f"all_changed_files={' '.join(files)}\n") diff --git a/.github/workflows/marldown-links.yml b/.github/workflows/marldown-links.yml deleted file mode 100644 index 63b30bcfe82a8..0000000000000 --- a/.github/workflows/marldown-links.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: Check Markdown links - -on: [push, pull_request] - -jobs: - markdown-link-check-push: - if: github.event_name == 'push' && github.repository_owner != 'conan-io' # We do not want to see red in CCI - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: gaurav-nelson/github-action-markdown-link-check@v1 - with: - config-file: .github/workflows/mlc_config.json - - markdown-link-check-pr: - if: github.event_name == 'pull_request' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: gaurav-nelson/github-action-markdown-link-check@v1 - with: - config-file: .github/workflows/mlc_config.json - use-quiet-mode: 'yes' - use-verbose-mode: 'yes' - check-modified-files-only: 'yes' diff --git a/.github/workflows/mlc_config.json b/.github/workflows/mlc_config.json deleted file mode 100644 index d16005bb86641..0000000000000 --- a/.github/workflows/mlc_config.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "retryOn429": true, - "retryCount": 5, - "fallbackRetryDelay": "30s", - "httpHeaders": [ - { - "urls": ["https://github.com/", "https://guides.github.com/", "https://help.github.com/", "https://docs.github.com/"], - "headers": { - "Accept-Encoding": "zstd, br, gzip, deflate" - } - } - ] -} From 26e0733556511e3ee291364d81dc2a17a35a5df0 Mon Sep 17 00:00:00 2001 From: SpaceIm <30052553+SpaceIm@users.noreply.github.com> Date: Sat, 3 Dec 2022 09:27:23 +0100 Subject: [PATCH 158/173] fix libdeflate installation for conan >=1.55.0 --- recipes/libdeflate/all/conanfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/recipes/libdeflate/all/conanfile.py b/recipes/libdeflate/all/conanfile.py index 5f72c64aad8c6..47e0d742be3d1 100644 --- a/recipes/libdeflate/all/conanfile.py +++ b/recipes/libdeflate/all/conanfile.py @@ -106,7 +106,7 @@ def _package_make(self): autotools = Autotools(self) with chdir(self, self.source_folder): # Note: not actually an autotools project, is a Makefile project. - autotools.install(args=[f"PREFIX={unix_path(self, self.package_folder)}"]) + autotools.install(args=[f"DESTDIR={unix_path(self, self.package_folder)}", "PREFIX=/"]) rmdir(self, os.path.join(self.package_folder, "bin")) rmdir(self, os.path.join(self.package_folder, "lib", "pkgconfig")) rm(self, "*.a" if self.options.shared else "*.[so|dylib]*", os.path.join(self.package_folder, "lib") ) From 6211b3ed9db2af275ef6e01d0f2e122dce08dd68 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 5 Dec 2022 17:00:08 -0600 Subject: [PATCH 159/173] dlproject.yaml: Build SWIG without pcre using bzip2 - DLE started settings pcre:with_bzip2=False - This ensures that the correct packages are created for the tools, so that the DLE build doesn't try building SWIG during 'conan install' - TODO: Consider whether this should be in dlproject.yaml or in a profile. --- dlproject.yaml | 56 +++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 44 insertions(+), 12 deletions(-) diff --git a/dlproject.yaml b/dlproject.yaml index 12f7e14c1d2be..f7a26a8395c5b 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -202,8 +202,16 @@ config: - ninja/[>=1.0.0] - b2/4.8.0 - b2/4.9.2 - - swig/1.3.40+dl.1 - - swig/4.0.2+dl.2 + # Here, and afterward, building pcre without bzip2, because DLE + # sets that option in its conanfile.py. Currently, the only + # tool being built that uses bzip2 or pcre is SWIG. + # TODO: Should this be in the build-profile-* ? + - package: swig/1.3.40+dl.1 + options: + - pcre:with_bzip2=False + - package: swig/4.0.2+dl.2 + options: + - pcre:with_bzip2=False prebuilt_tools_configs: - ReleaseTool @@ -258,8 +266,12 @@ config: - ninja/[>=1.0.0] - b2/4.8.0 - b2/4.9.2 - - swig/1.3.40+dl.1 - - swig/4.0.2+dl.2 + - package: swig/1.3.40+dl.1 + options: + - pcre:with_bzip2=False + - package: swig/4.0.2+dl.2 + options: + - pcre:with_bzip2=False prebuilt_tools_configs: - ReleaseTool @@ -332,8 +344,12 @@ config: - package: ninja/[~1.10.0] - b2/4.8.0 - b2/4.9.2 - - swig/1.3.40+dl.1 - - swig/4.0.2+dl.2 + - package: swig/1.3.40+dl.1 + options: + - pcre:with_bzip2=False + - package: swig/4.0.2+dl.2 + options: + - pcre:with_bzip2=False prebuilt_tools_configs: # Build 64-bit tools on RHEL 6, because they're used for 64-bit DLE for APDFL 15 - ReleaseTool @@ -388,8 +404,12 @@ config: - ninja/[>=1.0.0] - b2/4.8.0 - b2/4.9.2 - - swig/1.3.40+dl.1 - - swig/4.0.2+dl.2 + - package: swig/1.3.40+dl.1 + options: + - pcre:with_bzip2=False + - package: swig/4.0.2+dl.2 + options: + - pcre:with_bzip2=False prebuilt_tools_configs: - ReleaseTool @@ -475,8 +495,12 @@ config: - ninja/[>=1.0.0] - b2/4.8.0 - b2/4.9.2 - - swig/1.3.40+dl.1 - - swig/4.0.2+dl.2 + - package: swig/1.3.40+dl.1 + options: + - pcre:with_bzip2=False + - package: swig/4.0.2+dl.2 + options: + - pcre:with_bzip2=False - innoextract/1.9.0 prebuilt_tools_configs: - ReleaseTool @@ -557,9 +581,13 @@ config: configs: - ReleaseToolGCC - package: swig/1.3.40+dl.1 + options: + - pcre:with_bzip2=False configs: - ReleaseTool - package: swig/4.0.2+dl.2 + options: + - pcre:with_bzip2=False configs: - ReleaseTool prebuilt_tools_configs: @@ -628,8 +656,12 @@ config: - ninja/[>=1.0.0] - b2/4.8.0 - b2/4.9.2 - - swig/1.3.40+dl.1 - - swig/4.0.2+dl.2 + - package: swig/1.3.40+dl.1 + options: + - pcre:with_bzip2=False + - package: swig/4.0.2+dl.2 + options: + - pcre:with_bzip2=False prebuilt_tools_configs: - ReleaseTool From 1bc1d18ade60b5beeee73b73b9bc404366381fda Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 12 Dec 2022 15:06:08 -0600 Subject: [PATCH 160/173] cmake: Remove the private tag from the openssl requirement - It's trigging a Conan bug with software that uses libxml2, such as PDFAlchemist. - The problem is that the private requirement is skipped, and there's no cpp_info, and that's not checked when doing an internal Conan conversion. See: https://github.com/conan-io/conan/issues/10439 --- recipes/cmake/3.x.x/conanfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/recipes/cmake/3.x.x/conanfile.py b/recipes/cmake/3.x.x/conanfile.py index c2365d76d2530..e0d41c4a94794 100644 --- a/recipes/cmake/3.x.x/conanfile.py +++ b/recipes/cmake/3.x.x/conanfile.py @@ -35,7 +35,7 @@ def config_options(self): def requirements(self): if self.options.with_openssl: - self.requires("openssl/1.1.1s", private=True) + self.requires("openssl/1.1.1s") def validate(self): if self.settings.os == "Macos" and self.settings.arch == "x86": From a557f5a41c78152a4c3496efff75b81ab8759e19 Mon Sep 17 00:00:00 2001 From: Elizabeth Date: Wed, 21 Dec 2022 11:10:14 -0600 Subject: [PATCH 161/173] Add cmake 3.24.3 to tool build --- dlproject.yaml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index f7a26a8395c5b..a79bb91f22ba9 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -190,6 +190,7 @@ config: # else the entry can be a dictionary of package name and options prebuilt_tools: - cmake/[>=3.23.0] + - cmake/3.24.3 - doxygen/1.9.1 - doxygen/1.9.2 - package: doxygen/1.9.1 @@ -253,6 +254,7 @@ config: # If the entry is a string, it's taken to be the package name, # else the entry can be a dictionary of package name and options prebuilt_tools: + - cmake/3.24.3 - cmake/[>=3.23.0] - doxygen/1.9.1 - doxygen/1.9.2 @@ -333,6 +335,7 @@ config: - build_type=Debug prebuilt_tools: &redhat6Tools - package: cmake/[>=3.23.0] + - package: cmake/3.24.3 - package: doxygen/1.9.1 - package: doxygen/1.9.2 - package: doxygen/1.9.1 @@ -392,6 +395,7 @@ config: - build_type=Debug prebuilt_tools: - cmake/[>=3.23.0] + - cmake/3.24.3 - doxygen/1.9.1 - doxygen/1.9.2 - package: doxygen/1.9.1 @@ -477,6 +481,7 @@ config: default: *windowsDebug prebuilt_tools: - cmake/[>=3.23.0] + - cmake/3.24.3 - package: doxygen/1.9.1 configs: # xapian-core doesn't work for cross-building x86_64 to x86 @@ -557,6 +562,11 @@ config: include: - Debug32 prebuilt_tools: + - package: cmake/3.24.3 + options: + - cmake:with_openssl=False + configs: + - ReleaseTool - package: cmake/[>=3.23.0] options: - cmake:with_openssl=False @@ -646,6 +656,9 @@ config: settings: - build_type=Debug prebuilt_tools: + - package: cmake/3.24.3 + options: + - cmake:with_openssl=False - package: cmake/[>=3.23.0] options: - cmake:with_openssl=False From 1761ef6ad2246b4377317259d203c2615aa39e64 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 19 Dec 2022 13:03:10 -0600 Subject: [PATCH 162/173] README.md: Outline for operations and troubleshooting doc - From notes and discussions --- README.md | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/README.md b/README.md index 76568fee4314e..d2189ee16dc1e 100644 --- a/README.md +++ b/README.md @@ -12,5 +12,45 @@ It also has Invoke tasks and CI implementations that: ## DL Documentation +### Configuration and daily operations + +- Using the Curated Conan Center Index Conan repositories + - Building against the staging repository + - Using standard build profiles +- Updating a recipe + - Adding a new version of a package + - In conjunction with a contribution to `conan-io/conan-center-index` + - At DL only + - Datalogics-only modifications to recipes +- Specifying automatic builds of tools + - Configurations for tools + - Standard build profiles + - Using specific compilers + - Specifying which tools to build + - Using a dictionary + - Limiting which tool configs to use + - Specifying options for building the tool + - Using version ranges +- Jenkins jobs + - Nightly tool builds + - Requesting a full build + - Building individual tools + - Recipe uploads + - Merges from `conan-io/conan-center-index` to `develop` + - Controlling the interval of automated merges + - Requesting a merge + - Merging `develop` to `master` to put recipes into production + +### Troubleshooting + +- Analyzing build failures +- Using pytest to run the tools builders +- Resolving merge conflicts from the upstream repo +- Requesting a full build of the tools and their requirements +- Requesting a full (non-incremental) recipe upload + +### Reference + - [`merge-upstream` task](dl-docs/merge-upstream.md) - [Automatically Resolved Merge Conflicts](dl-docs/auto-merge-conflict-resolution.md) +- `merge-staging-to-production` task From af9e5d0cac9050b09dcfb1b8447af30f525deacd Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 19 Dec 2022 13:08:46 -0600 Subject: [PATCH 163/173] Fix example issue link in pull request template --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 961ab760931ec..f80dbd21a29d8 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,7 +1,7 @@ - _List changes here_ - -#### Fulfills JIRA issue [EXAMPLE-1](https://jira.datalogics.com/browse/EXAMPLE-1) +#### Fulfills JIRA issue [EXAMPLE-1](https://datalogics-jira.atlassian.net/browse/EXAMPLE-1) #### Checklist for approving this pull request From 181fa375b6918b5b40d880ed6e3e93673386533f Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 6 Jan 2023 15:42:26 -0600 Subject: [PATCH 164/173] Allow empty keys in dlproject.yaml for merging tasks - These empty keys will read as None, rather than an empty dict. - They'll also not trigger the default empty dict that would be used if they're missing. - Check for None, and in that case substitute an empty dict. This fixes the problem with merge-staging-to-production getting a key error. --- tasks/merging.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tasks/merging.py b/tasks/merging.py index ec054eb525113..1e24f7d60599f 100644 --- a/tasks/merging.py +++ b/tasks/merging.py @@ -121,6 +121,10 @@ def create_from_dlproject(cls): with open('dlproject.yaml', encoding='utf-8') as dlproject_file: dlproject = yaml.safe_load(dlproject_file) config_data = dlproject.get(cls.yaml_key, {}) + # If dlproject.yaml has an empty key, then the config_data will be None, + # and it won't get replaced by the empty dict. Check for that. + if config_data is None: + config_data = {} try: return dacite.from_dict(data_class=cls, data=config_data, From 902949ef9c71af3edb5154ca4cacd7c8187dc70d Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 19 Dec 2022 16:30:37 -0600 Subject: [PATCH 165/173] Document the merge-staging-to-production task. --- README.md | 2 +- dl-docs/merge-staging-to-production.md | 62 ++++++++++++++++++++++++++ 2 files changed, 63 insertions(+), 1 deletion(-) create mode 100644 dl-docs/merge-staging-to-production.md diff --git a/README.md b/README.md index d2189ee16dc1e..7be5c5354abe4 100644 --- a/README.md +++ b/README.md @@ -53,4 +53,4 @@ It also has Invoke tasks and CI implementations that: - [`merge-upstream` task](dl-docs/merge-upstream.md) - [Automatically Resolved Merge Conflicts](dl-docs/auto-merge-conflict-resolution.md) -- `merge-staging-to-production` task +- [`merge-staging-to-production` task](dl-docs/merge-staging-to-production.md) diff --git a/dl-docs/merge-staging-to-production.md b/dl-docs/merge-staging-to-production.md new file mode 100644 index 0000000000000..9f7d908f7032a --- /dev/null +++ b/dl-docs/merge-staging-to-production.md @@ -0,0 +1,62 @@ +# `merge-staging-to-production` task + +The `invoke merge-upstream` task fetches the latest `master` branch from the +[`conan-io/conan-center-index`](https://github.com/conan-io/conan-center-index) +repository, and merges it into the `develop` branch. + + + +- [Operation](#operation) +- [Configuration](#configuration) +- [When it runs](#when-it-runs) + + + +## Operation + +1. Check out the `master` branch from `datalogics/conan-center-index`. +2. Fetch the `develop` branch from `datalogics/conan-center-index`. +3. If there are any new changes on the `develop` branch, merge it into the + `master` branch. +4. If there was a successful merge, push the `master` back up to the + `datalogics/conan-center-index` repo. + +Since there are no contributions to the `master` branch that doesn't come from +the `develop` branch, any merge conflicts are unexpected. + +## Configuration + +The configuration is controlled by the `merge-staging-to-production` key in +`dlproject.yaml`. Any unspecified values will get the following defaults. + +The defaults are: + +```yaml +merge_staging_to_production: + host: octocat.dlogics.com + organization: datalogics + staging_branch: develop + production_branch: master +``` + +One use of this would be to use a personal fork for testing, to avoid polluting +the Datalogics organization repo: + +```yaml +merge_staging_to_production: + # Temporary overrides + organization: kam +``` + +## When it runs + +A merge of staging to production is a manual task done only after proving that +the current staging repo (based on the `develop` branch) will build a set of +important projects. + +- By going to the `master` branch of `conan-center-index` on Jenkins and doing a + **Build with parameters**, and clicking the **MERGE_STAGING_TO_PRODUCTION** + parameter. +- By invoking `invoke merge-staging-to-production` from the command line. This + should only be done when developing and testing; for everyday use, request the + merge via Jenkins. From 90dac7e7e8b25752a4b54ef4e1c21cf9d3b3d438 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 22 Dec 2022 18:51:28 -0600 Subject: [PATCH 166/173] Add doc for the tool builder --- README.md | 25 ++-- dl-docs/automatic-tool-builds.md | 215 +++++++++++++++++++++++++++++++ 2 files changed, 230 insertions(+), 10 deletions(-) create mode 100644 dl-docs/automatic-tool-builds.md diff --git a/README.md b/README.md index 7be5c5354abe4..23528c4f6cb93 100644 --- a/README.md +++ b/README.md @@ -22,20 +22,25 @@ It also has Invoke tasks and CI implementations that: - In conjunction with a contribution to `conan-io/conan-center-index` - At DL only - Datalogics-only modifications to recipes -- Specifying automatic builds of tools - - Configurations for tools - - Standard build profiles - - Using specific compilers - - Specifying which tools to build - - Using a dictionary - - Limiting which tool configs to use - - Specifying options for building the tool - - Using version ranges +- [Specifying automatic builds of tools](dl-docs/automatic-tool-builds.md) + - [Configurations for tools](dl-docs/automatic-tool-builds.md#configurations-for-tools) + - [Standard build profiles](dl-docs/automatic-tool-builds.md#standard-build-profiles) + - [Using specific compilers](dl-docs/automatic-tool-builds.md#using-specific-compilers) + - [Specifying which tools to build](dl-docs/automatic-tool-builds.md#specifying-which-tools-to-build) + - [Using a dictionary](dl-docs/automatic-tool-builds.md#using-a-dictionary) + - [Limiting which tool configs to use](dl-docs/automatic-tool-builds.md#limiting-which-tool-configs-to-use) + - [Specifying options for building the tool](dl-docs/automatic-tool-builds.md#specifying-options-for-building-the-tool) + - [Using version ranges](dl-docs/automatic-tool-builds.md#using-version-ranges) + - [Configurations for tools](dl-docs/automatic-tool-builds.md#configurations-for-tools) + - [Standard build profiles](dl-docs/automatic-tool-builds.md#standard-build-profiles) + - [Using specific compilers](dl-docs/automatic-tool-builds.md#using-specific-compilers) + - [Using version ranges](dl-docs/automatic-tool-builds.md#using-version-ranges) - Jenkins jobs + - Recipe uploads + - Forcing an upload of all recipes - Nightly tool builds - Requesting a full build - Building individual tools - - Recipe uploads - Merges from `conan-io/conan-center-index` to `develop` - Controlling the interval of automated merges - Requesting a merge diff --git a/dl-docs/automatic-tool-builds.md b/dl-docs/automatic-tool-builds.md new file mode 100644 index 0000000000000..1999290163937 --- /dev/null +++ b/dl-docs/automatic-tool-builds.md @@ -0,0 +1,215 @@ +# Specifying automatic builds of tools + + + +- [Configurations for tools](#configurations-for-tools) + - [Standard build profiles](#standard-build-profiles) + - [Using specific compilers](#using-specific-compilers) +- [Specifying which tools to build](#specifying-which-tools-to-build) + - [Using a dictionary](#using-a-dictionary) + - [Limiting which tool configs to use](#limiting-which-tool-configs-to-use) + - [Specifying options for building the tool](#specifying-options-for-building-the-tool) + - [Using version ranges](#using-version-ranges) + + + +You can specify that tools will be built on the various platforms. To do this, +add two dictionary keys to each platform section in `dlproject.yaml`: + +- `prebuilt_tools` specifies a list of tools to build, by Conan reference +- `prebuilt_tools_configs` specifies a list of configs to use to build the + tools, from the `configs` key in the platform. + +The `build_tools` test in pytest will build the _cartesian product_ of all the +items in these two lists, in other words, by default, it will build each tool +with each config. This product can be pared down by limiting which configs are +used for which tools, see below. + +Because the tools are built with pytest, the results of building the tools will +appear in the individual build page. Detailed information is available in the +HTML results in the Build Artifacts. These HTML results contain pass/fail +information, and each entry can be expanded to show the detailed log. + +See also: [Nightly tool builds](jenkins-jobs.md#nightly-tool-builds) + +## Configurations for tools + +In the `config` section for the platform, create a config named `ReleaseTool`. +(You may see that we have configs for `DebugTool` as well, but currently the +project only builds release versions of tools). In this section, put in a host +profile, as well as a build folder, description, and request to build missing +packages. + +The host profile for a tool should be set to one of the standard build profiles +from the list in the next section. + +Example: + +```yaml + macos-x86_64: + common: &macOSCommon + build: + - missing + config: + # ...other configs... + ReleaseTool: &macOSReleaseTool + <<: *macOSCommon + build_folder: build-release + description: macOS Release + profile_host: + - build-profile-macos-intel +``` + +Note there that a YAML reference is used to include the `macOSCommon` section +into every config on macOS; this is just a way of saving typing. + +From the standpoint of a project that uses split build/host profiles, the build +profile would be used for tools, but when building the tools _directly_, the +build profile is also used as the host profile. + +The configs don't specify a build profile yet because not all Conan Center +packages are able to be built with split build and host files yet. + +### Standard build profiles + +The following standard build profiles are in the `curated-conan-center-index` +branch of the `conan-config` repository. They should be used as the build +profiles for projects that consume Conan packages, and they should be used for +building tools in this project: + +- `build-profile-aix-ppc` +- `build-profile-aix-ppc-gcc` +- `build-profile-linux-arm` +- `build-profile-linux-intel` +- `build-profile-macos-arm` +- `build-profile-macos-intel` +- `build-profile-solaris-sparc` +- `build-profile-solaris-sparc-32` +- `build-profile-windows-intel` + +### Using specific compilers + +If `dlproject.yaml` uses the dictionary form of a `prebuilt_tools` entry, then +the configs to use can be specified on a per-tool basis. _Each config that in +use must also be in `prebuilt_tools_configs`._ + +On AIX, in particular, not all the tools can be built with the same compiler. To +get around this, there are two tool profiles: + +```yaml + aix: + common: &aixCommon + build: + - missing + config: + ReleaseTool: + <<: *aixCommon + build_folder: build-release-tool + description: AIX Release + profile_host: build-profile-aix-ppc + ReleaseToolGCC: + <<: *aixCommon + build_folder: build-release-tool + description: AIX Release Tool with GCC + profile_host: build-profile-aix-ppc-gcc +``` + +...and both profiles are in the `prebuilt_tools_configs` list: + +```yaml + prebuilt_tools_configs: + - ReleaseTool + - ReleaseToolGCC +``` + +Individual tools specify which of the tool configs to use. + +```yaml + prebuilt_tools: + - package: b2/4.9.2 + configs: + - ReleaseToolGCC + - package: swig/1.3.40+dl.1 + options: + - pcre:with_bzip2=False + configs: + - ReleaseTool +``` + +**Note:** Jenkins does not currently run the AIX builds for this project, +because work on AIX had been suspended. If work on AIX starts again, CI should +be turned back on, and the tool builds checked to ensure they work. + +## Specifying which tools to build + +The tools to build are specified in a list under `prebuilt_tools`. Entries in +this list can either be a string, or a dictionary. A string uses the configs in +`prebuilt_tools_configs`, and default options. + +Example of using a string: + +```yaml + prebuilt_tools: + - b2/4.9.2 +``` + +### Using a dictionary + +Using a dictionary for a `prebuilt_tools` entry allows more configuration. The +fields in the dictionary are: + +- `package`: the package ref +- `options`: a list of key=value option strings that are passed to the + `conan create` command; these should be options from the tool's Conanfile, or + `package:key=value` may be used to specify an option for one of the + requirements of the tool. +- `configs`: a list of configs to build for the particular tool in question. Any + config in this list must _also_ be in `prebuilt_tools_configs`. + +#### Limiting which tool configs to use + +Specify a list of configs, for instance, to build `b2` with gcc: + +```yaml + prebuilt_tools: + - package: b2/4.9.2 + configs: + - ReleaseToolGCC +``` + +#### Specifying options for building the tool + +Specify a list of options, for instance to build Doxygen with search turned off: + +```yaml + prebuilt_tools: + - package: doxygen/1.9.1 + options: + - doxygen:enable_search=False +``` + +Or, to build SWIG but telling `pcre` to not use `bzip2`: + +```yaml + prebuilt_tools: + - package: swig/4.0.2+dl.2 + options: + - pcre:with_bzip2=False +``` + +### Using version ranges + +Any tool reference can be specified with a version range. The version range will +be resolved to the _latest_ available version that satisfies the version range. + +For instance, the `build_tools` profile in the `curated-conan-center-index` +branch of `conan-config` specifies the latest CMake >= 3.23.0. To ensure that +the latest CMake after 3.23 is built, this `dlproject.yaml` has entries like: + +```yaml + prebuilt_tools: + - cmake/[>=3.23.0] +``` + +Although the tool builder will only build the latest version that matches the +range, previous versions are still left in the Conan repository on Artifactory. From 120c0f96db1038f0ba046ce0e450c012b7936c30 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Thu, 22 Dec 2022 18:51:46 -0600 Subject: [PATCH 167/173] dlproject.yaml: Fix missing cross-reference to build missing --- dlproject.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dlproject.yaml b/dlproject.yaml index a79bb91f22ba9..619d9ea558e32 100644 --- a/dlproject.yaml +++ b/dlproject.yaml @@ -543,6 +543,8 @@ config: description: AIX Release profile_host: build-profile-aix-ppc ReleaseToolGCC: + <<: *aixCommon + build_folder: build-release-tool build_folder: build-release-tool description: AIX Release Tool with GCC profile_host: build-profile-aix-ppc-gcc From 418a272f816ee2cc5010709585a854e8dfbd2721 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Wed, 4 Jan 2023 14:43:54 -0600 Subject: [PATCH 168/173] Add the main doc on how to use CCCI --- README.md | 6 +- dl-docs/using-the-ccci-repositories.md | 95 ++++++++++++++++++++++++++ 2 files changed, 98 insertions(+), 3 deletions(-) create mode 100644 dl-docs/using-the-ccci-repositories.md diff --git a/README.md b/README.md index 23528c4f6cb93..10fbc2b488d73 100644 --- a/README.md +++ b/README.md @@ -14,9 +14,9 @@ It also has Invoke tasks and CI implementations that: ### Configuration and daily operations -- Using the Curated Conan Center Index Conan repositories - - Building against the staging repository - - Using standard build profiles +- [Using the Curated Conan Center Index Conan repositories](dl-docs/using-the-ccci-repositories.md) + - [Building against the staging repository](dl-docs/using-the-ccci-repositories.md#building-against-the-staging-repository) + - [Using standard build profiles](dl-docs/using-the-ccci-repositories.md#using-standard-build-profiles) - Updating a recipe - Adding a new version of a package - In conjunction with a contribution to `conan-io/conan-center-index` diff --git a/dl-docs/using-the-ccci-repositories.md b/dl-docs/using-the-ccci-repositories.md new file mode 100644 index 0000000000000..7f33187aa10c8 --- /dev/null +++ b/dl-docs/using-the-ccci-repositories.md @@ -0,0 +1,95 @@ +# Using the Curated Conan Center Index Conan repositories + + + +- [Building against the staging repository](#building-against-the-staging-repository) +- [Using standard build profiles](#using-standard-build-profiles) + + + +To use the Curated Conan Center Index Conan repositories, add the following +repositories to Conan, and disable the one not currently in use: + +```text +conan-center-dl: http://artifactory.dlogics.com:8081/artifactory/api/conan/conan-center-dl [Verify SSL: True, Disabled: True] +conan-center-dl-staging: http://artifactory.dlogics.com:8081/artifactory/api/conan/conan-center-dl-staging [Verify SSL: True] +``` + +The best way to do this is to add the repositories to `remotes.txt` in a Conan +configuration repo, which is already done in the `curated-conan-center-index` +branch of the `conan-config` Git repository: + +```text +conan-local http://artifactory.dlogics.com:8081/artifactory/api/conan/conan-local True +conan-alias-production http://artifactory.dlogics.com:8081/artifactory/api/conan/conan-alias-production True +conan-alias-staging http://artifactory.dlogics.com:8081/artifactory/api/conan/conan-alias-staging True +conan-center-dl http://artifactory.dlogics.com:8081/artifactory/api/conan/conan-center-dl True +conan-center-dl-staging http://artifactory.dlogics.com:8081/artifactory/api/conan/conan-center-dl-staging True +``` + +Add the `curated-conan-center-index` to your `dlconfig.yaml` by adding +`--branch curated-conan-center-index` to the `config_args` key: + +```yaml +config: + # Basic configuration variables + global: + # Base configurations, may be overridden by platform + + # Conan configuration. `conan config install` installs configuration file from this URL. + # See: https://docs.conan.io/en/latest/reference/commands/consumer/config.html#conan-config-install + # This is usually a pointer to a Git repo, from which it clones the default branch + config_url: git@octocat.dlogics.com:datalogics/conan-config.git + config_args: --branch curated-conan-center-index +``` + +dl-conan-build-tools will use `curated-conan-center-index` by default once the +Curated Conan Center Index goes live. + +The [`invoke conan.install-config`][install-config] task in +`dl-conan-build-tools` will figure out if you're using the production repository +(the default) or the staging repository (see below), and disable the repository +that is not in use. To ensure consistency, it will also remove any packages from +the local cache that were from a now-missing-or-disabled remote. +`conan.install-config` is called from `conan.login` and `bootstrap`. + +## Building against the staging repository + +The staging repository corresponds to the `develop` branch of the Curated Conan +Center Index. It is the first repository to receive new changes: + +- Packages created or updated at Datalogics should be merged to `develop`. +- The `merge-upstream` task runs automatically in Jenkins to bring in changes + from https://github.com/conan-io/conan-center-index. + +The easiest way to build against the staging repository is to define the +environment variable `DL_CONAN_CENTER_INDEX=staging`. This can be done in your +command shell, or in an environment variable in a `Jenkinsfile`, perhaps +controlled by a parameter. + +## Using standard build profiles + +The `curated-conan-center-index` branch of `conan-config` introduces some +standard build profiles. Build profiles are used to obtain Conan packages that +are in the build environment: these packages are usually tools like CMake or +Doxygen. The Curated Conan Center Index repositories contain +[pre-built versions of tools](automatic-tool-builds.md) that use these profiles. + +The standard build profiles are named after operating system and architecture. + +It is recommended that projects that use split build/host profiles use the +standard build profiles. + +The profiles are: + +- `build-profile-aix-ppc` +- `build-profile-aix-ppc-gcc` +- `build-profile-linux-arm` +- `build-profile-linux-intel` +- `build-profile-macos-arm` +- `build-profile-macos-intel` +- `build-profile-solaris-sparc` +- `build-profile-solaris-sparc-32` +- `build-profile-windows-intel` + +[install-config]: https://octocat.dlogics.com/pages/datalogics/dl-conan-build-tools/conan_install_config.html From 63227a446b27035f59d9a6322fa675b40dfa5f68 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 6 Jan 2023 12:32:07 -0600 Subject: [PATCH 169/173] Add documentation for updating a recipe --- README.md | 11 ++--- dl-docs/updating-a-recipe.md | 78 ++++++++++++++++++++++++++++++++++++ 2 files changed, 84 insertions(+), 5 deletions(-) create mode 100644 dl-docs/updating-a-recipe.md diff --git a/README.md b/README.md index 10fbc2b488d73..a1b021ef65e76 100644 --- a/README.md +++ b/README.md @@ -17,11 +17,12 @@ It also has Invoke tasks and CI implementations that: - [Using the Curated Conan Center Index Conan repositories](dl-docs/using-the-ccci-repositories.md) - [Building against the staging repository](dl-docs/using-the-ccci-repositories.md#building-against-the-staging-repository) - [Using standard build profiles](dl-docs/using-the-ccci-repositories.md#using-standard-build-profiles) -- Updating a recipe - - Adding a new version of a package - - In conjunction with a contribution to `conan-io/conan-center-index` - - At DL only - - Datalogics-only modifications to recipes +- [Updating a recipe](dl-docs/updating-a-recipe.md) + - [Adding a new revision of a recipe](dl-docs/updating-a-recipe.md#adding-a-new-revision-of-a-recipe) + - [In conjunction with a contribution to `conan-io/conan-center-index`](dl-docs/updating-a-recipe.md#in-conjunction-with-a-contribution-to-conan-ioconan-center-index) + - [At DL only](dl-docs/updating-a-recipe.md#at-dl-only) + - [Testing the updated recipe with DL projects](dl-docs/updating-a-recipe.md#testing-the-updated-recipe-with-dl-projects) + - [Bringing updates to production](dl-docs/updating-a-recipe.md#bringing-updates-to-production) - [Specifying automatic builds of tools](dl-docs/automatic-tool-builds.md) - [Configurations for tools](dl-docs/automatic-tool-builds.md#configurations-for-tools) - [Standard build profiles](dl-docs/automatic-tool-builds.md#standard-build-profiles) diff --git a/dl-docs/updating-a-recipe.md b/dl-docs/updating-a-recipe.md new file mode 100644 index 0000000000000..1845058b4b990 --- /dev/null +++ b/dl-docs/updating-a-recipe.md @@ -0,0 +1,78 @@ +# Updating a recipe + + + +- [Adding a new revision of a recipe](#adding-a-new-revision-of-a-recipe) + - [In conjunction with a contribution to `conan-io/conan-center-index`](#in-conjunction-with-a-contribution-to-conan-ioconan-center-index) + - [At DL only](#at-dl-only) +- [Testing the updated recipe with DL projects](#testing-the-updated-recipe-with-dl-projects) +- [Bringing updates to production](#bringing-updates-to-production) + + + +This document will cover updating a recipe, but the same steps apply to making a +new recipe; it just starts from nothing rather than modifying a recipe that's +there. + +## Adding a new revision of a recipe + +### In conjunction with a contribution to `conan-io/conan-center-index` + +You can do this procedure work with packages you are contributing upstream, or +to get changes that others are making before they get fully approved by the +conan-io team. + +This general procedure is the same whether making fixes to a recipe, or adding a +new version of a software package. If you're adding a new version of the +software package, see how versions are specified in the +[recipe files structure](https://github.com/conan-io/conan-center-index/tree/master/docs/adding_packages#recipe-files-structure) +at conan-io. + +1. First, make the changes in a fork of the + [conan-io/conan-center-index](https://github.com/conan-io/conan-center-index) + repository, according to the requirements of the + [CONTRIBUTING.md](https://github.com/conan-io/conan-center-index/blob/master/CONTRIBUTING.md) + document there. You'll need to use a fork on github.com for this. +2. Create a pull request at `conan-io/conan-center-index`. +3. Create a branch from `upstream/develop` (where `upstream` is the + `datalogics/conan-center-index` repository on Octocat). +4. Get the HTTPS URL of the fork with the changes. From the PR on github.com, + you can click on the "from" branch in the PR header to get to the repository. + Then click on **Code**, choose **HTTPS**, and click the button to the right + of the URL to copy the URL. +5. Do a `git pull --no-ff `. A + remote is really shorthand for a URL of a remote repository. It's not + necessary to create a remote to pull a branch from a remote repository. +6. Push the changes up to your `conan-center-index` fork on _Octocat_. +7. Open a PR on Octocat against `develop`. + +An example of this procedure can be found in the pull request +[datalogics/conan-center-index#32](https://octocat.dlogics.com/datalogics/conan-center-index/pull/32). +That pull request had changes to libdeflate that we needed to bring in early. + +The `git pull` command in the case of datalogics/conan-center-index#32 was + +```bash +git pull --no-ff https://github.com/SpaceIm/conan-center-index.git fix/libdeflate-install +``` + +### At DL only + +1. Check out your fork of `conan-center-index` on Octocat. +2. Create a branch based on `upstream/develop` +3. Make changes to a recipe as needed. +4. Push the changes up to Octocat. +5. Open a PR on Octocat against `develop`. + +## Testing the updated recipe with DL projects + +Before bringing the updates into production, test with existing DL projects. To +do this, create a draft PR in one or more projects, and alter the Jenkins file +to set the environment variable `DL_CONAN_CENTER_INDEX=staging`. Also see the +[documentation](using-the-ccci-repositories.md#building-against-the-staging-repository) +on using the staging repository. + +## Bringing updates to production + +To make an update available to production, +[run the appropriate Jenkins job to merge staging to production](jenkins-jobs.md#merging-develop-to-master-to-put-recipes-into-production). From 380e0f432d481718a136537625f13d11d62eedfc Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Fri, 6 Jan 2023 12:35:02 -0600 Subject: [PATCH 170/173] Add documentation about the Jenkins jobs --- README.md | 22 ++--- dl-docs/jenkins-jobs.md | 193 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 205 insertions(+), 10 deletions(-) create mode 100644 dl-docs/jenkins-jobs.md diff --git a/README.md b/README.md index a1b021ef65e76..5be131f672645 100644 --- a/README.md +++ b/README.md @@ -36,16 +36,18 @@ It also has Invoke tasks and CI implementations that: - [Standard build profiles](dl-docs/automatic-tool-builds.md#standard-build-profiles) - [Using specific compilers](dl-docs/automatic-tool-builds.md#using-specific-compilers) - [Using version ranges](dl-docs/automatic-tool-builds.md#using-version-ranges) -- Jenkins jobs - - Recipe uploads - - Forcing an upload of all recipes - - Nightly tool builds - - Requesting a full build - - Building individual tools - - Merges from `conan-io/conan-center-index` to `develop` - - Controlling the interval of automated merges - - Requesting a merge - - Merging `develop` to `master` to put recipes into production +- [Jenkins jobs](dl-docs/jenkins-jobs.md) + - [Recipe uploads](dl-docs/jenkins-jobs.md#recipe-uploads) + - [Forcing an upload of all recipes](dl-docs/jenkins-jobs.md#forcing-an-upload-of-all-recipes) + - [Nightly tool builds](dl-docs/jenkins-jobs.md#nightly-tool-builds) + - [Requesting a full rebuild](dl-docs/jenkins-jobs.md#requesting-a-full-rebuild) + - [Building individual tools](dl-docs/jenkins-jobs.md#building-individual-tools) + - [Merges from `conan-io/conan-center-index` to `develop`](dl-docs/jenkins-jobs.md#merges-from-conan-ioconan-center-index-to-develop) + - [Controlling the interval of automated merges](dl-docs/jenkins-jobs.md#controlling-the-interval-of-automated-merges) + - [Requesting a merge](dl-docs/jenkins-jobs.md#requesting-a-merge) + - [Merging `develop` to `master` to put recipes into production](dl-docs/jenkins-jobs.md#merging-develop-to-master-to-put-recipes-into-production) + - [Criteria](dl-docs/jenkins-jobs.md#criteria) + - [Performing the merge](dl-docs/jenkins-jobs.md#performing-the-merge) ### Troubleshooting diff --git a/dl-docs/jenkins-jobs.md b/dl-docs/jenkins-jobs.md new file mode 100644 index 0000000000000..2e6dc8e05f559 --- /dev/null +++ b/dl-docs/jenkins-jobs.md @@ -0,0 +1,193 @@ +# Jenkins jobs + + + +- [Recipe uploads](#recipe-uploads) + - [Forcing an upload of all recipes](#forcing-an-upload-of-all-recipes) +- [Nightly tool builds](#nightly-tool-builds) + - [Requesting a full rebuild](#requesting-a-full-rebuild) + - [Building individual tools](#building-individual-tools) +- [Merges from `conan-io/conan-center-index` to `develop`](#merges-from-conan-ioconan-center-index-to-develop) + - [Controlling the interval of automated merges](#controlling-the-interval-of-automated-merges) + - [Requesting a merge](#requesting-a-merge) +- [Merging `develop` to `master` to put recipes into production](#merging-develop-to-master-to-put-recipes-into-production) + - [Criteria](#criteria) + - [Performing the merge](#performing-the-merge) + + + +## Recipe uploads + +Recipe uploads are done automatically on the `develop` and `master` branches +whenever the branch builds. + +As an optimization, recipes are uploaded incrementally. The Jenkins job looks +for any recipe changes added to the branch in question: + +1. ...since before the most recent merge commit of any kind to the branch. +2. ...in the last _two_ merges that ultimately came from `conan-io/master`, in + other words, two updates worth of changes from `conan-io`. + +For each package, every recipe version is exported and then the recipes are +uploaded to the appropriate repo: + +| Branch | Destination repo | +| --------- | ------------------------- | +| `develop` | `conan-center-dl-staging` | +| `master` | `conan-center-dl` | + +Conan will only upload the recipes if they've changed, based on a checksum of +the recipe. This saves time, and avoids removing packages that have been built +and cached. + +### Forcing an upload of all recipes + +If there is a lack of confidence in the incremental upload, it's possible to run +a job to upload _all_ the recipes. Even though recipes are generated and +uploaded in parallel, this could take more than 20 minutes, which is why the +optimization is normally used. + +To force an upload of all recipes: + +1. Go to the build page for the branch in question: + [`develop`](http://kepler.dlogics.com:8080/job/Datalogics/job/conan-center-index/job/develop/build) + or + [`master`](http://kepler.dlogics.com:8080/job/Datalogics/job/conan-center-index/job/master/build). +2. Check the **UPLOAD_ALL_RECIPES** checkbox. +3. Click the **Build** button. + +## Nightly tool builds + +Every night, Jenkins runs pytest to do a build of the tools. The results of the +tool builds appear in the Jenkins run page, and in HTML files that are collected +as build artifacts. + +The tests build the tools incrementally using `conan create`. If there were no +changes to cause the tool to build, the test package is built and run, verifying +that the cached tool at least runs. + +If a tool build fails, check the HTML results. The full log of the build attempt +is in the HTML result file, and is segregated for each tool. + +### Requesting a full rebuild + +You can force a full rebuild of the tools. To do this: + +1. Go to the build page for the branch in question: + [`develop`](http://kepler.dlogics.com:8080/job/Datalogics/job/conan-center-index/job/develop/build) + or + [`master`](http://kepler.dlogics.com:8080/job/Datalogics/job/conan-center-index/job/master/build). +2. If you want to just build the tools, check the **FORCE_TOOL_BUILD** checkbox. +3. If you want to build the tools and also force the tools' _requirements_ to be + built, check the **FORCE_TOOL_BUILD_WITH_REQUIREMENTS** checkbox. **Note:** + This can take quite a while, as _all_ the requirements will be built for + _each_ tool. +4. Click the **Build** button. + +### Building individual tools + +You can also force an individual tool to be rebuilt. + +1. Go to the build page for the branch in question: + [`develop`](http://kepler.dlogics.com:8080/job/Datalogics/job/conan-center-index/job/develop/build) + or + [`master`](http://kepler.dlogics.com:8080/job/Datalogics/job/conan-center-index/job/master/build). +2. In the **PYTEST_OPTIONS** field, enter `-k` followed by the name of the tool, + i.e., for SWIG, add `-k swig`. What you're doing here is adding an option to + the `pytest` command, asking to only run tests with the tool's name in the + command. For more information on options for pytest, see + [the pytest doc](https://docs.pytest.org/en/7.1.x/how-to/usage.html). +3. If you want to just build the tool, check the **FORCE_TOOL_BUILD** checkbox. +4. If you want to build the tool and also force the tool's _requirements_ to be + built, check the **FORCE_TOOL_BUILD_WITH_REQUIREMENTS** checkbox. **Note:** + This can take quite a while, as _all_ the requirements will be built for + _each_ tool. +5. Click the **Build** button. + +## Merges from `conan-io/conan-center-index` to `develop` + +The nightly Jenkins run automatically retrieves changes from +`conan-io/conan-center-index`, by running the +[`merge-upstream`](merge-upstream.md) task. See that documentation page for the +details. + +Normally, the merge runs without needing any attention. The task is set up to +automatically resolve merge conflicts where DL has overridden files from +`conan-io`, either deliberately, or by removing the files. + +If the merge fails, the task will instead make a PR for the merge, and assign +and request reviews for that PR based on +[the configuration](merge-upstream.md#configuration) in `dlproject.yaml`. + +### Controlling the interval of automated merges + +Currently, the upstream merge happens nightly. This is done by setting the +parameter `MERGE_UPSTREAM=true` for the develop branch in the `Jenkinsfile`: + +```groovy + triggers { + // From the doc: @midnight actually means some time between 12:00 AM and 2:59 AM. + // This gives us automatic spreading out of jobs, so they don't cause load spikes. + parameterizedCron(env.BRANCH_NAME =~ 'develop' ? '@midnight % MERGE_UPSTREAM=true' : '@midnight') + } +``` + +The interval could be changed; for instance, there could be one +`parameterizedCron` statement to run the nightlies every night between midnight +and 3 AM, and another `parameterizedCron` statement to run the `develop` branch +with the `MERGE_UPSTREAM=true` parameter at 3:30 AM Saturday morning. The +following example should work, but has not been tested. + +```groovy + triggers { + // From the doc: @midnight actually means some time between 12:00 AM and 2:59 AM. + // This gives us automatic spreading out of jobs, so they don't cause load spikes. + parameterizedCron(env.BRANCH_NAME =~ 'develop' + ? ''' + @midnight + 30 3 * * 6 % MERGE_UPSTREAM=true + ''' + : '@midnight') + } +``` + +### Requesting a merge + +One might want to get recent recipe changes from `conan-io`, perhaps after an +important bugfix gets merged. To request an upstream merge manually: + +1. Go to the + [build page](http://kepler.dlogics.com:8080/view/All%20branches/job/Datalogics/job/conan-center-index/job/develop/build) + in Jenkins for the `develop` branch. +2. Check the **MERGE_UPSTREAM** checkbox. +3. Click the **Build** button. + +The upstream merge will run, followed by a job that uploads the recipes to the +staging repo and builds the tools. + +## Merging `develop` to `master` to put recipes into production + +Merging the `develop` branch of the Curated Conan Center Index to `master` puts +recipes into production, where they will be used by everyday builds in the +company. + +The action is automatic, though the choice to take that action is one that is +manually made. + +### Criteria + +The choice to put recipes into production should be made by testing +representative projects against the `conan-center-dl-staging` repo, using the +techniques in +[Building against the staging repository](using-the-ccci-repositories.md#building-against-the-staging-repository). + +Building those projects is done outside the `conan-center-index` job system, and +is outside the scope of this documentation. + +### Performing the merge + +1. Go to the + [build page](http://kepler.dlogics.com:8080/view/All%20branches/job/Datalogics/job/conan-center-index/job/master/build) + in Jenkins for the `master` branch. +2. Check the **MERGE_STAGING_TO_PRODUCTION** checkbox. +3. Click the **Build** button. From 48a55e19e4a606a0c445eab3ee25b9821fc0f781 Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Mon, 9 Jan 2023 19:33:05 -0600 Subject: [PATCH 171/173] auto-merge-conflict-resolution.md: Fix heading so link works --- dl-docs/auto-merge-conflict-resolution.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dl-docs/auto-merge-conflict-resolution.md b/dl-docs/auto-merge-conflict-resolution.md index f0fb362f0a9eb..b3af1cadd78ef 100644 --- a/dl-docs/auto-merge-conflict-resolution.md +++ b/dl-docs/auto-merge-conflict-resolution.md @@ -11,7 +11,7 @@ conflicts. - [Files that both conan-io and Datalogics modify](#files-that-both-conan-io-and-datalogics-modify) - - [Why `.gitattributes-merge` and not `.gitattributes`?](#why-gitattributes-merge-and-not-gitattributes) + - [Why `.gitattributes-merge` and not `.gitattributes`](#why-gitattributes-merge-and-not-gitattributes) - [Verifying the coverage of `.gitattributes-merge`](#verifying-the-coverage-of-gitattributes-merge) - [Files that Datalogics has deleted](#files-that-datalogics-has-deleted) - [References](#references) @@ -36,7 +36,7 @@ As an example, this file currently contains: /.github/** merge=ours ``` -### Why `.gitattributes-merge` and not `.gitattributes`? +### Why `.gitattributes-merge` and not `.gitattributes` It's not possible to use custom merge drivers on GitHub, so if the `merge=ours` attributes were put into the `.gitattributes` file, it would cause problems with From ff4749efe198f68b0c3aa27165e43390030b51bf Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 24 Jan 2023 12:04:24 -0600 Subject: [PATCH 172/173] Add doc for build failure analysis and requesting builds --- README.md | 8 +- dl-docs/troubleshooting.md | 161 +++++++++++++++++++++++++++++++++++++ 2 files changed, 165 insertions(+), 4 deletions(-) create mode 100644 dl-docs/troubleshooting.md diff --git a/README.md b/README.md index 5be131f672645..0a9534447b77a 100644 --- a/README.md +++ b/README.md @@ -51,11 +51,11 @@ It also has Invoke tasks and CI implementations that: ### Troubleshooting -- Analyzing build failures -- Using pytest to run the tools builders +- [Analyzing build failures](dl-docs/troubleshooting.md#analyzing-build-failures) +- [Using pytest to run the tools builders](dl-docs/troubleshooting.md#using-pytest-to-run-the-tools-builders) - Resolving merge conflicts from the upstream repo -- Requesting a full build of the tools and their requirements -- Requesting a full (non-incremental) recipe upload +- [Requesting a full build of the tools and their requirements](dl-docs/jenkins-jobs.md#requesting-a-full-rebuild) +- [Requesting a full (non-incremental) recipe upload](dl-docs/jenkins-jobs.md#forcing-an-upload-of-all-recipes) ### Reference diff --git a/dl-docs/troubleshooting.md b/dl-docs/troubleshooting.md new file mode 100644 index 0000000000000..3d13312b90712 --- /dev/null +++ b/dl-docs/troubleshooting.md @@ -0,0 +1,161 @@ +# Troubleshooting + +Most of the time, the Curated Conan Center Index jobs run without incident. +Rarely something might fail. See the sections in this document for some +solutions to common problems. + + + +- [Analyzing build failures](#analyzing-build-failures) +- [Using pytest to run the tools builders](#using-pytest-to-run-the-tools-builders) + - [Preparation](#preparation) + - [Finding the list of available builders](#finding-the-list-of-available-builders) + - [Building a specific tool](#building-a-specific-tool) +- [Resolving merge conflicts from the upstream repo](#resolving-merge-conflicts-from-the-upstream-repo) + + + +## Analyzing build failures + +If the nightly tools build fails, the logs may be hard to understand. Instead, +look in the artifacts for a report ending in `-build-tools.html`. It'll be named +with the name of the platform, with the architecture included if there are +different architectures. + +The file is a pytest HTML report. It'll have an entry for each tool it tried to +build. The tests are named with the name of the package, its version number, the +config used to build it, and any additional options. + +If you view details, the log of the build will be shown. Note that the stdout +and stderr streams are separate, so make sure to read both to find the commands +and error messages that will be run. + +The first thing you should see in the output is the `conan create` command, +which might look like this: + +```text +Creating package cmake/3.24.3: conan create recipes/cmake/3.x.x cmake/3.24.3@ --update --json /private/var/folders/hv/kv3kzcwd65d4swrd637s65_w0000gp/T/pytest-of-devauto/pytest-15/test_build_tool_cmake_3_24_3_R0/create.json --profile:host build-profile-macos-intel --build missing +``` + +Things to note that you will see in the log following that: + +- The configuration of the build, showing the settings, options, configuration + settings, and environment. +- The list of requirements and build requirements, and their packages. That may + be useful information if the tool failed to build due to a change. + +Examine the log to determine the source of the error. Fixing it may require +examining what changed in the Conan recipes or the upstream sources. + +In order to view recent changes to the recipe that could have caused problems, +make sure to checkout the branch corresponding to the build (most likely +`upstream/develop`), and then use `git log` to look at patches to a single +recipe like this (assuming `cmake`): + +```shell +git log --patch -- recipes/cmake +``` + +## Using pytest to run the tools builders + +### Preparation + +Make sure to run `mkenv.py` and activate the virtual environment. + +Before running the tool builders, make sure that you have selected the +appropriate Conan repository. If you're working against `upstream/develop`, then +make sure to select the staging repository: + +```bash +$ export DL_CONAN_CENTER_INDEX=staging +$ invoke conan.login +``` + +If you want to prevent conflicts with other work by using an alternate Conan +cache, you can do so by first assigning `CONAN_USER_HOME` to a different +directory, where the `.conan` directory will be created. Often the current +directory is a good choice. + +```bash +$ export CONAN_USER_HOME=$PWD +$ export DL_CONAN_CENTER_INDEX=staging +$ invoke conan.login +``` + +### Finding the list of available builders + +To find the tools that are built, use the pytest option `--collect-only` like +this: + +```bash +$ pytest --collect-only +... +collected 12 items + + + + + + + + + + + + + + + + +``` + +### Building a specific tool + +You can build a tool and see the output by using the following pytest options: + +- `--capture=no` to display the output on the console instead of in the test log +- `-k` to use keywords, which can be a combination of strings and Python logical + expressions. See the + [doc](https://docs.pytest.org/en/7.2.x/how-to/usage.html#specifying-which-tests-to-run) + for more information. +- `--force-build=package` will force the package to be built. Normally, Conan + will try to download a built package and run the `test_package` against it. +- `--force-build=with-requirements` will force Conan to build not only the + package, but all its requirements. This is an advanced option that is only + necessary if there are deep problems with building the package. + +From the usage help for pytest: + +> Only run tests which match the given substring expression. An expression is a +> Python evaluatable expression where all names are substring-matched against +> test names and their parent classes. Example: `-k 'test_method or test_other'` +> matches all test functions and classes whose name contains 'test_method' or +> 'test_other', while `-k 'not test_method'` matches those that don't contain +> 'test_method' in their names. `-k 'not test_method and not test_other'` will +> eliminate the matches. Additionally keywords are matched to classes and +> functions containing extra names in their 'extra_keyword_matches' set, as well +> as functions which have names assigned directly to them. The matching is +> case-insensitive. + +For instance, to build Doxygen 1.9.2 with the enable_search option: + +```bash +$ pytest --capture=no -k 'doxygen and 1.9.2 and enable_search' --force-build=package +tests/test_tools.py::TestBuildTools::test_build_tool[doxygen/1.9.2_doxygen:enable_search=False-ReleaseTool] Creating package doxygen/1.9.2: conan create recipes/doxygen/all doxygen/1.9.2@ --update --json /private/var/folders/03/f8w5w_3s0xg5m1jphq243j_r0000gx/T/pytest-of-kam/pytest-1/test_build_tool_doxygen_1_9_2_0/create.json --profile:host build-profile-macos-intel --options:host doxygen:enable_search=False --build doxygen --build missing +...build... +``` + +If you want to build the package with `conan-create` directly, you can copy the +`conan create` command from the test output and run it. When you do this, remove +the `--json /create.json` option; the tests are using that to get +information about the package that was built. + +```bash +$ conan create recipes/doxygen/all doxygen/1.9.2@ --update --profile:host build-profile-macos-intel --options:host doxygen:enable_search=False --build doxygen --build missing +...build... +``` + +**Note:** When running on Windows, remember to use double quotes for quoting +strings. + +## Resolving merge conflicts from the upstream repo From 2ae008f98ab7b57cda05e1ed4065a7b4d30475da Mon Sep 17 00:00:00 2001 From: "Kevin A. Mitchell" Date: Tue, 24 Jan 2023 15:48:41 -0600 Subject: [PATCH 173/173] Add doc for resolving merge conflicts --- README.md | 2 +- dl-docs/troubleshooting.md | 81 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 0a9534447b77a..11d039fdf2c94 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ It also has Invoke tasks and CI implementations that: - [Analyzing build failures](dl-docs/troubleshooting.md#analyzing-build-failures) - [Using pytest to run the tools builders](dl-docs/troubleshooting.md#using-pytest-to-run-the-tools-builders) -- Resolving merge conflicts from the upstream repo +- [Resolving merge conflicts from the upstream repo](dl-docs/troubleshooting.md#resolving-merge-conflicts-from-the-upstream-repo) - [Requesting a full build of the tools and their requirements](dl-docs/jenkins-jobs.md#requesting-a-full-rebuild) - [Requesting a full (non-incremental) recipe upload](dl-docs/jenkins-jobs.md#forcing-an-upload-of-all-recipes) diff --git a/dl-docs/troubleshooting.md b/dl-docs/troubleshooting.md index 3d13312b90712..155a233beadfd 100644 --- a/dl-docs/troubleshooting.md +++ b/dl-docs/troubleshooting.md @@ -12,6 +12,7 @@ solutions to common problems. - [Finding the list of available builders](#finding-the-list-of-available-builders) - [Building a specific tool](#building-a-specific-tool) - [Resolving merge conflicts from the upstream repo](#resolving-merge-conflicts-from-the-upstream-repo) + - [Resolving merge conflicts locally](#resolving-merge-conflicts-locally) @@ -159,3 +160,83 @@ $ conan create recipes/doxygen/all doxygen/1.9.2@ --update --profile:host build- strings. ## Resolving merge conflicts from the upstream repo + +Most of the time, the +[automated merges](jenkins-jobs.md#merges-from-conan-ioconan-center-index-to-develop) +work without incident, as they fetch from `conan-io/conan-center-index` and +[resolve merge conflicts automatically](auto-merge-conflict-resolution.md). + +Sometimes, in the rare case that Datalogics has a local modification to a +recipe, and `conan-io` makes a change in the same bit of code, there will be a +merge conflict. + +When that happens, the automated merging will give up, and instead create a pull +request containing the changes from `conan-io`. It will assign the pull request +and request reviews from the Octocat users mentioned in the `reviewers` and +`assignee` keys in the `pull_requests` key of `merge_upstream` as seen in the +[configuration documentation](merge-upstream.md#configuration). + +To resolve the conflict, open the pull request, and then follow the instructions +in +[Resolving a merge conflict on GitHub](https://docs.github.com/en/enterprise-server@3.7/pull-requests/collaborating-with-pull-requests/addressing-merge-conflicts/resolving-a-merge-conflict-on-github) +in the GitHub documentation. Often, you will be able to edit the conflicts right +on Octocat using the web. + +### Resolving merge conflicts locally + +If you can't resolve the conflicts in the web editor, then resolve them locally. +I will illustrate this with commands that assume that you've installed the +[GitHub CLI](https://cli.github.com/), which supplies the `gh` command. + +If you haven't done so already, make sure you're authorized with Octocat: + +```shell +gh auth login -h octocat.dlogics.com -p ssh +``` + +If you haven't done so already, fork the `conan-center-index` repo on Octocat. + +If you have a checkout of your fork with `upstream` set to the Datalogics repo, +you can skip this step, otherwise: + +```shell +gh repo clone octocat.dlogics.com/your-user-id/conan-center-index +cd conan-center-index +``` + +Make sure all the remotes are up to date + +```shell +git remote update +``` + +Get the master branch from `conan-io` + +```shell +git fetch https://github.com/conan-io/conan-center-index.git master +``` + +Create a branch for doing the merge + +```shell +git checkout -b merge-from-conan-io FETCH_HEAD +``` + +Merge the `develop` branch + +```shell +git pull --no-ff upstream develop +``` + +At this point, resolve any merge conflicts, add the resolutions with `git add` +and commit with `git commit`. + +Then, open a pull request: + +```bash +gh --repo octocat.dlogics.com/datalogics/conan-center-index pr create --web +``` + +`gh` will ask where to push; select your own fork and press RETURN. + +Your web browser will open. Complete the pull request in the web browser.