From 91fb865058afa55e8245e16d5751bff141c5503e Mon Sep 17 00:00:00 2001 From: Yifan Li <109183385+yf711@users.noreply.github.com> Date: Wed, 19 Jun 2024 20:10:58 -0700 Subject: [PATCH] [ORT 1.18.1 Release] Cherry pick 1st round (#21105) ### Description ### Motivation and Context --------- Co-authored-by: Jian Chen Co-authored-by: Yi Zhang Co-authored-by: Changming Sun Co-authored-by: Ye Wang <52801275+wangyems@users.noreply.github.com> Co-authored-by: Your Name --- ...anch.Nuget-WindowsAI-Pipeline.Official.yml | 23 +- cmake/CMakeLists.txt | 20 +- cmake/onnxruntime_providers_migraphx.cmake | 2 +- cmake/onnxruntime_providers_tensorrt.cmake | 2 +- java/build.gradle | 87 +++-- java/settings.xml | 9 + .../cuda/quantization/moe_quantization.cc | 9 + .../tensorrt_execution_provider_utils.h | 3 +- .../models/llama/requirements.txt | 2 +- .../transformers/models/phi2/requirements.txt | 2 +- .../models/whisper/requirements.txt | 2 +- setup.py | 1 + tools/ci_build/build.py | 1 + ...arm64-v8a-QNN-crosscompile-ci-pipeline.yml | 2 +- .../azure-pipelines/bigmodels-ci-pipeline.yml | 10 +- .../c-api-noopenmp-packaging-pipelines.yml | 181 ++-------- .../cuda-packaging-pipeline.yml | 135 ++++++++ .../azure-pipelines/linux-ci-pipeline.yml | 20 +- .../linux-cpu-minimal-build-ci-pipeline.yml | 8 - .../linux-dnnl-ci-pipeline.yml | 8 - .../azure-pipelines/linux-gpu-ci-pipeline.yml | 71 +--- .../linux-gpu-tensorrt-ci-pipeline.yml | 42 +-- .../nodejs/templates/test_linux.yml | 2 +- .../nodejs/templates/test_macos.yml | 2 +- .../nodejs/templates/test_win.yml | 2 +- .../npm-packaging-pipeline.yml | 4 +- .../nuget-cuda-publishing-pipeline.yml | 20 +- .../nuget/templates/dml-vs-2022.yml | 2 +- .../nuget/templates/test_linux.yml | 4 +- .../nuget/templates/test_macos.yml | 2 +- .../nuget/templates/test_win.yml | 2 +- .../orttraining-linux-ci-pipeline.yml | 15 +- ...ortmodule-distributed-test-ci-pipeline.yml | 8 +- .../orttraining-pai-ci-pipeline.yml | 1 + .../orttraining-py-packaging-pipeline-cpu.yml | 106 +----- ...orttraining-py-packaging-pipeline-cuda.yml | 8 - ...ttraining-py-packaging-pipeline-cuda12.yml | 8 - .../github/azure-pipelines/publish-nuget.yml | 2 +- .../py-cuda-packaging-pipeline.yml | 8 - .../py-cuda-publishing-pipeline.yml | 40 +-- .../py-package-test-pipeline.yml | 22 +- .../azure-pipelines/py-packaging-pipeline.yml | 8 - .../stages/download-java-tools-stage.yml | 30 ++ .../stages/java-cuda-packaging-stage.yml | 10 +- .../stages/java-cuda-publishing-stage.yml | 56 +++ .../jobs/linux-gpu-tensorrt-packaging-job.yml | 108 ------ .../jobs/py-linux-cuda-package-test-job.yml | 2 +- .../stages/nuget-combine-cuda-stage.yml | 323 ++++++------------ .../stages/nuget-cuda-packaging-stage.yml | 235 +++++++++++++ .../stages/nuget-cuda-publishing-stage.yml | 4 +- .../nuget-linux-cuda-packaging-stage.yml | 156 ++++++--- .../stages/nuget-win-cuda-packaging-stage.yml | 15 +- .../stages/py-cuda-packaging-stage.yml | 4 +- .../stages/py-cuda-publishing-stage.yml | 73 ++-- .../stages/set_packaging_variables_stage.yml | 46 +++ .../android-binary-size-check-stage.yml | 2 +- .../templates/android-java-api-aar.yml | 1 + .../azure-pipelines/templates/c-api-cpu.yml | 10 +- .../templates/c-api-linux-cpu.yml | 27 +- ...t-governance-component-detection-steps.yml | 18 +- .../azure-pipelines/templates/esrp_nuget.yml | 47 +-- .../templates/final-jar-testing.yml | 2 +- .../templates/get-docker-image-steps.yml | 5 +- .../jobs/download_training_test_data.yml | 8 + .../jobs/download_win_gpu_library.yml | 16 +- .../templates/mac-cpu-packing-jobs.yml | 4 +- .../templates/mac-esrp-dylib.yml | 42 +-- .../templates/make_java_win_binaries.yml | 14 +- ...device-training-cpu-packaging-pipeline.yml | 2 +- ...orttraining-linux-gpu-test-ci-pipeline.yml | 16 +- .../templates/py-linux-gpu.yml | 17 +- .../azure-pipelines/templates/py-linux.yml | 18 +- .../templates/py-packaging-linux-test-cpu.yml | 18 +- .../py-packaging-linux-test-cuda.yml | 39 ++- .../py-packaging-selectable-stage.yml | 2 +- .../templates/py-packaging-stage.yml | 23 +- ...py-packaging-training-cuda-stage-steps.yml | 34 +- .../azure-pipelines/templates/py-win-gpu.yml | 2 +- .../github/azure-pipelines/templates/rocm.yml | 1 + .../azure-pipelines/templates/win-ci.yml | 45 +-- .../templates/win-esrp-dll.yml | 45 +-- .../azure-pipelines/win-ci-pipeline.yml | 2 +- .../win-gpu-reduce-op-ci-pipeline.yml | 4 +- .../win-gpu-tensorrt-ci-pipeline.yml | 6 +- .../github/linux/build_cuda_c_api_package.sh | 3 +- tools/ci_build/github/linux/build_cuda_ci.sh | 46 +++ .../linux/build_linux_python_package.sh | 6 + .../linux/build_tensorrt_c_api_package.sh | 5 +- .../github/linux/build_tensorrt_ci.sh | 46 +++ .../delete_unused_files_before_upload.sh | 7 + .../docker/Dockerfile.manylinux2014_aten_cpu | 4 +- .../linux/docker/Dockerfile.manylinux2_28_cpu | 160 +-------- .../docker/Dockerfile.manylinux2_28_cuda | 155 +-------- ...Dockerfile.manylinux2_28_training_cuda11_8 | 162 +-------- ...Dockerfile.manylinux2_28_training_cuda12_2 | 161 +-------- .../Dockerfile.package_ubi8_cuda_tensorrt10_0 | 11 +- .../docker/Dockerfile.package_ubuntu_2004_gpu | 4 +- .../inference/aarch64/python/cpu/Dockerfile | 11 + .../python/cpu/scripts/install_centos.sh | 0 .../python/cpu/scripts/install_deps.sh | 0 .../python/cpu/scripts/install_protobuf.sh | 0 .../python/cpu/scripts/requirements.txt | 0 .../inference/x64/default/gpu/Dockerfile | 19 -- .../x64/default/gpu/scripts/install_centos.sh | 9 - .../python/cpu/Dockerfile.manylinux2_28_cpu | 163 --------- .../{x64 => x86_64}/default/cpu/Dockerfile | 0 .../default/cpu/scripts/install_centos.sh | 10 +- .../default/cpu/scripts/install_deps.sh | 0 .../x86_64/default/cuda11/Dockerfile | 46 +++ .../default/cuda11}/scripts/install_deps.sh | 4 +- .../x86_64/default/cuda12/Dockerfile | 48 +++ .../default/cuda12/scripts/install_deps.sh | 68 ++++ .../inference/x86_64/python/cpu/Dockerfile | 11 + .../python/cpu/scripts/install_centos.sh | 20 ++ .../x86_64/python/cpu/scripts/install_deps.sh | 36 ++ .../python/cpu/scripts/install_protobuf.sh | 108 ++++++ .../python/cpu/scripts/requirements.txt | 11 + .../inference/x86_64/python/cuda/Dockerfile | 46 +++ .../python/cuda/scripts/install_centos.sh | 20 ++ .../python/cuda/scripts/install_deps.sh | 36 ++ .../python/cuda/scripts/install_protobuf.sh | 108 ++++++ .../python/cuda/scripts/requirements.txt | 11 + .../scripts/manylinux/install_centos.sh | 8 +- .../build_ort_and_check_binary_size.py | 2 +- .../github/linux/run_python_dockerbuild.sh | 2 + .../ci_build/github/linux/run_python_tests.sh | 4 + .../windows/extract_nuget_files_gpu.ps1 | 8 +- tools/ci_build/github/windows/helpers.ps1 | 8 +- .../windows/install_third_party_deps.ps1 | 2 +- 129 files changed, 1907 insertions(+), 2120 deletions(-) create mode 100644 java/settings.xml create mode 100644 tools/ci_build/github/azure-pipelines/cuda-packaging-pipeline.yml create mode 100644 tools/ci_build/github/azure-pipelines/stages/download-java-tools-stage.yml create mode 100644 tools/ci_build/github/azure-pipelines/stages/java-cuda-publishing-stage.yml delete mode 100644 tools/ci_build/github/azure-pipelines/stages/jobs/linux-gpu-tensorrt-packaging-job.yml create mode 100644 tools/ci_build/github/azure-pipelines/stages/nuget-cuda-packaging-stage.yml create mode 100644 tools/ci_build/github/azure-pipelines/stages/set_packaging_variables_stage.yml create mode 100644 tools/ci_build/github/azure-pipelines/templates/jobs/download_training_test_data.yml create mode 100755 tools/ci_build/github/linux/build_cuda_ci.sh create mode 100755 tools/ci_build/github/linux/build_tensorrt_ci.sh create mode 100755 tools/ci_build/github/linux/delete_unused_files_before_upload.sh create mode 100644 tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/Dockerfile rename tools/ci_build/github/linux/docker/inference/{x64 => aarch64}/python/cpu/scripts/install_centos.sh (100%) rename tools/ci_build/github/linux/docker/inference/{x64 => aarch64}/python/cpu/scripts/install_deps.sh (100%) rename tools/ci_build/github/linux/docker/inference/{x64 => aarch64}/python/cpu/scripts/install_protobuf.sh (100%) rename tools/ci_build/github/linux/docker/inference/{x64 => aarch64}/python/cpu/scripts/requirements.txt (100%) delete mode 100644 tools/ci_build/github/linux/docker/inference/x64/default/gpu/Dockerfile delete mode 100755 tools/ci_build/github/linux/docker/inference/x64/default/gpu/scripts/install_centos.sh delete mode 100644 tools/ci_build/github/linux/docker/inference/x64/python/cpu/Dockerfile.manylinux2_28_cpu rename tools/ci_build/github/linux/docker/inference/{x64 => x86_64}/default/cpu/Dockerfile (100%) rename tools/ci_build/github/linux/docker/inference/{x64 => x86_64}/default/cpu/scripts/install_centos.sh (53%) rename tools/ci_build/github/linux/docker/inference/{x64 => x86_64}/default/cpu/scripts/install_deps.sh (100%) create mode 100644 tools/ci_build/github/linux/docker/inference/x86_64/default/cuda11/Dockerfile rename tools/ci_build/github/linux/docker/inference/{x64/default/gpu => x86_64/default/cuda11}/scripts/install_deps.sh (89%) create mode 100644 tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/Dockerfile create mode 100755 tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/scripts/install_deps.sh create mode 100644 tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/Dockerfile create mode 100755 tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_centos.sh create mode 100755 tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_deps.sh create mode 100755 tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_protobuf.sh create mode 100644 tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/requirements.txt create mode 100644 tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/Dockerfile create mode 100755 tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_centos.sh create mode 100755 tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_deps.sh create mode 100755 tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_protobuf.sh create mode 100644 tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/requirements.txt diff --git a/.pipelines/OneBranch.Nuget-WindowsAI-Pipeline.Official.yml b/.pipelines/OneBranch.Nuget-WindowsAI-Pipeline.Official.yml index fd3b7266d30f..909e3dc753cf 100644 --- a/.pipelines/OneBranch.Nuget-WindowsAI-Pipeline.Official.yml +++ b/.pipelines/OneBranch.Nuget-WindowsAI-Pipeline.Official.yml @@ -173,7 +173,7 @@ extends: $arm64_static_runtime_nupkg_unzipped_directory = [System.IO.Path]::Combine($arm64_static_runtime_nupkg_unzipped_directory_root, 'binaries', [System.IO.Path]::GetFileNameWithoutExtension($arm64_static_runtime_nuget_package)) [System.IO.Compression.ZipFile]::ExtractToDirectory($arm64_static_runtime_nuget_package, $arm64_static_runtime_nupkg_unzipped_directory) - + $x64_static_runtime_path_old = [System.IO.Path]::Combine($x64_static_runtime_nupkg_unzipped_directory, 'runtimes', 'win-x64', '_native') $x64_static_runtime_path_new = [System.IO.Path]::Combine($x64_nupkg_unzipped_directory, 'runtimes', 'win-x64', '_native', 'static') @@ -185,7 +185,7 @@ extends: $arm64_runtime_path_new = [System.IO.Path]::Combine($x64_nupkg_unzipped_directory, 'runtimes', 'win-arm64', '_native') $arm64_static_runtime_path_old = [System.IO.Path]::Combine($arm64_static_runtime_nupkg_unzipped_directory, 'runtimes', 'win-arm64', '_native') $arm64_static_runtime_path_new = [System.IO.Path]::Combine($x64_nupkg_unzipped_directory, 'runtimes', 'win-arm64', '_native', 'static') - + $uap_build_path_old = [System.IO.Path]::Combine($x64_static_runtime_nupkg_unzipped_directory, 'build', 'native') $uap_build_path_new = [System.IO.Path]::Combine($x64_nupkg_unzipped_directory, 'build', 'uap10.0') @@ -262,7 +262,7 @@ extends: $x86_runtime_path_new = [System.IO.Path]::Combine($x64_nupkg_unzipped_directory, 'runtimes', 'win-x86', '_native') $arm64_runtime_path_old = [System.IO.Path]::Combine($arm64_nupkg_unzipped_directory, 'runtimes', 'win-arm64', '_native') $arm64_runtime_path_new = [System.IO.Path]::Combine($x64_nupkg_unzipped_directory, 'runtimes', 'win-arm64', '_native') - + New-Item -Path $x86_runtime_path_new -ItemType Directory New-Item -Path $arm64_runtime_path_new -ItemType Directory @@ -293,12 +293,21 @@ extends: - script: | dir $(Build.SourcesDirectory)\unzipped\runtimes\win-x64\_native - - task: EsrpCodeSigning@2 + - task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5 displayName: "Sign Nuget package" inputs: - ConnectedServiceName: 'OnnxRuntime CodeSign 20190817' + ConnectedServiceName: 'OnnxrunTimeCodeSign_20240611' + AppRegistrationClientId: '53d54d02-978d-4305-8572-583cf6711c4f' + AppRegistrationTenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47' + AuthAKVName: 'buildkeyvault' + AuthCertName: '53d54d02-SSL-AutoRotate' + AuthSignCertName: '53d54d02-978d-4305-8572-583cf6711c4f' + FolderPath: $(Build.ArtifactStagingDirectory) Pattern: '*.nupkg' + SessionTimeout: 90 + ServiceEndpointUrl: 'https://api.esrp.microsoft.com/api/v2' + MaxConcurrency: 25 signConfigType: inlineSignParams inlineOperation: | [ @@ -307,14 +316,14 @@ extends: "operationSetCode": "NuGetSign", "parameters": [ ], "toolName": "sign", - "toolVersion": "1.0" + "toolVersion": "6.2.9304.0" }, { "keyCode": "CP-401405", "operationSetCode": "NuGetVerify", "parameters": [ ], "toolName": "sign", - "toolVersion": "1.0" + "toolVersion": "6.2.9304.0" } ] diff --git a/cmake/CMakeLists.txt b/cmake/CMakeLists.txt index 8edbb6ad6f25..550238b192a8 100644 --- a/cmake/CMakeLists.txt +++ b/cmake/CMakeLists.txt @@ -59,8 +59,8 @@ if (NOT CMAKE_BUILD_TYPE) set(CMAKE_BUILD_TYPE "RelWithDebInfo" CACHE STRING "Choose build type: Debug Release RelWithDebInfo MinSizeRel." FORCE) endif() -if("${CMAKE_C_COMPILER_ID}" STREQUAL "GNU" AND CMAKE_C_COMPILER_VERSION VERSION_LESS 8) - message(FATAL_ERROR "GCC version must be greater than or equal to 8") +if("${CMAKE_C_COMPILER_ID}" STREQUAL "GNU" AND CMAKE_C_COMPILER_VERSION VERSION_LESS 9) + message(FATAL_ERROR "GCC version must be greater than or equal to 9") endif() # Options @@ -1294,12 +1294,6 @@ if (onnxruntime_USE_TVM) list(APPEND onnxruntime_EXTERNAL_DEPENDENCIES tvm) endif() -# needs to link with stdc++fs in Linux -if (UNIX AND "${CMAKE_C_COMPILER_ID}" STREQUAL "GNU" AND CMAKE_C_COMPILER_VERSION VERSION_LESS 9) - set(FS_STDLIB stdc++fs) -endif() -list(APPEND onnxruntime_EXTERNAL_LIBRARIES ${FS_STDLIB}) - # onnxruntime-extensions if (onnxruntime_USE_EXTENSIONS) include(extensions) @@ -1468,16 +1462,6 @@ if (onnxruntime_USE_CUDA) endif() endif() -if (onnxruntime_USE_TENSORRT) - # needs to link with stdc++fs in Linux - if (UNIX) - if (NOT APPLE) - set(FS_STDLIB stdc++fs) - endif() - endif() - list(APPEND onnxruntime_EXTERNAL_LIBRARIES ${FS_STDLIB}) -endif() - if (onnxruntime_USE_MIGRAPHX) if (WIN32) message(FATAL_ERROR "MIGraphX does not support build in Windows!") diff --git a/cmake/onnxruntime_providers_migraphx.cmake b/cmake/onnxruntime_providers_migraphx.cmake index 91ac66a40721..01c4f8b2c871 100644 --- a/cmake/onnxruntime_providers_migraphx.cmake +++ b/cmake/onnxruntime_providers_migraphx.cmake @@ -49,7 +49,7 @@ target_compile_options(onnxruntime_providers_migraphx PRIVATE -Wno-error=sign-compare) set_property(TARGET onnxruntime_providers_migraphx APPEND_STRING PROPERTY COMPILE_FLAGS "-Wno-deprecated-declarations") set_property(TARGET onnxruntime_providers_migraphx APPEND_STRING PROPERTY LINK_FLAGS "-Xlinker --version-script=${ONNXRUNTIME_ROOT}/core/providers/migraphx/version_script.lds -Xlinker --gc-sections") - target_link_libraries(onnxruntime_providers_migraphx PRIVATE nsync::nsync_cpp stdc++fs) + target_link_libraries(onnxruntime_providers_migraphx PRIVATE nsync::nsync_cpp) include(CheckLibraryExists) check_library_exists(migraphx::c "migraphx_program_run_async" "/opt/rocm/migraphx/lib" HAS_STREAM_SYNC) diff --git a/cmake/onnxruntime_providers_tensorrt.cmake b/cmake/onnxruntime_providers_tensorrt.cmake index 1e8f388548fa..e56de0c7124d 100644 --- a/cmake/onnxruntime_providers_tensorrt.cmake +++ b/cmake/onnxruntime_providers_tensorrt.cmake @@ -206,7 +206,7 @@ elseif(UNIX) set_property(TARGET onnxruntime_providers_tensorrt APPEND_STRING PROPERTY COMPILE_FLAGS "-Wno-deprecated-declarations") set_property(TARGET onnxruntime_providers_tensorrt APPEND_STRING PROPERTY LINK_FLAGS "-Xlinker --version-script=${ONNXRUNTIME_ROOT}/core/providers/tensorrt/version_script.lds -Xlinker --gc-sections") - target_link_libraries(onnxruntime_providers_tensorrt PRIVATE nsync::nsync_cpp stdc++fs) + target_link_libraries(onnxruntime_providers_tensorrt PRIVATE nsync::nsync_cpp) elseif(WIN32) set_property(TARGET onnxruntime_providers_tensorrt APPEND_STRING PROPERTY LINK_FLAGS "-DEF:${ONNXRUNTIME_ROOT}/core/providers/tensorrt/symbols.def") else() diff --git a/java/build.gradle b/java/build.gradle index fd66ec220b78..cebf67e08544 100644 --- a/java/build.gradle +++ b/java/build.gradle @@ -4,6 +4,7 @@ plugins { id 'signing' id 'jacoco' id "com.diffplug.spotless" version "6.25.0" + id "net.linguica.maven-settings" version "0.5" } allprojects { @@ -19,6 +20,12 @@ version = rootProject.file('../VERSION_NUMBER').text.trim() def cmakeBuildDir = System.properties['cmakeBuildDir'] def useCUDA = System.properties['USE_CUDA'] def useROCM = System.properties['USE_ROCM'] + +def adoArtifact = project.findProperty('adoArtifact') +def adoAccessToken = project.findProperty('adoAccessToken') +// Only publish to ADO feed if all two properties are set +def publishToAdo = adoArtifact != null && adoAccessToken != null + boolean enableTrainingApis = (System.properties['ENABLE_TRAINING_APIS'] ?: "0") == "1" def cmakeJavaDir = "${cmakeBuildDir}/java" def cmakeNativeLibDir = "${cmakeJavaDir}/native-lib" @@ -37,6 +44,11 @@ def trainingDescription = 'ONNX Runtime Training is a training and inference pac '(Open Neural Network Exchange) models. This package is targeted for Learning on The Edge aka On-Device Training ' + 'See https://github.com/microsoft/onnxruntime-training-examples/tree/master/on_device_training for more details.' +// We need to have a custom settings.xml so codeql can bypass the need for settings.security.xml +mavenSettings { + userSettingsFileName = "${projectDir}/settings.xml" +} + java { sourceCompatibility = JavaVersion.VERSION_1_8 targetCompatibility = JavaVersion.VERSION_1_8 @@ -48,7 +60,8 @@ jar { } // Add explicit sources jar with pom file. -task sourcesJar(type: Jar, dependsOn: classes) { +tasks.register('sourcesJar', Jar) { + dependsOn classes archiveClassifier = "sources" from sourceSets.main.allSource into("META-INF/maven/$project.group/$mavenArtifactId") { @@ -58,7 +71,8 @@ task sourcesJar(type: Jar, dependsOn: classes) { } // Add explicit javadoc jar with pom file -task javadocJar(type: Jar, dependsOn: javadoc) { +tasks.register('javadocJar', Jar) { + dependsOn javadoc archiveClassifier = "javadoc" from javadoc.destinationDir into("META-INF/maven/$project.group/$mavenArtifactId") { @@ -82,7 +96,7 @@ spotless { compileJava { dependsOn spotlessJava - options.compilerArgs += ["-h", "${project.buildDir}/headers/"] + options.compilerArgs += ["-h", "${layout.buildDirectory.get().toString()}/headers/"] if (!JavaVersion.current().isJava8()) { // Ensures only methods present in Java 8 are used options.compilerArgs.addAll(['--release', '8']) @@ -128,7 +142,7 @@ if (cmakeBuildDir != null) { // generate tasks to be called from cmake // Overwrite jar location - task allJar(type: Jar) { + tasks.register('allJar', Jar) { manifest { attributes('Automatic-Module-Name': project.group, 'Implementation-Title': 'onnxruntime', @@ -143,23 +157,20 @@ if (cmakeBuildDir != null) { from cmakeNativeLibDir } - task cmakeBuild(type: Copy) { - from project.buildDir + tasks.register('cmakeBuild', Copy) { + from layout.buildDirectory.get() include 'libs/**' include 'docs/**' into cmakeBuildOutputDir + dependsOn(allJar, sourcesJar, javadocJar, javadoc) } - cmakeBuild.dependsOn allJar - cmakeBuild.dependsOn sourcesJar - cmakeBuild.dependsOn javadocJar - cmakeBuild.dependsOn javadoc - task cmakeCheck(type: Copy) { - from project.buildDir + tasks.register('cmakeCheck', Copy) { + from layout.buildDirectory.get() include 'reports/**' into cmakeBuildOutputDir + dependsOn(check) } - cmakeCheck.dependsOn check } dependencies { @@ -198,7 +209,7 @@ jacocoTestReport { reports { xml.required = true csv.required = true - html.destination file("${buildDir}/jacocoHtml") + html.outputLocation = layout.buildDirectory.dir("jacocoHtml") } } @@ -206,9 +217,14 @@ publishing { publications { maven(MavenPublication) { groupId = project.group - artifactId = mavenArtifactId - - from components.java + if(publishToAdo) { + artifactId = 'onnxruntime_gpu' + artifact (adoArtifact) + } else { + artifactId = mavenArtifactId + from components.java + } + version = project.version pom { name = enableTrainingApis ? 'onnxruntime-training' : 'onnx-runtime' description = enableTrainingApis ? trainingDescription : defaultDescription @@ -239,24 +255,41 @@ publishing { } } repositories { - maven { - url 'https://oss.sonatype.org/service/local/staging/deploy/maven2/' - credentials { - username mavenUser - password mavenPwd + if (publishToAdo) { + maven { + url "https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/${System.getenv('ADOFeedName')}/maven/v1" + name System.getenv('ADOFeedName') + authentication { + basic(BasicAuthentication) + } + credentials { + username 'aiinfra' + password "${project.findProperty('adoAccessToken')}" + } + } + } else { + maven { + url 'https://oss.sonatype.org/service/local/staging/deploy/maven2/' + credentials { + username mavenUser + password mavenPwd + } } } } } - // Generates a task signMavenPublication that will // build all artifacts. signing { // Queries env vars: // ORG_GRADLE_PROJECT_signingKey // ORG_GRADLE_PROJECT_signingPassword but can be changed to properties - def signingKey = findProperty("signingKey") - def signingPassword = findProperty("signingPassword") - useInMemoryPgpKeys(signingKey, signingPassword) - sign publishing.publications.maven + def signingKey = findProperty("signingKey") + def signingPassword = findProperty("signingPassword") + // Skip signing if no key is provided + if (signingKey != null && signingPassword != null) { + useInMemoryPgpKeys(signingKey, signingPassword) + sign publishing.publications.maven + sign publishing.publications.mavenAdo + } } diff --git a/java/settings.xml b/java/settings.xml new file mode 100644 index 000000000000..21d7d0fb56c9 --- /dev/null +++ b/java/settings.xml @@ -0,0 +1,9 @@ + + + + + + \ No newline at end of file diff --git a/onnxruntime/contrib_ops/cuda/quantization/moe_quantization.cc b/onnxruntime/contrib_ops/cuda/quantization/moe_quantization.cc index 7bb0945615d3..571cc59dec75 100644 --- a/onnxruntime/contrib_ops/cuda/quantization/moe_quantization.cc +++ b/onnxruntime/contrib_ops/cuda/quantization/moe_quantization.cc @@ -54,6 +54,11 @@ Status QMoE::ComputeInternal(OpKernelContext* context) const { const Tensor* fc3_scales_optional = context->Input(9); const Tensor* fc3_experts_bias_optional = context->Input(10); +#if defined(__GNUC__) +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wmaybe-uninitialized" // Mute "maybe used uninitialized" warning for MoEParameters. +#endif + MoEParameters moe_params; MoEQuantType quant_type = MoEQuantType::UINT4; ORT_RETURN_IF_ERROR(CheckInputs(moe_params, quant_type, input, router_probs, fc1_experts_weights, @@ -135,6 +140,10 @@ Status QMoE::ComputeInternal(OpKernelContext* context) const { reinterpret_cast(expert_for_source_row.get()), static_cast(moe_params.num_rows), static_cast(moe_params.hidden_size), static_cast(k_), Stream(context)); +#if defined(__GNUC__) +#pragma GCC diagnostic pop +#endif + return Status::OK(); } diff --git a/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider_utils.h b/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider_utils.h index a54b728c17c4..df12d9033878 100644 --- a/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider_utils.h +++ b/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider_utils.h @@ -8,7 +8,6 @@ #include #include #include -#include #include "flatbuffers/idl.h" #include "ort_trt_int8_cal_table.fbs.h" #include @@ -16,7 +15,7 @@ #include "core/common/path_string.h" #include "core/framework/murmurhash3.h" -namespace fs = std::experimental::filesystem; +namespace fs = std::filesystem; namespace onnxruntime { diff --git a/onnxruntime/python/tools/transformers/models/llama/requirements.txt b/onnxruntime/python/tools/transformers/models/llama/requirements.txt index e991c2f27a1a..ce4b3f6a09ba 100644 --- a/onnxruntime/python/tools/transformers/models/llama/requirements.txt +++ b/onnxruntime/python/tools/transformers/models/llama/requirements.txt @@ -1,7 +1,7 @@ optimum>=1.14.1 transformers>=4.33.2,<= 4.37.2 torch>=2.2.0 -onnx>=1.14.0 +onnx==1.16.0 datasets>=2.8.0 protobuf==3.20.2 psutil \ No newline at end of file diff --git a/onnxruntime/python/tools/transformers/models/phi2/requirements.txt b/onnxruntime/python/tools/transformers/models/phi2/requirements.txt index af6f441c149d..0b2ea0df93a9 100644 --- a/onnxruntime/python/tools/transformers/models/phi2/requirements.txt +++ b/onnxruntime/python/tools/transformers/models/phi2/requirements.txt @@ -1,3 +1,3 @@ -onnx>=1.15.0 +onnx==1.16.0 transformers>=4.36.2 onnxscript>=0.1.0.dev20240126 diff --git a/onnxruntime/python/tools/transformers/models/whisper/requirements.txt b/onnxruntime/python/tools/transformers/models/whisper/requirements.txt index 9bbe0d738040..6dd46f973a5f 100644 --- a/onnxruntime/python/tools/transformers/models/whisper/requirements.txt +++ b/onnxruntime/python/tools/transformers/models/whisper/requirements.txt @@ -7,7 +7,7 @@ soundfile librosa optimum onnxruntime-extensions>=0.9.0 -onnx>=1.15.0 +onnx==1.16.0 protobuf==3.20.2 numpy==1.23.3 psutil diff --git a/setup.py b/setup.py index 3c7844aa2094..3203993e0c4d 100644 --- a/setup.py +++ b/setup.py @@ -198,6 +198,7 @@ def run(self): "libcudart.so.11.0", "libcudart.so.12", "libcudnn.so.8", + "libcudnn.so.9", "libcufft.so.10", "libcufft.so.11", "libcurand.so.10", diff --git a/tools/ci_build/build.py b/tools/ci_build/build.py index 3df351711ae4..e39689dc26b5 100644 --- a/tools/ci_build/build.py +++ b/tools/ci_build/build.py @@ -1585,6 +1585,7 @@ def generate_build_tree( cuda_compile_flags_str = cuda_compile_flags_str + " " + compile_flag if len(cuda_compile_flags_str) != 0: cudaflags.append('-Xcompiler="%s"' % cuda_compile_flags_str) + cudaflags.append("-allow-unsupported-compiler") elif is_linux() or is_macOS(): if is_linux(): ldflags = ["-Wl,-Bsymbolic-functions", "-Wl,-z,relro", "-Wl,-z,now", "-Wl,-z,noexecstack"] diff --git a/tools/ci_build/github/azure-pipelines/android-arm64-v8a-QNN-crosscompile-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/android-arm64-v8a-QNN-crosscompile-ci-pipeline.yml index 51a506bfc289..f488398293b7 100644 --- a/tools/ci_build/github/azure-pipelines/android-arm64-v8a-QNN-crosscompile-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/android-arm64-v8a-QNN-crosscompile-ci-pipeline.yml @@ -66,7 +66,7 @@ jobs: - script: | set -e -x rm -rf /tmp/scripts - cp -r tools/ci_build/github/linux/docker/inference/x64/python/cpu/scripts /tmp + cp -r tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts /tmp /tmp/scripts/install_protobuf.sh -p $(Build.BinariesDirectory)/installed -d cmake/deps.txt python3 tools/ci_build/build.py \ --config Release \ diff --git a/tools/ci_build/github/azure-pipelines/bigmodels-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/bigmodels-ci-pipeline.yml index c324236d3078..01eddb4d9c52 100644 --- a/tools/ci_build/github/azure-pipelines/bigmodels-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/bigmodels-ci-pipeline.yml @@ -40,12 +40,6 @@ parameters: resources: repositories: - - repository: manylinux - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 - - repository: LLaMa2Onnx type: Github endpoint: Microsoft @@ -55,7 +49,7 @@ resources: variables: - template: templates/common-variables.yml - name: docker_base_image - value: nvidia/cuda:11.8.0-cudnn8-devel-ubi8 + value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1 - name: linux_trt_version value: 10.0.1.6-1.cuda11.8 - name: Repository @@ -112,7 +106,7 @@ stages: inputs: script: | mkdir -p $HOME/.onnx - docker run -e CFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -fstack-clash-protection -fcf-protection -O3 -Wl,--strip-all" -e CXXFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -fstack-clash-protection -fcf-protection -O3 -Wl,--strip-all" --rm \ + docker run -e --rm \ --volume /data/onnx:/data/onnx:ro \ --volume $(Build.SourcesDirectory):/onnxruntime_src \ --volume $(Build.BinariesDirectory):/build \ diff --git a/tools/ci_build/github/azure-pipelines/c-api-noopenmp-packaging-pipelines.yml b/tools/ci_build/github/azure-pipelines/c-api-noopenmp-packaging-pipelines.yml index 4f645085c290..a0915d82c6c3 100644 --- a/tools/ci_build/github/azure-pipelines/c-api-noopenmp-packaging-pipelines.yml +++ b/tools/ci_build/github/azure-pipelines/c-api-noopenmp-packaging-pipelines.yml @@ -88,11 +88,6 @@ variables: value: nvidia/cuda:11.8.0-cudnn8-devel-ubi8 ${{ if eq(parameters.CudaVersion, '12.2') }}: value: nvidia/cuda:12.2.2-cudnn8-devel-ubi8 -- name: linux_trt_version - ${{ if eq(parameters.CudaVersion, '11.8') }}: - value: 10.0.1.6-1.cuda11.8 - ${{ if eq(parameters.CudaVersion, '12.2') }}: - value: 10.0.1.6-1.cuda12.4 - name: win_trt_version ${{ if eq(parameters.CudaVersion, '11.8') }}: value: 11.8 @@ -111,54 +106,18 @@ variables: value: $(Agent.TempDirectory)\v12.2 stages: -- stage: Setup - jobs: - - job: Set_Variables - pool: - vmImage: ubuntu-latest - steps: - - checkout: none - - task: mspremier.PostBuildCleanup.PostBuildCleanup-task.PostBuildCleanup@3 - displayName: 'Clean Agent Directories' - condition: always() - - bash: | - # Do not output ##vso[] commands with `set -x` or they may be parsed again and include a trailing quote. - set +x - if [[ "${{ parameters.IsReleaseBuild }}" = True && "${{ parameters.PreReleaseVersionSuffixString }}" != "none" ]]; then - if [[ "${{ parameters.PreReleaseVersionSuffixNumber }}" -eq 0 ]]; then - echo "##vso[task.setvariable variable=ReleaseVersionSuffix;isOutput=true]-${{ parameters.PreReleaseVersionSuffixString }}" - else - echo "##vso[task.setvariable variable=ReleaseVersionSuffix;isOutput=true]-${{ parameters.PreReleaseVersionSuffixString }}.${{ parameters.PreReleaseVersionSuffixNumber }}" - fi - else - echo "##vso[task.setvariable variable=ReleaseVersionSuffix;isOutput=true]" - fi - name: Set_Release_Version_Suffix - - script: | - # Extracting hours and minutes - date=$(date +'%Y%m%d') - # Set the hhmm value as a pipeline variable - echo "##vso[task.setvariable variable=BuildDate;isOutput=true]$date" - displayName: 'Set Start Date as Variable' - name: Set_Build_Date - - - script: | - # Extracting hours and minutes - hhmm=$(date +'%H%M') - # Set the hhmm value as a pipeline variable - echo "##vso[task.setvariable variable=BuildTime;isOutput=true]$hhmm" - displayName: 'Set Start Time as Variable' - name: Set_Build_Time - - template: templates/component-governance-component-detection-steps.yml - parameters : - condition : 'succeeded' +- template: stages/set_packaging_variables_stage.yml + parameters: + IsReleaseBuild: ${{ parameters.IsReleaseBuild }} + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} - stage: Debug dependsOn: Setup jobs: - job: D1 pool: - vmImage: ubuntu-latest + name: 'onnxruntime-Ubuntu2204-AMD-CPU' variables: MyVar: $[stageDependencies.Setup.Set_Variables.outputs['Set_Release_Version_Suffix.ReleaseVersionSuffix']] BuildDate: $[stageDependencies.Setup.Set_Variables.outputs['Set_Build_Date.BuildDate']] @@ -175,35 +134,7 @@ stages: parameters : condition : 'succeeded' -- stage: Download_Java_Tools - dependsOn: [] - jobs: - - job: Download_Java_Tools - pool: - vmImage: ubuntu-latest - steps: - - checkout: none - - task: mspremier.PostBuildCleanup.PostBuildCleanup-task.PostBuildCleanup@3 - displayName: 'Clean Agent Directories' - condition: always() - - task: CmdLine@2 - displayName: Download Java Tools - inputs: - script: | - mkdir -p java-tools - pushd java-tools - wget --tries=3 https://oss.sonatype.org/service/local/repositories/releases/content/org/junit/platform/junit-platform-console-standalone/1.6.2/junit-platform-console-standalone-1.6.2.jar -P ./ - wget --tries=3 https://oss.sonatype.org/service/local/repositories/releases/content/com/google/protobuf/protobuf-java/3.21.7/protobuf-java-3.21.7.jar -P ./ - popd - workingDirectory: '$(Agent.TempDirectory)' - - task: PublishPipelineArtifact@1 - displayName: 'Publish Pipeline Java Tools Artifact' - inputs: - targetPath: '$(Agent.TempDirectory)/java-tools' - artifact: 'onnxruntime-java-tools' - - template: templates/component-governance-component-detection-steps.yml - parameters : - condition : 'succeeded' +- template: stages/download-java-tools-stage.yml - template: templates/c-api-cpu.yml parameters: @@ -232,19 +163,33 @@ stages: AdditionalWinBuildFlags: '--enable_onnx_tests --enable_wcos' BuildVariant: 'default' -- template: stages/nuget-linux-cuda-packaging-stage.yml +- template: stages/java-cuda-packaging-stage.yml parameters: CudaVersion: ${{ parameters.CudaVersion }} - docker_base_image: ${{ variables.docker_base_image }} - linux_trt_version: ${{ variables.linux_trt_version }} - buildJava: true - buildNodejs: true + SpecificArtifact: ${{ parameters.SpecificArtifact }} + BuildId: ${{ parameters.BuildId }} + +- template: stages/nuget-combine-cuda-stage.yml + parameters: + DoCompliance: ${{ parameters.DoCompliance }} + CudaVersion: ${{ parameters.CudaVersion }} + docker_base_image: ${{ variables.docker_base_image }} + RunOnnxRuntimeTests: ${{ parameters.RunOnnxRuntimeTests }} + UseIncreasedTimeoutForTests: ${{ parameters.UseIncreasedTimeoutForTests }} + win_trt_home: ${{ variables.win_trt_home }} + win_cuda_home: ${{ variables.win_cuda_home }} + DoEsrp: ${{ parameters.DoEsrp }} + IsReleaseBuild: ${{ parameters.IsReleaseBuild }} + buildJava: true + buildNodejs: true + SpecificArtifact: ${{ parameters.SpecificArtifact }} + BuildId: ${{ parameters.BuildId }} # ROCm - stage: Linux_C_API_Packaging_ROCm_x64 dependsOn: [] jobs: - - job: + - job: Linux_C_API_Packaging_ROCm_x64 workspace: clean: all timeoutInMinutes: 120 @@ -272,6 +217,7 @@ stages: --build-arg PREPEND_PATH=/opt/rh/gcc-toolset-12/root/usr/bin: --build-arg LD_LIBRARY_PATH_ARG=/opt/rh/gcc-toolset-12/root/usr/lib64:/opt/rh/gcc-toolset-12/root/usr/lib:/opt/rh/gcc-toolset-12/root/usr/lib64/dyninst:/opt/rh/gcc-toolset-12/root/usr/lib/dyninst:/usr/local/lib64:/usr/local/lib Repository: onnxruntimetrainingrocmbuild-rocm$(RocmVersion) + CheckOutManyLinux: true - template: templates/set-version-number-variables-step.yml @@ -306,29 +252,6 @@ stages: condition: 'succeeded' - template: templates/clean-agent-build-directory-step.yml -- template: stages/java-cuda-packaging-stage.yml - parameters: - CudaVersion: ${{ parameters.CudaVersion }} - SpecificArtifact: ${{ parameters.SpecificArtifact }} - BuildId: ${{ parameters.BuildId }} - -- template: stages/nuget-win-cuda-packaging-stage.yml - parameters: - CudaVersion: ${{ parameters.CudaVersion }} - SpecificArtifact: ${{ parameters.SpecificArtifact }} - BuildId: ${{ parameters.BuildId }} - DoEsrp: ${{ parameters.DoEsrp }} - DoCompliance: ${{ parameters.DoCompliance }} - UseIncreasedTimeoutForTests: ${{ parameters.UseIncreasedTimeoutForTests }} - RunOnnxRuntimeTests: ${{ parameters.RunOnnxRuntimeTests }} - win_trt_home: ${{ variables.win_trt_home }} - win_cuda_home: ${{ variables.win_cuda_home }} - -- template: stages/nuget-combine-cuda-stage.yml - parameters: - DoEsrp: ${{ parameters.DoEsrp }} - DoCompliance: ${{ parameters.DoCompliance }} - IsReleaseBuild: ${{ parameters.IsReleaseBuild }} - stage: NuGet_Packaging_ROCm dependsOn: @@ -336,7 +259,7 @@ stages: - Linux_C_API_Packaging_ROCm_x64 condition: succeeded() jobs: - - job: + - job: NuGet_Packaging_ROCm workspace: clean: all # we need to use the 2022 pool to create the nuget package with both pre-net6+Xamarin and net6 targets. @@ -551,50 +474,6 @@ stages: displayName: 'Clean Agent Directories' condition: always() -- template: nuget/templates/test_win.yml - parameters: - AgentPool: 'onnxruntime-Win2022-GPU-A10' - NugetPackageName: 'Microsoft.ML.OnnxRuntime.Gpu' - ArtifactSuffix: 'GPU' - StageSuffix: 'GPU' - Skipx86Tests: 'true' - CudaVersion: ${{ parameters.CudaVersion }} - SpecificArtifact: ${{ parameters.SpecificArtifact }} - BuildId: ${{ parameters.BuildId }} - -- template: nuget/templates/test_win.yml - parameters: - AgentPool: 'onnxruntime-Win2022-GPU-A10' - NugetPackageName: 'Microsoft.ML.OnnxRuntime.Gpu.Windows' - ArtifactSuffix: 'GPU' - StageSuffix: 'GPU' - MoreSuffix: '_Windows' - Skipx86Tests: 'true' - CudaVersion: ${{ parameters.CudaVersion }} - SpecificArtifact: ${{ parameters.SpecificArtifact }} - BuildId: ${{ parameters.BuildId }} - -- template: nuget/templates/test_linux.yml - parameters: - AgentPool: Onnxruntime-Linux-GPU-A10 - ArtifactSuffix: 'GPU' - StageSuffix: 'GPU' - NugetPackageName : 'Microsoft.ML.OnnxRuntime.Gpu' - CudaVersion: ${{ parameters.CudaVersion }} - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - -- template: nuget/templates/test_linux.yml - parameters: - AgentPool: Onnxruntime-Linux-GPU-A10 - ArtifactSuffix: 'GPU' - StageSuffix: 'GPU' - MoreSuffix: '_Linux' - NugetPackageName : 'Microsoft.ML.OnnxRuntime.Gpu.Linux' - CudaVersion: ${{ parameters.CudaVersion }} - SpecificArtifact: ${{ parameters.specificArtifact }} - BuildId: ${{ parameters.BuildId }} - - template: nuget/templates/test_linux.yml parameters: AgentPool: AMD-GPU @@ -680,7 +559,7 @@ stages: - Windows_CI_GPU_DML_Dev_arm64 condition: succeeded() jobs: - - job: + - job: NuGet_Packaging_DML workspace: clean: all pool: 'onnxruntime-Win2022-GPU-dml-A10' diff --git a/tools/ci_build/github/azure-pipelines/cuda-packaging-pipeline.yml b/tools/ci_build/github/azure-pipelines/cuda-packaging-pipeline.yml new file mode 100644 index 000000000000..daf95af438d2 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/cuda-packaging-pipeline.yml @@ -0,0 +1,135 @@ +parameters: + - name: RunOnnxRuntimeTests + displayName: Run Tests? + type: boolean + default: true + + - name: UseIncreasedTimeoutForTests + displayName: Increase timeout for tests? Set it to false if you are doing an Onnx Runtime release. + type: boolean + default: false + + - name: DoCompliance + displayName: Run Compliance Tasks? + type: boolean + default: true + + - name: DoEsrp + displayName: Run code sign tasks? Must be true if you are doing an ONNX Runtime release + type: boolean + default: true + + - name: IsReleaseBuild + displayName: Is a release build? Set it to true if you are doing an ONNX Runtime release. + type: boolean + default: false + + - name: PreReleaseVersionSuffixString + displayName: Suffix added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the type of pre-release package. + type: string + values: + - alpha + - beta + - rc + - none + default: none + + - name: PreReleaseVersionSuffixNumber + displayName: Number added to pre-release package version. Only used if IsReleaseBuild is true. Denotes the sequence of a pre-release package. + type: number + default: 0 + + # these 2 parameters are used for debugging. + - name: SpecificArtifact + displayName: Use Specific Artifact (Debugging only) + type: boolean + default: false + + - name: BuildId + displayName: Pipeline BuildId, you could find it in the URL + type: string + default: '0' + + - name: CudaVersion + displayName: CUDA version + type: string + default: '12.2' + values: + - 11.8 + - 12.2 + +variables: + - name: ReleaseVersionSuffix + value: '' + - name: docker_base_image + ${{ if eq(parameters.CudaVersion, '11.8') }}: + value: nvidia/cuda:11.8.0-cudnn8-devel-ubi8 + ${{ if eq(parameters.CudaVersion, '12.2') }}: + value: nvidia/cuda:12.2.2-cudnn8-devel-ubi8 + - name: win_trt_home + ${{ if eq(parameters.CudaVersion, '11.8') }}: + value: $(Agent.TempDirectory)\TensorRT-10.0.1.6.Windows10.x86_64.cuda-11.8 + ${{ if eq(parameters.CudaVersion, '12.2') }}: + value: $(Agent.TempDirectory)\TensorRT-10.0.1.6.Windows10.x86_64.cuda-12.4 + - name: win_cuda_home + ${{ if eq(parameters.CudaVersion, '11.8') }}: + value: $(Agent.TempDirectory)\v11.8 + ${{ if eq(parameters.CudaVersion, '12.2') }}: + value: $(Agent.TempDirectory)\v12.2 +resources: + repositories: + - repository: onnxruntime-inference-examples # The name used to reference this repository in the checkout step + type: github + endpoint: ort-examples + name: microsoft/onnxruntime-inference-examples + - repository: manylinux + type: Github + endpoint: Microsoft + name: pypa/manylinux + ref: 5eda9aded5462201e6310105728d33016e637ea7 + +stages: + # Set ReleaseVersionSuffix + - template: stages/set_packaging_variables_stage.yml + parameters: + IsReleaseBuild: ${{ parameters.IsReleaseBuild }} + PreReleaseVersionSuffixString: ${{ parameters.PreReleaseVersionSuffixString }} + PreReleaseVersionSuffixNumber: ${{ parameters.PreReleaseVersionSuffixNumber }} + + # this is needed for certain artifacts to be published + - stage: Linux_C_API_Packaging_CPU + dependsOn: [ ] + jobs: + - template: templates/c-api-linux-cpu.yml + parameters: + BaseImage: 'registry.access.redhat.com/ubi8/ubi' + OnnxruntimeArch: 'x64' + OnnxruntimeNodejsBindingArch: 'x64' + PoolName: 'onnxruntime-Ubuntu2204-AMD-CPU' + PackageJava: false + PackageNodeJS: false + + # Nuget Packaging + - template: stages/nuget-combine-cuda-stage.yml + parameters: + DoCompliance: ${{ parameters.DoCompliance }} + CudaVersion: ${{ parameters.CudaVersion }} + docker_base_image: ${{ variables.docker_base_image }} + RunOnnxRuntimeTests: ${{ parameters.RunOnnxRuntimeTests }} + UseIncreasedTimeoutForTests: ${{ parameters.UseIncreasedTimeoutForTests }} + win_trt_home: ${{ variables.win_trt_home }} + win_cuda_home: ${{ variables.win_cuda_home }} + DoEsrp: ${{ parameters.DoEsrp }} + IsReleaseBuild: ${{ parameters.IsReleaseBuild }} + buildJava: true + buildNodejs: false + SpecificArtifact: ${{ parameters.SpecificArtifact }} + BuildId: ${{ parameters.BuildId }} + + - template: stages/download-java-tools-stage.yml + + - template: stages/java-cuda-packaging-stage.yml + parameters: + CudaVersion: ${{ parameters.CudaVersion }} + SpecificArtifact: ${{ parameters.SpecificArtifact }} + BuildId: ${{ parameters.BuildId }} diff --git a/tools/ci_build/github/azure-pipelines/linux-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/linux-ci-pipeline.yml index 82e571bf6519..b5155aebc00f 100644 --- a/tools/ci_build/github/azure-pipelines/linux-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/linux-ci-pipeline.yml @@ -1,3 +1,4 @@ + ##### start trigger Don't edit it manually, Please do edit set-trigger-rules.py #### trigger: branches: @@ -27,13 +28,6 @@ pr: - 'onnxruntime/core/providers/js' #### end trigger #### -resources: - repositories: - - repository: manylinux # The name used to reference this repository in the checkout step - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 stages: - stage: x64 dependsOn: [] @@ -58,8 +52,8 @@ stages: - template: templates/get-docker-image-steps.yml parameters: - Dockerfile: tools/ci_build/github/linux/docker/inference/x64/default/cpu/Dockerfile - Context: tools/ci_build/github/linux/docker/inference/x64/default/cpu + Dockerfile: tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile + Context: tools/ci_build/github/linux/docker/inference/x86_64/default/cpu DockerBuildArgs: "--build-arg BUILD_UID=$( id -u ) --build-arg BASEIMAGE=registry.access.redhat.com/ubi8/ubi" Repository: onnxruntimecpubuildcentos8x64 @@ -305,10 +299,6 @@ stages: parameters: arch: 'aarch64' machine_pool: 'onnxruntime-linux-ARM64-CPU-2019' - base_image: 'arm64v8/almalinux:8' - devtoolset_rootpath: /opt/rh/gcc-toolset-12/root - ld_library_path_arg: /opt/rh/gcc-toolset-12/root/usr/lib64:/opt/rh/gcc-toolset-12/root/usr/lib:/opt/rh/gcc-toolset-12/root/usr/lib64/dyninst:/opt/rh/gcc-toolset-12/root/usr/lib/dyninst:/usr/local/lib64 - prepend_path: '/opt/rh/gcc-toolset-12/root/usr/bin:' with_cache: true cmake_build_type: Release @@ -319,7 +309,3 @@ stages: parameters: arch: 'aarch64' machine_pool: 'onnxruntime-linux-ARM64-CPU-2019' - base_image: 'arm64v8/almalinux:8' - devtoolset_rootpath: /opt/rh/gcc-toolset-12/root - ld_library_path_arg: /opt/rh/gcc-toolset-12/root/usr/lib64:/opt/rh/gcc-toolset-12/root/usr/lib:/opt/rh/gcc-toolset-12/root/usr/lib64/dyninst:/opt/rh/gcc-toolset-12/root/usr/lib/dyninst:/usr/local/lib64 - prepend_path: '/opt/rh/gcc-toolset-12/root/usr/bin:' diff --git a/tools/ci_build/github/azure-pipelines/linux-cpu-minimal-build-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/linux-cpu-minimal-build-ci-pipeline.yml index bbea7a0d114e..2d3260a13f13 100644 --- a/tools/ci_build/github/azure-pipelines/linux-cpu-minimal-build-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/linux-cpu-minimal-build-ci-pipeline.yml @@ -46,14 +46,6 @@ pr: - BUILD.md - 'js/web' - 'onnxruntime/core/providers/js' -resources: - repositories: - - repository: manylinux - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 - jobs: - job: Linux_CPU_Minimal_Build_E2E timeoutInMinutes: 120 diff --git a/tools/ci_build/github/azure-pipelines/linux-dnnl-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/linux-dnnl-ci-pipeline.yml index 1c6d8bbfe7fb..a64a65622c90 100644 --- a/tools/ci_build/github/azure-pipelines/linux-dnnl-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/linux-dnnl-ci-pipeline.yml @@ -27,14 +27,6 @@ pr: - 'onnxruntime/core/providers/js' #### end trigger #### -resources: - repositories: - - repository: manylinux - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 - jobs: - job: Linux_py_Wheels timeoutInMinutes: 180 diff --git a/tools/ci_build/github/azure-pipelines/linux-gpu-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/linux-gpu-ci-pipeline.yml index aa47c91c71d5..8890a9c4bf56 100644 --- a/tools/ci_build/github/azure-pipelines/linux-gpu-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/linux-gpu-ci-pipeline.yml @@ -45,32 +45,18 @@ parameters: type: string default: '0' -resources: - repositories: - - repository: manylinux - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 - variables: - name: docker_base_image ${{ if eq(parameters.CudaVersion, '11.8') }}: - value: nvidia/cuda:11.8.0-cudnn8-devel-ubi8 - ${{ if eq(parameters.CudaVersion, '12.2') }}: - value: nvidia/cuda:12.2.2-cudnn8-devel-ubi8 - - - name: linux_trt_version - ${{ if eq(parameters.CudaVersion, '11.8') }}: - value: 10.0.1.6-1.cuda11.8 + value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1 ${{ if eq(parameters.CudaVersion, '12.2') }}: - value: 10.0.1.6-1.cuda12.4 + value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20240610.1 - name: Repository ${{ if eq(parameters.CudaVersion, '11.8') }}: - value: 'onnxruntimecuda11build' + value: 'onnxruntimecuda11manylinuxbuild' ${{ if eq(parameters.CudaVersion, '12.2') }}: - value: 'onnxruntimecuda12build' + value: 'onnxruntimecuda12manylinuxbuild' stages: - stage: Linux_Build @@ -97,12 +83,7 @@ stages: parameters: Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda Context: tools/ci_build/github/linux/docker - DockerBuildArgs: " - --network=host - --build-arg BASEIMAGE=$(docker_base_image) - --build-arg TRT_VERSION=$(linux_trt_version) - --build-arg BUILD_UID=$( id -u ) - " + DockerBuildArgs: "--build-arg BASEIMAGE=$(docker_base_image) --build-arg BUILD_UID=$( id -u )" Repository: $(Repository) - task: Cache@2 @@ -123,7 +104,7 @@ stages: - script: | set -e -x mkdir -p $HOME/.onnx - docker run -e CFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -fstack-clash-protection -fcf-protection -O3 -Wl,--strip-all" -e CXXFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -fstack-clash-protection -fcf-protection -O3 -Wl,--strip-all" --rm \ + docker run --rm \ --volume /data/onnx:/data/onnx:ro \ --volume $(Build.SourcesDirectory):/onnxruntime_src \ --volume $(Build.BinariesDirectory):/build \ @@ -133,39 +114,12 @@ stages: -e ALLOW_RELEASED_ONNX_OPSET_ONLY=0 \ -e NIGHTLY_BUILD \ -e BUILD_BUILDNUMBER \ - -e CCACHE_DIR=/cache \ - $(Repository) \ - /bin/bash -c " - set -ex; \ - env; \ - ccache -s; \ - /opt/python/cp38-cp38/bin/python3 /onnxruntime_src/tools/ci_build/build.py \ - --build_dir /build --cmake_generator Ninja \ - --config Release --update --build \ - --skip_submodule_sync \ - --build_shared_lib \ - --parallel --use_binskim_compliant_compile_flags \ - --build_wheel \ - --enable_onnx_tests --use_cuda --cuda_version=${{parameters.CudaVersion}} --cuda_home=/usr/local/cuda-${{parameters.CudaVersion}} --cudnn_home=/usr/local/cuda-${{parameters.CudaVersion}} \ - --enable_cuda_profiling --enable_cuda_nhwc_ops \ - --enable_pybind --build_java \ - --use_cache \ - --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=75 \ - --cmake_extra_defines onnxruntime_BUILD_UNIT_TESTS=ON \ - --cmake_extra_defines onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON; \ - ccache -sv; \ - ccache -z" + -e CCACHE_DIR=/cache -w /onnxruntime_src \ + $(Repository) tools/ci_build/github/linux/build_cuda_ci.sh workingDirectory: $(Build.SourcesDirectory) displayName: Build Onnxruntime - - task: CmdLine@2 - inputs: - script: | - rm -rf $(Build.BinariesDirectory)/Release/onnxruntime $(Build.BinariesDirectory)/Release/pybind11 - rm -f $(Build.BinariesDirectory)/Release/models - find $(Build.BinariesDirectory)/Release/_deps -mindepth 1 ! -regex '^$(Build.BinariesDirectory)/Release/_deps/onnx-src\(/.*\)?' -delete - cd $(Build.BinariesDirectory)/Release - find -executable -type f > $(Build.BinariesDirectory)/Release/perms.txt + - script: $(Build.SourcesDirectory)/tools/ci_build/github/linux/delete_unused_files_before_upload.sh - task: PublishPipelineArtifact@0 displayName: 'Publish Pipeline Artifact' @@ -203,12 +157,7 @@ stages: parameters: Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda Context: tools/ci_build/github/linux/docker - DockerBuildArgs: " - --network=host - --build-arg BASEIMAGE=$(docker_base_image) - --build-arg TRT_VERSION=$(linux_trt_version) - --build-arg BUILD_UID=$( id -u ) - " + DockerBuildArgs: "--build-arg BASEIMAGE=$(docker_base_image) --build-arg BUILD_UID=$( id -u )" Repository: $(Repository) - task: CmdLine@2 diff --git a/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-ci-pipeline.yml index 52a45fd812b8..5f63339fb0d0 100644 --- a/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-ci-pipeline.yml @@ -34,29 +34,19 @@ parameters: values: - 11.8 - 12.2 - - 12.4 -resources: - repositories: - - repository: manylinux - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 + variables: - name: docker_base_image ${{ if eq(parameters.CudaVersion, '11.8') }}: - value: nvidia/cuda:11.8.0-cudnn8-devel-ubi8 + value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1 ${{ if eq(parameters.CudaVersion, '12.2') }}: - value: nvidia/cuda:12.2.2-cudnn8-devel-ubi8 - ${{ if eq(parameters.CudaVersion, '12.4') }}: - value: nvidia/cuda:12.4.1-cudnn-devel-ubi8 + value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20240610.1 - name: linux_trt_version ${{ if eq(parameters.CudaVersion, '11.8') }}: value: 10.0.1.6-1.cuda11.8 ${{ if eq(parameters.CudaVersion, '12.2') }}: value: 10.0.1.6-1.cuda12.4 - ${{ if eq(parameters.CudaVersion, '12.4') }}: - value: 10.0.1.6-1.cuda12.4 + jobs: - job: Linux_Build timeoutInMinutes: 180 @@ -99,7 +89,7 @@ jobs: - task: CmdLine@2 inputs: script: | - docker run --gpus all -e CFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -fstack-clash-protection -fcf-protection -O3 -Wl,--strip-all" -e CXXFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -fstack-clash-protection -fcf-protection -O3 -Wl,--strip-all" --rm \ + docker run --gpus all --rm \ --volume /data/onnx:/data/onnx:ro \ --volume $(Build.SourcesDirectory):/onnxruntime_src \ --volume $(Build.BinariesDirectory):/build \ @@ -109,26 +99,8 @@ jobs: -e ALLOW_RELEASED_ONNX_OPSET_ONLY=0 \ -e NIGHTLY_BUILD \ -e BUILD_BUILDNUMBER \ - -e CCACHE_DIR=/cache \ - onnxruntimetensorrt86gpubuild \ - /bin/bash -c " - set -ex; \ - ccache -s; \ - /opt/python/cp38-cp38/bin/python3 /onnxruntime_src/tools/ci_build/build.py \ - --build_dir /build --cmake_generator Ninja \ - --config Release \ - --skip_submodule_sync \ - --build_shared_lib \ - --parallel --use_binskim_compliant_compile_flags \ - --build_wheel \ - --enable_onnx_tests \ - --use_cuda --cuda_home=/usr/local/cuda-${{ parameters.CudaVersion }} --cudnn_home=/usr/lib64/ \ - --enable_pybind --build_java \ - --use_tensorrt --tensorrt_home /usr \ - --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=75 \ - --use_cache; \ - ccache -sv; \ - ccache -z" + -e CCACHE_DIR=/cache -w /onnxruntime_src \ + onnxruntimetensorrt86gpubuild tools/ci_build/github/linux/build_tensorrt_ci.sh workingDirectory: $(Build.SourcesDirectory) - template: templates/explicitly-defined-final-tasks.yml diff --git a/tools/ci_build/github/azure-pipelines/nodejs/templates/test_linux.yml b/tools/ci_build/github/azure-pipelines/nodejs/templates/test_linux.yml index 7b03c0e82f4b..1d3e92056ebe 100644 --- a/tools/ci_build/github/azure-pipelines/nodejs/templates/test_linux.yml +++ b/tools/ci_build/github/azure-pipelines/nodejs/templates/test_linux.yml @@ -7,7 +7,7 @@ stages: - Nodejs_Packaging condition: succeeded() jobs: - - job: + - job: Nodejs_Test_${{ parameters.StageSuffix }} workspace: clean: all timeoutInMinutes: 120 diff --git a/tools/ci_build/github/azure-pipelines/nodejs/templates/test_macos.yml b/tools/ci_build/github/azure-pipelines/nodejs/templates/test_macos.yml index f66c7d9938ec..53923e0b4432 100644 --- a/tools/ci_build/github/azure-pipelines/nodejs/templates/test_macos.yml +++ b/tools/ci_build/github/azure-pipelines/nodejs/templates/test_macos.yml @@ -6,7 +6,7 @@ stages: - Nodejs_Packaging condition: succeeded() jobs: - - job: + - job: Nodejs_Test_MacOS_${{ parameters.StageSuffix }} workspace: clean: all timeoutInMinutes: 120 diff --git a/tools/ci_build/github/azure-pipelines/nodejs/templates/test_win.yml b/tools/ci_build/github/azure-pipelines/nodejs/templates/test_win.yml index 9b3c61b2d3d8..667c4f2e70a6 100644 --- a/tools/ci_build/github/azure-pipelines/nodejs/templates/test_win.yml +++ b/tools/ci_build/github/azure-pipelines/nodejs/templates/test_win.yml @@ -7,7 +7,7 @@ stages: - Nodejs_Packaging condition: succeeded() jobs: - - job: + - job: Nodejs_Test_${{ parameters.StageSuffix }} workspace: clean: all timeoutInMinutes: 120 diff --git a/tools/ci_build/github/azure-pipelines/npm-packaging-pipeline.yml b/tools/ci_build/github/azure-pipelines/npm-packaging-pipeline.yml index 21fc205c72e8..2ccf3d095e18 100644 --- a/tools/ci_build/github/azure-pipelines/npm-packaging-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/npm-packaging-pipeline.yml @@ -10,10 +10,10 @@ parameters: default: 'nightly (@dev)' variables: - # pipeline should define the following varaibles + # pipeline should define the following variables # ExtraBuildArgs # VersionSuffix - + ComponentDetection.Timeout: 1800 ${{ if eq(parameters.NpmPublish, 'nightly (@dev)') }}: NpmPackagingMode: 'dev' ${{ if eq(parameters.NpmPublish, 'release candidate (@rc)') }}: diff --git a/tools/ci_build/github/azure-pipelines/nuget-cuda-publishing-pipeline.yml b/tools/ci_build/github/azure-pipelines/nuget-cuda-publishing-pipeline.yml index 2801466e5253..b0cc253ae097 100644 --- a/tools/ci_build/github/azure-pipelines/nuget-cuda-publishing-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/nuget-cuda-publishing-pipeline.yml @@ -9,14 +9,22 @@ resources: branch: main parameters: - - name: nightly + - name: isReleaseBuild type: boolean - default: true + default: false + +variables: + - name: ArtifactFeed + ${{ if eq(parameters.isReleaseBuild, false) }}: + value: ort-cuda-12-nightly + ${{ else }}: + value: onnxruntime-cuda-12 stages: - template: stages/nuget-cuda-publishing-stage.yml parameters: - ${{ if ne(parameters.nightly, true) }}: - artifact_feed: onnxruntime-cuda-12 - ${{ else }}: - artifact_feed: ort-cuda-12-nightly \ No newline at end of file + artifact_feed: $(ArtifactFeed) + +- template: stages/java-cuda-publishing-stage.yml + parameters: + artifact_feed: $(ArtifactFeed) diff --git a/tools/ci_build/github/azure-pipelines/nuget/templates/dml-vs-2022.yml b/tools/ci_build/github/azure-pipelines/nuget/templates/dml-vs-2022.yml index cc1e798e6cd2..5994ed8f3bec 100644 --- a/tools/ci_build/github/azure-pipelines/nuget/templates/dml-vs-2022.yml +++ b/tools/ci_build/github/azure-pipelines/nuget/templates/dml-vs-2022.yml @@ -26,7 +26,7 @@ stages: - stage: ${{ parameters.StageName }} dependsOn: Setup jobs: - - job: + - job: ${{ parameters.StageName }} timeoutInMinutes: 200 strategy: maxParallel: 2 diff --git a/tools/ci_build/github/azure-pipelines/nuget/templates/test_linux.yml b/tools/ci_build/github/azure-pipelines/nuget/templates/test_linux.yml index 58449a9c4466..b9a538383644 100644 --- a/tools/ci_build/github/azure-pipelines/nuget/templates/test_linux.yml +++ b/tools/ci_build/github/azure-pipelines/nuget/templates/test_linux.yml @@ -16,7 +16,7 @@ stages: - NuGet_Packaging_${{ parameters.StageSuffix }} condition: succeeded() jobs: - - job: + - job: NuGet_Test_Linux_${{ parameters.StageSuffix }}${{ parameters.MoreSuffix }} workspace: clean: all timeoutInMinutes: 120 @@ -60,7 +60,7 @@ stages: Context: tools/ci_build/github/linux/docker/ ${{ if eq(parameters.CudaVersion, '12.2') }}: DockerBuildArgs: " - --build-arg BASEIMAGE=nvidia/cuda:12.2.2-cudnn8-devel-ubuntu20.04 + --build-arg BASEIMAGE=nvidia/cuda:12.2.2-devel-ubuntu20.04 --build-arg TRT_VERSION=10.0.1.6-1+cuda12.4 --build-arg BUILD_UID=$( id -u ) " diff --git a/tools/ci_build/github/azure-pipelines/nuget/templates/test_macos.yml b/tools/ci_build/github/azure-pipelines/nuget/templates/test_macos.yml index 4dcec0f8cf3e..c977e17aada9 100644 --- a/tools/ci_build/github/azure-pipelines/nuget/templates/test_macos.yml +++ b/tools/ci_build/github/azure-pipelines/nuget/templates/test_macos.yml @@ -7,7 +7,7 @@ stages: - NuGet_Packaging_${{ parameters.ArtifactSuffix }} condition: succeeded() jobs: - - job: + - job: NuGet_Test_MacOS workspace: clean: all pool: diff --git a/tools/ci_build/github/azure-pipelines/nuget/templates/test_win.yml b/tools/ci_build/github/azure-pipelines/nuget/templates/test_win.yml index 102a037a4a58..c582a836c7db 100644 --- a/tools/ci_build/github/azure-pipelines/nuget/templates/test_win.yml +++ b/tools/ci_build/github/azure-pipelines/nuget/templates/test_win.yml @@ -20,7 +20,7 @@ stages: - NuGet_Packaging_${{ parameters.StageSuffix }} condition: succeeded() jobs: - - job: + - job: NuGet_Test_Win_${{ parameters.StageSuffix }}${{ parameters.MoreSuffix }} workspace: clean: all pool: ${{ parameters.AgentPool }} diff --git a/tools/ci_build/github/azure-pipelines/orttraining-linux-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/orttraining-linux-ci-pipeline.yml index d8f02054a321..96e2e0a7580d 100644 --- a/tools/ci_build/github/azure-pipelines/orttraining-linux-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/orttraining-linux-ci-pipeline.yml @@ -27,14 +27,6 @@ pr: - 'onnxruntime/core/providers/js' #### end trigger #### -resources: - repositories: - - repository: manylinux # The name used to reference this repository in the checkout step - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 - jobs: - job: Linux_Build timeoutInMinutes: 180 @@ -61,9 +53,9 @@ jobs: - template: templates/get-docker-image-steps.yml parameters: - Dockerfile: tools/ci_build/github/linux/docker/inference/x64/python/cpu/Dockerfile.manylinux2_28_cpu - Context: tools/ci_build/github/linux/docker/inference/x64/python/cpu - DockerBuildArgs: "--build-arg BUILD_UID=$( id -u ) --build-arg BASEIMAGE=registry.access.redhat.com/ubi8/ubi --build-arg PLATFORM=x86_64 --build-arg PREPEND_PATH=/opt/rh/gcc-toolset-12/root/usr/bin: --build-arg LD_LIBRARY_PATH_ARG=/opt/rh/gcc-toolset-12/root/usr/lib64:/opt/rh/gcc-toolset-12/root/usr/lib:/opt/rh/gcc-toolset-12/root/usr/lib64/dyninst:/opt/rh/gcc-toolset-12/root/usr/lib/dyninst:/usr/local/lib64 --build-arg DEVTOOLSET_ROOTPATH=/opt/rh/gcc-toolset-12/root" + Dockerfile: tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/Dockerfile + Context: tools/ci_build/github/linux/docker/inference/x86_64/python/cpu + DockerBuildArgs: "--build-arg BUILD_UID=$( id -u )" Repository: onnxruntimecpubuildpythonx86_64 - task: Cache@2 @@ -80,6 +72,7 @@ jobs: displayName: 'build' inputs: script: | + set -e -x mkdir -p $HOME/.onnx mkdir -p $(Pipeline.Workspace)/ccache docker run --rm \ diff --git a/tools/ci_build/github/azure-pipelines/orttraining-linux-gpu-ortmodule-distributed-test-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/orttraining-linux-gpu-ortmodule-distributed-test-ci-pipeline.yml index 654bc0921556..2c6b6183a9aa 100644 --- a/tools/ci_build/github/azure-pipelines/orttraining-linux-gpu-ortmodule-distributed-test-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/orttraining-linux-gpu-ortmodule-distributed-test-ci-pipeline.yml @@ -41,6 +41,8 @@ stages: clean: true submodules: recursive + - template: templates/jobs/download_training_test_data.yml + - template: templates/run-docker-build-steps.yml parameters: RunDockerBuildArgs: | @@ -58,10 +60,6 @@ stages: -e DisplayName: 'Build' - - bash: tools/ci_build/github/linux/docker/scripts/training/azure_scale_set_vm_mount_test_data.sh -p $(orttrainingtestdatascus-storage-key) -s "//orttrainingtestdatascus.file.core.windows.net/mnist" -d "/mnist" - displayName: 'Mount MNIST' - condition: succeededOrFailed() - # Entry point for all ORTModule distributed tests # Refer to orttraining/orttraining/test/python/how_to_add_ortmodule_distributed_ci_pipeline_tests.md for guidelines on how to add new tests to this pipeline. - script: | @@ -71,7 +69,7 @@ stages: --rm \ --volume $(Build.SourcesDirectory):/onnxruntime_src \ --volume $(Build.BinariesDirectory):/build \ - --volume /mnist:/mnist \ + --volume $(Agent.TempDirectory)/mnist:/mnist \ onnxruntime_ortmodule_distributed_tests_image \ bash -c "rm -rf /build/RelWithDebInfo/onnxruntime/ && python3 -m pip install /build/RelWithDebInfo/dist/onnxruntime*.whl && python3 -m onnxruntime.training.ortmodule.torch_cpp_extensions.install && /build/RelWithDebInfo/launch_test.py --cmd_line_with_args 'python orttraining_ortmodule_distributed_tests.py --mnist /mnist' --cwd /build/RelWithDebInfo" \ displayName: 'Run orttraining_ortmodule_distributed_tests.py' diff --git a/tools/ci_build/github/azure-pipelines/orttraining-pai-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/orttraining-pai-ci-pipeline.yml index 71b224b65964..f18a8960a92b 100644 --- a/tools/ci_build/github/azure-pipelines/orttraining-pai-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/orttraining-pai-ci-pipeline.yml @@ -62,6 +62,7 @@ jobs: --build-arg PREPEND_PATH=/opt/rh/gcc-toolset-12/root/usr/bin: --build-arg LD_LIBRARY_PATH_ARG=/opt/rh/gcc-toolset-12/root/usr/lib64:/opt/rh/gcc-toolset-12/root/usr/lib:/opt/rh/gcc-toolset-12/root/usr/lib64/dyninst:/opt/rh/gcc-toolset-12/root/usr/lib/dyninst:/usr/local/lib64:/usr/local/lib Repository: onnxruntimetrainingrocm-cibuild-rocm$(RocmVersion)-manylinux-build + CheckOutManyLinux: true - task: Cache@2 inputs: diff --git a/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cpu.yml b/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cpu.yml index 4ca122f63955..5fa80bf7ff6d 100644 --- a/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cpu.yml +++ b/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cpu.yml @@ -1,103 +1,23 @@ -trigger: none +parameters: +- name: cmake_build_type + type: string + displayName: 'Linux packages cmake build type. Linux Only.' + default: 'Release' + values: + - Debug + - Release + - RelWithDebInfo + - MinSizeRel -resources: - repositories: - - repository: manylinux - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 +trigger: none stages: -- stage: Python_Packaging_Linux_Training_CPU - - jobs: - - job: Linux_Training_CPU_Wheels - timeoutInMinutes: 180 - workspace: - clean: all - pool: onnxruntime-Ubuntu2204-AMD-CPU - - strategy: - matrix: - Python38: - PythonVersion: '3.8' - Python39: - PythonVersion: '3.9' - Python310: - PythonVersion: '3.10' - Python311: - PythonVersion: '3.11' - Python312: - PythonVersion: '3.12' - - steps: - - checkout: self - clean: true - submodules: recursive - - - template: templates/set-python-manylinux-variables-step.yml - - - template: templates/get-docker-image-steps.yml - parameters: - Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cpu - Context: tools/ci_build/github/linux/docker - DockerBuildArgs: >- - --build-arg PYTHON_VERSION=$(PythonVersion) - --build-arg INSTALL_DEPS_EXTRA_ARGS=-tu - --build-arg BUILD_UID=$(id -u) - Repository: onnxruntimetrainingcpubuild_$(PythonVersion) - - - task: CmdLine@2 - displayName: 'build onnxruntime' - inputs: - script: | - mkdir -p $HOME/.onnx - docker run --rm \ - --volume /data/onnx:/data/onnx:ro \ - --volume $(Build.SourcesDirectory):/onnxruntime_src \ - --volume $(Build.BinariesDirectory):/build \ - --volume /data/models:/build/models:ro \ - --volume $HOME/.onnx:/home/onnxruntimedev/.onnx \ - -e NIGHTLY_BUILD \ - -e BUILD_BUILDNUMBER \ - -e ORT_DISABLE_PYTHON_PACKAGE_LOCAL_VERSION \ - -e DEFAULT_TRAINING_PACKAGE_DEVICE \ - onnxruntimetrainingcpubuild_$(PythonVersion) \ - $(PythonManylinuxDir)/bin/python3 /onnxruntime_src/tools/ci_build/build.py \ - --build_dir /build --cmake_generator Ninja \ - --config Debug Release \ - --skip_submodule_sync \ - --build_shared_lib \ - --parallel --use_binskim_compliant_compile_flags \ - --build_wheel \ - --enable_onnx_tests \ - --enable_pybind --enable_training - workingDirectory: $(Build.SourcesDirectory) - - - task: CopyFiles@2 - displayName: 'Copy Python Wheel to: $(Build.ArtifactStagingDirectory)' - inputs: - SourceFolder: '$(Build.BinariesDirectory)' - Contents: 'Release/dist/*.whl' - TargetFolder: '$(Build.ArtifactStagingDirectory)' - - - task: PublishBuildArtifacts@1 - displayName: 'Publish Artifact: ONNXRuntime python wheel and documentation' - inputs: - ArtifactName: onnxruntime_training_cpu - - - template: templates/component-governance-component-detection-steps.yml - parameters: - condition: 'succeeded' - - - template: templates/clean-agent-build-directory-step.yml - - template: templates/py-packaging-stage.yml parameters: build_py_parameters: --enable_training + cmake_build_type: ${{ parameters.cmake_build_type }} enable_linux_gpu: false - enable_linux_cpu: false + enable_linux_cpu: true enable_windows_cpu: true enable_windows_gpu: false enable_mac_cpu: true diff --git a/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cuda.yml b/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cuda.yml index 2c6543247192..be3f67ba450b 100644 --- a/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cuda.yml +++ b/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cuda.yml @@ -1,13 +1,5 @@ trigger: none -resources: - repositories: - - repository: manylinux - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 - parameters: - name: SpecificArtifact displayName: Use Specific Artifact diff --git a/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cuda12.yml b/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cuda12.yml index 8628ae3de4d7..265db420b1af 100644 --- a/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cuda12.yml +++ b/tools/ci_build/github/azure-pipelines/orttraining-py-packaging-pipeline-cuda12.yml @@ -1,13 +1,5 @@ trigger: none -resources: - repositories: - - repository: manylinux - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 - stages: - template: templates/py-packaging-training-cuda-stage.yml parameters: diff --git a/tools/ci_build/github/azure-pipelines/publish-nuget.yml b/tools/ci_build/github/azure-pipelines/publish-nuget.yml index 19ede05eb12b..e2c73c52a0bb 100644 --- a/tools/ci_build/github/azure-pipelines/publish-nuget.yml +++ b/tools/ci_build/github/azure-pipelines/publish-nuget.yml @@ -11,7 +11,7 @@ resources: stages: - stage: Publish_NuGet_Package_And_Report jobs: - - job: + - job: Publish_NuGet_Package_And_Report workspace: clean: all variables: diff --git a/tools/ci_build/github/azure-pipelines/py-cuda-packaging-pipeline.yml b/tools/ci_build/github/azure-pipelines/py-cuda-packaging-pipeline.yml index 20646d3ba4a2..3503857a9233 100644 --- a/tools/ci_build/github/azure-pipelines/py-cuda-packaging-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/py-cuda-packaging-pipeline.yml @@ -31,14 +31,6 @@ parameters: type: string default: '0' -resources: - repositories: - - repository: manylinux - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 - stages: - template: stages/py-cuda-packaging-stage.yml parameters: diff --git a/tools/ci_build/github/azure-pipelines/py-cuda-publishing-pipeline.yml b/tools/ci_build/github/azure-pipelines/py-cuda-publishing-pipeline.yml index 7f99f7f803d0..50e0ca3708d2 100644 --- a/tools/ci_build/github/azure-pipelines/py-cuda-publishing-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/py-cuda-publishing-pipeline.yml @@ -1,24 +1,26 @@ +resources: + pipelines: + - pipeline: build + source: 'Python-CUDA-Packaging-Pipeline' + trigger: + branches: + include: + - main + branch: main + parameters: - - name: nightly - type: string - default: '1' - - name: build_id - type: string - default: 'latest' - - name: project - type: string - default: 'Lotus' - - name: pipeline - type: string - default: 'Python-CUDA-Packaging-Pipeline' + - name: isReleaseBuild + type: boolean + default: false + +variables: + - name: ArtifactFeed + ${{ if eq(parameters.isReleaseBuild, false) }}: + value: ort-cuda-12-nightly + ${{ else }}: + value: onnxruntime-cuda-12 stages: - template: stages/py-cuda-publishing-stage.yml parameters: - build_id: ${{ parameters.build_id }} - project: ${{ parameters.project }} - pipeline: ${{ parameters.pipeline }} - ${{ if ne(parameters.nightly, '1') }}: - artifact_feed: onnxruntime-cuda-12 - ${{ else }}: - artifact_feed: ort-cuda-12-nightly \ No newline at end of file + artifact_feed: $(ArtifactFeed) \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/py-package-test-pipeline.yml b/tools/ci_build/github/azure-pipelines/py-package-test-pipeline.yml index acec6f501ed2..63e70fa8e648 100644 --- a/tools/ci_build/github/azure-pipelines/py-package-test-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/py-package-test-pipeline.yml @@ -4,13 +4,6 @@ resources: source: 'Python packaging pipeline' trigger: true branch: main # branch to pick the artifact, Used only for manual triggered pipeline runs for testing the pipeline itself - #TODO: Remove the following dependency. Running python tests should not need to use manylinux. - repositories: - - repository: manylinux # The name used to reference this repository in the checkout step - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 stages: - stage: Linux_Test_CPU_x86_64_stage @@ -19,10 +12,7 @@ stages: parameters: arch: 'x86_64' machine_pool: 'onnxruntime-Ubuntu2204-AMD-CPU' - base_image: 'registry.access.redhat.com/ubi8/ubi' - devtoolset_rootpath: /opt/rh/gcc-toolset-12/root - ld_library_path_arg: /opt/rh/gcc-toolset-12/root/usr/lib64:/opt/rh/gcc-toolset-12/root/usr/lib:/opt/rh/gcc-toolset-12/root/usr/lib64/dyninst:/opt/rh/gcc-toolset-12/root/usr/lib/dyninst:/usr/local/lib64 - prepend_path: '/opt/rh/gcc-toolset-12/root/usr/bin:' + - stage: Linux_Test_CPU_aarch64_stage dependsOn: [] @@ -31,10 +21,6 @@ stages: parameters: arch: 'aarch64' machine_pool: 'onnxruntime-linux-ARM64-CPU-2019' - base_image: 'arm64v8/almalinux:8' - devtoolset_rootpath: /opt/rh/gcc-toolset-12/root - ld_library_path_arg: /opt/rh/gcc-toolset-12/root/usr/lib64:/opt/rh/gcc-toolset-12/root/usr/lib:/opt/rh/gcc-toolset-12/root/usr/lib64/dyninst:/opt/rh/gcc-toolset-12/root/usr/lib/dyninst:/usr/local/lib64 - prepend_path: '/opt/rh/gcc-toolset-12/root/usr/bin:' - stage: Packages_Somking_Test dependsOn: [] @@ -50,7 +36,7 @@ stages: job_name: Test_LINUX_x86_64_Wheels itemPattern: '*/*manylinux*x86_64.whl' machine_pool: - vmImage: 'ubuntu-22.04' + name: 'onnxruntime-Ubuntu2204-AMD-CPU' # ****The following Stage depend on all previous tags. *** @@ -66,9 +52,11 @@ stages: parameters: arch: 'x86_64' machine_pool: 'Onnxruntime-Linux-GPU' - device: 'GPU' python_wheel_suffix: '_gpu' timeout: 480 + docker_base_image: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1 + trt_version: '10.0.1.6-1.cuda11.8' + cuda_version: '11.8' # if final job not extecuted, it will not run nightlly build diff --git a/tools/ci_build/github/azure-pipelines/py-packaging-pipeline.yml b/tools/ci_build/github/azure-pipelines/py-packaging-pipeline.yml index 062a6ca9990c..1273194753ce 100644 --- a/tools/ci_build/github/azure-pipelines/py-packaging-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/py-packaging-pipeline.yml @@ -63,14 +63,6 @@ parameters: trigger: none -resources: - repositories: - - repository: manylinux - type: Github - endpoint: Microsoft - name: pypa/manylinux - ref: 5eda9aded5462201e6310105728d33016e637ea7 - stages: - template: templates/py-packaging-stage.yml parameters: diff --git a/tools/ci_build/github/azure-pipelines/stages/download-java-tools-stage.yml b/tools/ci_build/github/azure-pipelines/stages/download-java-tools-stage.yml new file mode 100644 index 000000000000..40a4885e6c18 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/stages/download-java-tools-stage.yml @@ -0,0 +1,30 @@ +stages: +- stage: Download_Java_Tools + dependsOn: [] + jobs: + - job: Download_Java_Tools + pool: + name: 'onnxruntime-Ubuntu2204-AMD-CPU' + steps: + - checkout: none + - task: mspremier.PostBuildCleanup.PostBuildCleanup-task.PostBuildCleanup@3 + displayName: 'Clean Agent Directories' + condition: always() + - task: CmdLine@2 + displayName: Download Java Tools + inputs: + script: | + mkdir -p java-tools + pushd java-tools + wget --tries=3 https://oss.sonatype.org/service/local/repositories/releases/content/org/junit/platform/junit-platform-console-standalone/1.6.2/junit-platform-console-standalone-1.6.2.jar -P ./ + wget --tries=3 https://oss.sonatype.org/service/local/repositories/releases/content/com/google/protobuf/protobuf-java/3.21.7/protobuf-java-3.21.7.jar -P ./ + popd + workingDirectory: '$(Agent.TempDirectory)' + - task: PublishPipelineArtifact@1 + displayName: 'Publish Pipeline Java Tools Artifact' + inputs: + targetPath: '$(Agent.TempDirectory)/java-tools' + artifact: 'onnxruntime-java-tools' + - template: ../templates/component-governance-component-detection-steps.yml + parameters : + condition : 'succeeded' \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/stages/java-cuda-packaging-stage.yml b/tools/ci_build/github/azure-pipelines/stages/java-cuda-packaging-stage.yml index 8c81972d607e..22264fc670cf 100644 --- a/tools/ci_build/github/azure-pipelines/stages/java-cuda-packaging-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/java-cuda-packaging-stage.yml @@ -7,9 +7,9 @@ parameters: type: string stages: -- stage: Java_GPU_Packaging +- stage: Jar_Packaging_GPU dependsOn: - - Linux_C_API_Packaging_Combined_CUDA + - Linux_C_API_Packaging_GPU - Windows_Packaging_CUDA - Windows_Packaging_TensorRT - Download_Java_Tools @@ -81,7 +81,7 @@ stages: Jar_Packaging_GPU workspace: clean: all - pool: 'onnxruntime-Win2022-GPU-T4' + pool: 'onnxruntime-Win2022-GPU-A10' timeoutInMinutes: 60 variables: - name: runCodesignValidationInjection @@ -139,9 +139,9 @@ stages: value: false - name: docker_base_image ${{ if eq(parameters.CudaVersion, '11.8') }}: - value: nvidia/cuda:11.8.0-cudnn8-devel-ubi8 + value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1 ${{ if eq(parameters.CudaVersion, '12.2') }}: - value: nvidia/cuda:12.2.2-cudnn8-devel-ubi8 + value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20240610.1 timeoutInMinutes: 60 steps: diff --git a/tools/ci_build/github/azure-pipelines/stages/java-cuda-publishing-stage.yml b/tools/ci_build/github/azure-pipelines/stages/java-cuda-publishing-stage.yml new file mode 100644 index 000000000000..70d92286b396 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/stages/java-cuda-publishing-stage.yml @@ -0,0 +1,56 @@ +parameters: +- name: artifact_feed + type: string + +stages: +- stage: JAR_Publishing_GPU + dependsOn: [] + jobs: + - job: JAR_Publishing_GPU + #TD-DO: figure out a way to package nightly jar. Currently Java version are set from VERSION_NUMBER file + condition: ${{ eq(parameters.artifact_feed, 'onnxruntime-cuda-12') }} + workspace: + clean: all + pool: 'onnxruntime-Win-CPU-2022' + variables: + - name: SYSTEM_ACCESSTOKEN + value: $(System.AccessToken) + - name: ADOFeedName + value: ${{ parameters.artifact_feed }} + - name: GDN_CODESIGN_TARGETDIRECTORY + value: '$(Build.BinariesDirectory)/final-package' + - name: artifactName + value: onnxruntime-java-gpu + + steps: + - script: mkdir "$(GDN_CODESIGN_TARGETDIRECTORY)" + + - download: build + displayName: 'Download Pipeline Artifact - $(artifactName)' + artifact: '$(artifactName)' + + - task: CopyFiles@2 + inputs: + SourceFolder: '$(Pipeline.Workspace)/build/$(artifactName)' + Contents: | + onnxruntime_gpu*.jar + onnxruntime_gpu*.pom + TargetFolder: '$(GDN_CODESIGN_TARGETDIRECTORY)' + CleanTargetFolder: true + + - task: PowerShell@2 + displayName: 'Bundle Jar and POM files into a single jar file' + inputs: + targetType: 'inline' + script: | + jar cvf bundle.jar ` + onnxruntime_gpu-*.pom ` + onnxruntime_gpu-*.jar + workingDirectory: '$(GDN_CODESIGN_TARGETDIRECTORY)' + + - task: Gradle@3 + inputs: + gradleWrapperFile: '$(Build.SourcesDirectory)/java/gradlew.bat' + workingDirectory: '$(Build.SourcesDirectory)/java' + tasks: 'publish' + options: '-PadoAccessToken=$(SYSTEM_ACCESSTOKEN) -PadoArtifact=$(GDN_CODESIGN_TARGETDIRECTORY)/bundle.jar' diff --git a/tools/ci_build/github/azure-pipelines/stages/jobs/linux-gpu-tensorrt-packaging-job.yml b/tools/ci_build/github/azure-pipelines/stages/jobs/linux-gpu-tensorrt-packaging-job.yml deleted file mode 100644 index 541ab1ac7487..000000000000 --- a/tools/ci_build/github/azure-pipelines/stages/jobs/linux-gpu-tensorrt-packaging-job.yml +++ /dev/null @@ -1,108 +0,0 @@ -parameters: -- name: artifactName - type: string - default: 'onnxruntime-linux-x64-gpu-tensorrt-$(OnnxRuntimeVersion)' - -- name: artifactNameNoVersionString - type: string - default: 'onnxruntime-linux-x64-gpu-tensorrt' - -- name: buildJava - type: boolean - default: false - -- name: buildJavaOption - type: string - default: '' - -- name: buildNodejs - type: boolean - default: true - -- name: buildNodejsOption - type: string - default: '' - -- name: CudaVersion - displayName: CUDA version - type: string - default: '11.8' - values: - - 11.8 - - 12.2 - -jobs: -- job: Linux_C_API_Packaging_TensorRT - dependsOn: [] - workspace: - clean: all - timeoutInMinutes: 180 - pool: 'Onnxruntime-Linux-GPU' - variables: - - name: CUDA_VERSION_MAJOR - ${{ if eq(parameters.CudaVersion, '11.8') }}: - value: '11' - ${{ if eq(parameters.CudaVersion, '12.2') }}: - value: '12' - - name: CUDA_VERSION - value: ${{ parameters.CudaVersion }} - - name: linux_trt_version - ${{ if eq(parameters.CudaVersion, '11.8') }}: - value: 10.0.1.6-1.cuda11.8 - ${{ if eq(parameters.CudaVersion, '12.2') }}: - value: 10.0.1.6-1.cuda12.4 - - name: docker_base_image - ${{ if eq(parameters.CudaVersion, '11.8') }}: - value: nvidia/cuda:11.8.0-cudnn8-devel-ubi8 - ${{ if eq(parameters.CudaVersion, '12.2') }}: - value: nvidia/cuda:12.2.2-cudnn8-devel-ubi8 - steps: - - checkout: self - clean: true - submodules: recursive - - template: ../../templates/get-docker-image-steps.yml - parameters: - Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda - Context: tools/ci_build/github/linux/docker - DockerBuildArgs: " - --network=host - --build-arg BASEIMAGE=${{ variables.docker_base_image }} - --build-arg TRT_VERSION=${{ variables.linux_trt_version }} - --build-arg BUILD_UID=$( id -u ) - " - Repository: onnxruntimecuda${{ variables.CUDA_VERSION_MAJOR }}xtrt86build - - template: ../../templates/set-version-number-variables-step.yml - - - script: $(Build.SourcesDirectory)/tools/ci_build/github/linux/build_tensorrt_c_api_package.sh - workingDirectory: $(Build.SourcesDirectory) - displayName: 'Build and Test' - - - ${{ if eq(parameters.buildJava, true) }}: - - template: ../../templates/java-api-artifacts-package-and-publish-steps-posix.yml - parameters: - arch: 'linux-x64' - buildConfig: 'Release' - artifactName: 'onnxruntime-java-linux-x64-tensorrt' - version: '$(OnnxRuntimeVersion)' - libraryName: 'libonnxruntime.so' - nativeLibraryName: 'libonnxruntime4j_jni.so' - - - ${{ if eq(parameters.buildNodejs, 'true') }}: - - template: ../../templates/nodejs-artifacts-package-and-publish-steps-posix.yml - parameters: - arch: 'x64' - os: 'linux' - artifactName: 'drop-onnxruntime-nodejs-linux-x64-tensorrt' - - - template: ../../templates/c-api-artifacts-package-and-publish-steps-posix.yml - parameters: - buildConfig: 'Release' - artifactName: ${{ parameters.artifactName }} - artifactNameNoVersionString: ${{ parameters.artifactNameNoVersionString }} - libraryName: 'libonnxruntime.so.$(OnnxRuntimeVersion)' - - - - template: ../../templates/component-governance-component-detection-steps.yml - parameters: - condition: 'succeeded' - - template: ../../templates/clean-agent-build-directory-step.yml diff --git a/tools/ci_build/github/azure-pipelines/stages/jobs/py-linux-cuda-package-test-job.yml b/tools/ci_build/github/azure-pipelines/stages/jobs/py-linux-cuda-package-test-job.yml index 760f5591fef9..b6943f9e1b77 100644 --- a/tools/ci_build/github/azure-pipelines/stages/jobs/py-linux-cuda-package-test-job.yml +++ b/tools/ci_build/github/azure-pipelines/stages/jobs/py-linux-cuda-package-test-job.yml @@ -46,7 +46,7 @@ jobs: ${{ if eq(parameters.CudaVersion, '11.8') }}: value: nvidia/cuda:11.8.0-cudnn8-devel-ubi8 ${{ if eq(parameters.CudaVersion, '12.2') }}: - value: nvidia/cuda:12.2.2-cudnn8-devel-ubi8 + value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20240610.1 - name: linux_trt_version ${{ if eq(parameters.CudaVersion, '11.8') }}: value: 10.0.1.6-1.cuda11.8 diff --git a/tools/ci_build/github/azure-pipelines/stages/nuget-combine-cuda-stage.yml b/tools/ci_build/github/azure-pipelines/stages/nuget-combine-cuda-stage.yml index 9f5ca3db3170..0d5aa7387b8b 100644 --- a/tools/ci_build/github/azure-pipelines/stages/nuget-combine-cuda-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/nuget-combine-cuda-stage.yml @@ -8,232 +8,99 @@ parameters: - name: IsReleaseBuild type: boolean +- name: CudaVersion + type: string + +- name: docker_base_image + type: string + +- name: win_trt_home + type: string + +- name: win_cuda_home + type: string + +- name: UseIncreasedTimeoutForTests + type: boolean + +- name: RunOnnxRuntimeTests + type: boolean + +- name: buildJava + type: boolean + +- name: buildNodejs + type: boolean + +- name: SpecificArtifact + type: boolean + +- name: BuildId + type: string + stages: -######## Nuget ######## -# Win/Linux CUDA Combined packaging -- stage: NuGet_Packaging_GPU - dependsOn: - - Setup - - Windows_Packaging_CUDA - - Windows_Packaging_TensorRT - - Linux_C_API_Packaging_Combined_CUDA - condition: succeeded() - jobs: - - job: - workspace: - clean: all - pool: 'Onnxruntime-Win-CPU-2022' - variables: - breakCodesignValidationInjection: ${{ parameters.DoEsrp }} - ReleaseVersionSuffix: $[stageDependencies.Setup.Set_Variables.outputs['Set_Release_Version_Suffix.ReleaseVersionSuffix']] - BuildDate: $[format('{0:yyyyMMdd}', pipeline.startTime)] - BuildTime: $[format('{0:HHmm}', pipeline.startTime)] - - steps: - - checkout: self - submodules: true - - template: ../templates/flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download Pipeline Artifact - NuGet' - ArtifactName: 'onnxruntime-win-x64-cuda' - TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' - - - template: ../templates/flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download Pipeline Artifact - NuGet' - ArtifactName: 'onnxruntime-win-x64-tensorrt' - TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' - - - template: ../templates/flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download Pipeline Artifact - NuGet' - ArtifactName: 'onnxruntime-linux-x64-cuda' - TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' - - - template: ../templates/flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download Pipeline Artifact - NuGet' - ArtifactName: 'onnxruntime-linux-x64-tensorrt' - TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' - - - template: ../templates/flex-downloadPipelineArtifact.yml - parameters: - StepName: 'Download Pipeline Artifact - NuGet' - ArtifactName: 'drop-extra' - TargetPath: '$(Build.BinariesDirectory)/extra-artifact' - - # Reconstruct the build dir - - task: PowerShell@2 - displayName: 'PS: Extract nuget files gpu' - inputs: - targetType: filePath - filePath: $(Build.SourcesDirectory)\tools\ci_build\github\windows\extract_nuget_files_gpu.ps1 - - - script: | - dir - workingDirectory: '$(Build.BinariesDirectory)/nuget-artifact' - displayName: 'List artifacts' - - - script: | - mklink /D /J models C:\local\models - workingDirectory: '$(Build.BinariesDirectory)' - displayName: 'Create models link' - - - task: NuGetToolInstaller@0 - displayName: Use Nuget 6.2.1 - inputs: - versionSpec: 6.2.1 - - - task: PowerShell@2 - displayName: Install .NET 6 workloads - inputs: - targetType: 'inline' - script: | - dotnet workload install android ios macos - workingDirectory: '$(Build.SourcesDirectory)\csharp' - - - task: PowerShell@2 - displayName: Build .NET 6 targets using dotnet - inputs: - targetType: 'inline' - # we don't specify 'Any CPU' as the platform here because if we do it gets added to the output path - # e.g. csharp\src\Microsoft.ML.OnnxRuntime\bin\Any CPU\RelWithDebInfo\net6.0-ios\ - # which is inconsistent with the msbuild output path for the pre-.net6 targets - # e.g. csharp\src\Microsoft.ML.OnnxRuntime\bin\RelWithDebInfo\monoandroid11.0 - # and makes it harder to do the packing - # - # 'Any CPU' is the default (first 'mixed' platform specified in the csproj) so this should be fine. - script: | - dotnet build .\src\Microsoft.ML.OnnxRuntime\Microsoft.ML.OnnxRuntime.csproj -p:SelectedTargets=Net6 -p:Configuration=RelWithDebInfo -p:OnnxRuntimeBuildDirectory="$(Build.BinariesDirectory)" -p:OrtPackageId="Microsoft.ML.OnnxRuntime.Gpu" -p:IsReleaseBuild=${{ parameters.IsReleaseBuild }} -p:ReleaseVersionSuffix=$(ReleaseVersionSuffix) - workingDirectory: '$(Build.SourcesDirectory)\csharp' - - - task: MSBuild@1 - displayName: 'Restore NuGet Packages and create project.assets.json for pre-.net6 targets' - inputs: - solution: '$(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.sln' - platform: 'Any CPU' - configuration: RelWithDebInfo - msbuildArguments: '-t:restore -p:SelectedTargets=PreNet6 -p:OrtPackageId="Microsoft.ML.OnnxRuntime.Gpu"' - workingDirectory: '$(Build.SourcesDirectory)\csharp' - - - task: MSBuild@1 - displayName: 'Build C# for pre-.net6 targets' - inputs: - solution: '$(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.sln' - configuration: RelWithDebInfo - platform: 'Any CPU' - msbuildArguments: '-p:SelectedTargets=PreNet6 -p:OnnxRuntimeBuildDirectory="$(Build.BinariesDirectory)" -p:OrtPackageId="Microsoft.ML.OnnxRuntime.Gpu" -p:IsReleaseBuild=${{ parameters.IsReleaseBuild }} -p:ReleaseVersionSuffix=$(ReleaseVersionSuffix)' - workingDirectory: '$(Build.SourcesDirectory)\csharp' - - - template: ../templates/win-esrp-dll.yml - parameters: - FolderPath: '$(Build.SourcesDirectory)\csharp\src\Microsoft.ML.OnnxRuntime\bin\RelWithDebInfo' - DisplayName: 'ESRP - Sign C# dlls' - DoEsrp: ${{ parameters.DoEsrp }} - - - task: MSBuild@1 - displayName: Update projects.assets.json with combined list of all target frameworks - inputs: - solution: '$(Build.SourcesDirectory)\csharp\src\Microsoft.ML.OnnxRuntime\Microsoft.ML.OnnxRuntime.csproj' - platform: 'Any CPU' - configuration: RelWithDebInfo - msbuildArguments: '-t:restore -p:SelectedTargets=All -p:OrtPackageId=Microsoft.ML.OnnxRuntime.Gpu' - workingDirectory: '$(Build.SourcesDirectory)\csharp' - - - task: MSBuild@1 - displayName: 'Build Nuget Packages' - inputs: - solution: '$(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.proj' - configuration: RelWithDebInfo - platform: 'Any CPU' - msbuildArguments: '-t:CreatePackage -p:OnnxRuntimeBuildDirectory="$(Build.BinariesDirectory)" -p:OrtPackageId=Microsoft.ML.OnnxRuntime.Gpu -p:IsReleaseBuild=${{ parameters.IsReleaseBuild }} - -p:ReleaseVersionSuffix=$(ReleaseVersionSuffix) -p:CurrentDate=$(BuildDate) -p:CurrentTime=$(BuildTime)' - workingDirectory: '$(Build.SourcesDirectory)\csharp' - - - task: BatchScript@1 - displayName: 'Add TensorRT header file to the native nuGet package' - inputs: - filename: $(Build.SourcesDirectory)\tools\ci_build\github\windows\bundle_nuget_with_native_headers.bat - workingFolder: $(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo - - - task: CopyFiles@2 - displayName: 'Copy nuget packages to: $(Build.ArtifactStagingDirectory)' - inputs: - SourceFolder: '$(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo' - Contents: '*.snupkg' - TargetFolder: '$(Build.ArtifactStagingDirectory)' - - - task: CopyFiles@2 - displayName: 'Copy nuget packages to: $(Build.ArtifactStagingDirectory)' - inputs: - SourceFolder: '$(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo' - Contents: '*.nupkg' - TargetFolder: '$(Build.ArtifactStagingDirectory)' - - - task: CopyFiles@2 - displayName: 'Copy nuget packages to: $(Build.ArtifactStagingDirectory)' - inputs: - SourceFolder: '$(Build.SourcesDirectory)\csharp\src\Microsoft.ML.OnnxRuntime\bin\RelWithDebInfo' - Contents: '*.nupkg' - TargetFolder: '$(Build.ArtifactStagingDirectory)' - - - template: ../templates/esrp_nuget.yml - parameters: - DisplayName: 'ESRP - sign NuGet package' - FolderPath: '$(Build.ArtifactStagingDirectory)' - DoEsrp: ${{ parameters.DoEsrp }} - - - template: ../templates/validate-package.yml - parameters: - PackageType: 'nuget' - PackagePath: '$(Build.ArtifactStagingDirectory)' - PlatformsSupported: 'win-x64,linux-x64' - # 1* stands for version number. we use it to filter Gpu.Windows and Gpu.Linux packages - PackageName: 'Microsoft.ML.OnnxRuntime.Gpu.1*nupkg' - VerifyNugetSigning: false - - - template: ../templates/validate-package.yml - parameters: - PackageType: 'nuget' - PackagePath: '$(Build.ArtifactStagingDirectory)' - PackageName: 'Microsoft.ML.OnnxRuntime.Gpu.Windows.*nupkg' - PlatformsSupported: 'win-x64' - VerifyNugetSigning: false - - - template: ../templates/validate-package.yml - parameters: - PackageType: 'nuget' - PackagePath: '$(Build.ArtifactStagingDirectory)' - PackageName: 'Microsoft.ML.OnnxRuntime.Gpu.Linux.*nupkg' - PlatformsSupported: 'linux-x64' - VerifyNugetSigning: false - - - task: MSBuild@1 - displayName: 'Clean C#' - inputs: - solution: '$(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.sln' - platform: 'Any CPU' - configuration: RelWithDebInfo - msbuildArguments: '-t:Clean -p:OnnxRuntimeBuildDirectory="$(Build.BinariesDirectory)" -p:OrtPackageId=Microsoft.ML.OnnxRuntime.Gpu' - workingDirectory: '$(Build.SourcesDirectory)\csharp' - - - task: RoslynAnalyzers@2 - displayName: 'Run Roslyn Analyzers' - inputs: - userProvideBuildInfo: msBuildInfo - msBuildCommandline: '"C:\Program Files\Microsoft Visual Studio\2022\Enterprise\MSBuild\Current\Bin\msbuild.exe" $(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.sln -p:configuration="RelWithDebInfo" -p:Platform="Any CPU" -p:OnnxRuntimeBuildDirectory="$(Build.BinariesDirectory)" -p:OrtPackageId=Microsoft.ML.OnnxRuntime.Gpu' - condition: and(succeeded(), eq('${{ parameters.DoCompliance }}', true)) - - - template: ../templates/component-governance-component-detection-steps.yml - parameters: - condition: 'succeeded' - - - task: PublishPipelineArtifact@0 - displayName: 'Publish Pipeline NuGet Artifact' - inputs: - artifactName: 'drop-signed-nuget-GPU' - targetPath: '$(Build.ArtifactStagingDirectory)' - - - task: mspremier.PostBuildCleanup.PostBuildCleanup-task.PostBuildCleanup@3 - displayName: 'Clean Agent Directories' - condition: always() \ No newline at end of file +- template: nuget-linux-cuda-packaging-stage.yml + parameters: + CudaVersion: ${{ parameters.CudaVersion }} + docker_base_image: ${{ parameters.docker_base_image }} + buildJava: ${{ parameters.buildJava }} + buildNodejs: ${{ parameters.buildNodejs }} + +- template: nuget-win-cuda-packaging-stage.yml + parameters: + RunOnnxRuntimeTests: ${{ parameters.RunOnnxRuntimeTests }} + UseIncreasedTimeoutForTests: ${{ parameters.UseIncreasedTimeoutForTests }} + CudaVersion: ${{ parameters.CudaVersion }} + win_trt_home: ${{ parameters.win_trt_home }} + win_cuda_home: ${{ parameters.win_cuda_home }} + buildJava: ${{ parameters.buildJava }} + +- template: nuget-cuda-packaging-stage.yml + parameters: + DoCompliance: ${{ parameters.DoCompliance }} + DoEsrp: ${{ parameters.DoEsrp }} + IsReleaseBuild: ${{ parameters.IsReleaseBuild }} + +- template: ../nuget/templates/test_win.yml + parameters: + AgentPool : 'onnxruntime-Win2022-GPU-A10' + NugetPackageName : 'Microsoft.ML.OnnxRuntime.Gpu' + ArtifactSuffix: 'GPU' + StageSuffix: 'GPU' + Skipx86Tests: 'true' + CudaVersion: ${{ parameters.CudaVersion }} + SpecificArtifact: ${{ parameters.SpecificArtifact }} + BuildId: ${{ parameters.BuildId }} + +- template: ../nuget/templates/test_win.yml + parameters: + AgentPool : 'onnxruntime-Win2022-GPU-A10' + NugetPackageName : 'Microsoft.ML.OnnxRuntime.Gpu.Windows' + ArtifactSuffix: 'GPU' + StageSuffix: 'GPU' + MoreSuffix: '_Windows' + Skipx86Tests: 'true' + CudaVersion: ${{ parameters.CudaVersion }} + SpecificArtifact: ${{ parameters.SpecificArtifact }} + BuildId: ${{ parameters.BuildId }} + +- template: ../nuget/templates/test_linux.yml + parameters: + AgentPool : Onnxruntime-Linux-GPU-A10 + ArtifactSuffix: 'GPU' + StageSuffix: 'GPU' + NugetPackageName : 'Microsoft.ML.OnnxRuntime.Gpu' + CudaVersion: ${{ parameters.CudaVersion }} + SpecificArtifact: ${{ parameters.specificArtifact }} + BuildId: ${{ parameters.BuildId }} + +- template: ../nuget/templates/test_linux.yml + parameters: + AgentPool : Onnxruntime-Linux-GPU-A10 + ArtifactSuffix: 'GPU' + StageSuffix: 'GPU' + MoreSuffix: '_Linux' + NugetPackageName : 'Microsoft.ML.OnnxRuntime.Gpu.Linux' + CudaVersion: ${{ parameters.CudaVersion }} + SpecificArtifact: ${{ parameters.specificArtifact }} + BuildId: ${{ parameters.BuildId }} \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/stages/nuget-cuda-packaging-stage.yml b/tools/ci_build/github/azure-pipelines/stages/nuget-cuda-packaging-stage.yml new file mode 100644 index 000000000000..6e60cccb60a8 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/stages/nuget-cuda-packaging-stage.yml @@ -0,0 +1,235 @@ +parameters: +- name: DoCompliance + type: boolean + +- name: DoEsrp + type: boolean + +- name: IsReleaseBuild + type: boolean + +stages: +######## Nuget ######## +# Win/Linux CUDA Combined packaging +- stage: NuGet_Packaging_GPU + dependsOn: + - Setup + - Windows_Packaging_CUDA + - Windows_Packaging_TensorRT + - Linux_C_API_Packaging_GPU +# This is need for Download Linux CustomOp TestData + - Linux_C_API_Packaging_CPU + condition: succeeded() + jobs: + - job: NuGet_Packaging_GPU + workspace: + clean: all + pool: 'Onnxruntime-Win-CPU-2022' + variables: + breakCodesignValidationInjection: ${{ parameters.DoEsrp }} + ReleaseVersionSuffix: $[stageDependencies.Setup.Set_Variables.outputs['Set_Release_Version_Suffix.ReleaseVersionSuffix']] + BuildDate: $[format('{0:yyyyMMdd}', pipeline.startTime)] + BuildTime: $[format('{0:HHmm}', pipeline.startTime)] + + steps: + - checkout: self + submodules: true + - template: ../templates/flex-downloadPipelineArtifact.yml + parameters: + StepName: 'Download Pipeline Artifact - NuGet' + ArtifactName: 'onnxruntime-win-x64-cuda' + TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' + + - template: ../templates/flex-downloadPipelineArtifact.yml + parameters: + StepName: 'Download Pipeline Artifact - NuGet' + ArtifactName: 'onnxruntime-win-x64-tensorrt' + TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' + + - template: ../templates/flex-downloadPipelineArtifact.yml + parameters: + StepName: 'Download Pipeline Artifact - NuGet' + ArtifactName: 'onnxruntime-linux-x64-cuda' + TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' + + - template: ../templates/flex-downloadPipelineArtifact.yml + parameters: + StepName: 'Download Pipeline Artifact - NuGet' + ArtifactName: 'onnxruntime-linux-x64-tensorrt' + TargetPath: '$(Build.BinariesDirectory)/nuget-artifact' + + # Reconstruct the build dir + - task: PowerShell@2 + displayName: 'PS: Extract nuget files gpu' + inputs: + targetType: filePath + filePath: $(Build.SourcesDirectory)\tools\ci_build\github\windows\extract_nuget_files_gpu.ps1 + + - script: | + dir + workingDirectory: '$(Build.BinariesDirectory)/nuget-artifact' + displayName: 'List artifacts' + + - script: | + mklink /D /J models C:\local\models + workingDirectory: '$(Build.BinariesDirectory)' + displayName: 'Create models link' + + - task: NuGetToolInstaller@0 + displayName: Use Nuget 6.2.1 + inputs: + versionSpec: 6.2.1 + + - task: PowerShell@2 + displayName: Install .NET 6 workloads + inputs: + targetType: 'inline' + script: | + dotnet workload install android ios macos + workingDirectory: '$(Build.SourcesDirectory)\csharp' + + - task: PowerShell@2 + displayName: Build .NET 6 targets using dotnet + inputs: + targetType: 'inline' + # we don't specify 'Any CPU' as the platform here because if we do it gets added to the output path + # e.g. csharp\src\Microsoft.ML.OnnxRuntime\bin\Any CPU\RelWithDebInfo\net6.0-ios\ + # which is inconsistent with the msbuild output path for the pre-.net6 targets + # e.g. csharp\src\Microsoft.ML.OnnxRuntime\bin\RelWithDebInfo\monoandroid11.0 + # and makes it harder to do the packing + # + # 'Any CPU' is the default (first 'mixed' platform specified in the csproj) so this should be fine. + script: | + dotnet build .\src\Microsoft.ML.OnnxRuntime\Microsoft.ML.OnnxRuntime.csproj -p:SelectedTargets=Net6 -p:Configuration=RelWithDebInfo -p:OnnxRuntimeBuildDirectory="$(Build.BinariesDirectory)" -p:OrtPackageId="Microsoft.ML.OnnxRuntime.Gpu" -p:IsReleaseBuild=${{ parameters.IsReleaseBuild }} -p:ReleaseVersionSuffix=$(ReleaseVersionSuffix) + workingDirectory: '$(Build.SourcesDirectory)\csharp' + + - task: MSBuild@1 + displayName: 'Restore NuGet Packages and create project.assets.json for pre-.net6 targets' + inputs: + solution: '$(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.sln' + platform: 'Any CPU' + configuration: RelWithDebInfo + msbuildArguments: '-t:restore -p:SelectedTargets=PreNet6 -p:OrtPackageId="Microsoft.ML.OnnxRuntime.Gpu"' + workingDirectory: '$(Build.SourcesDirectory)\csharp' + + - task: MSBuild@1 + displayName: 'Build C# for pre-.net6 targets' + inputs: + solution: '$(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.sln' + configuration: RelWithDebInfo + platform: 'Any CPU' + msbuildArguments: '-p:SelectedTargets=PreNet6 -p:OnnxRuntimeBuildDirectory="$(Build.BinariesDirectory)" -p:OrtPackageId="Microsoft.ML.OnnxRuntime.Gpu" -p:IsReleaseBuild=${{ parameters.IsReleaseBuild }} -p:ReleaseVersionSuffix=$(ReleaseVersionSuffix)' + workingDirectory: '$(Build.SourcesDirectory)\csharp' + + - template: ../templates/win-esrp-dll.yml + parameters: + FolderPath: '$(Build.SourcesDirectory)\csharp\src\Microsoft.ML.OnnxRuntime\bin\RelWithDebInfo' + DisplayName: 'ESRP - Sign C# dlls' + DoEsrp: ${{ parameters.DoEsrp }} + + - task: MSBuild@1 + displayName: Update projects.assets.json with combined list of all target frameworks + inputs: + solution: '$(Build.SourcesDirectory)\csharp\src\Microsoft.ML.OnnxRuntime\Microsoft.ML.OnnxRuntime.csproj' + platform: 'Any CPU' + configuration: RelWithDebInfo + msbuildArguments: '-t:restore -p:SelectedTargets=All -p:OrtPackageId=Microsoft.ML.OnnxRuntime.Gpu' + workingDirectory: '$(Build.SourcesDirectory)\csharp' + + - task: MSBuild@1 + displayName: 'Build Nuget Packages' + inputs: + solution: '$(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.proj' + configuration: RelWithDebInfo + platform: 'Any CPU' + msbuildArguments: '-t:CreatePackage -p:OnnxRuntimeBuildDirectory="$(Build.BinariesDirectory)" -p:OrtPackageId=Microsoft.ML.OnnxRuntime.Gpu -p:IsReleaseBuild=${{ parameters.IsReleaseBuild }} + -p:ReleaseVersionSuffix=$(ReleaseVersionSuffix) -p:CurrentDate=$(BuildDate) -p:CurrentTime=$(BuildTime)' + workingDirectory: '$(Build.SourcesDirectory)\csharp' + + - task: BatchScript@1 + displayName: 'Add TensorRT header file to the native nuGet package' + inputs: + filename: $(Build.SourcesDirectory)\tools\ci_build\github\windows\bundle_nuget_with_native_headers.bat + workingFolder: $(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo + + - task: CopyFiles@2 + displayName: 'Copy nuget packages to: $(Build.ArtifactStagingDirectory)' + inputs: + SourceFolder: '$(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo' + Contents: '*.snupkg' + TargetFolder: '$(Build.ArtifactStagingDirectory)' + + - task: CopyFiles@2 + displayName: 'Copy nuget packages to: $(Build.ArtifactStagingDirectory)' + inputs: + SourceFolder: '$(Build.BinariesDirectory)\RelWithDebInfo\RelWithDebInfo' + Contents: '*.nupkg' + TargetFolder: '$(Build.ArtifactStagingDirectory)' + + - task: CopyFiles@2 + displayName: 'Copy nuget packages to: $(Build.ArtifactStagingDirectory)' + inputs: + SourceFolder: '$(Build.SourcesDirectory)\csharp\src\Microsoft.ML.OnnxRuntime\bin\RelWithDebInfo' + Contents: '*.nupkg' + TargetFolder: '$(Build.ArtifactStagingDirectory)' + + - template: ../templates/esrp_nuget.yml + parameters: + DisplayName: 'ESRP - sign NuGet package' + FolderPath: '$(Build.ArtifactStagingDirectory)' + DoEsrp: ${{ parameters.DoEsrp }} + + - template: ../templates/validate-package.yml + parameters: + PackageType: 'nuget' + PackagePath: '$(Build.ArtifactStagingDirectory)' + PlatformsSupported: 'win-x64,linux-x64' + # 1* stands for version number. we use it to filter Gpu.Windows and Gpu.Linux packages + PackageName: 'Microsoft.ML.OnnxRuntime.Gpu.1*nupkg' + VerifyNugetSigning: false + + - template: ../templates/validate-package.yml + parameters: + PackageType: 'nuget' + PackagePath: '$(Build.ArtifactStagingDirectory)' + PackageName: 'Microsoft.ML.OnnxRuntime.Gpu.Windows.*nupkg' + PlatformsSupported: 'win-x64' + VerifyNugetSigning: false + + - template: ../templates/validate-package.yml + parameters: + PackageType: 'nuget' + PackagePath: '$(Build.ArtifactStagingDirectory)' + PackageName: 'Microsoft.ML.OnnxRuntime.Gpu.Linux.*nupkg' + PlatformsSupported: 'linux-x64' + VerifyNugetSigning: false + + - task: MSBuild@1 + displayName: 'Clean C#' + inputs: + solution: '$(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.sln' + platform: 'Any CPU' + configuration: RelWithDebInfo + msbuildArguments: '-t:Clean -p:OnnxRuntimeBuildDirectory="$(Build.BinariesDirectory)" -p:OrtPackageId=Microsoft.ML.OnnxRuntime.Gpu' + workingDirectory: '$(Build.SourcesDirectory)\csharp' + + - task: RoslynAnalyzers@2 + displayName: 'Run Roslyn Analyzers' + inputs: + userProvideBuildInfo: msBuildInfo + msBuildCommandline: '"C:\Program Files\Microsoft Visual Studio\2022\Enterprise\MSBuild\Current\Bin\msbuild.exe" $(Build.SourcesDirectory)\csharp\OnnxRuntime.CSharp.sln -p:configuration="RelWithDebInfo" -p:Platform="Any CPU" -p:OnnxRuntimeBuildDirectory="$(Build.BinariesDirectory)" -p:OrtPackageId=Microsoft.ML.OnnxRuntime.Gpu' + condition: and(succeeded(), eq('${{ parameters.DoCompliance }}', true)) + + - template: ../templates/component-governance-component-detection-steps.yml + parameters: + condition: 'succeeded' + + - task: PublishPipelineArtifact@0 + displayName: 'Publish Pipeline NuGet Artifact' + inputs: + artifactName: 'drop-signed-nuget-GPU' + targetPath: '$(Build.ArtifactStagingDirectory)' + + - task: mspremier.PostBuildCleanup.PostBuildCleanup-task.PostBuildCleanup@3 + displayName: 'Clean Agent Directories' + condition: always() \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/stages/nuget-cuda-publishing-stage.yml b/tools/ci_build/github/azure-pipelines/stages/nuget-cuda-publishing-stage.yml index 252b96e54bab..b802dd43f905 100644 --- a/tools/ci_build/github/azure-pipelines/stages/nuget-cuda-publishing-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/nuget-cuda-publishing-stage.yml @@ -6,7 +6,7 @@ parameters: stages: - stage: NuGet_Publishing_GPU jobs: - - job: + - job: NuGet_Publishing_GPU workspace: clean: all variables: @@ -64,7 +64,7 @@ stages: inputs: command: push packagesToPush: '$(Build.BinariesDirectory)/nuget-artifact/final-package/*.nupkg' - publishVstsFeed: '2692857e-05ef-43b4-ba9c-ccf1c22c437c/d3daa2b0-aa56-45ac-8145-2c3dc0661c87' + publishVstsFeed: 'PublicPackages/${{ parameters.artifact_feed }}' allowPackageConflicts: true diff --git a/tools/ci_build/github/azure-pipelines/stages/nuget-linux-cuda-packaging-stage.yml b/tools/ci_build/github/azure-pipelines/stages/nuget-linux-cuda-packaging-stage.yml index d42f89b26774..cca53e36ebab 100644 --- a/tools/ci_build/github/azure-pipelines/stages/nuget-linux-cuda-packaging-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/nuget-linux-cuda-packaging-stage.yml @@ -4,24 +4,20 @@ parameters: default: '11.8' - name: docker_base_image type: string -- name: linux_trt_version - type: string - name: buildJava type: boolean - default: false - name: buildNodejs type: boolean - default: false stages: -- stage: Linux_C_API_Packaging_Combined_CUDA +- stage: Linux_C_API_Packaging_GPU dependsOn: [] jobs: - job: Linux_C_API_Packaging_CUDA workspace: clean: all timeoutInMinutes: 150 - pool: 'Onnxruntime-Linux-GPU' + pool: 'onnxruntime-Ubuntu2204-AMD-CPU' variables: - name: CUDA_VERSION_MAJOR ${{ if eq(parameters.CudaVersion, '11.8') }}: @@ -30,34 +26,31 @@ stages: value: '12' - name: CUDA_VERSION value: ${{ parameters.CudaVersion }} - - name: docker_base_image - ${{ if eq(parameters.CudaVersion, '11.8') }}: - value: nvidia/cuda:11.8.0-cudnn8-devel-ubi8 - ${{ if eq(parameters.CudaVersion, '12.2') }}: - value: nvidia/cuda:12.2.2-cudnn8-devel-ubi8 steps: - template: ../templates/set-version-number-variables-step.yml - template: ../templates/get-docker-image-steps.yml parameters: - Dockerfile: tools/ci_build/github/linux/docker/inference/x64/default/gpu/Dockerfile - Context: tools/ci_build/github/linux/docker/inference/x64/default/gpu + Dockerfile: tools/ci_build/github/linux/docker/inference/x86_64/default/cuda${{ variables.CUDA_VERSION_MAJOR }}/Dockerfile + Context: tools/ci_build/github/linux/docker/inference/x86_64/default/cuda${{ variables.CUDA_VERSION_MAJOR }} DockerBuildArgs: " --build-arg BUILD_UID=$( id -u ) - --build-arg BASEIMAGE=${{ parameters.docker_base_image }} " Repository: onnxruntimecuda${{ variables.CUDA_VERSION_MAJOR }}build - script: $(Build.SourcesDirectory)/tools/ci_build/github/linux/build_cuda_c_api_package.sh workingDirectory: $(Build.SourcesDirectory) - displayName: 'Build and Test' - - template: ../templates/java-api-artifacts-package-and-publish-steps-posix.yml - parameters: - arch: 'linux-x64' - buildConfig: 'Release' - artifactName: 'onnxruntime-java-linux-x64-cuda' - version: '$(OnnxRuntimeVersion)' - libraryName: 'libonnxruntime.so' - nativeLibraryName: 'libonnxruntime4j_jni.so' + displayName: 'Build CUDA C API Package' + + - ${{ if eq(parameters.buildJava, true) }}: + - template: ../templates/java-api-artifacts-package-and-publish-steps-posix.yml + parameters: + arch: 'linux-x64' + buildConfig: 'Release' + artifactName: 'onnxruntime-java-linux-x64-cuda' + version: '$(OnnxRuntimeVersion)' + libraryName: 'libonnxruntime.so' + nativeLibraryName: 'libonnxruntime4j_jni.so' + - template: ../templates/c-api-artifacts-package-and-publish-steps-posix.yml parameters: buildConfig: 'Release' @@ -70,20 +63,80 @@ stages: condition: 'succeeded' - template: ../templates/clean-agent-build-directory-step.yml # Linux CUDA with TensorRT Packaging - - template: jobs/linux-gpu-tensorrt-packaging-job.yml - parameters: - artifactName: 'onnxruntime-linux-x64-tensorrt-$(OnnxRuntimeVersion)' - artifactNameNoVersionString: 'onnxruntime-linux-x64-tensorrt' - buildJava: ${{ parameters.buildJava }} - buildJavaOption: '--build_java' - buildNodejs: ${{ parameters.buildNodejs }} - buildNodejsOption: '--build_nodejs' - CudaVersion: ${{ parameters.CudaVersion }} + - job: Linux_C_API_Packaging_TensorRT + dependsOn: [] + workspace: + clean: all + timeoutInMinutes: 180 + pool: 'onnxruntime-Ubuntu2204-AMD-CPU' + variables: + - name: CUDA_VERSION_MAJOR + ${{ if eq(parameters.CudaVersion, '11.8') }}: + value: '11' + ${{ if eq(parameters.CudaVersion, '12.2') }}: + value: '12' + - name: CUDA_VERSION + value: ${{ parameters.CudaVersion }} + + - name: linux_trt_version + ${{ if eq(parameters.CudaVersion, '11.8') }}: + value: 10.0.1.6-1.cuda11.8 + ${{ if eq(parameters.CudaVersion, '12.2') }}: + value: 10.0.1.6-1.cuda12.4 + steps: + - checkout: self + clean: true + submodules: recursive + - template: ../templates/get-docker-image-steps.yml + parameters: + Dockerfile: tools/ci_build/github/linux/docker/inference/x86_64/default/cuda${{ variables.CUDA_VERSION_MAJOR }}/Dockerfile + Context: tools/ci_build/github/linux/docker/inference/x86_64/default/cuda${{ variables.CUDA_VERSION_MAJOR }} + DockerBuildArgs: " + --build-arg TRT_VERSION=${{ variables.linux_trt_version }} + --build-arg BUILD_UID=$( id -u ) + " + Repository: onnxruntimecuda${{ variables.CUDA_VERSION_MAJOR }}xtrt86build + - template: ../templates/set-version-number-variables-step.yml + + - script: $(Build.SourcesDirectory)/tools/ci_build/github/linux/build_tensorrt_c_api_package.sh + workingDirectory: $(Build.SourcesDirectory) + displayName: 'Build TensorRT C API Package' + + - ${{ if eq(parameters.buildJava, true) }}: + - template: ../templates/java-api-artifacts-package-and-publish-steps-posix.yml + parameters: + arch: 'linux-x64' + buildConfig: 'Release' + artifactName: 'onnxruntime-java-linux-x64-tensorrt' + version: '$(OnnxRuntimeVersion)' + libraryName: 'libonnxruntime.so' + nativeLibraryName: 'libonnxruntime4j_jni.so' + + - ${{ if eq(parameters.buildNodejs, 'true') }}: + - template: ../templates/nodejs-artifacts-package-and-publish-steps-posix.yml + parameters: + arch: 'x64' + os: 'linux' + artifactName: 'drop-onnxruntime-nodejs-linux-x64-tensorrt' + + - template: ../templates/c-api-artifacts-package-and-publish-steps-posix.yml + parameters: + buildConfig: 'Release' + artifactName: 'onnxruntime-linux-x64-tensorrt-$(OnnxRuntimeVersion)' + artifactNameNoVersionString: 'onnxruntime-linux-x64-tensorrt' + libraryName: 'libonnxruntime.so.$(OnnxRuntimeVersion)' + + + - template: ../templates/component-governance-component-detection-steps.yml + parameters: + condition: 'succeeded' + - template: ../templates/clean-agent-build-directory-step.yml # Linux CUDA Combined Testing and Publishing +- stage: Linux_Packaging_combined_CUDA + dependsOn: + - Linux_C_API_Packaging_GPU + jobs: - job: Linux_Packaging_combined_CUDA - dependsOn: - - Linux_C_API_Packaging_CUDA - - Linux_C_API_Packaging_TensorRT condition: succeeded() workspace: clean: all @@ -94,38 +147,29 @@ stages: value: '11' ${{ if eq(parameters.CudaVersion, '12.2') }}: value: '12' + - name: linux_trt_version + ${{ if eq(parameters.CudaVersion, '11.8') }}: + value: 10.0.1.6-1.cuda11.8 + ${{ if eq(parameters.CudaVersion, '12.2') }}: + value: 10.0.1.6-1.cuda12.4 steps: - checkout: self # due to checkout multiple repos, the root directory is $(Build.SourcesDirectory)/onnxruntime submodules: false - checkout: onnxruntime-inference-examples # due to checkout multiple repos, the root directory is $(Build.SourcesDirectory)/onnxruntime-inference-examples submodules: false - - checkout: manylinux # due to checkout multiple repos, the root directory is $(Build.SourcesDirectory)/manylinux - submodules: false - task: mspremier.PostBuildCleanup.PostBuildCleanup-task.PostBuildCleanup@3 displayName: 'Clean Agent Directories' condition: always() - - script: | - set -e -x - cd $(Build.SourcesDirectory) - mv manylinux onnxruntime - ls - - - template: ../templates/with-container-registry-steps.yml + - template: ../templates/get-docker-image-steps.yml parameters: - Steps: - - script: | - tools/ci_build/get_docker_image.py \ - --dockerfile tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda \ - --context tools/ci_build/github/linux/docker \ - --docker-build-args "--network=host --build-arg BASEIMAGE=${{ parameters.docker_base_image }} --build-arg TRT_VERSION=${{ parameters.linux_trt_version }} --build-arg BUILD_UID=$( id -u )" \ - --container-registry onnxruntimebuildcache \ - --multiple_repos \ - --repository onnxruntimecuda${{ variables.CUDA_VERSION_MAJOR }}xtrt86build - displayName: "Get onnxruntimecuda${{ variables.CUDA_VERSION_MAJOR }}xtrt86build image for tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda" - workingDirectory: $(Build.SourcesDirectory)/onnxruntime - ContainerRegistry: onnxruntimebuildcache + ScriptName: $(Build.SourcesDirectory)/onnxruntime/tools/ci_build/get_docker_image.py + Dockerfile: $(Build.SourcesDirectory)/onnxruntime/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda${{ variables.CUDA_VERSION_MAJOR }}/Dockerfile + Context: $(Build.SourcesDirectory)/onnxruntime/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda${{ variables.CUDA_VERSION_MAJOR }} + DockerBuildArgs: "--build-arg BASEIMAGE=${{ variables.docker_base_image }} --build-arg TRT_VERSION=${{ variables.linux_trt_version }} --build-arg BUILD_UID=$( id -u )" + Repository: onnxruntimecuda${{ variables.CUDA_VERSION_MAJOR }}xtrt86build + UpdateDepsTxt: false - template: ../templates/set-version-number-variables-step.yml parameters: diff --git a/tools/ci_build/github/azure-pipelines/stages/nuget-win-cuda-packaging-stage.yml b/tools/ci_build/github/azure-pipelines/stages/nuget-win-cuda-packaging-stage.yml index ad5c41b9fbd1..445066f08995 100644 --- a/tools/ci_build/github/azure-pipelines/stages/nuget-win-cuda-packaging-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/nuget-win-cuda-packaging-stage.yml @@ -34,12 +34,15 @@ parameters: displayName: Specific Artifact's BuildId type: string default: '0' + +- name: buildJava + type: boolean stages: # Windows CUDA without TensorRT Packaging - template: ../templates/win-ci.yml parameters: - ort_build_pool_name: 'onnxruntime-Win2022-GPU-T4' + ort_build_pool_name: 'onnxruntime-Win2022-GPU-A10' DoCompliance: ${{ parameters.DoCompliance }} DoEsrp: ${{ parameters.DoEsrp }} stage_name_suffix: CUDA @@ -49,7 +52,7 @@ stages: CudaVersion: ${{ parameters.CudaVersion }} buildparameter: --use_cuda --cuda_home=${{ parameters.win_cuda_home }} --enable_onnx_tests --enable_wcos --cmake_extra_defines "CMAKE_CUDA_ARCHITECTURES=60;61;70;75;80" runTests: ${{ parameters.RunOnnxRuntimeTests }} - buildJava: true + buildJava: ${{ parameters.buildJava }} java_artifact_id: onnxruntime_gpu UseIncreasedTimeoutForTests: ${{ parameters.UseIncreasedTimeoutForTests }} SpecificArtifact: ${{ parameters.SpecificArtifact }} @@ -57,7 +60,7 @@ stages: # Windows CUDA with TensorRT Packaging - template: ../templates/win-ci.yml parameters: - ort_build_pool_name: 'onnxruntime-Win2022-GPU-T4' + ort_build_pool_name: 'onnxruntime-Win2022-GPU-A10' DoCompliance: ${{ parameters.DoCompliance }} DoEsrp: ${{ parameters.DoEsrp }} stage_name_suffix: TensorRT @@ -67,7 +70,7 @@ stages: packageName: x64-tensorrt buildparameter: --use_tensorrt --tensorrt_home=${{ parameters.win_trt_home }} --cuda_home=${{ parameters.win_cuda_home }} --enable_onnx_tests --enable_wcos --cmake_extra_defines "CMAKE_CUDA_ARCHITECTURES=60;61;70;75;80" runTests: ${{ parameters.RunOnnxRuntimeTests }} - buildJava: true + buildJava: ${{ parameters.buildJava }} java_artifact_id: onnxruntime_gpu UseIncreasedTimeoutForTests: ${{ parameters.UseIncreasedTimeoutForTests }} SpecificArtifact: ${{ parameters.SpecificArtifact }} @@ -81,10 +84,10 @@ stages: condition: succeeded() jobs: - - job: + - job: Windows_Packaging_combined_GPU workspace: clean: all - pool: 'onnxruntime-Win2022-GPU-T4' + pool: 'onnxruntime-Win2022-GPU-A10' variables: CUDA_MODULE_LOADINGL: 'LAZY' GRADLE_OPTS: '-Dorg.gradle.daemon=false' diff --git a/tools/ci_build/github/azure-pipelines/stages/py-cuda-packaging-stage.yml b/tools/ci_build/github/azure-pipelines/stages/py-cuda-packaging-stage.yml index d0c3ca2e1eef..01f0337be771 100644 --- a/tools/ci_build/github/azure-pipelines/stages/py-cuda-packaging-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/py-cuda-packaging-stage.yml @@ -78,8 +78,8 @@ stages: cmake_build_type: ${{ parameters.cmake_build_type }} cuda_version: ${{ parameters.cuda_version }} ${{ if eq(parameters.cuda_version, '11.8') }}: - docker_base_image: nvidia/cuda:11.8.0-cudnn8-devel-ubi8 + docker_base_image: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1 trt_version: 10.0.1.6-1.cuda11.8 ${{ if eq(parameters.cuda_version, '12.2') }}: - docker_base_image: nvidia/cuda:12.2.2-cudnn8-devel-ubi8 + docker_base_image: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20240610.1 trt_version: 10.0.1.6-1.cuda12.4 diff --git a/tools/ci_build/github/azure-pipelines/stages/py-cuda-publishing-stage.yml b/tools/ci_build/github/azure-pipelines/stages/py-cuda-publishing-stage.yml index 2a4debcf9fba..85bd5de5b7eb 100644 --- a/tools/ci_build/github/azure-pipelines/stages/py-cuda-publishing-stage.yml +++ b/tools/ci_build/github/azure-pipelines/stages/py-cuda-publishing-stage.yml @@ -1,51 +1,30 @@ parameters: - - name: build_id - type: string - - name: project - type: string - - name: pipeline - type: string - - name: artifact_feed - type: string - default: 'onnxruntime-cuda-12' - - name: dependencies - type: string - default: 'none' +- name: artifact_feed + type: string + default: 'onnxruntime-cuda-12' stages: - - stage: Python_Publishing - ${{ if ne(parameters.dependencies, 'none') }}: - dependsOn: ${{ parameters.dependencies }} - ${{ if eq(parameters.dependencies, 'none') }}: - dependsOn: [] - jobs: - - job: - pool: 'onnxruntime-Ubuntu2204-AMD-CPU' - steps: - - checkout: none - - task: DownloadPipelineArtifact@2 - inputs: - artifact: 'onnxruntime_gpu' - targetPath: '$(Build.SourcesDirectory)/onnxruntime-gpu' - ${{ if ne(parameters.build_id, 'latest') }}: - buildType: 'specific' - project: '${{ parameters.project }}' - pipeline: '${{ parameters.pipeline }}' - buildVersionToDownload: 'specific' - buildId: '${{ parameters.build_id }}' - displayName: 'Download Build Artifacts - onnxruntime-gpu' - - task: UsePythonVersion@0 - displayName: 'Use Python 3.x' - - script: 'pip install twine==3.4.2' - displayName: 'Install Twine' - - task: TwineAuthenticate@1 - displayName: 'Twine Authenticate ' - inputs: - artifactFeed: PublicPackages/${{ parameters.artifact_feed }} - - script: 'python -m twine upload -r ${{ parameters.artifact_feed }} --config-file $(PYPIRC_PATH) --non-interactive --skip-existing *.whl' - workingDirectory: '$(Build.SourcesDirectory)/onnxruntime-gpu' - displayName: 'Uploading wheels to ${{ parameters.artifact_feed }}' - retryCountOnTaskFailure: 3 - env: - SYSTEM_ACCESSTOKEN: $(System.AccessToken) +- stage: Python_Publishing_GPU + jobs: + - job: Python_Publishing_GPU + pool: 'onnxruntime-Ubuntu2204-AMD-CPU' + steps: + - checkout: none + - download: build + displayName: 'Download Pipeline Artifact - onnxruntime_gpu' + artifact: 'onnxruntime_gpu' + - task: UsePythonVersion@0 + displayName: 'Use Python 3.x' + - script: 'pip install twine==3.4.2' + displayName: 'Install Twine' + - task: TwineAuthenticate@1 + displayName: 'Twine Authenticate ' + inputs: + artifactFeed: PublicPackages/${{ parameters.artifact_feed }} + - script: 'python -m twine upload -r ${{ parameters.artifact_feed }} --config-file $(PYPIRC_PATH) --non-interactive --skip-existing *.whl' + workingDirectory: '$(Pipeline.Workspace)/build/onnxruntime_gpu' + displayName: 'Uploading wheels to ${{ parameters.artifact_feed }}' + retryCountOnTaskFailure: 3 + env: + SYSTEM_ACCESSTOKEN: $(System.AccessToken) diff --git a/tools/ci_build/github/azure-pipelines/stages/set_packaging_variables_stage.yml b/tools/ci_build/github/azure-pipelines/stages/set_packaging_variables_stage.yml new file mode 100644 index 000000000000..3e2b3b585df9 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/stages/set_packaging_variables_stage.yml @@ -0,0 +1,46 @@ +parameters: + IsReleaseBuild: false + PreReleaseVersionSuffixString: 'none' + PreReleaseVersionSuffixNumber: 0 +stages: +- stage: Setup + jobs: + - job: Set_Variables + pool: + name: 'onnxruntime-Ubuntu2204-AMD-CPU' + steps: + - checkout: none + - task: mspremier.PostBuildCleanup.PostBuildCleanup-task.PostBuildCleanup@3 + displayName: 'Clean Agent Directories' + condition: always() + - bash: | + # Do not output ##vso[] commands with `set -x` or they may be parsed again and include a trailing quote. + set +x + if [[ "${{ parameters.IsReleaseBuild }}" = True && "${{ parameters.PreReleaseVersionSuffixString }}" != "none" ]]; then + if [[ "${{ parameters.PreReleaseVersionSuffixNumber }}" -eq 0 ]]; then + echo "##vso[task.setvariable variable=ReleaseVersionSuffix;isOutput=true]-${{ parameters.PreReleaseVersionSuffixString }}" + else + echo "##vso[task.setvariable variable=ReleaseVersionSuffix;isOutput=true]-${{ parameters.PreReleaseVersionSuffixString }}.${{ parameters.PreReleaseVersionSuffixNumber }}" + fi + else + echo "##vso[task.setvariable variable=ReleaseVersionSuffix;isOutput=true]" + fi + name: Set_Release_Version_Suffix + - script: | + # Extracting hours and minutes + date=$(date +'%Y%m%d') + # Set the hhmm value as a pipeline variable + echo "##vso[task.setvariable variable=BuildDate;isOutput=true]$date" + displayName: 'Set Start Date as Variable' + name: Set_Build_Date + + - script: | + # Extracting hours and minutes + hhmm=$(date +'%H%M') + # Set the hhmm value as a pipeline variable + echo "##vso[task.setvariable variable=BuildTime;isOutput=true]$hhmm" + displayName: 'Set Start Time as Variable' + name: Set_Build_Time + - template: ../templates/component-governance-component-detection-steps.yml + parameters: + condition: 'succeeded' diff --git a/tools/ci_build/github/azure-pipelines/templates/android-binary-size-check-stage.yml b/tools/ci_build/github/azure-pipelines/templates/android-binary-size-check-stage.yml index a96abee85388..acce2a4098ed 100644 --- a/tools/ci_build/github/azure-pipelines/templates/android-binary-size-check-stage.yml +++ b/tools/ci_build/github/azure-pipelines/templates/android-binary-size-check-stage.yml @@ -38,7 +38,7 @@ stages: submodules: none - template: use-android-ndk.yml - + #TODO: use a different docker file since this job doesn't need to rely on manylinux - template: get-docker-image-steps.yml parameters: Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cpu diff --git a/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml b/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml index 509fea45ebe5..b0506e936eb7 100644 --- a/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml +++ b/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml @@ -64,6 +64,7 @@ jobs: mkdir -p $(artifacts_directory) workingDirectory: $(Build.BinariesDirectory) + #TODO: use a different docker file since this job doesn't need to rely on manylinux - template: get-docker-image-steps.yml parameters: Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cpu diff --git a/tools/ci_build/github/azure-pipelines/templates/c-api-cpu.yml b/tools/ci_build/github/azure-pipelines/templates/c-api-cpu.yml index b103bef31bac..4abd51750755 100644 --- a/tools/ci_build/github/azure-pipelines/templates/c-api-cpu.yml +++ b/tools/ci_build/github/azure-pipelines/templates/c-api-cpu.yml @@ -97,7 +97,7 @@ stages: - stage: iOS_Full_xcframework dependsOn: [] jobs: - - job: + - job: iOS_Full_xcframework workspace: clean: all pool: @@ -208,7 +208,7 @@ stages: - Download_Java_Tools condition: succeeded() jobs: - - job: + - job: Jar_Packaging workspace: clean: all pool: 'onnxruntime-Win-CPU-2022' @@ -298,7 +298,7 @@ stages: - iOS_Full_xcframework condition: succeeded() jobs: - - job: + - job: NuGet_Packaging_CPU workspace: clean: all pool: 'onnxruntime-Win-CPU-2022' @@ -519,11 +519,11 @@ stages: - Windows_CI_GPU_DML_Dev - Windows_CI_GPU_DML_Dev_arm64 - Linux_C_API_Packaging_CPU - - Linux_C_API_Packaging_Combined_CUDA + - Linux_C_API_Packaging_GPU - MacOS_C_API_Package_Publish condition: succeeded() jobs: - - job: + - job: Nodejs_Packaging workspace: clean: all pool: 'onnxruntime-Win-CPU-2022' diff --git a/tools/ci_build/github/azure-pipelines/templates/c-api-linux-cpu.yml b/tools/ci_build/github/azure-pipelines/templates/c-api-linux-cpu.yml index 2da3b8a9bc7b..041ea623ecf6 100644 --- a/tools/ci_build/github/azure-pipelines/templates/c-api-linux-cpu.yml +++ b/tools/ci_build/github/azure-pipelines/templates/c-api-linux-cpu.yml @@ -38,9 +38,6 @@ jobs: workspace: clean: all - variables: - - name: skipComponentGovernanceDetection - value: ${{eq('${{parameters.OnnxruntimeNodejsBindingArch}}', 'arm64')}} timeoutInMinutes: 210 pool: ${{parameters.PoolName}} steps: @@ -48,14 +45,22 @@ jobs: clean: true submodules: none - template: set-version-number-variables-step.yml - - template: get-docker-image-steps.yml - parameters: - Dockerfile: tools/ci_build/github/linux/docker/inference/${{parameters.OnnxruntimeArch}}/default/cpu/Dockerfile - Context: tools/ci_build/github/linux/docker/inference/${{parameters.OnnxruntimeArch}}/default/cpu - DockerBuildArgs: "--build-arg BUILD_UID=$( id -u ) --build-arg BASEIMAGE=${{parameters.BaseImage}}" - Repository: onnxruntimecpubuildcentos8${{parameters.OnnxruntimeArch}} - ${{ if eq(parameters.OnnxruntimeArch, 'aarch64') }}: - UpdateDepsTxt: false + - ${{ if eq(parameters.OnnxruntimeArch, 'x64') }}: + - template: get-docker-image-steps.yml + parameters: + Dockerfile: tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile + Context: tools/ci_build/github/linux/docker/inference/x86_64/default/cpu + DockerBuildArgs: "--build-arg BUILD_UID=$( id -u ) --build-arg BASEIMAGE=${{parameters.BaseImage}}" + Repository: onnxruntimecpubuildcentos8${{parameters.OnnxruntimeArch}} + + - ${{ if eq(parameters.OnnxruntimeArch, 'aarch64') }}: + - template: get-docker-image-steps.yml + parameters: + Dockerfile: tools/ci_build/github/linux/docker/inference/aarch64/default/cpu/Dockerfile + Context: tools/ci_build/github/linux/docker/inference/aarch64/default/cpu + DockerBuildArgs: "--build-arg BUILD_UID=$( id -u ) --build-arg BASEIMAGE=${{parameters.BaseImage}}" + Repository: onnxruntimecpubuildcentos8${{parameters.OnnxruntimeArch}} + UpdateDepsTxt: false - task: CmdLine@2 inputs: diff --git a/tools/ci_build/github/azure-pipelines/templates/component-governance-component-detection-steps.yml b/tools/ci_build/github/azure-pipelines/templates/component-governance-component-detection-steps.yml index 3d128fdb78ee..a78697498ab9 100644 --- a/tools/ci_build/github/azure-pipelines/templates/component-governance-component-detection-steps.yml +++ b/tools/ci_build/github/azure-pipelines/templates/component-governance-component-detection-steps.yml @@ -12,11 +12,25 @@ steps: - task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 displayName: 'Component Detection' + continueOnError: true condition: or(or(and(eq('${{parameters.condition}}', 'ci_only'), and(succeeded(), in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI', 'Scheduled'))), and(eq('${{parameters.condition}}', 'always'), always())), and(eq('${{parameters.condition}}', 'succeeded'), succeeded())) inputs: - # ignore dmlc-core tracker for its CI, which is not used in onnxruntime build # ignore unit tests in emscripten. emscripten unit tests are not used in onnxruntime build - ignoreDirectories: '$(Build.SourcesDirectory)/cmake/external/emsdk/upstream/emscripten/tests' + # ignore onnx third_party directory. onnx third_party directory is not responsible for onnxruntime build + # because sometime there are multiple repo is checked out in the same pipeline, we also need to add the repo name, such as onnxruntime/ to the ignoreDirectories + # ignore onnxruntime-extensions directory. onnxruntime-extensions directory is not responsible for onnxruntime build + # ignore react_native e2e node_modules directory. react_native e2e node_modules directory is generated by react_native e2e tests + # ignore github directory. github directory is used for github actions, not for onnxruntime package + # ignore onnxruntime-inference-examples directory. onnxruntime-inference-examples directory is used for inference examples, not for onnxruntime package + # ignore BinariesDirectory. BinariesDirectory is used for build output, not for onnxruntime package + ignoreDirectories: + '$(Build.Repository.LocalPath)/cmake/external/emsdk/upstream/emscripten/tests, + $(Build.Repository.LocalPath)/cmake/external/onnx/third_party/benchmark, + $(Build.Repository.LocalPath)/cmake/external/onnx/third_party/pybind11, + $(Build.Repository.LocalPath)/cmake/external/onnxruntime-extensions, + $(Build.Repository.LocalPath)/js/react_native/e2e/node_modules, + $(Build.SourcesDirectory)/onnxruntime-inference-examples, + $(Build.BinariesDirectory)' \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/templates/esrp_nuget.yml b/tools/ci_build/github/azure-pipelines/templates/esrp_nuget.yml index b699f2c7f1da..79cceb7a0251 100644 --- a/tools/ci_build/github/azure-pipelines/templates/esrp_nuget.yml +++ b/tools/ci_build/github/azure-pipelines/templates/esrp_nuget.yml @@ -5,27 +5,36 @@ parameters: steps: - ${{ if eq(parameters['DoEsrp'], 'true') }}: - - task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@2 - displayName: ${{ parameters.DisplayName }} + - task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5 + displayName: 'ESRP CodeSigning' inputs: - ConnectedServiceName: 'OnnxRuntime CodeSign 20190817' + ConnectedServiceName: 'OnnxrunTimeCodeSign_20240611' + AppRegistrationClientId: '53d54d02-978d-4305-8572-583cf6711c4f' + AppRegistrationTenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47' + AuthAKVName: 'buildkeyvault' + AuthCertName: '53d54d02-SSL-AutoRotate' + AuthSignCertName: '53d54d02-978d-4305-8572-583cf6711c4f' + FolderPath: ${{ parameters.FolderPath }} Pattern: '*.nupkg' + SessionTimeout: 90 + ServiceEndpointUrl: 'https://api.esrp.microsoft.com/api/v2' + MaxConcurrency: 25 signConfigType: inlineSignParams inlineOperation: | - [ - { - "keyCode": "CP-401405", - "operationSetCode": "NuGetSign", - "parameters": [ ], - "toolName": "sign", - "toolVersion": "1.0" - }, - { - "keyCode": "CP-401405", - "operationSetCode": "NuGetVerify", - "parameters": [ ], - "toolName": "sign", - "toolVersion": "1.0" - } - ] + [ + { + "keyCode": "CP-401405", + "operationSetCode": "NuGetSign", + "parameters": [ ], + "toolName": "sign", + "toolVersion": "6.2.9304.0" + }, + { + "keyCode": "CP-401405", + "operationSetCode": "NuGetVerify", + "parameters": [ ], + "toolName": "sign", + "toolVersion": "6.2.9304.0" + } + ] diff --git a/tools/ci_build/github/azure-pipelines/templates/final-jar-testing.yml b/tools/ci_build/github/azure-pipelines/templates/final-jar-testing.yml index d618d05d4859..31519a2cef37 100644 --- a/tools/ci_build/github/azure-pipelines/templates/final-jar-testing.yml +++ b/tools/ci_build/github/azure-pipelines/templates/final-jar-testing.yml @@ -21,7 +21,7 @@ stages: dependsOn: Jar_Packaging jobs: - - job: + - job: Final_Jar_Testing_${{parameters.OS}} workspace: clean: all ${{ if eq(parameters.OS, 'MacOS') }}: diff --git a/tools/ci_build/github/azure-pipelines/templates/get-docker-image-steps.yml b/tools/ci_build/github/azure-pipelines/templates/get-docker-image-steps.yml index e4f467da45d5..94cdf042ec62 100644 --- a/tools/ci_build/github/azure-pipelines/templates/get-docker-image-steps.yml +++ b/tools/ci_build/github/azure-pipelines/templates/get-docker-image-steps.yml @@ -16,6 +16,9 @@ parameters: - name: UsePipelineCache type: boolean default: false +- name: CheckOutManyLinux + type: boolean + default: false - name: ScriptName type: string default: "tools/ci_build/get_docker_image.py" @@ -31,7 +34,7 @@ steps: - ${{ if eq(parameters.UpdateDepsTxt, true)}}: - template: download-deps.yml -- ${{ if contains(parameters.Dockerfile, 'manylinux') }}: +- ${{ if eq(parameters.CheckOutManyLinux, true) }}: - checkout: manylinux - script: | set -e -x diff --git a/tools/ci_build/github/azure-pipelines/templates/jobs/download_training_test_data.yml b/tools/ci_build/github/azure-pipelines/templates/jobs/download_training_test_data.yml new file mode 100644 index 000000000000..8f6434f7ac40 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/templates/jobs/download_training_test_data.yml @@ -0,0 +1,8 @@ +steps: + - script: | + azcopy cp --recursive https://lotusscus.blob.core.windows.net/orttrainingtestdatascus/mnist/ $(Agent.TempDirectory) + displayName: 'Download Training Test Data MNIST' + + - script: | + ls -al $(Agent.TempDirectory)/mnist + displayName: 'Print contents of Training Test Data MNIST' diff --git a/tools/ci_build/github/azure-pipelines/templates/jobs/download_win_gpu_library.yml b/tools/ci_build/github/azure-pipelines/templates/jobs/download_win_gpu_library.yml index 9784aaf3bc28..0dd9ffd5282e 100644 --- a/tools/ci_build/github/azure-pipelines/templates/jobs/download_win_gpu_library.yml +++ b/tools/ci_build/github/azure-pipelines/templates/jobs/download_win_gpu_library.yml @@ -35,19 +35,19 @@ steps: - ${{ if eq(parameters.DownloadTRT, true) }}: - ${{ if eq(parameters.CudaVersion, '11.8') }}: - - bash: | - echo "##vso[task.setvariable variable=trtCudaVersion]11.8" + - powershell: | + Write-Host "##vso[task.setvariable variable=trtCudaVersion;]11.8" displayName: Set trtCudaVersion - ${{ if and(eq(parameters.CudaVersion, '12.2'), eq(parameters.TrtVersion, '8.6.1.6')) }}: - - bash: | - echo "##vso[task.setvariable variable=trtCudaVersion]12.0" + - powershell: | + Write-Host "##vso[task.setvariable variable=trtCudaVersion;]12.0" displayName: Set trtCudaVersion - ${{ if and(eq(parameters.CudaVersion, '12.2'), eq(parameters.TrtVersion, '10.0.1.6')) }}: - - bash: | - echo "##vso[task.setvariable variable=trtCudaVersion]12.4" + - powershell: | + Write-Host "##vso[task.setvariable variable=trtCudaVersion;]12.4" displayName: Set trtCudaVersion - - bash: | + - script: | echo $(trtCudaVersion) && echo TensorRT-${{ parameters.TrtVersion }}.Windows10.x86_64.cuda-$(trtCudaVersion) displayName: Get trtCudaVersion and Directory Name @@ -63,4 +63,4 @@ steps: inputs: script: | echo %PATH% - displayName: 'Print PATH after download TensorRT' \ No newline at end of file + displayName: 'Print PATH after download TensorRT' diff --git a/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packing-jobs.yml b/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packing-jobs.yml index 5035eb9e5de7..01ec3b5a2f8c 100644 --- a/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packing-jobs.yml +++ b/tools/ci_build/github/azure-pipelines/templates/mac-cpu-packing-jobs.yml @@ -77,14 +77,14 @@ jobs: parameters: WithCache: ${{ parameters.WithCache }} Today: $(TODAY) - AdditionalKey: ' protobuf | "$(Agent.OS)" | $(Build.SourcesDirectory)/cmake/deps.txt, $(Build.SourcesDirectory)/tools/ci_build/github/linux/docker/inference/x64/python/cpu/scripts/install_protobuf.sh' + AdditionalKey: ' protobuf | "$(Agent.OS)" | $(Build.SourcesDirectory)/cmake/deps.txt, $(Build.SourcesDirectory)/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_protobuf.sh' CacheDir: $(PROTO_CACHE_DIR) ChangeEveryCommit: false BuildStep: - script: | set -e -x pushd . - $(Build.SourcesDirectory)/tools/ci_build/github/linux/docker/inference/x64/python/cpu/scripts/install_protobuf.sh -d $(Build.SourcesDirectory)/cmake/deps.txt -p $(Build.BinariesDirectory)/installed + $(Build.SourcesDirectory)/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_protobuf.sh -d $(Build.SourcesDirectory)/cmake/deps.txt -p $(Build.BinariesDirectory)/installed popd export PATH=$(Build.BinariesDirectory)/installed/bin:$PATH export ONNX_ML=1 diff --git a/tools/ci_build/github/azure-pipelines/templates/mac-esrp-dylib.yml b/tools/ci_build/github/azure-pipelines/templates/mac-esrp-dylib.yml index a9a0d87a30e3..aeebf2a39c8e 100644 --- a/tools/ci_build/github/azure-pipelines/templates/mac-esrp-dylib.yml +++ b/tools/ci_build/github/azure-pipelines/templates/mac-esrp-dylib.yml @@ -16,42 +16,28 @@ parameters: default: '*.zip' steps: -- task: EsrpCodeSigning@3 - displayName: ${{ parameters.DisplayName }} - condition: and(succeeded(), eq('${{ parameters.DoEsrp }}', true)) +- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5 + displayName: 'ESRP CodeSigning' inputs: - ConnectedServiceName: 'OnnxRuntime CodeSign 20190817' + ConnectedServiceName: 'OnnxrunTimeCodeSign_20240611' + AppRegistrationClientId: '53d54d02-978d-4305-8572-583cf6711c4f' + AppRegistrationTenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47' + AuthAKVName: 'buildkeyvault' + AuthCertName: '53d54d02-SSL-AutoRotate' + AuthSignCertName: '53d54d02-978d-4305-8572-583cf6711c4f' + FolderPath: ${{ parameters.FolderPath }} - Pattern: ${{ parameters.Pattern }} + Pattern: '*.nupkg' + SessionTimeout: 90 + ServiceEndpointUrl: 'https://api.esrp.microsoft.com/api/v2' + MaxConcurrency: 25 signConfigType: inlineSignParams inlineOperation: | [ { "keyCode": "CP-401337-Apple", "operationSetCode": "MacAppDeveloperSign", - "parameters": [ - { - "parameterName": "OpusName", - "parameterValue": "Microsoft" - }, - { - "parameterName": "OpusInfo", - "parameterValue": "http://www.microsoft.com" - }, - { - "parameterName": "PageHash", - "parameterValue": "/NPH" - }, - { - "parameterName": "FileDigest", - "parameterValue": "/fd sha256" - }, - { - "parameterName": "TimeStamp", - "parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256" - } - ], "toolName": "sign", - "toolVersion": "1.0" + "toolVersion": "6.2.9304.0" } ] diff --git a/tools/ci_build/github/azure-pipelines/templates/make_java_win_binaries.yml b/tools/ci_build/github/azure-pipelines/templates/make_java_win_binaries.yml index 756a7a48343a..9a666155028c 100644 --- a/tools/ci_build/github/azure-pipelines/templates/make_java_win_binaries.yml +++ b/tools/ci_build/github/azure-pipelines/templates/make_java_win_binaries.yml @@ -3,16 +3,24 @@ parameters: type: string - name: java_artifact_id type: string + - name: buildOnly + type: boolean steps: + - task: CmdLine@2 + displayName: 'Gradle cmakeCheck' + continueOnError: ${{ parameters.buildOnly }} + inputs: + script: | + @echo on + call gradlew.bat cmakeCheck -DcmakeBuildDir=$(Build.BinariesDirectory)\RelWithDebInfo --warning-mode all + workingDirectory: $(Build.SourcesDirectory)\java + - task: CmdLine@2 displayName: 'Add symbols and notices to Java' inputs: script: | @echo on - cd $(Build.SourcesDirectory)\java - call $(Build.SourcesDirectory)\java\gradlew.bat cmakeCheck -DcmakeBuildDir=$(Build.BinariesDirectory)\RelWithDebInfo - if %errorlevel% neq 0 exit /b %errorlevel% cd $(Build.BinariesDirectory)\RelWithDebInfo set NATIVE_FOLDER=$(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}\stage\ai\onnxruntime\native\win-x64 mkdir %NATIVE_FOLDER% diff --git a/tools/ci_build/github/azure-pipelines/templates/ondevice-training-cpu-packaging-pipeline.yml b/tools/ci_build/github/azure-pipelines/templates/ondevice-training-cpu-packaging-pipeline.yml index bfee58e6e5ef..5ab452be2bc1 100644 --- a/tools/ci_build/github/azure-pipelines/templates/ondevice-training-cpu-packaging-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/templates/ondevice-training-cpu-packaging-pipeline.yml @@ -116,7 +116,7 @@ stages: - Android_Java_API_AAR_Packaging_Training_Full condition: succeeded() jobs: - - job: + - job: NuGet_Packaging_Training_CPU workspace: clean: all # we need to use the 2022 pool to create the nuget package with both pre-net6+Xamarin and net6 targets. diff --git a/tools/ci_build/github/azure-pipelines/templates/orttraining-linux-gpu-test-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/templates/orttraining-linux-gpu-test-ci-pipeline.yml index 5dc156e30135..f832315c1f0d 100644 --- a/tools/ci_build/github/azure-pipelines/templates/orttraining-linux-gpu-test-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/templates/orttraining-linux-gpu-test-ci-pipeline.yml @@ -6,17 +6,7 @@ parameters: steps: -- bash: tools/ci_build/github/linux/docker/scripts/training/azure_scale_set_vm_mount_test_data.sh -p $(orttrainingtestdatascus-storage-key) -s "//orttrainingtestdatascus.file.core.windows.net/mnist" -d "/mnist" - displayName: 'Mount MNIST' - condition: succeededOrFailed() - -- bash: tools/ci_build/github/linux/docker/scripts/training/azure_scale_set_vm_mount_test_data.sh -p $(orttrainingtestdatascus-storage-key) -s "//orttrainingtestdatascus.file.core.windows.net/bert-data" -d "/bert_data" - displayName: 'Mount bert-data' - condition: succeededOrFailed() - -- bash: tools/ci_build/github/linux/docker/scripts/training/azure_scale_set_vm_mount_test_data.sh -p $(orttrainingtestdatascus-storage-key) -s "//orttrainingtestdatascus.file.core.windows.net/hf-models-cache" -d "/hf_models_cache" - displayName: 'Mount hf-models-cache' - condition: succeededOrFailed() +- template: jobs/download_training_test_data.yml # Entry point for all ORTModule tests # The onnxruntime folder is deleted in the build directory @@ -29,9 +19,7 @@ steps: --rm \ --volume $(Build.SourcesDirectory):/onnxruntime_src \ --volume $(Build.BinariesDirectory)/${{ parameters.BuildConfig }}:/build \ - --volume /mnist:/mnist \ - --volume /bert_data:/bert_data \ - --volume /hf_models_cache:/hf_models_cache \ + --volume $(Agent.TempDirectory)/mnist:/mnist \ ${{ parameters.DockerImageTag }} \ bash -c "rm -rf /build/onnxruntime/ && python3 -m pip install /build/dist/onnxruntime*.whl && python3 -m onnxruntime.training.ortmodule.torch_cpp_extensions.install && /build/launch_test.py --cmd_line_with_args 'python orttraining_ortmodule_tests.py --mnist /mnist --bert_data /bert_data/hf_data/glue_data/CoLA/original/raw' --cwd /build" \ displayName: 'Run orttraining_ortmodule_tests.py' diff --git a/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu.yml b/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu.yml index 6e1adba47480..97f95797be1f 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-linux-gpu.yml @@ -19,10 +19,7 @@ parameters: - MinSizeRel - name: docker_base_image type: string - default: 'nvidia/cuda:11.8.0-cudnn8-devel-ubi8' - values: - - nvidia/cuda:11.8.0-cudnn8-devel-ubi8 - - nvidia/cuda:12.2.2-cudnn8-devel-ubi8 + - name: trt_version type: string default: '10.0.1.6-1.cuda11.8' @@ -63,15 +60,9 @@ stages: - template: get-docker-image-steps.yml parameters: - Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda - Context: tools/ci_build/github/linux/docker - DockerBuildArgs: " - --network=host - --build-arg BASEIMAGE=${{ parameters.docker_base_image }} - --build-arg TRT_VERSION=${{ parameters.trt_version }} - --build-arg BUILD_UID=$( id -u ) - --build-arg PLATFORM=${{ parameters.arch }} - " + Dockerfile: tools/ci_build/github/linux/docker/inference/${{ parameters.arch }}/python/cuda/Dockerfile + Context: tools/ci_build/github/linux/docker/inference/${{ parameters.arch }}/python/cuda + DockerBuildArgs: "--build-arg BASEIMAGE=${{ parameters.docker_base_image }} --build-arg TRT_VERSION=${{ parameters.trt_version }} --build-arg BUILD_UID=$( id -u )" Repository: onnxruntimecuda${{ replace(parameters.cuda_version, '.', '') }}xtrt86build${{ parameters.arch }} diff --git a/tools/ci_build/github/azure-pipelines/templates/py-linux.yml b/tools/ci_build/github/azure-pipelines/templates/py-linux.yml index 2adcbb13dbeb..dd9d2412f8f9 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-linux.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-linux.yml @@ -5,18 +5,6 @@ parameters: - name: machine_pool type: string -- name: base_image - type: string - -- name: devtoolset_rootpath - type: string - -- name: ld_library_path_arg - type: string - -- name: prepend_path - type: string - - name: cmake_build_type type: string default: 'Release' @@ -70,9 +58,9 @@ jobs: - template: get-docker-image-steps.yml parameters: - Dockerfile: tools/ci_build/github/linux/docker/inference/x64/python/cpu/Dockerfile.manylinux2_28_cpu - Context: tools/ci_build/github/linux/docker/inference/x64/python/cpu - DockerBuildArgs: "--build-arg POLICY=manylinux_2_28 --build-arg BUILD_UID=$( id -u ) --build-arg BASEIMAGE=${{ parameters.base_image }} --build-arg PLATFORM=${{ parameters.arch }} --build-arg PREPEND_PATH=${{ parameters.prepend_path }} --build-arg LD_LIBRARY_PATH_ARG=${{ parameters.ld_library_path_arg }} --build-arg DEVTOOLSET_ROOTPATH=${{ parameters.devtoolset_rootpath }}" + Dockerfile: tools/ci_build/github/linux/docker/inference/${{ parameters.arch }}/python/cpu/Dockerfile + Context: tools/ci_build/github/linux/docker/inference/${{ parameters.arch }}/python/cpu + DockerBuildArgs: "--build-arg BUILD_UID=$( id -u )" Repository: onnxruntimecpubuildpython${{ parameters.arch }} ${{ if eq(parameters.arch, 'aarch64') }}: UpdateDepsTxt: false diff --git a/tools/ci_build/github/azure-pipelines/templates/py-packaging-linux-test-cpu.yml b/tools/ci_build/github/azure-pipelines/templates/py-packaging-linux-test-cpu.yml index cc90085e184d..0c7c356393b5 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-packaging-linux-test-cpu.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-packaging-linux-test-cpu.yml @@ -2,18 +2,6 @@ parameters: - name: arch type: string -- name: base_image - type: string - -- name: devtoolset_rootpath - type: string - -- name: ld_library_path_arg - type: string - -- name: prepend_path - type: string - - name: machine_pool type: string @@ -98,9 +86,9 @@ jobs: - template: get-docker-image-steps.yml parameters: - Dockerfile: tools/ci_build/github/linux/docker/inference/x64/python/cpu/Dockerfile.manylinux2_28_cpu - Context: tools/ci_build/github/linux/docker/inference/x64/python/cpu - DockerBuildArgs: "--build-arg POLICY=manylinux_2_28 --build-arg BUILD_UID=$( id -u ) --build-arg BASEIMAGE=${{ parameters.base_image }} --build-arg PLATFORM=${{ parameters.arch }} --build-arg PREPEND_PATH=${{ parameters.prepend_path }} --build-arg LD_LIBRARY_PATH_ARG=${{ parameters.ld_library_path_arg }} --build-arg DEVTOOLSET_ROOTPATH=${{ parameters.devtoolset_rootpath }}" + Dockerfile: tools/ci_build/github/linux/docker/inference/${{ parameters.arch }}/python/cpu/Dockerfile + Context: tools/ci_build/github/linux/docker/inference/${{ parameters.arch }}/python/cpu + DockerBuildArgs: "--build-arg BUILD_UID=$( id -u )" Repository: onnxruntimecpubuildpython${{ parameters.arch }} ${{ if eq(parameters.arch, 'aarch64') }}: UpdateDepsTxt: false diff --git a/tools/ci_build/github/azure-pipelines/templates/py-packaging-linux-test-cuda.yml b/tools/ci_build/github/azure-pipelines/templates/py-packaging-linux-test-cuda.yml index 40904dda0431..3081624225b1 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-packaging-linux-test-cuda.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-packaging-linux-test-cuda.yml @@ -2,12 +2,6 @@ parameters: - name: arch type: string -- name: device - type: string - values: - - CPU - - GPU - - name: machine_pool type: string @@ -19,6 +13,21 @@ parameters: type: string default: '' +- name: docker_base_image + type: string + +- name: trt_version + type: string + default: '10.0.1.6-1.cuda11.8' + values: + - 10.0.1.6-1.cuda11.8 + - 10.0.1.6-1.cuda12.4 +- name: cuda_version + type: string + default: '11.8' + values: + - 11.8 + - 12.2 # TODO: Ideally it should fetch information from the build that triggers it - name: cmake_build_type @@ -79,18 +88,12 @@ jobs: # GdnBreakPolicy: M365 # GdnBreakPolicyMinSev: Error - - template: get-docker-image-steps.yml - parameters: - Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda - Context: tools/ci_build/github/linux/docker - DockerBuildArgs: " - --network=host - --build-arg BASEIMAGE=nvidia/cuda:11.8.0-cudnn8-devel-ubi8 - --build-arg TRT_VERSION=10.0.1.6-1.cuda11.8 - --build-arg BUILD_UID=$( id -u ) - --build-arg PLATFORM=${{ parameters.arch }} - " - Repository: onnxruntimecuda118xtrt86build${{ parameters.arch }} + - template: get-docker-image-steps.yml + parameters: + Dockerfile: tools/ci_build/github/linux/docker/inference/${{ parameters.arch }}/python/cuda/Dockerfile + Context: tools/ci_build/github/linux/docker/inference/${{ parameters.arch }}/python/cuda + DockerBuildArgs: "--build-arg BASEIMAGE=${{ parameters.docker_base_image }} --build-arg TRT_VERSION=${{ parameters.trt_version }} --build-arg BUILD_UID=$( id -u )" + Repository: onnxruntimecuda${{ replace(parameters.cuda_version, '.', '') }}xtrt86build${{ parameters.arch }} - task: Bash@3 displayName: 'Bash Script' diff --git a/tools/ci_build/github/azure-pipelines/templates/py-packaging-selectable-stage.yml b/tools/ci_build/github/azure-pipelines/templates/py-packaging-selectable-stage.yml index 2e5f6ce8ad22..cc07df59da61 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-packaging-selectable-stage.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-packaging-selectable-stage.yml @@ -387,7 +387,7 @@ stages: - job: Windows_py_GPU_Wheels workspace: clean: all - pool: 'onnxruntime-Win2022-GPU-T4' + pool: 'onnxruntime-Win2022-GPU-A10' timeoutInMinutes: 300 variables: CUDA_VERSION: '11.8' diff --git a/tools/ci_build/github/azure-pipelines/templates/py-packaging-stage.yml b/tools/ci_build/github/azure-pipelines/templates/py-packaging-stage.yml index b0a66eadfc9c..7e49fcbcd7d2 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-packaging-stage.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-packaging-stage.yml @@ -283,7 +283,7 @@ stages: - ${{ if eq(parameters.enable_windows_gpu, true) }}: - template: py-win-gpu.yml parameters: - MACHINE_POOL: 'onnxruntime-Win2022-GPU-T4' + MACHINE_POOL: 'onnxruntime-Win2022-GPU-A10' PYTHON_VERSION: '3.8' EP_BUILD_FLAGS: --use_tensorrt --tensorrt_home="$(Agent.TempDirectory)\TensorRT-10.0.1.6.Windows10.x86_64.cuda-11.8" --cuda_home="$(Agent.TempDirectory)\v11.8" --cmake_extra_defines "CMAKE_CUDA_ARCHITECTURES=52;60;61;70;75;80" ENV_SETUP_SCRIPT: setup_env_gpu.bat @@ -291,7 +291,7 @@ stages: - template: py-win-gpu.yml parameters: - MACHINE_POOL: 'onnxruntime-Win2022-GPU-T4' + MACHINE_POOL: 'onnxruntime-Win2022-GPU-A10' PYTHON_VERSION: '3.9' EP_BUILD_FLAGS: --use_tensorrt --tensorrt_home="$(Agent.TempDirectory)\TensorRT-10.0.1.6.Windows10.x86_64.cuda-11.8" --cuda_home="$(Agent.TempDirectory)\v11.8" --cmake_extra_defines "CMAKE_CUDA_ARCHITECTURES=52;60;61;70;75;80" ENV_SETUP_SCRIPT: setup_env_gpu.bat @@ -299,7 +299,7 @@ stages: - template: py-win-gpu.yml parameters: - MACHINE_POOL: 'onnxruntime-Win2022-GPU-T4' + MACHINE_POOL: 'onnxruntime-Win2022-GPU-A10' PYTHON_VERSION: '3.10' EP_BUILD_FLAGS: --use_tensorrt --tensorrt_home="$(Agent.TempDirectory)\TensorRT-10.0.1.6.Windows10.x86_64.cuda-11.8" --cuda_home="$(Agent.TempDirectory)\v11.8" --cmake_extra_defines "CMAKE_CUDA_ARCHITECTURES=52;60;61;70;75;80" ENV_SETUP_SCRIPT: setup_env_gpu.bat @@ -307,7 +307,7 @@ stages: - template: py-win-gpu.yml parameters: - MACHINE_POOL: 'onnxruntime-Win2022-GPU-T4' + MACHINE_POOL: 'onnxruntime-Win2022-GPU-A10' PYTHON_VERSION: '3.11' EP_BUILD_FLAGS: --use_tensorrt --tensorrt_home="$(Agent.TempDirectory)\TensorRT-10.0.1.6.Windows10.x86_64.cuda-11.8" --cuda_home="$(Agent.TempDirectory)\v11.8" --cmake_extra_defines "CMAKE_CUDA_ARCHITECTURES=52;60;61;70;75;80" ENV_SETUP_SCRIPT: setup_env_gpu.bat @@ -315,7 +315,7 @@ stages: - template: py-win-gpu.yml parameters: - MACHINE_POOL: 'onnxruntime-Win2022-GPU-T4' + MACHINE_POOL: 'onnxruntime-Win2022-GPU-A10' PYTHON_VERSION: '3.12' EP_BUILD_FLAGS: --use_tensorrt --tensorrt_home="$(Agent.TempDirectory)\TensorRT-10.0.1.6.Windows10.x86_64.cuda-11.8" --cuda_home="$(Agent.TempDirectory)\v11.8" --cmake_extra_defines "CMAKE_CUDA_ARCHITECTURES=52;60;61;70;75;80" ENV_SETUP_SCRIPT: setup_env_gpu.bat @@ -452,10 +452,6 @@ stages: parameters: arch: 'aarch64' machine_pool: 'onnxruntime-linux-ARM64-CPU-2019' - base_image: 'arm64v8/almalinux:8' - devtoolset_rootpath: /opt/rh/gcc-toolset-12/root - ld_library_path_arg: /opt/rh/gcc-toolset-12/root/usr/lib64:/opt/rh/gcc-toolset-12/root/usr/lib:/opt/rh/gcc-toolset-12/root/usr/lib64/dyninst:/opt/rh/gcc-toolset-12/root/usr/lib/dyninst:/usr/local/lib64 - prepend_path: '/opt/rh/gcc-toolset-12/root/usr/bin:' extra_build_arg: ${{ parameters.build_py_parameters }} cmake_build_type: ${{ parameters.cmake_build_type }} @@ -466,11 +462,7 @@ stages: - template: py-linux.yml parameters: arch: 'x86_64' - machine_pool: 'onnxruntime-Ubuntu2204-AMD-CPU' - base_image: 'registry.access.redhat.com/ubi8/ubi' - devtoolset_rootpath: /opt/rh/gcc-toolset-12/root - ld_library_path_arg: /opt/rh/gcc-toolset-12/root/usr/lib64:/opt/rh/gcc-toolset-12/root/usr/lib:/opt/rh/gcc-toolset-12/root/usr/lib64/dyninst:/opt/rh/gcc-toolset-12/root/usr/lib/dyninst:/usr/local/lib64 - prepend_path: '/opt/rh/gcc-toolset-12/root/usr/bin:' + machine_pool: 'onnxruntime-Ubuntu2204-AMD-CPU' extra_build_arg: ${{ parameters.build_py_parameters }} cmake_build_type: ${{ parameters.cmake_build_type }} @@ -480,8 +472,11 @@ stages: parameters: arch: 'x86_64' machine_pool: 'onnxruntime-Ubuntu2204-AMD-CPU' + docker_base_image: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1 extra_build_arg: ${{ parameters.build_py_parameters }} cmake_build_type: ${{ parameters.cmake_build_type }} + trt_version: '10.0.1.6-1.cuda11.8' + cuda_version: '11.8' - ${{ if eq(parameters.enable_windows_arm64_qnn, true) }}: - stage: Python_Packaging_Windows_ARM64_QNN diff --git a/tools/ci_build/github/azure-pipelines/templates/py-packaging-training-cuda-stage-steps.yml b/tools/ci_build/github/azure-pipelines/templates/py-packaging-training-cuda-stage-steps.yml index f6b36733ebdd..2b5b11ece417 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-packaging-training-cuda-stage-steps.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-packaging-training-cuda-stage-steps.yml @@ -66,11 +66,7 @@ stages: --build-arg OPSET_VERSION=${{ parameters.opset_version }} --build-arg PYTHON_VERSION=${{ parameters.python_version }} --build-arg INSTALL_DEPS_EXTRA_ARGS=-tu - --build-arg BUILD_UID=$(id -u) - --network=host --build-arg POLICY=manylinux_2_28 --build-arg PLATFORM=x86_64 - --build-arg DEVTOOLSET_ROOTPATH=/usr - --build-arg PREPEND_PATH=/usr/local/cuda/bin: - --build-arg LD_LIBRARY_PATH_ARG=/usr/local/lib64 + --build-arg BUILD_UID=$(id -u) Repository: $(Repository) - task: CmdLine@2 @@ -79,7 +75,7 @@ stages: script: | set -e -x mkdir -p $HOME/.onnx - docker run --rm -e CFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -fstack-clash-protection -fcf-protection -O3 -Wl,--strip-all" -e CXXFLAGS="-Wp,-D_FORTIFY_SOURCE=2 -Wp,-D_GLIBCXX_ASSERTIONS -fstack-protector-strong -fstack-clash-protection -fcf-protection -O3 -Wl,--strip-all" \ + docker run --rm \ --volume /data/onnx:/data/onnx:ro \ --volume $(Build.SourcesDirectory):/onnxruntime_src \ --volume $(Build.BinariesDirectory):/build \ @@ -153,6 +149,8 @@ stages: clean: true submodules: none + - template: jobs/download_training_test_data.yml + - template: set-python-manylinux-variables-step.yml - template: flex-downloadPipelineArtifact.yml @@ -182,27 +180,9 @@ stages: --build-arg PYTHON_VERSION=${{ parameters.python_version }} --build-arg INSTALL_DEPS_EXTRA_ARGS=-tu --build-arg BUILD_UID=$(id -u) - --network=host --build-arg POLICY=manylinux_2_28 --build-arg PLATFORM=x86_64 - --build-arg DEVTOOLSET_ROOTPATH=/usr - --build-arg PREPEND_PATH=/usr/local/cuda/bin: - --build-arg LD_LIBRARY_PATH_ARG=/usr/local/lib64 + --network=host Repository: $(Repository) - - bash: tools/ci_build/github/linux/docker/scripts/training/azure_scale_set_vm_mount_test_data.sh -p $(orttrainingtestdatascus-storage-key) -s "//orttrainingtestdatascus.file.core.windows.net/mnist" -d "/mnist" - displayName: 'Mount MNIST' - condition: succeededOrFailed() - workingDirectory: $(Build.SourcesDirectory) - - - bash: tools/ci_build/github/linux/docker/scripts/training/azure_scale_set_vm_mount_test_data.sh -p $(orttrainingtestdatascus-storage-key) -s "//orttrainingtestdatascus.file.core.windows.net/bert-data" -d "/bert_data" - displayName: 'Mount bert-data' - condition: succeededOrFailed() - workingDirectory: $(Build.SourcesDirectory) - - - bash: tools/ci_build/github/linux/docker/scripts/training/azure_scale_set_vm_mount_test_data.sh -p $(orttrainingtestdatascus-storage-key) -s "//orttrainingtestdatascus.file.core.windows.net/hf-models-cache" -d "/hf_models_cache" - displayName: 'Mount hf-models-cache' - condition: succeededOrFailed() - workingDirectory: $(Build.SourcesDirectory) - - task: CmdLine@2 displayName: 'test ortmodule' inputs: @@ -215,9 +195,7 @@ stages: --gpus all \ -e NVIDIA_VISIBLE_DEVICES=all \ --volume $(Build.ArtifactStagingDirectory):/build \ - --volume /mnist:/mnist \ - --volume /bert_data:/bert_data \ - --volume /hf_models_cache:/hf_models_cache \ + --volume $(Agent.TempDirectory)/MNIST:/mnist \ $(Repository) \ bash -c " $(PythonManylinuxDir)/bin/python3 -m pip install /build/Release/dist/$basefilename && $(PythonManylinuxDir)/bin/python3 -m onnxruntime.training.ortmodule.torch_cpp_extensions.install " ; workingDirectory: $(Build.SourcesDirectory) diff --git a/tools/ci_build/github/azure-pipelines/templates/py-win-gpu.yml b/tools/ci_build/github/azure-pipelines/templates/py-win-gpu.yml index e5e72aacd0b0..97a8d45b2bb1 100644 --- a/tools/ci_build/github/azure-pipelines/templates/py-win-gpu.yml +++ b/tools/ci_build/github/azure-pipelines/templates/py-win-gpu.yml @@ -1,7 +1,7 @@ parameters: - name: MACHINE_POOL type: string - default: 'onnxruntime-Win2022-GPU-T4' + default: 'onnxruntime-Win2022-GPU-A10' - name: EP_NAME type: string diff --git a/tools/ci_build/github/azure-pipelines/templates/rocm.yml b/tools/ci_build/github/azure-pipelines/templates/rocm.yml index 43a80aa4fd4e..a4adfa89775d 100644 --- a/tools/ci_build/github/azure-pipelines/templates/rocm.yml +++ b/tools/ci_build/github/azure-pipelines/templates/rocm.yml @@ -47,6 +47,7 @@ jobs: parameters: Dockerfile: tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_rocm Context: tools/ci_build/github/linux/docker + CheckOutManyLinux: true DockerBuildArgs: >- --build-arg INSTALL_DEPS_EXTRA_ARGS=-tmur --build-arg BUILD_UID=$(id -u) diff --git a/tools/ci_build/github/azure-pipelines/templates/win-ci.yml b/tools/ci_build/github/azure-pipelines/templates/win-ci.yml index d6e8a30b441d..c726054d8eb1 100644 --- a/tools/ci_build/github/azure-pipelines/templates/win-ci.yml +++ b/tools/ci_build/github/azure-pipelines/templates/win-ci.yml @@ -191,19 +191,24 @@ stages: createLogFile: true # For CPU job, tests are run in the same machine as building + - ${{ if eq(parameters.buildJava, 'true') }}: + - template: make_java_win_binaries.yml + parameters: + msbuildPlatform: ${{ parameters.msbuildPlatform }} + java_artifact_id: ${{ parameters.java_artifact_id }} + ${{ if contains(parameters.ort_build_pool_name, 'CPU') }}: + buildOnly: false + # When it is a GPU build, we only assemble the java binaries, testing will be done in the later stage with GPU machine + ${{ else }}: + buildOnly: true + + - task: PublishBuildArtifacts@1 + displayName: 'Publish Java temp binaries' + inputs: + pathtoPublish: '$(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}' + artifactName: 'drop-onnxruntime-java-win-${{ parameters.packageName }}${{parameters.artifact_name_suffix}}' + # All GPU builds will be tested in the next stage with GPU machine - ${{ if contains(parameters.ort_build_pool_name, 'CPU') }}: - - ${{ if eq(parameters.buildJava, 'true') }}: - - template: make_java_win_binaries.yml - parameters: - msbuildPlatform: ${{ parameters.msbuildPlatform }} - java_artifact_id: ${{ parameters.java_artifact_id }} - - - task: PublishBuildArtifacts@1 - condition: and(succeeded(), eq('${{ parameters.buildJava}}', true)) - displayName: 'Publish Java temp binaries' - inputs: - pathtoPublish: '$(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}' - artifactName: 'drop-onnxruntime-java-win-${{ parameters.packageName }}${{parameters.artifact_name_suffix}}' - task: PythonScript@0 displayName: 'test' condition: and(succeeded(), eq('${{ parameters.runTests}}', true)) @@ -307,9 +312,11 @@ stages: - template: component-governance-component-detection-steps.yml parameters : condition : 'succeeded' - - ${{ if contains(parameters.ort_build_pool_name, 'GPU') }}: +- ${{ if contains(parameters.ort_build_pool_name, 'GPU') }}: + - stage: Windows_Packaging_${{ parameters.stage_name_suffix }}_Testing + dependsOn: Windows_Packaging_${{ parameters.stage_name_suffix }} + jobs: - job: Windows_Packaging_${{ parameters.stage_name_suffix }}_Testing - dependsOn: Windows_Packaging_${{ parameters.stage_name_suffix }} workspace: clean: all pool: ${{ parameters.ort_build_pool_name }} @@ -384,16 +391,10 @@ stages: scriptPath: '$(Build.SourcesDirectory)\tools\ci_build\build.py' arguments: '--config RelWithDebInfo --use_binskim_compliant_compile_flags --enable_lto --disable_rtti --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --test --enable_onnx_tests $(TelemetryOption) ' workingDirectory: '$(Build.BinariesDirectory)' - +# Previous stage only assembles the java binaries, testing will be done in this stage with GPU machine - ${{ if eq(parameters.buildJava, 'true') }}: - template: make_java_win_binaries.yml parameters: msbuildPlatform: ${{ parameters.msbuildPlatform }} java_artifact_id: ${{ parameters.java_artifact_id }} - - - task: PublishBuildArtifacts@1 - condition: and(succeeded(), eq('${{ parameters.buildJava}}', true)) - displayName: 'Publish Java temp binaries' - inputs: - pathtoPublish: '$(Build.BinariesDirectory)\onnxruntime-java-win-${{ parameters.msbuildPlatform }}' - artifactName: 'drop-onnxruntime-java-win-${{ parameters.packageName }}${{parameters.artifact_name_suffix}}' + buildOnly: false \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/templates/win-esrp-dll.yml b/tools/ci_build/github/azure-pipelines/templates/win-esrp-dll.yml index ba4c55f2fde4..933abad11595 100644 --- a/tools/ci_build/github/azure-pipelines/templates/win-esrp-dll.yml +++ b/tools/ci_build/github/azure-pipelines/templates/win-esrp-dll.yml @@ -16,42 +16,19 @@ parameters: default: '*.dll' steps: -- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@2 +- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@5 displayName: ${{ parameters.DisplayName }} condition: and(succeeded(), eq('${{ parameters.DoEsrp }}', true)) inputs: - ConnectedServiceName: 'OnnxRuntime CodeSign 20190817' + ConnectedServiceName: 'OnnxrunTimeCodeSign_20240611' + AppRegistrationClientId: '53d54d02-978d-4305-8572-583cf6711c4f' + AppRegistrationTenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47' + AuthAKVName: 'buildkeyvault' + AuthCertName: '53d54d02-SSL-AutoRotate' + AuthSignCertName: '53d54d02-978d-4305-8572-583cf6711c4f' + FolderPath: ${{ parameters.FolderPath }} Pattern: ${{ parameters.Pattern }} - signConfigType: inlineSignParams - inlineOperation: | - [ - { - "keyCode": "CP-230012", - "operationSetCode": "SigntoolSign", - "parameters": [ - { - "parameterName": "OpusName", - "parameterValue": "Microsoft" - }, - { - "parameterName": "OpusInfo", - "parameterValue": "http://www.microsoft.com" - }, - { - "parameterName": "PageHash", - "parameterValue": "/NPH" - }, - { - "parameterName": "FileDigest", - "parameterValue": "/fd sha256" - }, - { - "parameterName": "TimeStamp", - "parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256" - } - ], - "toolName": "signtool.exe", - "toolVersion": "6.2.9304.0" - } - ] + SessionTimeout: 90 + ServiceEndpointUrl: 'https://api.esrp.microsoft.com/api/v2' + MaxConcurrency: 25 diff --git a/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml index c333c7ef084d..39e68f5631f0 100644 --- a/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/win-ci-pipeline.yml @@ -234,7 +234,7 @@ stages: - stage: x64_release_azure dependsOn: [] jobs: - - job: + - job: x64_release_azure steps: - powershell: | Write-Host "##vso[task.prependpath]$(Build.BinariesDirectory)\RelWithDebInfo\_deps\vcpkg-src\installed\x86-windows\bin" diff --git a/tools/ci_build/github/azure-pipelines/win-gpu-reduce-op-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/win-gpu-reduce-op-ci-pipeline.yml index 9133db79946b..211541a18546 100644 --- a/tools/ci_build/github/azure-pipelines/win-gpu-reduce-op-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/win-gpu-reduce-op-ci-pipeline.yml @@ -1,6 +1,6 @@ jobs: - job: 'build' - pool: 'onnxruntime-Win2022-GPU-T4' + pool: 'onnxruntime-Win2022-GPU-A10' strategy: maxParallel: 2 matrix: @@ -22,7 +22,7 @@ jobs: DownloadCUDA: true BuildArch: 'x64' BuildConfig: $(BuildConfig) - MachinePool: 'onnxruntime-Win2022-GPU-T4' + MachinePool: 'onnxruntime-Win2022-GPU-A10' WithCache: true Today: $(Today) diff --git a/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml index 26c7f978456f..1af00da01241 100644 --- a/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml @@ -29,7 +29,7 @@ pr: jobs: - job: 'build' - pool: 'onnxruntime-Win2022-GPU-T4' + pool: 'onnxruntime-Win2022-GPU-A10' variables: MsbuildArguments: '-detailedsummary -maxcpucount -consoleloggerparameters:PerformanceSummary' EnvSetupScript: setup_env_trt.bat @@ -46,7 +46,7 @@ jobs: DownloadTRT: true BuildArch: 'x64' BuildConfig: RelWithDebInfo - MachinePool: 'onnxruntime-Win2022-GPU-T4' + MachinePool: 'onnxruntime-Win2022-GPU-A10' WithCache: true Today: $(Today) @@ -55,7 +55,7 @@ jobs: WithCache: True Today: $(TODAY) AdditionalKey: "gpu-tensorrt | RelWithDebInfo" - BuildPyArguments: '--config RelWithDebInfo --parallel --use_binskim_compliant_compile_flags --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --update --cmake_generator "Visual Studio 17 2022" --build_wheel --enable_onnx_tests --use_tensorrt --tensorrt_home="$(Agent.TempDirectory)\TensorRT-10.0.1.6.Windows10.x86_64.cuda-11.8" --cuda_home="$(Agent.TempDirectory)\v11.8" --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=75' + BuildPyArguments: '--config RelWithDebInfo --parallel --use_binskim_compliant_compile_flags --build_dir $(Build.BinariesDirectory) --skip_submodule_sync --build_shared_lib --update --cmake_generator "Visual Studio 17 2022" --build_wheel --enable_onnx_tests --use_tensorrt --tensorrt_home="$(Agent.TempDirectory)\TensorRT-10.0.1.6.Windows10.x86_64.cuda-11.8" --cuda_home="$(Agent.TempDirectory)\v11.8" --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=86' MsbuildArguments: $(MsbuildArguments) BuildArch: 'x64' Platform: 'x64' diff --git a/tools/ci_build/github/linux/build_cuda_c_api_package.sh b/tools/ci_build/github/linux/build_cuda_c_api_package.sh index aec02f76693b..04968aacdb25 100755 --- a/tools/ci_build/github/linux/build_cuda_c_api_package.sh +++ b/tools/ci_build/github/linux/build_cuda_c_api_package.sh @@ -1,9 +1,10 @@ #!/bin/bash set -e -x -docker run --gpus all -e NVIDIA_VISIBLE_DEVICES=all --rm --volume \ +docker run --rm --volume \ $BUILD_SOURCESDIRECTORY:/onnxruntime_src --volume $BUILD_BINARIESDIRECTORY:/build \ --volume /data/models:/build/models:ro --volume /data/onnx:/data/onnx:ro -e NIGHTLY_BUILD onnxruntimecuda${CUDA_VERSION_MAJOR}build \ /usr/bin/python3.9 /onnxruntime_src/tools/ci_build/build.py --enable_lto --build_java --build_nodejs --build_dir /build --config Release \ --skip_submodule_sync --parallel --use_binskim_compliant_compile_flags --build_shared_lib --use_cuda --cuda_version=$CUDA_VERSION \ --cuda_home=/usr/local/cuda-$CUDA_VERSION --cudnn_home=/usr/local/cuda-$CUDA_VERSION \ +--skip_tests \ --cmake_extra_defines 'CMAKE_CUDA_ARCHITECTURES=60;61;70;75;80' diff --git a/tools/ci_build/github/linux/build_cuda_ci.sh b/tools/ci_build/github/linux/build_cuda_ci.sh new file mode 100755 index 000000000000..c8691b3a01e7 --- /dev/null +++ b/tools/ci_build/github/linux/build_cuda_ci.sh @@ -0,0 +1,46 @@ +#!/bin/bash +set -ex +#Every cuda container has this $CUDA_VERSION env var set. +SHORT_CUDA_VERSION=$(echo $CUDA_VERSION | sed 's/\([[:digit:]]\+\.[[:digit:]]\+\)\.[[:digit:]]\+/\1/') + +BUILD_ARGS=('--config' 'Release' '--update' '--build' + '--skip_submodule_sync' + '--build_shared_lib' + '--parallel' '--use_binskim_compliant_compile_flags' + '--build_wheel' + '--enable_onnx_tests' + '--use_cuda' + "--cuda_version=$SHORT_CUDA_VERSION" + "--cuda_home=/usr/local/cuda-$SHORT_CUDA_VERSION" + "--cudnn_home=/usr/local/cuda-$SHORT_CUDA_VERSION" + "--enable_cuda_profiling" + "--enable_cuda_nhwc_ops" + "--enable_pybind" + "--build_java" + "--cmake_extra_defines" + "CMAKE_CUDA_ARCHITECTURES=75" + "onnxruntime_BUILD_UNIT_TESTS=ON" + "onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON") +if [ -x "$(command -v ninja)" ]; then + BUILD_ARGS+=('--cmake_generator' 'Ninja') +fi + +if [ -d /build ]; then + BUILD_ARGS+=('--build_dir' '/build') +else + BUILD_ARGS+=('--build_dir' 'build') +fi + +if [ -x "$(command -v ccache)" ]; then + ccache -s; + BUILD_ARGS+=("--use_cache") +fi +if [ -f /opt/python/cp38-cp38/bin/python3 ]; then + /opt/python/cp38-cp38/bin/python3 tools/ci_build/build.py "${BUILD_ARGS[@]}" +else + python3 tools/ci_build/build.py "${BUILD_ARGS[@]}" +fi +if [ -x "$(command -v ccache)" ]; then + ccache -sv + ccache -z +fi diff --git a/tools/ci_build/github/linux/build_linux_python_package.sh b/tools/ci_build/github/linux/build_linux_python_package.sh index bc57cf4120d2..e7909fa4ffcc 100755 --- a/tools/ci_build/github/linux/build_linux_python_package.sh +++ b/tools/ci_build/github/linux/build_linux_python_package.sh @@ -23,7 +23,13 @@ c) BUILD_CONFIG=${OPTARG};; esac done + + BUILD_ARGS=("--build_dir" "/build" "--config" "$BUILD_CONFIG" "--update" "--build" "--skip_submodule_sync" "--parallel" "--use_binskim_compliant_compile_flags" "--build_wheel") +if [[ "$EXTRA_ARG" == *"training"* ]]; then + echo "Skip building unit tests because the container is a manylinux docker" + BUILD_ARGS+=("--cmake_extra_defines" "onnxruntime_BUILD_UNIT_TESTS=OFF") +fi if [ "$BUILD_CONFIG" != "Debug" ]; then BUILD_ARGS+=("--enable_lto") diff --git a/tools/ci_build/github/linux/build_tensorrt_c_api_package.sh b/tools/ci_build/github/linux/build_tensorrt_c_api_package.sh index 7d65a6f738a5..cc63b68d441d 100755 --- a/tools/ci_build/github/linux/build_tensorrt_c_api_package.sh +++ b/tools/ci_build/github/linux/build_tensorrt_c_api_package.sh @@ -1,7 +1,8 @@ #!/bin/bash set -e -x mkdir -p $HOME/.onnx -docker run --gpus all -e CFLAGS -e CXXFLAGS -e NVIDIA_VISIBLE_DEVICES=all --rm --volume /data/onnx:/data/onnx:ro --volume $BUILD_SOURCESDIRECTORY:/onnxruntime_src --volume $BUILD_BINARIESDIRECTORY:/build \ +docker run --rm --volume /data/onnx:/data/onnx:ro --volume $BUILD_SOURCESDIRECTORY:/onnxruntime_src --volume $BUILD_BINARIESDIRECTORY:/build \ --volume /data/models:/build/models:ro --volume $HOME/.onnx:/home/onnxruntimedev/.onnx -e NIGHTLY_BUILD onnxruntimecuda${CUDA_VERSION_MAJOR}xtrt86build \ -/opt/python/cp38-cp38/bin/python3 /onnxruntime_src/tools/ci_build/build.py --build_dir /build --config Release \ +/usr/bin/python3.9 /onnxruntime_src/tools/ci_build/build.py --build_dir /build --config Release \ +--skip_tests \ --skip_submodule_sync --parallel --use_binskim_compliant_compile_flags --build_shared_lib --build_java --build_nodejs --use_tensorrt --cuda_version=$CUDA_VERSION --cuda_home=/usr/local/cuda-$CUDA_VERSION --cudnn_home=/usr --tensorrt_home=/usr --cmake_extra_defines 'CMAKE_CUDA_ARCHITECTURES=60;61;70;75;80' diff --git a/tools/ci_build/github/linux/build_tensorrt_ci.sh b/tools/ci_build/github/linux/build_tensorrt_ci.sh new file mode 100755 index 000000000000..3002f2c239f1 --- /dev/null +++ b/tools/ci_build/github/linux/build_tensorrt_ci.sh @@ -0,0 +1,46 @@ +#!/bin/bash +set -ex +#Every cuda container has this $CUDA_VERSION env var set. +SHORT_CUDA_VERSION=$(echo $CUDA_VERSION | sed 's/\([[:digit:]]\+\.[[:digit:]]\+\)\.[[:digit:]]\+/\1/') + +#TODO: add --update --build +BUILD_ARGS=('--config' 'Release' + '--skip_submodule_sync' + '--build_shared_lib' + '--parallel' '--use_binskim_compliant_compile_flags' + '--build_wheel' + '--enable_onnx_tests' + '--use_cuda' + "--cuda_version=$SHORT_CUDA_VERSION" + "--cuda_home=/usr/local/cuda-$SHORT_CUDA_VERSION" + "--cudnn_home=/usr/local/cuda-$SHORT_CUDA_VERSION" + "--use_tensorrt" "--tensorrt_home" "/usr" + "--enable_pybind" + "--build_java" + "--cmake_extra_defines" + "CMAKE_CUDA_ARCHITECTURES=75" + "onnxruntime_BUILD_UNIT_TESTS=ON" + "onnxruntime_ENABLE_CUDA_EP_INTERNAL_TESTS=ON") +if [ -x "$(command -v ninja)" ]; then + BUILD_ARGS+=('--cmake_generator' 'Ninja') +fi + +if [ -d /build ]; then + BUILD_ARGS+=('--build_dir' '/build') +else + BUILD_ARGS+=('--build_dir' 'build') +fi + +if [ -x "$(command -v ccache)" ]; then + ccache -s; + BUILD_ARGS+=("--use_cache") +fi +if [ -f /opt/python/cp38-cp38/bin/python3 ]; then + /opt/python/cp38-cp38/bin/python3 tools/ci_build/build.py "${BUILD_ARGS[@]}" +else + python3 tools/ci_build/build.py "${BUILD_ARGS[@]}" +fi +if [ -x "$(command -v ccache)" ]; then + ccache -sv + ccache -z +fi diff --git a/tools/ci_build/github/linux/delete_unused_files_before_upload.sh b/tools/ci_build/github/linux/delete_unused_files_before_upload.sh new file mode 100755 index 000000000000..5d7d01e31ff0 --- /dev/null +++ b/tools/ci_build/github/linux/delete_unused_files_before_upload.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -e -x +rm -rf $BUILD_BINARIESDIRECTORY/Release/onnxruntime $BUILD_BINARIESDIRECTORY/Release/pybind11 +rm -f $BUILD_BINARIESDIRECTORY/Release/models +find $BUILD_BINARIESDIRECTORY/Release/_deps -mindepth 1 ! -regex "^$BUILD_BINARIESDIRECTORY/Release/_deps/onnx-src\(/.*\)?" -delete +cd $BUILD_BINARIESDIRECTORY/Release +find -executable -type f > $BUILD_BINARIESDIRECTORY/Release/perms.txt diff --git a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2014_aten_cpu b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2014_aten_cpu index ad3e783040b9..ad1db6a0305e 100644 --- a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2014_aten_cpu +++ b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2014_aten_cpu @@ -1,4 +1,4 @@ -FROM quay.io/pypa/manylinux2014_x86_64:latest +FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_ubi8_gcc12:20240531.1 ADD scripts /tmp/scripts RUN cd /tmp/scripts && /tmp/scripts/manylinux/install_centos.sh && /tmp/scripts/manylinux/install_deps_aten.sh && rm -rf /tmp/scripts @@ -7,4 +7,4 @@ ARG BUILD_UID=1001 ARG BUILD_USER=onnxruntimedev RUN adduser --uid $BUILD_UID $BUILD_USER WORKDIR /home/$BUILD_USER -USER $BUILD_USER +USER $BUILD_USER \ No newline at end of file diff --git a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cpu b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cpu index 546fca69201a..9bdc62ace479 100644 --- a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cpu +++ b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cpu @@ -1,163 +1,5 @@ -ARG BASEIMAGE=registry.access.redhat.com/ubi8/ubi -ARG POLICY=manylinux_2_28 -ARG PLATFORM=x86_64 -ARG DEVTOOLSET_ROOTPATH=/opt/rh/gcc-toolset-12/root -ARG LD_LIBRARY_PATH_ARG=${DEVTOOLSET_ROOTPATH}/usr/lib64:${DEVTOOLSET_ROOTPATH}/usr/lib:${DEVTOOLSET_ROOTPATH}/usr/lib64/dyninst:${DEVTOOLSET_ROOTPATH}/usr/lib/dyninst:/usr/local/lib64 -ARG PREPEND_PATH=/usr/lib/jvm/msopenjdk-11/bin:${DEVTOOLSET_ROOTPATH}/usr/bin: +FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_ubi8_gcc12:20240531.1 -#Build manylinux2014 docker image begin -FROM $BASEIMAGE AS runtime_base -ARG POLICY -ARG PLATFORM -ARG DEVTOOLSET_ROOTPATH -ARG LD_LIBRARY_PATH_ARG -ARG PREPEND_PATH -LABEL maintainer="The ManyLinux project" - -ENV AUDITWHEEL_POLICY=${POLICY} AUDITWHEEL_ARCH=${PLATFORM} AUDITWHEEL_PLAT=${POLICY}_${PLATFORM} -ENV LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8 LANGUAGE=en_US.UTF-8 -ENV DEVTOOLSET_ROOTPATH=${DEVTOOLSET_ROOTPATH} -ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH_ARG} -ENV PATH=${PREPEND_PATH}${PATH} -ENV PKG_CONFIG_PATH=/usr/local/lib/pkgconfig - -# first copy the fixup mirrors script, keep the script around -COPY build_scripts/fixup-mirrors.sh /usr/local/sbin/fixup-mirrors - -# setup entrypoint, this will wrap commands with `linux32` with i686 images -COPY build_scripts/install-entrypoint.sh \ - build_scripts/build_utils.sh \ - /build_scripts/ - -RUN /build_scripts/install-entrypoint.sh && rm -rf /build_scripts -COPY manylinux-entrypoint /usr/local/bin/manylinux-entrypoint -ENTRYPOINT ["manylinux-entrypoint"] - -COPY build_scripts/install-runtime-packages.sh \ - build_scripts/build_utils.sh \ - /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-runtime-packages.sh && rm -rf /build_scripts/ - -COPY build_scripts/build_utils.sh /build_scripts/ - -COPY build_scripts/install-autoconf.sh /build_scripts/ -RUN export AUTOCONF_ROOT=autoconf-2.71 && \ - export AUTOCONF_HASH=431075ad0bf529ef13cb41e9042c542381103e80015686222b8a9d4abef42a1c && \ - export AUTOCONF_DOWNLOAD_URL=http://ftp.gnu.org/gnu/autoconf && \ - manylinux-entrypoint /build_scripts/install-autoconf.sh - -COPY build_scripts/install-automake.sh /build_scripts/ -RUN export AUTOMAKE_ROOT=automake-1.16.5 && \ - export AUTOMAKE_HASH=07bd24ad08a64bc17250ce09ec56e921d6343903943e99ccf63bbf0705e34605 && \ - export AUTOMAKE_DOWNLOAD_URL=http://ftp.gnu.org/gnu/automake && \ - manylinux-entrypoint /build_scripts/install-automake.sh - -COPY build_scripts/install-libtool.sh /build_scripts/ -RUN export LIBTOOL_ROOT=libtool-2.4.7 && \ - export LIBTOOL_HASH=04e96c2404ea70c590c546eba4202a4e12722c640016c12b9b2f1ce3d481e9a8 && \ - export LIBTOOL_DOWNLOAD_URL=http://ftp.gnu.org/gnu/libtool && \ - manylinux-entrypoint /build_scripts/install-libtool.sh - -COPY build_scripts/install-libxcrypt.sh /build_scripts/ -RUN export LIBXCRYPT_VERSION=4.4.28 && \ - export LIBXCRYPT_HASH=db7e37901969cb1d1e8020cb73a991ef81e48e31ea5b76a101862c806426b457 && \ - export LIBXCRYPT_DOWNLOAD_URL=https://github.com/besser82/libxcrypt/archive && \ - export PERL_ROOT=perl-5.34.0 && \ - export PERL_HASH=551efc818b968b05216024fb0b727ef2ad4c100f8cb6b43fab615fa78ae5be9a && \ - export PERL_DOWNLOAD_URL=https://www.cpan.org/src/5.0 && \ - manylinux-entrypoint /build_scripts/install-libxcrypt.sh - -FROM runtime_base AS build_base -COPY build_scripts/install-build-packages.sh /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-build-packages.sh - - -FROM build_base AS build_git -COPY build_scripts/build-git.sh /build_scripts/ -RUN export GIT_ROOT=git-2.36.2 && \ - export GIT_HASH=6dc2cdea5fb23d823ba4871cc23222c1db31dfbb6d6c6ff74c4128700df57c68 && \ - export GIT_DOWNLOAD_URL=https://www.kernel.org/pub/software/scm/git && \ - manylinux-entrypoint /build_scripts/build-git.sh - - -FROM build_base AS build_cpython -COPY build_scripts/build-sqlite3.sh /build_scripts/ -RUN export SQLITE_AUTOCONF_ROOT=sqlite-autoconf-3390200 && \ - export SQLITE_AUTOCONF_HASH=852be8a6183a17ba47cee0bbff7400b7aa5affd283bf3beefc34fcd088a239de && \ - export SQLITE_AUTOCONF_DOWNLOAD_URL=https://www.sqlite.org/2022 && \ - manylinux-entrypoint /build_scripts/build-sqlite3.sh - -COPY build_scripts/build-openssl.sh /build_scripts/ -RUN export OPENSSL_ROOT=openssl-1.1.1q && \ - export OPENSSL_HASH=d7939ce614029cdff0b6c20f0e2e5703158a489a72b2507b8bd51bf8c8fd10ca && \ - export OPENSSL_DOWNLOAD_URL=https://www.openssl.org/source && \ - manylinux-entrypoint /build_scripts/build-openssl.sh - -COPY build_scripts/build-cpython.sh /build_scripts/ - - - - -FROM build_cpython AS build_cpython38 -COPY build_scripts/ambv-pubkey.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.8.13 - - -FROM build_cpython AS build_cpython39 -COPY build_scripts/ambv-pubkey.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.9.13 - - -FROM build_cpython AS build_cpython310 -COPY build_scripts/cpython-pubkey-310-311.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.10.5 - -FROM build_cpython AS build_cpython311 -COPY build_scripts/cpython-pubkey-310-311.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.11.2 - -FROM build_cpython AS build_cpython312 -COPY build_scripts/cpython-pubkey-312-313.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.12.1 - -FROM build_cpython AS all_python -COPY build_scripts/install-pypy.sh \ - build_scripts/pypy.sha256 \ - build_scripts/finalize-python.sh \ - /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.8 7.3.9 -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.9 7.3.9 -COPY --from=build_cpython38 /opt/_internal /opt/_internal/ -COPY --from=build_cpython39 /opt/_internal /opt/_internal/ -COPY --from=build_cpython310 /opt/_internal /opt/_internal/ -COPY --from=build_cpython311 /opt/_internal /opt/_internal/ -COPY --from=build_cpython312 /opt/_internal /opt/_internal/ -RUN manylinux-entrypoint /build_scripts/finalize-python.sh - - -FROM runtime_base -COPY --from=build_git /manylinux-rootfs / -COPY --from=build_cpython /manylinux-rootfs / -COPY --from=all_python /opt/_internal /opt/_internal/ -COPY build_scripts/finalize.sh \ - build_scripts/python-tag-abi-tag.py \ - build_scripts/requirements3.8.txt \ - build_scripts/requirements3.9.txt \ - build_scripts/requirements3.10.txt \ - build_scripts/requirements3.11.txt \ - build_scripts/requirements3.12.txt \ - build_scripts/requirements-base-tools.txt \ - /build_scripts/ -COPY build_scripts/requirements-tools/* /build_scripts/requirements-tools/ -RUN manylinux-entrypoint /build_scripts/finalize.sh && rm -rf /build_scripts - -ENV SSL_CERT_FILE=/opt/_internal/certs.pem - -CMD ["/bin/bash"] - -#Build manylinux2014 docker image end - -ENV PATH ${DEVTOOLSET_ROOTPATH}/usr/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin ENV JAVA_HOME=/usr/lib/jvm/msopenjdk-11 ADD scripts /tmp/scripts diff --git a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda index 00b7e9b722fc..d96b34297427 100644 --- a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda +++ b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_cuda @@ -2,163 +2,10 @@ # Please overwrite BASEIMAGE, TRT_VERSION and other arguments with # --docker-build-args ' --build-arg BASEIMAGE=other_base_image --build-arg TRT_VERSION=other_trt_version etc...' # for other cuda version and TRT version -ARG POLICY=manylinux_2_28 -ARG PLATFORM=x86_64 ARG BASEIMAGE=nvidia/cuda:11.8.0-cudnn8-devel-ubi8 -ARG DEVTOOLSET_ROOTPATH=/usr -ARG LD_LIBRARY_PATH_ARG=/usr/local/lib64 -ARG PREPEND_PATH=/usr/local/cuda/bin -ARG TRT_VERSION=10.0.1.6-1.cuda11.8 -#Build manylinux docker image begin -FROM $BASEIMAGE AS runtime_base -ARG POLICY -ARG PLATFORM -ARG DEVTOOLSET_ROOTPATH -ARG LD_LIBRARY_PATH_ARG -ARG PREPEND_PATH +FROM $BASEIMAGE ARG TRT_VERSION -LABEL maintainer="The ManyLinux project" - -ENV AUDITWHEEL_POLICY=${POLICY} AUDITWHEEL_ARCH=${PLATFORM} AUDITWHEEL_PLAT=${POLICY}_${PLATFORM} -ENV LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8 LANGUAGE=en_US.UTF-8 -ENV DEVTOOLSET_ROOTPATH=${DEVTOOLSET_ROOTPATH} -ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH_ARG} -ENV PATH=${PREPEND_PATH}${PATH} -ENV PKG_CONFIG_PATH=/usr/local/lib/pkgconfig - -# first copy the fixup mirrors script, keep the script around -COPY build_scripts/fixup-mirrors.sh /usr/local/sbin/fixup-mirrors - -# setup entrypoint, this will wrap commands with `linux32` with i686 images -COPY build_scripts/install-entrypoint.sh \ - build_scripts/build_utils.sh \ - /build_scripts/ - -RUN /build_scripts/install-entrypoint.sh && rm -rf /build_scripts -COPY manylinux-entrypoint /usr/local/bin/manylinux-entrypoint -ENTRYPOINT ["manylinux-entrypoint"] - -COPY build_scripts/install-runtime-packages.sh \ - build_scripts/build_utils.sh \ - /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-runtime-packages.sh && rm -rf /build_scripts/ - -COPY build_scripts/build_utils.sh /build_scripts/ - -COPY build_scripts/install-autoconf.sh /build_scripts/ -RUN export AUTOCONF_ROOT=autoconf-2.71 && \ - export AUTOCONF_HASH=431075ad0bf529ef13cb41e9042c542381103e80015686222b8a9d4abef42a1c && \ - export AUTOCONF_DOWNLOAD_URL=http://ftp.gnu.org/gnu/autoconf && \ - manylinux-entrypoint /build_scripts/install-autoconf.sh - -COPY build_scripts/install-automake.sh /build_scripts/ -RUN export AUTOMAKE_ROOT=automake-1.16.5 && \ - export AUTOMAKE_HASH=07bd24ad08a64bc17250ce09ec56e921d6343903943e99ccf63bbf0705e34605 && \ - export AUTOMAKE_DOWNLOAD_URL=http://ftp.gnu.org/gnu/automake && \ - manylinux-entrypoint /build_scripts/install-automake.sh - -COPY build_scripts/install-libtool.sh /build_scripts/ -RUN export LIBTOOL_ROOT=libtool-2.4.7 && \ - export LIBTOOL_HASH=04e96c2404ea70c590c546eba4202a4e12722c640016c12b9b2f1ce3d481e9a8 && \ - export LIBTOOL_DOWNLOAD_URL=http://ftp.gnu.org/gnu/libtool && \ - manylinux-entrypoint /build_scripts/install-libtool.sh - -COPY build_scripts/install-libxcrypt.sh /build_scripts/ -RUN export LIBXCRYPT_VERSION=4.4.28 && \ - export LIBXCRYPT_HASH=db7e37901969cb1d1e8020cb73a991ef81e48e31ea5b76a101862c806426b457 && \ - export LIBXCRYPT_DOWNLOAD_URL=https://github.com/besser82/libxcrypt/archive && \ - export PERL_ROOT=perl-5.34.0 && \ - export PERL_HASH=551efc818b968b05216024fb0b727ef2ad4c100f8cb6b43fab615fa78ae5be9a && \ - export PERL_DOWNLOAD_URL=https://www.cpan.org/src/5.0 && \ - manylinux-entrypoint /build_scripts/install-libxcrypt.sh - -FROM runtime_base AS build_base -COPY build_scripts/install-build-packages.sh /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-build-packages.sh - - -FROM build_base AS build_git -COPY build_scripts/build-git.sh /build_scripts/ -RUN export GIT_ROOT=git-2.36.2 && \ - export GIT_HASH=6dc2cdea5fb23d823ba4871cc23222c1db31dfbb6d6c6ff74c4128700df57c68 && \ - export GIT_DOWNLOAD_URL=https://www.kernel.org/pub/software/scm/git && \ - manylinux-entrypoint /build_scripts/build-git.sh - - -FROM build_base AS build_cpython -COPY build_scripts/build-sqlite3.sh /build_scripts/ -RUN export SQLITE_AUTOCONF_ROOT=sqlite-autoconf-3390200 && \ - export SQLITE_AUTOCONF_HASH=852be8a6183a17ba47cee0bbff7400b7aa5affd283bf3beefc34fcd088a239de && \ - export SQLITE_AUTOCONF_DOWNLOAD_URL=https://www.sqlite.org/2022 && \ - manylinux-entrypoint /build_scripts/build-sqlite3.sh - -COPY build_scripts/build-openssl.sh /build_scripts/ -RUN export OPENSSL_ROOT=openssl-1.1.1q && \ - export OPENSSL_HASH=d7939ce614029cdff0b6c20f0e2e5703158a489a72b2507b8bd51bf8c8fd10ca && \ - export OPENSSL_DOWNLOAD_URL=https://www.openssl.org/source && \ - manylinux-entrypoint /build_scripts/build-openssl.sh - -COPY build_scripts/build-cpython.sh /build_scripts/ - -FROM build_cpython AS build_cpython38 -COPY build_scripts/ambv-pubkey.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.8.13 - - -FROM build_cpython AS build_cpython39 -COPY build_scripts/ambv-pubkey.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.9.13 - - -FROM build_cpython AS build_cpython310 -COPY build_scripts/cpython-pubkey-310-311.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.10.5 - -FROM build_cpython AS build_cpython311 -COPY build_scripts/cpython-pubkey-310-311.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.11.2 - -FROM build_cpython AS build_cpython312 -COPY build_scripts/cpython-pubkey-312-313.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.12.1 - -FROM build_cpython AS all_python -COPY build_scripts/install-pypy.sh \ - build_scripts/pypy.sha256 \ - build_scripts/finalize-python.sh \ - /build_scripts/ - -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.8 7.3.9 -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.9 7.3.9 -COPY --from=build_cpython38 /opt/_internal /opt/_internal/ -COPY --from=build_cpython39 /opt/_internal /opt/_internal/ -COPY --from=build_cpython310 /opt/_internal /opt/_internal/ -COPY --from=build_cpython311 /opt/_internal /opt/_internal/ -COPY --from=build_cpython312 /opt/_internal /opt/_internal/ -RUN manylinux-entrypoint /build_scripts/finalize-python.sh - -FROM runtime_base -COPY --from=build_git /manylinux-rootfs / -COPY --from=build_cpython /manylinux-rootfs / -COPY --from=all_python /opt/_internal /opt/_internal/ -COPY build_scripts/finalize.sh \ - build_scripts/python-tag-abi-tag.py \ - build_scripts/requirements3.8.txt \ - build_scripts/requirements3.9.txt \ - build_scripts/requirements3.10.txt \ - build_scripts/requirements3.11.txt \ - build_scripts/requirements3.12.txt \ - build_scripts/requirements-base-tools.txt \ - /build_scripts/ -COPY build_scripts/requirements-tools/* /build_scripts/requirements-tools/ -RUN manylinux-entrypoint /build_scripts/finalize.sh && rm -rf /build_scripts - -ENV SSL_CERT_FILE=/opt/_internal/certs.pem - -CMD ["/bin/bash"] - -#Build manylinux docker image end #Install TensorRT only if TRT_VERSION is not empty RUN if [ -n "$TRT_VERSION" ]; then \ diff --git a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_training_cuda11_8 b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_training_cuda11_8 index fed29689fbe5..ed920ea05739 100644 --- a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_training_cuda11_8 +++ b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_training_cuda11_8 @@ -1,164 +1,4 @@ -ARG BASEIMAGE=nvidia/cuda:11.8.0-cudnn8-devel-ubi8 -ARG POLICY=manylinux2014 -ARG PLATFORM=x86_64 -ARG DEVTOOLSET_ROOTPATH= -ARG LD_LIBRARY_PATH_ARG= -ARG PREPEND_PATH= - -#We need both CUDA and manylinux. But the CUDA Toolkit End User License Agreement says NVIDIA CUDA Driver Libraries(libcuda.so, libnvidia-ptxjitcompiler.so) are only distributable in applications that meet this criteria: -#1. The application was developed starting from a NVIDIA CUDA container obtained from Docker Hub or the NVIDIA GPU Cloud, and -#2. The resulting application is packaged as a Docker container and distributed to users on Docker Hub or the NVIDIA GPU Cloud only. -#So we use CUDA as the base image then add manylinux on top of it. - -#Build manylinux2014 docker image begin -FROM $BASEIMAGE AS runtime_base -ARG POLICY -ARG PLATFORM -ARG DEVTOOLSET_ROOTPATH -ARG LD_LIBRARY_PATH_ARG -ARG PREPEND_PATH -LABEL maintainer="The ManyLinux project" - -ENV AUDITWHEEL_POLICY=${POLICY} AUDITWHEEL_ARCH=${PLATFORM} AUDITWHEEL_PLAT=${POLICY}_${PLATFORM} -ENV LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8 LANGUAGE=en_US.UTF-8 -ENV DEVTOOLSET_ROOTPATH=${DEVTOOLSET_ROOTPATH} -ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH_ARG} -ENV PATH=${PREPEND_PATH}${PATH} -ENV PKG_CONFIG_PATH=/usr/local/lib/pkgconfig - -# first copy the fixup mirrors script, keep the script around -COPY build_scripts/fixup-mirrors.sh /usr/local/sbin/fixup-mirrors - -# setup entrypoint, this will wrap commands with `linux32` with i686 images -COPY build_scripts/install-entrypoint.sh \ - build_scripts/build_utils.sh \ - /build_scripts/ - -RUN /build_scripts/install-entrypoint.sh && rm -rf /build_scripts -COPY manylinux-entrypoint /usr/local/bin/manylinux-entrypoint -ENTRYPOINT ["manylinux-entrypoint"] - -COPY build_scripts/install-runtime-packages.sh \ - build_scripts/build_utils.sh \ - /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-runtime-packages.sh && rm -rf /build_scripts/ - -COPY build_scripts/build_utils.sh /build_scripts/ - -COPY build_scripts/install-autoconf.sh /build_scripts/ -RUN export AUTOCONF_ROOT=autoconf-2.71 && \ - export AUTOCONF_HASH=431075ad0bf529ef13cb41e9042c542381103e80015686222b8a9d4abef42a1c && \ - export AUTOCONF_DOWNLOAD_URL=http://ftp.gnu.org/gnu/autoconf && \ - manylinux-entrypoint /build_scripts/install-autoconf.sh - -COPY build_scripts/install-automake.sh /build_scripts/ -RUN export AUTOMAKE_ROOT=automake-1.16.5 && \ - export AUTOMAKE_HASH=07bd24ad08a64bc17250ce09ec56e921d6343903943e99ccf63bbf0705e34605 && \ - export AUTOMAKE_DOWNLOAD_URL=http://ftp.gnu.org/gnu/automake && \ - manylinux-entrypoint /build_scripts/install-automake.sh - -COPY build_scripts/install-libtool.sh /build_scripts/ -RUN export LIBTOOL_ROOT=libtool-2.4.7 && \ - export LIBTOOL_HASH=04e96c2404ea70c590c546eba4202a4e12722c640016c12b9b2f1ce3d481e9a8 && \ - export LIBTOOL_DOWNLOAD_URL=http://ftp.gnu.org/gnu/libtool && \ - manylinux-entrypoint /build_scripts/install-libtool.sh - -COPY build_scripts/install-libxcrypt.sh /build_scripts/ -RUN export LIBXCRYPT_VERSION=4.4.28 && \ - export LIBXCRYPT_HASH=db7e37901969cb1d1e8020cb73a991ef81e48e31ea5b76a101862c806426b457 && \ - export LIBXCRYPT_DOWNLOAD_URL=https://github.com/besser82/libxcrypt/archive && \ - export PERL_ROOT=perl-5.34.0 && \ - export PERL_HASH=551efc818b968b05216024fb0b727ef2ad4c100f8cb6b43fab615fa78ae5be9a && \ - export PERL_DOWNLOAD_URL=https://www.cpan.org/src/5.0 && \ - manylinux-entrypoint /build_scripts/install-libxcrypt.sh - -FROM runtime_base AS build_base -COPY build_scripts/install-build-packages.sh /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-build-packages.sh - - -FROM build_base AS build_git -COPY build_scripts/build-git.sh /build_scripts/ -RUN export GIT_ROOT=git-2.36.2 && \ - export GIT_HASH=6dc2cdea5fb23d823ba4871cc23222c1db31dfbb6d6c6ff74c4128700df57c68 && \ - export GIT_DOWNLOAD_URL=https://www.kernel.org/pub/software/scm/git && \ - manylinux-entrypoint /build_scripts/build-git.sh - - -FROM build_base AS build_cpython -COPY build_scripts/build-sqlite3.sh /build_scripts/ -RUN export SQLITE_AUTOCONF_ROOT=sqlite-autoconf-3390200 && \ - export SQLITE_AUTOCONF_HASH=852be8a6183a17ba47cee0bbff7400b7aa5affd283bf3beefc34fcd088a239de && \ - export SQLITE_AUTOCONF_DOWNLOAD_URL=https://www.sqlite.org/2022 && \ - manylinux-entrypoint /build_scripts/build-sqlite3.sh - -COPY build_scripts/build-openssl.sh /build_scripts/ -RUN export OPENSSL_ROOT=openssl-1.1.1q && \ - export OPENSSL_HASH=d7939ce614029cdff0b6c20f0e2e5703158a489a72b2507b8bd51bf8c8fd10ca && \ - export OPENSSL_DOWNLOAD_URL=https://www.openssl.org/source && \ - manylinux-entrypoint /build_scripts/build-openssl.sh - -COPY build_scripts/build-cpython.sh /build_scripts/ - - -FROM build_cpython AS build_cpython38 -COPY build_scripts/ambv-pubkey.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.8.13 - - -FROM build_cpython AS build_cpython39 -COPY build_scripts/ambv-pubkey.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.9.13 - - -FROM build_cpython AS build_cpython310 -COPY build_scripts/cpython-pubkey-310-311.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.10.5 - -FROM build_cpython AS build_cpython311 -COPY build_scripts/cpython-pubkey-310-311.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.11.2 - -FROM build_cpython AS build_cpython312 -COPY build_scripts/cpython-pubkey-312-313.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.12.1 - -FROM build_cpython AS all_python -COPY build_scripts/install-pypy.sh \ - build_scripts/pypy.sha256 \ - build_scripts/finalize-python.sh \ - /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.8 7.3.9 -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.9 7.3.9 -COPY --from=build_cpython38 /opt/_internal /opt/_internal/ -COPY --from=build_cpython39 /opt/_internal /opt/_internal/ -COPY --from=build_cpython310 /opt/_internal /opt/_internal/ -COPY --from=build_cpython311 /opt/_internal /opt/_internal/ -COPY --from=build_cpython312 /opt/_internal /opt/_internal/ -RUN manylinux-entrypoint /build_scripts/finalize-python.sh - - -FROM runtime_base -COPY --from=build_git /manylinux-rootfs / -COPY --from=build_cpython /manylinux-rootfs / -COPY --from=all_python /opt/_internal /opt/_internal/ -COPY build_scripts/finalize.sh \ - build_scripts/python-tag-abi-tag.py \ - build_scripts/requirements3.8.txt \ - build_scripts/requirements3.9.txt \ - build_scripts/requirements3.10.txt \ - build_scripts/requirements3.11.txt \ - build_scripts/requirements3.12.txt \ - build_scripts/requirements-base-tools.txt \ - /build_scripts/ -COPY build_scripts/requirements-tools/* /build_scripts/requirements-tools/ -RUN manylinux-entrypoint /build_scripts/finalize.sh && rm -rf /build_scripts - -ENV SSL_CERT_FILE=/opt/_internal/certs.pem - -CMD ["/bin/bash"] - -#Build manylinux2014 docker image end +FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1 ARG PYTHON_VERSION=3.9 ARG TORCH_VERSION=2.0.0 ARG OPSET_VERSION=17 diff --git a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_training_cuda12_2 b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_training_cuda12_2 index e1caa141ef31..6886600417c8 100644 --- a/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_training_cuda12_2 +++ b/tools/ci_build/github/linux/docker/Dockerfile.manylinux2_28_training_cuda12_2 @@ -1,164 +1,5 @@ -ARG BASEIMAGE=nvidia/cuda:12.2.2-cudnn8-devel-ubi8 -ARG POLICY=manylinux2014 -ARG PLATFORM=x86_64 -ARG DEVTOOLSET_ROOTPATH= -ARG LD_LIBRARY_PATH_ARG= -ARG PREPEND_PATH= +FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12:20240610.1 -#We need both CUDA and manylinux. But the CUDA Toolkit End User License Agreement says NVIDIA CUDA Driver Libraries(libcuda.so, libnvidia-ptxjitcompiler.so) are only distributable in applications that meet this criteria: -#1. The application was developed starting from a NVIDIA CUDA container obtained from Docker Hub or the NVIDIA GPU Cloud, and -#2. The resulting application is packaged as a Docker container and distributed to users on Docker Hub or the NVIDIA GPU Cloud only. -#So we use CUDA as the base image then add manylinux on top of it. - -#Build manylinux2014 docker image begin -FROM $BASEIMAGE AS runtime_base -ARG POLICY -ARG PLATFORM -ARG DEVTOOLSET_ROOTPATH -ARG LD_LIBRARY_PATH_ARG -ARG PREPEND_PATH -LABEL maintainer="The ManyLinux project" - -ENV AUDITWHEEL_POLICY=${POLICY} AUDITWHEEL_ARCH=${PLATFORM} AUDITWHEEL_PLAT=${POLICY}_${PLATFORM} -ENV LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8 LANGUAGE=en_US.UTF-8 -ENV DEVTOOLSET_ROOTPATH=${DEVTOOLSET_ROOTPATH} -ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH_ARG} -ENV PATH=${PREPEND_PATH}${PATH} -ENV PKG_CONFIG_PATH=/usr/local/lib/pkgconfig - -# first copy the fixup mirrors script, keep the script around -COPY build_scripts/fixup-mirrors.sh /usr/local/sbin/fixup-mirrors - -# setup entrypoint, this will wrap commands with `linux32` with i686 images -COPY build_scripts/install-entrypoint.sh \ - build_scripts/build_utils.sh \ - /build_scripts/ - -RUN /build_scripts/install-entrypoint.sh && rm -rf /build_scripts -COPY manylinux-entrypoint /usr/local/bin/manylinux-entrypoint -ENTRYPOINT ["manylinux-entrypoint"] - -COPY build_scripts/install-runtime-packages.sh \ - build_scripts/build_utils.sh \ - /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-runtime-packages.sh && rm -rf /build_scripts/ - -COPY build_scripts/build_utils.sh /build_scripts/ - -COPY build_scripts/install-autoconf.sh /build_scripts/ -RUN export AUTOCONF_ROOT=autoconf-2.71 && \ - export AUTOCONF_HASH=431075ad0bf529ef13cb41e9042c542381103e80015686222b8a9d4abef42a1c && \ - export AUTOCONF_DOWNLOAD_URL=http://ftp.gnu.org/gnu/autoconf && \ - manylinux-entrypoint /build_scripts/install-autoconf.sh - -COPY build_scripts/install-automake.sh /build_scripts/ -RUN export AUTOMAKE_ROOT=automake-1.16.5 && \ - export AUTOMAKE_HASH=07bd24ad08a64bc17250ce09ec56e921d6343903943e99ccf63bbf0705e34605 && \ - export AUTOMAKE_DOWNLOAD_URL=http://ftp.gnu.org/gnu/automake && \ - manylinux-entrypoint /build_scripts/install-automake.sh - -COPY build_scripts/install-libtool.sh /build_scripts/ -RUN export LIBTOOL_ROOT=libtool-2.4.7 && \ - export LIBTOOL_HASH=04e96c2404ea70c590c546eba4202a4e12722c640016c12b9b2f1ce3d481e9a8 && \ - export LIBTOOL_DOWNLOAD_URL=http://ftp.gnu.org/gnu/libtool && \ - manylinux-entrypoint /build_scripts/install-libtool.sh - -COPY build_scripts/install-libxcrypt.sh /build_scripts/ -RUN export LIBXCRYPT_VERSION=4.4.28 && \ - export LIBXCRYPT_HASH=db7e37901969cb1d1e8020cb73a991ef81e48e31ea5b76a101862c806426b457 && \ - export LIBXCRYPT_DOWNLOAD_URL=https://github.com/besser82/libxcrypt/archive && \ - export PERL_ROOT=perl-5.34.0 && \ - export PERL_HASH=551efc818b968b05216024fb0b727ef2ad4c100f8cb6b43fab615fa78ae5be9a && \ - export PERL_DOWNLOAD_URL=https://www.cpan.org/src/5.0 && \ - manylinux-entrypoint /build_scripts/install-libxcrypt.sh - -FROM runtime_base AS build_base -COPY build_scripts/install-build-packages.sh /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-build-packages.sh - - -FROM build_base AS build_git -COPY build_scripts/build-git.sh /build_scripts/ -RUN export GIT_ROOT=git-2.36.2 && \ - export GIT_HASH=6dc2cdea5fb23d823ba4871cc23222c1db31dfbb6d6c6ff74c4128700df57c68 && \ - export GIT_DOWNLOAD_URL=https://www.kernel.org/pub/software/scm/git && \ - manylinux-entrypoint /build_scripts/build-git.sh - - -FROM build_base AS build_cpython -COPY build_scripts/build-sqlite3.sh /build_scripts/ -RUN export SQLITE_AUTOCONF_ROOT=sqlite-autoconf-3390200 && \ - export SQLITE_AUTOCONF_HASH=852be8a6183a17ba47cee0bbff7400b7aa5affd283bf3beefc34fcd088a239de && \ - export SQLITE_AUTOCONF_DOWNLOAD_URL=https://www.sqlite.org/2022 && \ - manylinux-entrypoint /build_scripts/build-sqlite3.sh - -COPY build_scripts/build-openssl.sh /build_scripts/ -RUN export OPENSSL_ROOT=openssl-1.1.1q && \ - export OPENSSL_HASH=d7939ce614029cdff0b6c20f0e2e5703158a489a72b2507b8bd51bf8c8fd10ca && \ - export OPENSSL_DOWNLOAD_URL=https://www.openssl.org/source && \ - manylinux-entrypoint /build_scripts/build-openssl.sh - -COPY build_scripts/build-cpython.sh /build_scripts/ - - -FROM build_cpython AS build_cpython38 -COPY build_scripts/ambv-pubkey.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.8.13 - - -FROM build_cpython AS build_cpython39 -COPY build_scripts/ambv-pubkey.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.9.13 - - -FROM build_cpython AS build_cpython310 -COPY build_scripts/cpython-pubkey-310-311.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.10.5 - -FROM build_cpython AS build_cpython311 -COPY build_scripts/cpython-pubkey-310-311.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.11.2 - -FROM build_cpython AS build_cpython312 -COPY build_scripts/cpython-pubkey-312-313.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.12.1 - -FROM build_cpython AS all_python -COPY build_scripts/install-pypy.sh \ - build_scripts/pypy.sha256 \ - build_scripts/finalize-python.sh \ - /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.8 7.3.9 -RUN manylinux-entrypoint /build_scripts/install-pypy.sh 3.9 7.3.9 -COPY --from=build_cpython38 /opt/_internal /opt/_internal/ -COPY --from=build_cpython39 /opt/_internal /opt/_internal/ -COPY --from=build_cpython310 /opt/_internal /opt/_internal/ -COPY --from=build_cpython311 /opt/_internal /opt/_internal/ -COPY --from=build_cpython312 /opt/_internal /opt/_internal/ -RUN manylinux-entrypoint /build_scripts/finalize-python.sh - - -FROM runtime_base -COPY --from=build_git /manylinux-rootfs / -COPY --from=build_cpython /manylinux-rootfs / -COPY --from=all_python /opt/_internal /opt/_internal/ -COPY build_scripts/finalize.sh \ - build_scripts/python-tag-abi-tag.py \ - build_scripts/requirements3.8.txt \ - build_scripts/requirements3.9.txt \ - build_scripts/requirements3.10.txt \ - build_scripts/requirements3.11.txt \ - build_scripts/requirements3.12.txt \ - build_scripts/requirements-base-tools.txt \ - /build_scripts/ -COPY build_scripts/requirements-tools/* /build_scripts/requirements-tools/ -RUN manylinux-entrypoint /build_scripts/finalize.sh && rm -rf /build_scripts - -ENV SSL_CERT_FILE=/opt/_internal/certs.pem - -CMD ["/bin/bash"] - -#Build manylinux2014 docker image end ARG PYTHON_VERSION=3.9 ARG TORCH_VERSION=2.1.0 ARG OPSET_VERSION=17 diff --git a/tools/ci_build/github/linux/docker/Dockerfile.package_ubi8_cuda_tensorrt10_0 b/tools/ci_build/github/linux/docker/Dockerfile.package_ubi8_cuda_tensorrt10_0 index 8ef8bfcb53fc..86c178aae519 100644 --- a/tools/ci_build/github/linux/docker/Dockerfile.package_ubi8_cuda_tensorrt10_0 +++ b/tools/ci_build/github/linux/docker/Dockerfile.package_ubi8_cuda_tensorrt10_0 @@ -9,20 +9,11 @@ ARG BASEIMAGE=nvidia/cuda:11.8.0-cudnn8-devel-ubi8 ARG TRT_VERSION=10.0.1.6-1.cuda11.8 FROM $BASEIMAGE AS base ARG TRT_VERSION -ENV PATH /usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/src/tensorrt/bin:${PATH} +ENV PATH /opt/python/cp38-cp38/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/src/tensorrt/bin:${PATH} RUN dnf install -y bash wget &&\ dnf clean dbcache -# Install python3 -RUN dnf install -y \ - python3.8 \ - python38-pip \ - python38-wheel &&\ - cd /usr/local/bin &&\ - ln -s /usr/bin/python3 python3.8 &&\ - ln -s /usr/bin/pip3 pip3.8; - RUN pip3 install --upgrade pip RUN pip3 install setuptools>=68.2.2 diff --git a/tools/ci_build/github/linux/docker/Dockerfile.package_ubuntu_2004_gpu b/tools/ci_build/github/linux/docker/Dockerfile.package_ubuntu_2004_gpu index 95a7a03ee978..5ef56fd885ca 100644 --- a/tools/ci_build/github/linux/docker/Dockerfile.package_ubuntu_2004_gpu +++ b/tools/ci_build/github/linux/docker/Dockerfile.package_ubuntu_2004_gpu @@ -46,7 +46,9 @@ RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/ libnvonnxparsers-dev=${TRT_VERSION} \ libnvonnxparsers10=${TRT_VERSION} \ tensorrt-dev=${TRT_VERSION} \ - libnvinfer-bin=${TRT_VERSION} + libnvinfer-bin=${TRT_VERSION} &&\ + if [ $(echo $CUDA_VERSION | cut -d"." -f1) -ge 12 ]; then apt-get install -y cudnn9-cuda-12 ; fi +# ^^^^^^^^^^^If cuda version is 12 or higher, install cudnn 9 for cuda 12 ADD scripts /tmp/scripts RUN cd /tmp/scripts && /tmp/scripts/install_dotnet.sh && rm -rf /tmp/scripts diff --git a/tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/Dockerfile b/tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/Dockerfile new file mode 100644 index 000000000000..9a74788300ec --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/Dockerfile @@ -0,0 +1,11 @@ +FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_aarch64_ubi8_gcc12:20240531.1 + +ADD scripts /tmp/scripts +RUN cd /tmp/scripts && /tmp/scripts/install_centos.sh && /tmp/scripts/install_deps.sh && rm -rf /tmp/scripts + +ARG BUILD_UID=1001 +ARG BUILD_USER=onnxruntimedev +RUN adduser --uid $BUILD_UID $BUILD_USER +WORKDIR /home/$BUILD_USER +USER $BUILD_USER + diff --git a/tools/ci_build/github/linux/docker/inference/x64/python/cpu/scripts/install_centos.sh b/tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/scripts/install_centos.sh similarity index 100% rename from tools/ci_build/github/linux/docker/inference/x64/python/cpu/scripts/install_centos.sh rename to tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/scripts/install_centos.sh diff --git a/tools/ci_build/github/linux/docker/inference/x64/python/cpu/scripts/install_deps.sh b/tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/scripts/install_deps.sh similarity index 100% rename from tools/ci_build/github/linux/docker/inference/x64/python/cpu/scripts/install_deps.sh rename to tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/scripts/install_deps.sh diff --git a/tools/ci_build/github/linux/docker/inference/x64/python/cpu/scripts/install_protobuf.sh b/tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/scripts/install_protobuf.sh similarity index 100% rename from tools/ci_build/github/linux/docker/inference/x64/python/cpu/scripts/install_protobuf.sh rename to tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/scripts/install_protobuf.sh diff --git a/tools/ci_build/github/linux/docker/inference/x64/python/cpu/scripts/requirements.txt b/tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/scripts/requirements.txt similarity index 100% rename from tools/ci_build/github/linux/docker/inference/x64/python/cpu/scripts/requirements.txt rename to tools/ci_build/github/linux/docker/inference/aarch64/python/cpu/scripts/requirements.txt diff --git a/tools/ci_build/github/linux/docker/inference/x64/default/gpu/Dockerfile b/tools/ci_build/github/linux/docker/inference/x64/default/gpu/Dockerfile deleted file mode 100644 index b1ff40e8effe..000000000000 --- a/tools/ci_build/github/linux/docker/inference/x64/default/gpu/Dockerfile +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# This file is used by Zip-Nuget Packaging NoContribOps Pipeline,Zip-Nuget-Java Packaging Pipeline -ARG BASEIMAGE=nvidia/cuda:11.8.0-cudnn8-devel-ubi8 -FROM $BASEIMAGE -ENV PATH /usr/lib/jvm/msopenjdk-11/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin -ENV LANG=en_US.UTF-8 -ENV LC_ALL=en_US.UTF-8 -ENV JAVA_HOME=/usr/lib/jvm/msopenjdk-11 - -ADD scripts /tmp/scripts -RUN cd /tmp/scripts && /tmp/scripts/install_centos.sh && /tmp/scripts/install_deps.sh && rm -rf /tmp/scripts - -ARG BUILD_UID=1001 -ARG BUILD_USER=onnxruntimedev -RUN adduser --uid $BUILD_UID $BUILD_USER -WORKDIR /home/$BUILD_USER -USER $BUILD_USER diff --git a/tools/ci_build/github/linux/docker/inference/x64/default/gpu/scripts/install_centos.sh b/tools/ci_build/github/linux/docker/inference/x64/default/gpu/scripts/install_centos.sh deleted file mode 100755 index 31e3e40f1b7e..000000000000 --- a/tools/ci_build/github/linux/docker/inference/x64/default/gpu/scripts/install_centos.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -set -e -x - -os_major_version=$(tr -dc '0-9.' < /etc/redhat-release |cut -d \. -f1) - -echo "installing for CentOS version : $os_major_version" -rpm -Uvh https://packages.microsoft.com/config/centos/$os_major_version/packages-microsoft-prod.rpm -dnf install -y python39-devel glibc-langpack-\* glibc-locale-source which redhat-lsb-core expat-devel tar unzip zlib-devel make bzip2 bzip2-devel msopenjdk-11 -locale diff --git a/tools/ci_build/github/linux/docker/inference/x64/python/cpu/Dockerfile.manylinux2_28_cpu b/tools/ci_build/github/linux/docker/inference/x64/python/cpu/Dockerfile.manylinux2_28_cpu deleted file mode 100644 index 66fe0cafd945..000000000000 --- a/tools/ci_build/github/linux/docker/inference/x64/python/cpu/Dockerfile.manylinux2_28_cpu +++ /dev/null @@ -1,163 +0,0 @@ -ARG BASEIMAGE=amd64/almalinux:8 -ARG POLICY=manylinux_2_28 -ARG PLATFORM=x86_64 -ARG DEVTOOLSET_ROOTPATH=/opt/rh/gcc-toolset-12/root -ARG LD_LIBRARY_PATH_ARG=/opt/rh/gcc-toolset-12/root/usr/lib64:/opt/rh/gcc-toolset-12/root/usr/lib:/opt/rh/gcc-toolset-12/root/usr/lib64/dyninst:/opt/rh/gcc-toolset-12/root/usr/lib/dyninst:/usr/local/lib64 -ARG PREPEND_PATH=/opt/rh/gcc-toolset-12/root/usr/bin: - -#Build manylinux2014 docker image begin -FROM $BASEIMAGE AS runtime_base -ARG POLICY -ARG PLATFORM -ARG DEVTOOLSET_ROOTPATH -ARG LD_LIBRARY_PATH_ARG -ARG PREPEND_PATH -LABEL maintainer="The ManyLinux project" - -ENV AUDITWHEEL_POLICY=${POLICY} AUDITWHEEL_ARCH=${PLATFORM} AUDITWHEEL_PLAT=${POLICY}_${PLATFORM} -ENV LC_ALL=en_US.UTF-8 LANG=en_US.UTF-8 LANGUAGE=en_US.UTF-8 -ENV DEVTOOLSET_ROOTPATH=${DEVTOOLSET_ROOTPATH} -ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH_ARG} -ENV PATH=${PREPEND_PATH}${PATH} -ENV PKG_CONFIG_PATH=/usr/local/lib/pkgconfig - -# first copy the fixup mirrors script, keep the script around -COPY build_scripts/fixup-mirrors.sh /usr/local/sbin/fixup-mirrors - -# setup entrypoint, this will wrap commands with `linux32` with i686 images -COPY build_scripts/install-entrypoint.sh \ - build_scripts/build_utils.sh \ - /build_scripts/ - -RUN /build_scripts/install-entrypoint.sh && rm -rf /build_scripts -COPY manylinux-entrypoint /usr/local/bin/manylinux-entrypoint -ENTRYPOINT ["manylinux-entrypoint"] - -COPY build_scripts/install-runtime-packages.sh \ - build_scripts/build_utils.sh \ - /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-runtime-packages.sh && rm -rf /build_scripts/ - -COPY build_scripts/build_utils.sh /build_scripts/ - -COPY build_scripts/install-autoconf.sh /build_scripts/ -RUN export AUTOCONF_ROOT=autoconf-2.71 && \ - export AUTOCONF_HASH=431075ad0bf529ef13cb41e9042c542381103e80015686222b8a9d4abef42a1c && \ - export AUTOCONF_DOWNLOAD_URL=http://ftp.gnu.org/gnu/autoconf && \ - manylinux-entrypoint /build_scripts/install-autoconf.sh - -COPY build_scripts/install-automake.sh /build_scripts/ -RUN export AUTOMAKE_ROOT=automake-1.16.5 && \ - export AUTOMAKE_HASH=07bd24ad08a64bc17250ce09ec56e921d6343903943e99ccf63bbf0705e34605 && \ - export AUTOMAKE_DOWNLOAD_URL=http://ftp.gnu.org/gnu/automake && \ - manylinux-entrypoint /build_scripts/install-automake.sh - -COPY build_scripts/install-libtool.sh /build_scripts/ -RUN export LIBTOOL_ROOT=libtool-2.4.7 && \ - export LIBTOOL_HASH=04e96c2404ea70c590c546eba4202a4e12722c640016c12b9b2f1ce3d481e9a8 && \ - export LIBTOOL_DOWNLOAD_URL=http://ftp.gnu.org/gnu/libtool && \ - manylinux-entrypoint /build_scripts/install-libtool.sh - -COPY build_scripts/install-libxcrypt.sh /build_scripts/ -RUN export LIBXCRYPT_VERSION=4.4.28 && \ - export LIBXCRYPT_HASH=db7e37901969cb1d1e8020cb73a991ef81e48e31ea5b76a101862c806426b457 && \ - export LIBXCRYPT_DOWNLOAD_URL=https://github.com/besser82/libxcrypt/archive && \ - export PERL_ROOT=perl-5.34.0 && \ - export PERL_HASH=551efc818b968b05216024fb0b727ef2ad4c100f8cb6b43fab615fa78ae5be9a && \ - export PERL_DOWNLOAD_URL=https://www.cpan.org/src/5.0 && \ - manylinux-entrypoint /build_scripts/install-libxcrypt.sh - -FROM runtime_base AS build_base -COPY build_scripts/install-build-packages.sh /build_scripts/ -RUN manylinux-entrypoint /build_scripts/install-build-packages.sh - - -FROM build_base AS build_git -COPY build_scripts/build-git.sh /build_scripts/ -RUN export GIT_ROOT=git-2.36.2 && \ - export GIT_HASH=6dc2cdea5fb23d823ba4871cc23222c1db31dfbb6d6c6ff74c4128700df57c68 && \ - export GIT_DOWNLOAD_URL=https://www.kernel.org/pub/software/scm/git && \ - manylinux-entrypoint /build_scripts/build-git.sh - - -FROM build_base AS build_cpython -COPY build_scripts/build-sqlite3.sh /build_scripts/ -RUN export SQLITE_AUTOCONF_ROOT=sqlite-autoconf-3390200 && \ - export SQLITE_AUTOCONF_HASH=852be8a6183a17ba47cee0bbff7400b7aa5affd283bf3beefc34fcd088a239de && \ - export SQLITE_AUTOCONF_DOWNLOAD_URL=https://www.sqlite.org/2022 && \ - manylinux-entrypoint /build_scripts/build-sqlite3.sh - -COPY build_scripts/build-openssl.sh /build_scripts/ -RUN export OPENSSL_ROOT=openssl-1.1.1q && \ - export OPENSSL_HASH=d7939ce614029cdff0b6c20f0e2e5703158a489a72b2507b8bd51bf8c8fd10ca && \ - export OPENSSL_DOWNLOAD_URL=https://www.openssl.org/source && \ - manylinux-entrypoint /build_scripts/build-openssl.sh - -COPY build_scripts/build-cpython.sh /build_scripts/ - - -FROM build_cpython AS build_cpython38 -COPY build_scripts/ambv-pubkey.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.8.13 - - -FROM build_cpython AS build_cpython39 -COPY build_scripts/ambv-pubkey.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.9.13 - - -FROM build_cpython AS build_cpython310 -COPY build_scripts/cpython-pubkey-310-311.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.10.5 - -FROM build_cpython AS build_cpython311 -COPY build_scripts/cpython-pubkey-310-311.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.11.2 - -FROM build_cpython AS build_cpython312 -COPY build_scripts/cpython-pubkey-312-313.txt /build_scripts/cpython-pubkeys.txt -RUN manylinux-entrypoint /build_scripts/build-cpython.sh 3.12.1 - -FROM build_cpython AS all_python -COPY build_scripts/finalize-python.sh \ - /build_scripts/ - -COPY --from=build_cpython38 /opt/_internal /opt/_internal/ -COPY --from=build_cpython39 /opt/_internal /opt/_internal/ -COPY --from=build_cpython310 /opt/_internal /opt/_internal/ -COPY --from=build_cpython311 /opt/_internal /opt/_internal/ -COPY --from=build_cpython312 /opt/_internal /opt/_internal/ -RUN manylinux-entrypoint /build_scripts/finalize-python.sh - - -FROM runtime_base -COPY --from=build_git /manylinux-rootfs / -COPY --from=build_cpython /manylinux-rootfs / -COPY --from=all_python /opt/_internal /opt/_internal/ -COPY build_scripts/finalize.sh \ - build_scripts/python-tag-abi-tag.py \ - build_scripts/requirements3.8.txt \ - build_scripts/requirements3.9.txt \ - build_scripts/requirements3.10.txt \ - build_scripts/requirements3.11.txt \ - build_scripts/requirements3.12.txt \ - build_scripts/requirements-base-tools.txt \ - /build_scripts/ -COPY build_scripts/requirements-tools/* /build_scripts/requirements-tools/ -RUN manylinux-entrypoint /build_scripts/finalize.sh && rm -rf /build_scripts - -ENV SSL_CERT_FILE=/opt/_internal/certs.pem - -CMD ["/bin/bash"] - -#Build manylinux2014 docker image end - -ADD scripts /tmp/scripts -RUN cd /tmp/scripts && /tmp/scripts/install_centos.sh && /tmp/scripts/install_deps.sh && rm -rf /tmp/scripts - -ARG BUILD_UID=1001 -ARG BUILD_USER=onnxruntimedev -RUN adduser --uid $BUILD_UID $BUILD_USER -WORKDIR /home/$BUILD_USER -USER $BUILD_USER - diff --git a/tools/ci_build/github/linux/docker/inference/x64/default/cpu/Dockerfile b/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile similarity index 100% rename from tools/ci_build/github/linux/docker/inference/x64/default/cpu/Dockerfile rename to tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/Dockerfile diff --git a/tools/ci_build/github/linux/docker/inference/x64/default/cpu/scripts/install_centos.sh b/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/scripts/install_centos.sh similarity index 53% rename from tools/ci_build/github/linux/docker/inference/x64/default/cpu/scripts/install_centos.sh rename to tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/scripts/install_centos.sh index dc105805a8a1..17b80150c848 100755 --- a/tools/ci_build/github/linux/docker/inference/x64/default/cpu/scripts/install_centos.sh +++ b/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/scripts/install_centos.sh @@ -1,9 +1,9 @@ !/bin/bash set -e -x - -os_major_version=$(tr -dc '0-9.' < /etc/redhat-release |cut -d \. -f1) - -echo "installing for CentOS version : $os_major_version" -rpm -Uvh https://packages.microsoft.com/config/centos/$os_major_version/packages-microsoft-prod.rpm +if [ ! -f /etc/yum.repos.d/microsoft-prod.repo ]; then + os_major_version=$(tr -dc '0-9.' < /etc/redhat-release |cut -d \. -f1) + echo "installing for CentOS version : $os_major_version" + rpm -Uvh https://packages.microsoft.com/config/centos/$os_major_version/packages-microsoft-prod.rpm +fi dnf install -y python39-devel glibc-langpack-\* glibc-locale-source which redhat-lsb-core expat-devel tar unzip zlib-devel make bzip2 bzip2-devel msopenjdk-11 graphviz gcc-toolset-12-binutils gcc-toolset-12-gcc gcc-toolset-12-gcc-c++ gcc-toolset-12-gcc-gfortran gcc-toolset-12-libasan-devel libasan.x86_64 locale diff --git a/tools/ci_build/github/linux/docker/inference/x64/default/cpu/scripts/install_deps.sh b/tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/scripts/install_deps.sh similarity index 100% rename from tools/ci_build/github/linux/docker/inference/x64/default/cpu/scripts/install_deps.sh rename to tools/ci_build/github/linux/docker/inference/x86_64/default/cpu/scripts/install_deps.sh diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda11/Dockerfile b/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda11/Dockerfile new file mode 100644 index 000000000000..051f9cc6a267 --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda11/Dockerfile @@ -0,0 +1,46 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +# This file is used by Zip-Nuget Packaging NoContribOps Pipeline,Zip-Nuget-Java Packaging Pipeline +FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11_dotnet:20240531.1 + +ARG TRT_VERSION +RUN rpm -Uvh https://packages.microsoft.com/config/centos/8/packages-microsoft-prod.rpm && dnf install -y msopenjdk-11 +#Install TensorRT only if TRT_VERSION is not empty +RUN if [ -n "$TRT_VERSION" ]; then \ + echo "TRT_VERSION is $TRT_VERSION" && \ + dnf -y install \ + libnvinfer10-${TRT_VERSION} \ + libnvinfer-headers-devel-${TRT_VERSION} \ + libnvinfer-devel-${TRT_VERSION} \ + libnvinfer-lean10-${TRT_VERSION} \ + libnvonnxparsers10-${TRT_VERSION} \ + libnvonnxparsers-devel-${TRT_VERSION} \ + libnvinfer-dispatch10-${TRT_VERSION} \ + libnvinfer-plugin10-${TRT_VERSION} \ + libnvinfer-vc-plugin10-${TRT_VERSION} \ + libnvinfer-bin-${TRT_VERSION} \ + libnvinfer-plugin10-${TRT_VERSION} \ + libnvinfer-plugin-devel-${TRT_VERSION} \ + libnvinfer-vc-plugin-devel-${TRT_VERSION} \ + libnvinfer-lean-devel-${TRT_VERSION} \ + libnvinfer-dispatch-devel-${TRT_VERSION} \ + libnvinfer-headers-plugin-devel-${TRT_VERSION} && \ + dnf clean dbcache ; \ +else \ + echo "TRT_VERSION is none skipping Tensor RT Installation" ; \ +fi + +ENV PATH /usr/lib/jvm/msopenjdk-11/bin:$PATH +ENV LANG=en_US.UTF-8 +ENV LC_ALL=en_US.UTF-8 +ENV JAVA_HOME=/usr/lib/jvm/msopenjdk-11 +ENV CUDAHOSTCXX /opt/rh/gcc-toolset-11/root/usr/bin/g++ +ADD scripts /tmp/scripts +RUN cd /tmp/scripts && /tmp/scripts/install_deps.sh && rm -rf /tmp/scripts + +ARG BUILD_UID=1001 +ARG BUILD_USER=onnxruntimedev +RUN adduser --uid $BUILD_UID $BUILD_USER +WORKDIR /home/$BUILD_USER +USER $BUILD_USER diff --git a/tools/ci_build/github/linux/docker/inference/x64/default/gpu/scripts/install_deps.sh b/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda11/scripts/install_deps.sh similarity index 89% rename from tools/ci_build/github/linux/docker/inference/x64/default/gpu/scripts/install_deps.sh rename to tools/ci_build/github/linux/docker/inference/x86_64/default/cuda11/scripts/install_deps.sh index eb6d3315b97e..3c88c516bee4 100755 --- a/tools/ci_build/github/linux/docker/inference/x64/default/gpu/scripts/install_deps.sh +++ b/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda11/scripts/install_deps.sh @@ -39,8 +39,8 @@ mkdir -p /tmp/src cd /tmp/src echo "Installing cmake" -GetFile https://github.com/Kitware/CMake/releases/download/v3.26.3/cmake-3.26.3-linux-`uname -m`.tar.gz /tmp/src/cmake-3.26.3-linux-`uname -m`.tar.gz -tar -zxf /tmp/src/cmake-3.26.3-linux-`uname -m`.tar.gz --strip=1 -C /usr +GetFile https://github.com/Kitware/CMake/releases/download/v3.29.3/cmake-3.29.3-linux-`uname -m`.tar.gz /tmp/src/cmake-3.29.3-linux-`uname -m`.tar.gz +tar -zxf /tmp/src/cmake-3.29.3-linux-`uname -m`.tar.gz --strip=1 -C /usr echo "Installing Ninja" GetFile https://github.com/ninja-build/ninja/archive/v1.10.0.tar.gz /tmp/src/ninja-linux.tar.gz diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/Dockerfile b/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/Dockerfile new file mode 100644 index 000000000000..ef69235ce5c1 --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/Dockerfile @@ -0,0 +1,48 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +# This file is used by Zip-Nuget Packaging NoContribOps Pipeline,Zip-Nuget-Java Packaging Pipeline +FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda12_x64_ubi8_gcc12_dotnet:20240610.1 +ARG TRT_VERSION + +#Install TensorRT only if TRT_VERSION is not empty +RUN if [ -n "$TRT_VERSION" ]; then \ + echo "TRT_VERSION is $TRT_VERSION" && \ + dnf -y install \ + libnvinfer10-${TRT_VERSION} \ + libnvinfer-headers-devel-${TRT_VERSION} \ + libnvinfer-devel-${TRT_VERSION} \ + libnvinfer-lean10-${TRT_VERSION} \ + libnvonnxparsers10-${TRT_VERSION} \ + libnvonnxparsers-devel-${TRT_VERSION} \ + libnvinfer-dispatch10-${TRT_VERSION} \ + libnvinfer-plugin10-${TRT_VERSION} \ + libnvinfer-vc-plugin10-${TRT_VERSION} \ + libnvinfer-bin-${TRT_VERSION} \ + libnvinfer-plugin10-${TRT_VERSION} \ + libnvinfer-plugin-devel-${TRT_VERSION} \ + libnvinfer-vc-plugin-devel-${TRT_VERSION} \ + libnvinfer-lean-devel-${TRT_VERSION} \ + libnvinfer-dispatch-devel-${TRT_VERSION} \ + libnvinfer-headers-plugin-devel-${TRT_VERSION} && \ + dnf clean dbcache ; \ +else \ + echo "TRT_VERSION is none skipping Tensor RT Installation" ; \ +fi + + + +ENV LANG=en_US.UTF-8 +ENV LC_ALL=en_US.UTF-8 + +ENV CUDAHOSTCXX /opt/rh/gcc-toolset-12/root/usr/bin/g++ +ADD scripts /tmp/scripts +RUN sed -i 's/enabled\s*=\s*1/enabled = 1\nexclude=dotnet* aspnet* netstandard*/g' /etc/yum.repos.d/ubi.repo && \ + rpm -Uvh https://packages.microsoft.com/config/centos/8/packages-microsoft-prod.rpm && dnf install -y msopenjdk-11 && cd /tmp/scripts && /tmp/scripts/install_deps.sh && rm -rf /tmp/scripts +ENV PATH /usr/lib/jvm/msopenjdk-11/bin:$PATH +ENV JAVA_HOME=/usr/lib/jvm/msopenjdk-11 +ARG BUILD_UID=1001 +ARG BUILD_USER=onnxruntimedev +RUN adduser --uid $BUILD_UID $BUILD_USER +WORKDIR /home/$BUILD_USER +USER $BUILD_USER diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/scripts/install_deps.sh b/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/scripts/install_deps.sh new file mode 100755 index 000000000000..3c88c516bee4 --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/default/cuda12/scripts/install_deps.sh @@ -0,0 +1,68 @@ +#!/bin/bash +set -e -x + +# Download a file from internet +function GetFile { + local uri=$1 + local path=$2 + local force=${3:-false} + local download_retries=${4:-5} + local retry_wait_time_seconds=${5:-30} + + if [[ -f $path ]]; then + if [[ $force = false ]]; then + echo "File '$path' already exists. Skipping download" + return 0 + else + rm -rf $path + fi + fi + + if [[ -f $uri ]]; then + echo "'$uri' is a file path, copying file to '$path'" + cp $uri $path + return $? + fi + + echo "Downloading $uri" + # Use aria2c if available, otherwise use curl + if command -v aria2c > /dev/null; then + aria2c -q -d $(dirname $path) -o $(basename $path) "$uri" + else + curl "$uri" -sSL --retry $download_retries --retry-delay $retry_wait_time_seconds --create-dirs -o "$path" --fail + fi + + return $? +} +mkdir -p /tmp/src + +cd /tmp/src + +echo "Installing cmake" +GetFile https://github.com/Kitware/CMake/releases/download/v3.29.3/cmake-3.29.3-linux-`uname -m`.tar.gz /tmp/src/cmake-3.29.3-linux-`uname -m`.tar.gz +tar -zxf /tmp/src/cmake-3.29.3-linux-`uname -m`.tar.gz --strip=1 -C /usr + +echo "Installing Ninja" +GetFile https://github.com/ninja-build/ninja/archive/v1.10.0.tar.gz /tmp/src/ninja-linux.tar.gz +tar -zxf ninja-linux.tar.gz +pushd ninja-1.10.0 +cmake -Bbuild-cmake -H. +cmake --build build-cmake +mv ./build-cmake/ninja /usr/bin +popd + +echo "Installing Node.js" +CPU_ARCH=`uname -m` +if [[ "$CPU_ARCH" = "x86_64" ]]; then + NODEJS_ARCH=x64 +elif [[ "$CPU_ARCH" = "aarch64" ]]; then + NODEJS_ARCH=arm64 +else + NODEJS_ARCH=$CPU_ARCH +fi +# The EOL for nodejs v18.17.1 LTS is April 2025 +GetFile https://nodejs.org/dist/v18.17.1/node-v18.17.1-linux-${NODEJS_ARCH}.tar.gz /tmp/src/node-v18.17.1-linux-${NODEJS_ARCH}.tar.gz +tar --strip 1 -xf /tmp/src/node-v18.17.1-linux-${NODEJS_ARCH}.tar.gz -C /usr + +cd / +rm -rf /tmp/src diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/Dockerfile b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/Dockerfile new file mode 100644 index 000000000000..2f568a78a13d --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/Dockerfile @@ -0,0 +1,11 @@ +FROM onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cpu_x64_ubi8_gcc12:20240531.1 + +ADD scripts /tmp/scripts +RUN cd /tmp/scripts && /tmp/scripts/install_centos.sh && /tmp/scripts/install_deps.sh && rm -rf /tmp/scripts + +ARG BUILD_UID=1001 +ARG BUILD_USER=onnxruntimedev +RUN adduser --uid $BUILD_UID $BUILD_USER +WORKDIR /home/$BUILD_USER +USER $BUILD_USER + diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_centos.sh b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_centos.sh new file mode 100755 index 000000000000..c81e57c60c9d --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_centos.sh @@ -0,0 +1,20 @@ +#!/bin/bash +set -e + +os_major_version=$(tr -dc '0-9.' < /etc/redhat-release |cut -d \. -f1) + +echo "installing for os major version : $os_major_version" +dnf install -y glibc-langpack-\* +yum install -y which redhat-lsb-core expat-devel tar unzip zlib-devel make bzip2 bzip2-devel perl-IPC-Cmd openssl-devel wget + +# export PATH=/opt/python/cp38-cp38/bin:$PATH + +echo "installing rapidjson for AzureEP" +wget https://github.com/Tencent/rapidjson/archive/refs/tags/v1.1.0.tar.gz +tar zxvf v1.1.0.tar.gz +cd rapidjson-1.1.0 +mkdir build +cd build +cmake .. +cmake --install . +cd ../.. diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_deps.sh b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_deps.sh new file mode 100755 index 000000000000..f576b867da73 --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_deps.sh @@ -0,0 +1,36 @@ +#!/bin/bash +set -e -x +pushd . +PYTHON_EXES=("/opt/python/cp38-cp38/bin/python3.8" "/opt/python/cp39-cp39/bin/python3.9" "/opt/python/cp310-cp310/bin/python3.10" "/opt/python/cp311-cp311/bin/python3.11" "/opt/python/cp312-cp312/bin/python3.12") +CURRENT_DIR=$(pwd) +if ! [ -x "$(command -v protoc)" ]; then + $CURRENT_DIR/install_protobuf.sh +fi +popd +export ONNX_ML=1 +export CMAKE_ARGS="-DONNX_GEN_PB_TYPE_STUBS=OFF -DONNX_WERROR=OFF" + +for PYTHON_EXE in "${PYTHON_EXES[@]}" +do + ${PYTHON_EXE} -m pip install -r requirements.txt +done + +# No release binary for ccache aarch64, so we need to build it from source. +if ! [ -x "$(command -v ccache)" ]; then + ccache_url="https://github.com/ccache/ccache/archive/refs/tags/v4.8.tar.gz" + pushd . + curl -sSL --retry 5 --retry-delay 10 --create-dirs --fail -L -o ccache_src.tar.gz $ccache_url + mkdir ccache_main + cd ccache_main + tar -zxf ../ccache_src.tar.gz --strip=1 + + mkdir build + cd build + cmake -DCMAKE_INSTALL_PREFIX=/usr/local _DCMAKE_BUILD_TYPE=Release .. + make + make install + which ccache + popd + rm -f ccache_src.tar.gz + rm -rf ccache_src +fi diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_protobuf.sh b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_protobuf.sh new file mode 100755 index 000000000000..31b5ca6f9e69 --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/install_protobuf.sh @@ -0,0 +1,108 @@ +#!/bin/bash +set -e -x + +INSTALL_PREFIX='/usr' +DEP_FILE_PATH='/tmp/scripts/deps.txt' +while getopts "p:d:" parameter_Option +do case "${parameter_Option}" +in +p) INSTALL_PREFIX=${OPTARG};; +d) DEP_FILE_PATH=${OPTARG};; +esac +done + + + +EXTRA_CMAKE_ARGS="-DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_CXX_STANDARD=17" + +case "$(uname -s)" in + Darwin*) + echo 'Building ONNX Runtime on Mac OS X' + EXTRA_CMAKE_ARGS="$EXTRA_CMAKE_ARGS -DCMAKE_OSX_ARCHITECTURES=x86_64;arm64" + GCC_PATH=$(which clang) + GPLUSPLUS_PATH=$(which clang++) + ;; + Linux*) + SYS_LONG_BIT=$(getconf LONG_BIT) + DISTRIBUTOR=$(lsb_release -i -s) + + if [[ ("$DISTRIBUTOR" = "CentOS" || "$DISTRIBUTOR" = "RedHatEnterprise") && $SYS_LONG_BIT = "64" ]]; then + LIBDIR="lib64" + else + LIBDIR="lib" + fi + EXTRA_CMAKE_ARGS="$EXTRA_CMAKE_ARGS -DCMAKE_INSTALL_LIBDIR=$LIBDIR" + # Depending on how the compiler has been configured when it was built, sometimes "gcc -dumpversion" shows the full version. + GCC_VERSION=$(gcc -dumpversion | cut -d . -f 1) + #-fstack-clash-protection prevents attacks based on an overlapping heap and stack. + if [ "$GCC_VERSION" -ge 8 ]; then + CFLAGS="$CFLAGS -fstack-clash-protection" + CXXFLAGS="$CXXFLAGS -fstack-clash-protection" + fi + ARCH=$(uname -m) + GCC_PATH=$(which gcc) + GPLUSPLUS_PATH=$(which g++) + if [ "$ARCH" == "x86_64" ] && [ "$GCC_VERSION" -ge 9 ]; then + CFLAGS="$CFLAGS -fcf-protection" + CXXFLAGS="$CXXFLAGS -fcf-protection" + fi + export CFLAGS + export CXXFLAGS + ;; + *) + exit 1 +esac +mkdir -p "$INSTALL_PREFIX" + +if [ -x "$(command -v ninja)" ]; then + EXTRA_CMAKE_ARGS="$EXTRA_CMAKE_ARGS -G Ninja" +fi +echo "Installing abseil ..." +pushd . +absl_url=$(grep '^abseil_cpp' "$DEP_FILE_PATH" | cut -d ';' -f 2 ) +if [[ "$absl_url" = https* ]]; then + absl_url=$(echo $absl_url | sed 's/\.zip$/\.tar.gz/') + curl -sSL --retry 5 --retry-delay 10 --create-dirs --fail -L -o absl_src.tar.gz $absl_url + mkdir abseil + cd abseil + tar -zxf ../absl_src.tar.gz --strip=1 +else + cp $absl_url absl_src.zip + unzip absl_src.zip + cd */ +fi + +CC=$GCC_PATH CXX=$GPLUSPLUS_PATH cmake "." "-DABSL_PROPAGATE_CXX_STD=ON" "-DCMAKE_BUILD_TYPE=Release" "-DBUILD_TESTING=OFF" "-DABSL_USE_EXTERNAL_GOOGLETEST=ON" "-DCMAKE_PREFIX_PATH=$INSTALL_PREFIX" "-DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX" $EXTRA_CMAKE_ARGS +if [ -x "$(command -v ninja)" ]; then + ninja + ninja install +else + make -j$(getconf _NPROCESSORS_ONLN) + make install +fi +popd + +pushd . +echo "Installing protobuf ..." +protobuf_url=$(grep '^protobuf' $DEP_FILE_PATH | cut -d ';' -f 2 ) +if [[ "$protobuf_url" = https* ]]; then + protobuf_url=$(echo "$protobuf_url" | sed 's/\.zip$/\.tar.gz/') + curl -sSL --retry 5 --retry-delay 10 --create-dirs --fail -L -o protobuf_src.tar.gz "$protobuf_url" + mkdir protobuf + cd protobuf + tar -zxf ../protobuf_src.tar.gz --strip=1 +else + cp $protobuf_url protobuf_src.zip + unzip protobuf_src.zip + cd protobuf-* +fi + +CC=$GCC_PATH CXX=$GPLUSPLUS_PATH cmake . "-DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX" -DCMAKE_POSITION_INDEPENDENT_CODE=ON -Dprotobuf_BUILD_TESTS=OFF -DCMAKE_BUILD_TYPE=Release -Dprotobuf_WITH_ZLIB_DEFAULT=OFF -Dprotobuf_BUILD_SHARED_LIBS=OFF "-DCMAKE_PREFIX_PATH=$INSTALL_PREFIX" $EXTRA_CMAKE_ARGS -Dprotobuf_ABSL_PROVIDER=package +if [ -x "$(command -v ninja)" ]; then + ninja + ninja install +else + make -j$(getconf _NPROCESSORS_ONLN) + make install +fi +popd diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/requirements.txt b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/requirements.txt new file mode 100644 index 000000000000..8f56ee18ccd2 --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/python/cpu/scripts/requirements.txt @@ -0,0 +1,11 @@ +numpy==1.21.6 ; python_version < '3.11' +numpy==1.24.2 ; python_version == '3.11' +numpy==1.26.0 ; python_version >= '3.12' +mypy +pytest +setuptools>=68.2.2 +wheel +onnx==1.16.0 +protobuf==4.21.12 +sympy==1.12 +flatbuffers diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/Dockerfile b/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/Dockerfile new file mode 100644 index 000000000000..3a7f410d3859 --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/Dockerfile @@ -0,0 +1,46 @@ +# The default ARGs are for cuda 11.8 with cudnn8, TensorRT is optional +# Please overwrite BASEIMAGE, TRT_VERSION and other arguments with +# --docker-build-args ' --build-arg BASEIMAGE=other_base_image --build-arg TRT_VERSION=other_trt_version etc...' +# for other cuda version and TRT version +ARG BASEIMAGE=nvidia/cuda:11.8.0-cudnn8-devel-ubi8 + +FROM $BASEIMAGE +ARG TRT_VERSION=10.0.1.6-1.cuda11.8 + +#Install TensorRT only if TRT_VERSION is not empty +RUN if [ -n "${TRT_VERSION}" ]; then \ + echo "TRT_VERSION is $TRT_VERSION" && \ + dnf -y install \ + libnvinfer10-${TRT_VERSION} \ + libnvinfer-headers-devel-${TRT_VERSION} \ + libnvinfer-devel-${TRT_VERSION} \ + libnvinfer-lean10-${TRT_VERSION} \ + libnvonnxparsers10-${TRT_VERSION} \ + libnvonnxparsers-devel-${TRT_VERSION} \ + libnvinfer-dispatch10-${TRT_VERSION} \ + libnvinfer-plugin10-${TRT_VERSION} \ + libnvinfer-vc-plugin10-${TRT_VERSION} \ + libnvinfer-bin-${TRT_VERSION} \ + libnvinfer-plugin10-${TRT_VERSION} \ + libnvinfer-plugin-devel-${TRT_VERSION} \ + libnvinfer-vc-plugin-devel-${TRT_VERSION} \ + libnvinfer-lean-devel-${TRT_VERSION} \ + libnvinfer-dispatch-devel-${TRT_VERSION} \ + libnvinfer-headers-plugin-devel-${TRT_VERSION} && \ + dnf clean dbcache ; \ +else \ + echo "TRT_VERSION is x${TRT_VERSION} skipping Tensor RT Installation" ; \ +fi + +ENV PATH /usr/local/cuda/bin:$PATH +ENV CUDA_MODULE_LOADING "LAZY" + +ADD scripts /tmp/scripts +RUN cd /tmp/scripts && /tmp/scripts/install_centos.sh && /tmp/scripts/install_deps.sh && rm -rf /tmp/scripts + +ARG BUILD_UID=1001 +ARG BUILD_USER=onnxruntimedev +RUN adduser --uid $BUILD_UID $BUILD_USER +WORKDIR /home/$BUILD_USER +USER $BUILD_USER + diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_centos.sh b/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_centos.sh new file mode 100755 index 000000000000..c81e57c60c9d --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_centos.sh @@ -0,0 +1,20 @@ +#!/bin/bash +set -e + +os_major_version=$(tr -dc '0-9.' < /etc/redhat-release |cut -d \. -f1) + +echo "installing for os major version : $os_major_version" +dnf install -y glibc-langpack-\* +yum install -y which redhat-lsb-core expat-devel tar unzip zlib-devel make bzip2 bzip2-devel perl-IPC-Cmd openssl-devel wget + +# export PATH=/opt/python/cp38-cp38/bin:$PATH + +echo "installing rapidjson for AzureEP" +wget https://github.com/Tencent/rapidjson/archive/refs/tags/v1.1.0.tar.gz +tar zxvf v1.1.0.tar.gz +cd rapidjson-1.1.0 +mkdir build +cd build +cmake .. +cmake --install . +cd ../.. diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_deps.sh b/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_deps.sh new file mode 100755 index 000000000000..f576b867da73 --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_deps.sh @@ -0,0 +1,36 @@ +#!/bin/bash +set -e -x +pushd . +PYTHON_EXES=("/opt/python/cp38-cp38/bin/python3.8" "/opt/python/cp39-cp39/bin/python3.9" "/opt/python/cp310-cp310/bin/python3.10" "/opt/python/cp311-cp311/bin/python3.11" "/opt/python/cp312-cp312/bin/python3.12") +CURRENT_DIR=$(pwd) +if ! [ -x "$(command -v protoc)" ]; then + $CURRENT_DIR/install_protobuf.sh +fi +popd +export ONNX_ML=1 +export CMAKE_ARGS="-DONNX_GEN_PB_TYPE_STUBS=OFF -DONNX_WERROR=OFF" + +for PYTHON_EXE in "${PYTHON_EXES[@]}" +do + ${PYTHON_EXE} -m pip install -r requirements.txt +done + +# No release binary for ccache aarch64, so we need to build it from source. +if ! [ -x "$(command -v ccache)" ]; then + ccache_url="https://github.com/ccache/ccache/archive/refs/tags/v4.8.tar.gz" + pushd . + curl -sSL --retry 5 --retry-delay 10 --create-dirs --fail -L -o ccache_src.tar.gz $ccache_url + mkdir ccache_main + cd ccache_main + tar -zxf ../ccache_src.tar.gz --strip=1 + + mkdir build + cd build + cmake -DCMAKE_INSTALL_PREFIX=/usr/local _DCMAKE_BUILD_TYPE=Release .. + make + make install + which ccache + popd + rm -f ccache_src.tar.gz + rm -rf ccache_src +fi diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_protobuf.sh b/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_protobuf.sh new file mode 100755 index 000000000000..31b5ca6f9e69 --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/install_protobuf.sh @@ -0,0 +1,108 @@ +#!/bin/bash +set -e -x + +INSTALL_PREFIX='/usr' +DEP_FILE_PATH='/tmp/scripts/deps.txt' +while getopts "p:d:" parameter_Option +do case "${parameter_Option}" +in +p) INSTALL_PREFIX=${OPTARG};; +d) DEP_FILE_PATH=${OPTARG};; +esac +done + + + +EXTRA_CMAKE_ARGS="-DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_CXX_STANDARD=17" + +case "$(uname -s)" in + Darwin*) + echo 'Building ONNX Runtime on Mac OS X' + EXTRA_CMAKE_ARGS="$EXTRA_CMAKE_ARGS -DCMAKE_OSX_ARCHITECTURES=x86_64;arm64" + GCC_PATH=$(which clang) + GPLUSPLUS_PATH=$(which clang++) + ;; + Linux*) + SYS_LONG_BIT=$(getconf LONG_BIT) + DISTRIBUTOR=$(lsb_release -i -s) + + if [[ ("$DISTRIBUTOR" = "CentOS" || "$DISTRIBUTOR" = "RedHatEnterprise") && $SYS_LONG_BIT = "64" ]]; then + LIBDIR="lib64" + else + LIBDIR="lib" + fi + EXTRA_CMAKE_ARGS="$EXTRA_CMAKE_ARGS -DCMAKE_INSTALL_LIBDIR=$LIBDIR" + # Depending on how the compiler has been configured when it was built, sometimes "gcc -dumpversion" shows the full version. + GCC_VERSION=$(gcc -dumpversion | cut -d . -f 1) + #-fstack-clash-protection prevents attacks based on an overlapping heap and stack. + if [ "$GCC_VERSION" -ge 8 ]; then + CFLAGS="$CFLAGS -fstack-clash-protection" + CXXFLAGS="$CXXFLAGS -fstack-clash-protection" + fi + ARCH=$(uname -m) + GCC_PATH=$(which gcc) + GPLUSPLUS_PATH=$(which g++) + if [ "$ARCH" == "x86_64" ] && [ "$GCC_VERSION" -ge 9 ]; then + CFLAGS="$CFLAGS -fcf-protection" + CXXFLAGS="$CXXFLAGS -fcf-protection" + fi + export CFLAGS + export CXXFLAGS + ;; + *) + exit 1 +esac +mkdir -p "$INSTALL_PREFIX" + +if [ -x "$(command -v ninja)" ]; then + EXTRA_CMAKE_ARGS="$EXTRA_CMAKE_ARGS -G Ninja" +fi +echo "Installing abseil ..." +pushd . +absl_url=$(grep '^abseil_cpp' "$DEP_FILE_PATH" | cut -d ';' -f 2 ) +if [[ "$absl_url" = https* ]]; then + absl_url=$(echo $absl_url | sed 's/\.zip$/\.tar.gz/') + curl -sSL --retry 5 --retry-delay 10 --create-dirs --fail -L -o absl_src.tar.gz $absl_url + mkdir abseil + cd abseil + tar -zxf ../absl_src.tar.gz --strip=1 +else + cp $absl_url absl_src.zip + unzip absl_src.zip + cd */ +fi + +CC=$GCC_PATH CXX=$GPLUSPLUS_PATH cmake "." "-DABSL_PROPAGATE_CXX_STD=ON" "-DCMAKE_BUILD_TYPE=Release" "-DBUILD_TESTING=OFF" "-DABSL_USE_EXTERNAL_GOOGLETEST=ON" "-DCMAKE_PREFIX_PATH=$INSTALL_PREFIX" "-DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX" $EXTRA_CMAKE_ARGS +if [ -x "$(command -v ninja)" ]; then + ninja + ninja install +else + make -j$(getconf _NPROCESSORS_ONLN) + make install +fi +popd + +pushd . +echo "Installing protobuf ..." +protobuf_url=$(grep '^protobuf' $DEP_FILE_PATH | cut -d ';' -f 2 ) +if [[ "$protobuf_url" = https* ]]; then + protobuf_url=$(echo "$protobuf_url" | sed 's/\.zip$/\.tar.gz/') + curl -sSL --retry 5 --retry-delay 10 --create-dirs --fail -L -o protobuf_src.tar.gz "$protobuf_url" + mkdir protobuf + cd protobuf + tar -zxf ../protobuf_src.tar.gz --strip=1 +else + cp $protobuf_url protobuf_src.zip + unzip protobuf_src.zip + cd protobuf-* +fi + +CC=$GCC_PATH CXX=$GPLUSPLUS_PATH cmake . "-DCMAKE_INSTALL_PREFIX=$INSTALL_PREFIX" -DCMAKE_POSITION_INDEPENDENT_CODE=ON -Dprotobuf_BUILD_TESTS=OFF -DCMAKE_BUILD_TYPE=Release -Dprotobuf_WITH_ZLIB_DEFAULT=OFF -Dprotobuf_BUILD_SHARED_LIBS=OFF "-DCMAKE_PREFIX_PATH=$INSTALL_PREFIX" $EXTRA_CMAKE_ARGS -Dprotobuf_ABSL_PROVIDER=package +if [ -x "$(command -v ninja)" ]; then + ninja + ninja install +else + make -j$(getconf _NPROCESSORS_ONLN) + make install +fi +popd diff --git a/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/requirements.txt b/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/requirements.txt new file mode 100644 index 000000000000..8f56ee18ccd2 --- /dev/null +++ b/tools/ci_build/github/linux/docker/inference/x86_64/python/cuda/scripts/requirements.txt @@ -0,0 +1,11 @@ +numpy==1.21.6 ; python_version < '3.11' +numpy==1.24.2 ; python_version == '3.11' +numpy==1.26.0 ; python_version >= '3.12' +mypy +pytest +setuptools>=68.2.2 +wheel +onnx==1.16.0 +protobuf==4.21.12 +sympy==1.12 +flatbuffers diff --git a/tools/ci_build/github/linux/docker/scripts/manylinux/install_centos.sh b/tools/ci_build/github/linux/docker/scripts/manylinux/install_centos.sh index 63b953a95add..dfda5ec73fdb 100755 --- a/tools/ci_build/github/linux/docker/scripts/manylinux/install_centos.sh +++ b/tools/ci_build/github/linux/docker/scripts/manylinux/install_centos.sh @@ -7,12 +7,10 @@ echo "installing for os major version : $os_major_version" if [ "$os_major_version" -gt 7 ]; then PACKAGE_MANAGER="dnf" $PACKAGE_MANAGER install -y which redhat-lsb-core expat-devel tar unzip zlib-devel make bzip2 bzip2-devel perl-IPC-Cmd openssl-devel wget -else - PACKAGE_MANAGER="yum" - $PACKAGE_MANAGER install -y which redhat-lsb-core expat-devel tar unzip zlib-devel make libunwind bzip2 bzip2-devel perl-IPC-Cmd openssl-devel wget fi -rpm -Uvh https://packages.microsoft.com/config/centos/$os_major_version/packages-microsoft-prod.rpm - +if [ ! -f /etc/yum.repos.d/microsoft-prod.repo ]; then + rpm -Uvh https://packages.microsoft.com/config/centos/$os_major_version/packages-microsoft-prod.rpm +fi # Install Java # Install automatic documentation generation dependencies $PACKAGE_MANAGER install -y msopenjdk-11 graphviz diff --git a/tools/ci_build/github/linux/ort_minimal/build_ort_and_check_binary_size.py b/tools/ci_build/github/linux/ort_minimal/build_ort_and_check_binary_size.py index e8d8094c4968..df530a7c5e9a 100644 --- a/tools/ci_build/github/linux/ort_minimal/build_ort_and_check_binary_size.py +++ b/tools/ci_build/github/linux/ort_minimal/build_ort_and_check_binary_size.py @@ -44,7 +44,7 @@ def main(): / "linux" / "docker" / "inference" - / "x64" + / "x86_64" / "python" / "cpu" / "scripts" diff --git a/tools/ci_build/github/linux/run_python_dockerbuild.sh b/tools/ci_build/github/linux/run_python_dockerbuild.sh index ff2ce6f7ff23..eb3a0132f8ab 100755 --- a/tools/ci_build/github/linux/run_python_dockerbuild.sh +++ b/tools/ci_build/github/linux/run_python_dockerbuild.sh @@ -30,6 +30,8 @@ docker run --rm \ -w /onnxruntime_src \ -e NIGHTLY_BUILD \ -e BUILD_BUILDNUMBER \ + -e ORT_DISABLE_PYTHON_PACKAGE_LOCAL_VERSION \ + -e DEFAULT_TRAINING_PACKAGE_DEVICE \ $ADDITIONAL_DOCKER_PARAMETER \ $DOCKER_IMAGE tools/ci_build/github/linux/build_linux_python_package.sh $DOCKER_SCRIPT_OPTIONS diff --git a/tools/ci_build/github/linux/run_python_tests.sh b/tools/ci_build/github/linux/run_python_tests.sh index 082c561dd17b..e8f683efbb24 100755 --- a/tools/ci_build/github/linux/run_python_tests.sh +++ b/tools/ci_build/github/linux/run_python_tests.sh @@ -26,6 +26,10 @@ echo "Package name:$PYTHON_PACKAGE_NAME" BUILD_ARGS="--build_dir /build --config $BUILD_CONFIG --test --skip_submodule_sync --parallel --enable_lto --build_wheel " +if [[ "$PYTHON_PACKAGE_NAME" == *"training"* ]]; then + BUILD_ARGS="$BUILD_ARGS --enable_training" +fi + ARCH=$(uname -m) if [ $ARCH == "x86_64" ]; then diff --git a/tools/ci_build/github/windows/extract_nuget_files_gpu.ps1 b/tools/ci_build/github/windows/extract_nuget_files_gpu.ps1 index 244a92f7762b..01a8eebe75df 100644 --- a/tools/ci_build/github/windows/extract_nuget_files_gpu.ps1 +++ b/tools/ci_build/github/windows/extract_nuget_files_gpu.ps1 @@ -29,7 +29,13 @@ Foreach-Object { New-Item -Path $Env:BUILD_BINARIESDIRECTORY\RelWithDebInfo\_deps\protobuf-build\RelWithDebInfo -ItemType directory Copy-Item -Path $Env:BUILD_BINARIESDIRECTORY\RelWithDebInfo\RelWithDebInfo\nuget-artifacts\onnxruntime-win-x64-cuda-*\lib\* -Destination $Env:BUILD_BINARIESDIRECTORY\RelWithDebInfo\RelWithDebInfo -Copy-Item -Path $Env:BUILD_BINARIESDIRECTORY\extra-artifact\protoc.exe $Env:BUILD_BINARIESDIRECTORY\RelWithDebInfo\_deps\protobuf-build\RelWithDebInfo + +$protocInstallDir = "$Env:BUILD_BINARIESDIRECTORY\RelWithDebInfo\_deps\protobuf-build" +dotnet new console +dotnet add package Google.Protobuf.Tools --version 3.21.12 --package-directory $protocInstallDir +$protocDir = Get-ChildItem -Path $protocInstallDir -Recurse -Filter "protoc.exe" | Select-Object -ExpandProperty DirectoryName -First 1 +Write-Output $protocDir +Copy-Item -Path $protocDir -Destination $Env:BUILD_BINARIESDIRECTORY\RelWithDebInfo\_deps\protobuf-build\RelWithDebInfo $ort_dirs = Get-ChildItem -Path $Env:BUILD_BINARIESDIRECTORY\RelWithDebInfo\RelWithDebInfo\nuget-artifacts\onnxruntime-* -Directory foreach ($ort_dir in $ort_dirs) diff --git a/tools/ci_build/github/windows/helpers.ps1 b/tools/ci_build/github/windows/helpers.ps1 index a039a9274b8e..0e7d279c9fa4 100644 --- a/tools/ci_build/github/windows/helpers.ps1 +++ b/tools/ci_build/github/windows/helpers.ps1 @@ -419,7 +419,7 @@ function Install-Abseil { } # Run cmake to generate Visual Studio sln file - [string[]]$cmake_args = ".", "-DABSL_PROPAGATE_CXX_STD=ON", "-DCMAKE_BUILD_TYPE=$build_config", "-DBUILD_TESTING=OFF", "-DABSL_USE_EXTERNAL_GOOGLETEST=ON", "-DCMAKE_PREFIX_PATH=$install_prefix", "-DCMAKE_INSTALL_PREFIX=$install_prefix" + [string[]]$cmake_args = ".", "-DABSL_PROPAGATE_CXX_STD=ON", "-DCMAKE_BUILD_TYPE=$build_config", "-DBUILD_TESTING=OFF", "-DABSL_USE_EXTERNAL_GOOGLETEST=ON", "-DCMAKE_INSTALL_PREFIX=$install_prefix" $cmake_args += $cmake_extra_args &$cmake_path $cmake_args @@ -492,7 +492,7 @@ function Install-UTF8-Range { cd * # Run cmake to generate Visual Studio sln file - [string[]]$cmake_args = ".", "-Dutf8_range_ENABLE_TESTS=OFF", "-Dutf8_range_ENABLE_INSTALL=ON", "-DCMAKE_BUILD_TYPE=$build_config", "-DBUILD_TESTING=OFF", "-DCMAKE_PREFIX_PATH=$install_prefix", "-DCMAKE_INSTALL_PREFIX=$install_prefix" + [string[]]$cmake_args = ".", "-Dutf8_range_ENABLE_TESTS=OFF", "-Dutf8_range_ENABLE_INSTALL=ON", "-DCMAKE_BUILD_TYPE=$build_config", "-DBUILD_TESTING=OFF", "-DCMAKE_INSTALL_PREFIX=$install_prefix" $cmake_args += $cmake_extra_args &$cmake_path $cmake_args @@ -580,7 +580,7 @@ function Install-Protobuf { } # Run cmake to generate Visual Studio sln file - [string[]]$cmake_args = ".", "-Dprotobuf_DISABLE_RTTI=ON", "-DCMAKE_BUILD_TYPE=$build_config", "-Dprotobuf_BUILD_TESTS=OFF", "-Dprotobuf_USE_EXTERNAL_GTEST=ON", "-DBUILD_SHARED_LIBS=OFF", "-DCMAKE_PREFIX_PATH=$install_prefix", "-DCMAKE_INSTALL_PREFIX=$install_prefix", "-Dprotobuf_MSVC_STATIC_RUNTIME=OFF", "-Dprotobuf_ABSL_PROVIDER=package" + [string[]]$cmake_args = ".", "-Dprotobuf_DISABLE_RTTI=ON", "-DCMAKE_BUILD_TYPE=$build_config", "-Dprotobuf_BUILD_TESTS=OFF", "-Dprotobuf_USE_EXTERNAL_GTEST=ON", "-DBUILD_SHARED_LIBS=OFF", "-DCMAKE_INSTALL_PREFIX=$install_prefix", "-Dprotobuf_MSVC_STATIC_RUNTIME=OFF", "-Dprotobuf_ABSL_PROVIDER=package" $cmake_args += $cmake_extra_args &$cmake_path $cmake_args @@ -684,7 +684,7 @@ function Install-ONNX { if($build_config -eq 'Debug'){ $Env:DEBUG='1' } - $Env:CMAKE_ARGS="-DONNX_USE_PROTOBUF_SHARED_LIBS=OFF -DProtobuf_USE_STATIC_LIBS=ON -DONNX_USE_LITE_PROTO=OFF -DCMAKE_PREFIX_PATH=$install_prefix" + $Env:CMAKE_ARGS="-DONNX_USE_PROTOBUF_SHARED_LIBS=OFF -DProtobuf_USE_STATIC_LIBS=ON -DONNX_USE_LITE_PROTO=OFF" python.exe "setup.py" "bdist_wheel" diff --git a/tools/ci_build/github/windows/install_third_party_deps.ps1 b/tools/ci_build/github/windows/install_third_party_deps.ps1 index 54507cd40cc4..07679006fb34 100644 --- a/tools/ci_build/github/windows/install_third_party_deps.ps1 +++ b/tools/ci_build/github/windows/install_third_party_deps.ps1 @@ -23,7 +23,7 @@ $ErrorActionPreference = "Stop" $Env:Path = "$install_prefix\bin;" + $env:Path $Env:MSBUILDDISABLENODEREUSE=1 - +$Env:CMAKE_PREFIX_PATH = "$install_prefix" New-Item -Path "$install_prefix" -ItemType Directory -Force # Setup compile flags