Skip to content

Commit

Permalink
Added Phi-3.5-vision model validation in pipeline (#1066)
Browse files Browse the repository at this point in the history
* Restructure Phi-3.5-vision C# and Python examples to make them more
CI-friendly.
* Added Phi-3.5-vision validation steps to the existing validation
pipeline.

Python run:

*
https://aiinfra.visualstudio.com/ONNX%20Runtime/_build/results?buildId=609855&view=results

NuGet run:

*
https://aiinfra.visualstudio.com/ONNX%20Runtime/_build/results?buildId=609849&view=results

---

Internal work item: https://task.ms/aii/34279

---------

Co-authored-by: kunal-vaishnavi <[email protected]>
  • Loading branch information
skyline75489 and kunal-vaishnavi authored Nov 21, 2024
1 parent ec89e49 commit d4d3a88
Show file tree
Hide file tree
Showing 13 changed files with 538 additions and 282 deletions.
124 changes: 41 additions & 83 deletions .pipelines/stages/jobs/nuget-validation-job.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,16 @@ jobs:
${{ else }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'

- name: prebuild_phi3_5_vision_model_folder
${{ if eq(parameters.ep, 'cpu') }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'
${{ elseif eq(parameters.ep, 'cuda') }}:
value: 'gpu/gpu-int4-rtn-block-32'
${{ elseif eq(parameters.ep, 'directml')}}:
value: 'gpu/gpu-int4-rtn-block-32'
${{ else }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'

- name: cuda_docker_image
${{ if eq(parameters.cuda_version, '11.8') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1
Expand Down Expand Up @@ -120,98 +130,46 @@ jobs:
inputs:
version: '8.x'

- template: steps/utils/flex-download-pipeline-artifact.yml
parameters:
StepName: 'Download NuGet Artifacts'
ArtifactName: $(artifactName)-nuget
TargetPath: '$(Build.BinariesDirectory)/nuget'
SpecificArtifact: ${{ parameters.specificArtifact }}
BuildId: ${{ parameters.BuildId }}

- template: steps/utils/download-huggingface-model.yml
parameters:
StepName: 'Download Model from HuggingFace'
HuggingFaceRepo: 'microsoft/Phi-3-mini-4k-instruct-onnx'
LocalFolder: 'phi3-mini'
RepoFolder: $(prebuild_phi3_mini_model_folder)
LocalFolder: 'models'
WorkingDirectory: '$(Build.Repository.LocalPath)/examples/csharp/HelloPhi'
HuggingFaceToken: $(HF_TOKEN)
os: ${{ parameters.os }}

- template: steps/utils//flex-download-pipeline-artifact.yml
- template: steps/nuget-validation-step.yml
parameters:
StepName: 'Download NuGet Artifacts'
ArtifactName: $(artifactName)-nuget
TargetPath: '$(Build.BinariesDirectory)/nuget'
SpecificArtifact: ${{ parameters.specificArtifact }}
BuildId: ${{ parameters.BuildId }}
CsprojFolder: "examples/csharp/HelloPhi"
CsprojName: "HelloPhi"
CsprojConfiguration: $(csproj_configuration)
LocalFolder: 'phi3-mini'
ModelFolder: $(prebuild_phi3_mini_model_folder)

- task: Docker@2
inputs:
containerRegistry: onnxruntimebuildcache
command: "login"
addPipelineData: false
displayName: "Log in to container registry"

- ${{ if eq(parameters.os, 'win') }}:
- ${{ if eq(parameters.ep, 'cuda') }}:
- powershell: |
$env:AZCOPY_MSI_CLIENT_ID = "63b63039-6328-442f-954b-5a64d124e5b4";
azcopy.exe cp --recursive "https://lotusscus.blob.core.windows.net/models/cuda_sdk/v$(cuda_version)" 'cuda_sdk'
displayName: 'Download CUDA $(cuda_version)'
workingDirectory: '$(Build.Repository.LocalPath)'
- powershell: |
if ("$(ep)" -eq "cuda") {
$env:CUDA_PATH = '$(Build.Repository.LocalPath)\cuda_sdk\v$(cuda_version)'
$env:PATH = "$env:CUDA_PATH\bin;$env:CUDA_PATH\extras\CUPTI\lib64;$env:PATH"
Write-Host $env:PATH
}
dotnet --info
Copy-Item -Force -Recurse -Verbose $(Build.BinariesDirectory)/nuget/* -Destination examples/csharp/HelloPhi/
cd examples/csharp/HelloPhi
Move-Item models\$(prebuild_phi3_mini_model_folder) models\phi-3
dotnet restore -r $(os)-$(arch) /property:Configuration=$(csproj_configuration) --source https://api.nuget.org/v3/index.json --source https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ORT-Nightly/nuget/v3/index.json --source $PWD --disable-parallel --verbosity detailed
dotnet run -r $(os)-$(arch) --configuration $(csproj_configuration) --no-restore --verbosity normal -- -m ./models/phi-3
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
env:
NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS: 180
NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS: 180
- ${{ elseif or(eq(parameters.os, 'linux'), eq(parameters.os, 'osx')) }}:
- bash: |
dotnet --info
cp $(Build.BinariesDirectory)/nuget/* examples/csharp/HelloPhi/
cd examples/csharp/HelloPhi
mv models/$(prebuild_phi3_mini_model_folder) models/phi-3
dotnet restore -r $(os)-$(arch) /property:Configuration=$(csproj_configuration) --source https://api.nuget.org/v3/index.json --source https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ORT-Nightly/nuget/v3/index.json --source $PWD --disable-parallel --verbosity detailed
dotnet build ./HelloPhi.csproj -r $(os)-$(arch) /property:Configuration=$(csproj_configuration) --no-restore --self-contained
ls -l ./bin/$(csproj_configuration)/net6.0/$(os)-$(arch)/
displayName: 'Perform dotnet restore & build'
workingDirectory: '$(Build.Repository.LocalPath)'
env:
NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS: 180
NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS: 180
- ${{ if eq(parameters.ep, 'cuda') }}:
- bash: |
set -e -x
docker pull $(cuda_docker_image)
docker run \
--gpus all \
--rm \
--volume $(Build.Repository.LocalPath):/ort_genai_src \
--volume $(Build.BinariesDirectory):/ort_genai_binary \
-e HF_TOKEN=$HF_TOKEN \
-w /ort_genai_src/ $(cuda_docker_image) \
bash -c " \
export ORTGENAI_LOG_ORT_LIB=1 && \
cd /ort_genai_src/examples/csharp/HelloPhi && \
chmod +x ./bin/Release_Cuda/net6.0/linux-x64/HelloPhi && \
./bin/Release_Cuda/net6.0/linux-x64/HelloPhi -m ./models/phi-3"
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
- ${{ elseif eq(parameters.ep, 'cpu') }}:
- bash: |
export ORTGENAI_LOG_ORT_LIB=1
cd examples/csharp/HelloPhi
dotnet run -r $(os)-$(arch) --configuration $(csproj_configuration) --no-build --verbosity normal -- -m ./models/phi-3
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
- template: steps/utils/download-huggingface-model.yml
parameters:
HuggingFaceRepo: 'microsoft/Phi-3.5-vision-instruct-onnx'
LocalFolder: 'phi3.5-vision'
RepoFolder: $(prebuild_phi3_5_vision_model_folder)
WorkingDirectory: '$(Build.Repository.LocalPath)/examples/csharp/HelloPhi3V'
HuggingFaceToken: $(HF_TOKEN)
os: ${{ parameters.os }}

- template: steps/compliant-and-cleanup-step.yml
- template: steps/nuget-validation-step.yml
parameters:
CsprojFolder: "examples/csharp/HelloPhi3V"
CsprojName: "HelloPhi3V"
CsprojConfiguration: $(csproj_configuration)
LocalFolder: 'phi3.5-vision'
ModelFolder: $(prebuild_phi3_5_vision_model_folder)

- template: steps/compliant-and-cleanup-step.yml
121 changes: 33 additions & 88 deletions .pipelines/stages/jobs/py-validation-job.yml
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,16 @@ jobs:
${{ else }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'

- name: prebuild_phi3_5_vision_model_folder
${{ if eq(parameters.ep, 'cpu') }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'
${{ elseif eq(parameters.ep, 'cuda') }}:
value: 'gpu/gpu-int4-rtn-block-32'
${{ elseif eq(parameters.ep, 'directml')}}:
value: 'gpu/gpu-int4-rtn-block-32'
${{ else }}:
value: 'cpu_and_mobile/cpu-int4-rtn-block-32-acc-level-4'

- name: cuda_docker_image
${{ if eq(parameters.cuda_version, '11.8') }}:
value: onnxruntimebuildcache.azurecr.io/internal/azureml/onnxruntime/build/cuda11_x64_almalinux8_gcc11:20240531.1
Expand Down Expand Up @@ -147,99 +157,34 @@ jobs:

- template: steps/utils/download-huggingface-model.yml
parameters:
StepName: 'Download Model from HuggingFace'
HuggingFaceRepo: 'microsoft/Phi-3-mini-4k-instruct-onnx'
LocalFolder: 'phi3-mini'
RepoFolder: $(prebuild_phi3_mini_model_folder)
LocalFolder: 'models'
WorkingDirectory: '$(Build.Repository.LocalPath)/examples/python'
HuggingFaceToken: $(HF_TOKEN)
os: ${{ parameters.os }}

- task: Docker@2
inputs:
containerRegistry: onnxruntimebuildcache
command: "login"
addPipelineData: false
displayName: "Log in to container registry"

- ${{ if or(eq(parameters.os, 'linux'), eq(parameters.os, 'osx')) }}:
- ${{ if eq(parameters.ep, 'cuda') }}:
- bash: |
set -e -x
docker pull $(cuda_docker_image)
python_exe=/opt/python/cp310-cp310/bin/python3.10
docker run \
--gpus all \
--rm \
--volume $(Build.Repository.LocalPath):/ort_genai_src \
--volume $(Build.BinariesDirectory):/ort_genai_binary \
-e HF_TOKEN=$HF_TOKEN \
-w /ort_genai_src/ $(cuda_docker_image) \
bash -c " \
export ORTGENAI_LOG_ORT_LIB=1 && \
$python_exe -m pip install -r /ort_genai_src/test/python/requirements.txt && \
$python_exe -m pip install -r /ort_genai_src/test/python/cuda/torch/requirements.txt && \
$python_exe -m pip install -r /ort_genai_src/test/python/cuda/ort/requirements.txt && \
cd /ort_genai_src/examples/python && \
$python_exe -m pip install --no-index --find-links=/ort_genai_binary/wheel $(pip_package_name) && \
$python_exe model-generate.py -m ./models/$(prebuild_phi3_mini_model_folder) --min_length 25 --max_length 50 --verbose"
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
- ${{ elseif eq(parameters.ep, 'cpu') }}:
- bash: |
export ORTGENAI_LOG_ORT_LIB=1
python -m pip install -r test/python/requirements.txt
if [[ "$(os)" == "linux" ]]; then
python -m pip install -r test/python/cpu/torch/requirements.txt
python -m pip install -r test/python/cpu/ort/requirements.txt
fi
if [[ "$(os)" == "osx" ]]; then
python -m pip install -r test/python/macos/torch/requirements.txt
python -m pip install -r test/python/macos/ort/requirements.txt
fi
cd examples/python
python -m pip install --no-index --find-links=$(Build.BinariesDirectory)/wheel $(pip_package_name)
python model-generate.py -m ./models/$(prebuild_phi3_mini_model_folder) --min_length 25 --max_length 50 --verbose
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
- ${{ if eq(parameters.os, 'win') }}:
- ${{ if eq(parameters.ep, 'cuda') }}:
- powershell: |
$env:AZCOPY_MSI_CLIENT_ID = "63b63039-6328-442f-954b-5a64d124e5b4";
azcopy.exe cp --recursive "https://lotusscus.blob.core.windows.net/models/cuda_sdk/v$(cuda_version)" 'cuda_sdk'
displayName: 'Download CUDA $(cuda_version)'
workingDirectory: '$(Build.Repository.LocalPath)'
- powershell: |
if ("$(arch)" -ne "arm64") {
python -m pip install -r test/python/requirements.txt
}
if ("$(ep)" -eq "cuda") {
$env:CUDA_PATH = '$(Build.Repository.LocalPath)\cuda_sdk\v$(cuda_version)'
$env:PATH = "$env:CUDA_PATH\bin;$env:CUDA_PATH\extras\CUPTI\lib64;$env:PATH"
Write-Host $env:PATH
python -m pip install -r test/python/cuda/torch/requirements.txt
python -m pip install -r test/python/cuda/ort/requirements.txt
}
elseif ("$(ep)" -eq "directml") {
python -m pip install -r test/python/directml/torch/requirements.txt
python -m pip install -r test/python/directml/ort/requirements.txt
}
elseif ("$(arch)" -eq "arm64") {
python -m pip install numpy<2
python -m pip install onnxruntime-qnn==1.20.0
}
else {
python -m pip install -r test/python/cpu/torch/requirements.txt
python -m pip install -r test/python/cpu/ort/requirements.txt
}
cd examples\python
python -m pip install --no-index --find-links=$(Build.BinariesDirectory)/wheel $(pip_package_name)
python model-generate.py -m .\models\$(prebuild_phi3_mini_model_folder) --min_length 25 --max_length 50 --batch_size_for_cuda_graph 3 --verbose
displayName: 'Run Example With Artifact'
workingDirectory: '$(Build.Repository.LocalPath)'
- template: steps/python-validation-step.yml
parameters:
PythonScriptFolder: "examples/python"
PythonScriptName: "model-generate.py"
LocalFolder: 'phi3-mini'
ModelFolder: $(prebuild_phi3_mini_model_folder)

- template: steps/utils/download-huggingface-model.yml
parameters:
HuggingFaceRepo: 'microsoft/Phi-3.5-vision-instruct-onnx'
LocalFolder: 'phi3.5-vision'
RepoFolder: $(prebuild_phi3_5_vision_model_folder)
WorkingDirectory: '$(Build.Repository.LocalPath)/examples/python'
HuggingFaceToken: $(HF_TOKEN)
os: ${{ parameters.os }}

- template: steps/python-validation-step.yml
parameters:
PythonScriptFolder: "examples/python"
PythonScriptName: "phi3v.py"
LocalFolder: 'phi3.5-vision'
ModelFolder: $(prebuild_phi3_5_vision_model_folder)

- template: steps/compliant-and-cleanup-step.yml
88 changes: 88 additions & 0 deletions .pipelines/stages/jobs/steps/nuget-validation-step.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
parameters:
- name: CsprojFolder
type: string
- name: CsprojName
type: string
- name: CsprojConfiguration
type: string
- name: LocalFolder
type: string
- name: ModelFolder
type: string

steps:
- task: Docker@2
inputs:
containerRegistry: onnxruntimebuildcache
command: "login"
addPipelineData: false
displayName: "Log in to container registry"

- powershell: |
$env:AZCOPY_MSI_CLIENT_ID = "63b63039-6328-442f-954b-5a64d124e5b4";
azcopy.exe cp --recursive "https://lotusscus.blob.core.windows.net/models/cuda_sdk/v$(cuda_version)" 'cuda_sdk'
displayName: 'Download CUDA $(cuda_version)'
workingDirectory: '$(Build.Repository.LocalPath)'
condition: and(eq(variables['os'], 'win'), eq(variables['ep'], 'cuda'))
- powershell: |
if ("$(ep)" -eq "cuda") {
$env:CUDA_PATH = '$(Build.Repository.LocalPath)\cuda_sdk\v$(cuda_version)'
$env:PATH = "$env:CUDA_PATH\bin;$env:CUDA_PATH\extras\CUPTI\lib64;$env:PATH"
Write-Host $env:PATH
}
dotnet --info
Copy-Item -Force -Recurse -Verbose $(Build.BinariesDirectory)/nuget/* -Destination ${{ parameters.CsprojFolder }}
cd ${{ parameters.CsprojFolder }}
dotnet restore -r $(os)-$(arch) /property:Configuration=${{ parameters.CsprojConfiguration }} --source https://api.nuget.org/v3/index.json --source https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ORT-Nightly/nuget/v3/index.json --source $PWD --disable-parallel --verbosity detailed
dotnet run -r $(os)-$(arch) --configuration ${{ parameters.CsprojConfiguration }} --no-restore --verbosity normal -- -m ./${{ parameters.LocalFolder }}/${{ parameters.ModelFolder }} --non-interactive
displayName: 'Run ${{ parameters.CsprojName }} With Artifact on Windows'
workingDirectory: '$(Build.Repository.LocalPath)'
condition: eq(variables['os'], 'win')
env:
NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS: 180
NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS: 180
- bash: |
set -e -x
dotnet --info
cp $(Build.BinariesDirectory)/nuget/* ${{ parameters.CsprojFolder }}
cd ${{ parameters.CsprojFolder }}
dotnet restore -r $(os)-$(arch) /property:Configuration=${{ parameters.CsprojConfiguration }} --source https://api.nuget.org/v3/index.json --source https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/ORT-Nightly/nuget/v3/index.json --source $PWD --disable-parallel --verbosity detailed
dotnet build ./${{ parameters.CsprojName }}.csproj -r $(os)-$(arch) /property:Configuration=${{ parameters.CsprojConfiguration }} --no-restore --self-contained --verbosity normal
ls -l ./bin/${{ parameters.CsprojConfiguration }}/net6.0/$(os)-$(arch)/
displayName: 'Perform dotnet restore & build'
workingDirectory: '$(Build.Repository.LocalPath)'
condition: or(eq(variables['os'], 'linux'), eq(variables['os'], 'osx'))
env:
NUGET_PLUGIN_HANDSHAKE_TIMEOUT_IN_SECONDS: 180
NUGET_PLUGIN_REQUEST_TIMEOUT_IN_SECONDS: 180
- bash: |
set -e -x
az login --identity --username 63b63039-6328-442f-954b-5a64d124e5b4
az acr login --name onnxruntimebuildcache --subscription 00c06639-6ee4-454e-8058-8d8b1703bd87
docker pull $(cuda_docker_image)
docker run \
--gpus all \
--rm \
--volume $(Build.Repository.LocalPath):/ort_genai_src \
--volume $(Build.BinariesDirectory):/ort_genai_binary \
-e HF_TOKEN=$HF_TOKEN \
-w /ort_genai_src/ $(cuda_docker_image) \
bash -c " \
export ORTGENAI_LOG_ORT_LIB=1 && \
cd /ort_genai_src/${{ parameters.CsprojFolder }} && \
chmod +x ./bin/Release_Cuda/net6.0/linux-x64/${{ parameters.CsprojName }} && \
./bin/Release_Cuda/net6.0/linux-x64/${{ parameters.CsprojName }} -m ./${{ parameters.LocalFolder }}/${{ parameters.ModelFolder }} --non-interactive"
displayName: 'Run ${{ parameters.CsprojName }} With Artifact on Linux CUDA'
workingDirectory: '$(Build.Repository.LocalPath)'
condition: and(eq(variables['os'], 'linux'), eq(variables['ep'], 'cuda'))
- bash: |
export ORTGENAI_LOG_ORT_LIB=1
cd ${{ parameters.CsprojFolder }}
dotnet run -r $(os)-$(arch) --configuration ${{ parameters.CsprojConfiguration }} --no-build --verbosity normal -- -m ./${{ parameters.LocalFolder }}/${{ parameters.ModelFolder }} --non-interactive
displayName: 'Run ${{ parameters.CsprojName }} With Artifact on Linux/macOS CPU'
workingDirectory: '$(Build.Repository.LocalPath)'
condition: and(or(eq(variables['os'], 'linux'), eq(variables['os'], 'osx')), eq(variables['ep'], 'cpu'))
Loading

0 comments on commit d4d3a88

Please sign in to comment.