Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Perf testing for AzCopy #2006

Merged
merged 9 commits into from
Mar 27, 2023
152 changes: 152 additions & 0 deletions perf-test.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
trigger: none
pr: none

stages:
- stage: Smallfiles
jobs:
- job: PerformanceTest
timeoutInMinutes: 720
strategy:
matrix:
Ubuntu-22:
imageName: "azcopyPerfTestUbuntu22.04"
Description: "AzCopy Perf Test"

pool:
name: "AzCopyPerfTestUbuntu"
demands:
- ImageOverride -equals $(imageName)

variables:
- group: AzCopyPerfTestTargets
- name: localPath
value: "/mnt/storage"

steps:
- script: |
echo $(Description)
hostnamectl
displayName: 'Print Agent Info'

- task: GoTool@0
inputs:
version: '1.19.3'

- script: |
go build -o $GOROOT/bin/azcopy
azcopy --version
displayName: 'Build Azcopy'

- script: |
time azcopy copy $(Blob2BlobLargeFilesSrc) $(Blob2BlobLargeFilesDst) --recursive --block-size-mb=128 --log-level=ERROR --cap-mbps=40000
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we parse the output of time and put it in some file for all three test cases and then upload this file in our artifacts.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm looking for a better way to do it

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

you can refer to blobfuse2 for doing this as well

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In Blobfuse2, we also created perf scripts so then we can choose to run them manually also, would that be helpful here?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you link an example?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

displayName: 'Blob2Blob - Large Files'
condition: always()
env:
AZCOPY_AUTO_LOGIN_TYPE: $(AZCOPY_AUTO_LOGIN_TYPE)
AZCOPY_MSI_CLIENT_ID: $(AZCOPY_MSI_CLIENT_ID)
AZCOPY_LOG_LOCATION: $(Build.ArtifactStagingDirectory)/logs
AZCOPY_CONCURRENCY_VALUE: "256"
AZCOPY_SHOW_PERF_STATES: "1"

- script: |
time azcopy copy $(Blob2BlobSmallAndMedFilesSrc) $(Blob2BlobSmallAndMedFilesDst) --recursive --block-size-mb=128 --log-level=ERROR
displayName: 'Blob2Blob - Small to Medium sized files'
condition: always()
env:
AZCOPY_AUTO_LOGIN_TYPE: $(AZCOPY_AUTO_LOGIN_TYPE)
AZCOPY_MSI_CLIENT_ID: $(AZCOPY_MSI_CLIENT_ID)
AZCOPY_LOG_LOCATION: $(Build.ArtifactStagingDirectory)/logs
AZCOPY_CONCURRENCY_VALUE: "256"
AZCOPY_SHOW_PERF_STATES: "1"

- script: |
time azcopy copy $(Blob2BlobSmallFilesSrc) $(Blob2BlobSmallFilesDst) --recursive --check-length=false --log-level=ERROR
displayName: 'Blob2Blob - Small Files'
condition: always()
env:
AZCOPY_AUTO_LOGIN_TYPE: $(AZCOPY_AUTO_LOGIN_TYPE)
AZCOPY_MSI_CLIENT_ID: $(AZCOPY_MSI_CLIENT_ID)
AZCOPY_LOG_LOCATION: $(Build.ArtifactStagingDirectory)/logs
AZCOPY_CONCURRENCY_VALUE: "256"
AZCOPY_SHOW_PERF_STATES: "1"

- script: |
sudo mkdir -m 777 $(localPath)/largeFiles/
time azcopy copy $(Blob2BlobLargeFilesSrc) /dev/null --recursive --log-level=ERROR
displayName: 'Download - Large files'
condition: always()
env:
AZCOPY_AUTO_LOGIN_TYPE: $(AZCOPY_AUTO_LOGIN_TYPE)
AZCOPY_MSI_CLIENT_ID: $(AZCOPY_MSI_CLIENT_ID)
AZCOPY_SHOW_PERF_STATES: "1"
AZCOPY_LOG_LOCATION: $(Build.ArtifactStagingDirectory)/logs

- script: |
time azcopy bench $(Blob2BlobLargeFilesDst) --log-level=ERROR --size-per-file=50G --file-count=50 --put-md5=false --delete-test-data=false
sudo rm -rf $(localPath)/*
displayName: 'Upload - Large files'
condition: always()
env:
AZCOPY_AUTO_LOGIN_TYPE: $(AZCOPY_AUTO_LOGIN_TYPE)
AZCOPY_MSI_CLIENT_ID: $(AZCOPY_MSI_CLIENT_ID)
AZCOPY_SHOW_PERF_STATES: "1"
AZCOPY_LOG_LOCATION: $(Build.ArtifactStagingDirectory)/logs

- script: |
sudo mkdir -m 777 $(localPath)/smallToMediumFiles/
time azcopy copy $(Blob2BlobSmallAndMedFilesSrc) $(localPath)/smallToMediumFiles --recursive --log-level=ERROR
displayName: 'Download - Small to Medium sized files'
condition: always()
env:
AZCOPY_AUTO_LOGIN_TYPE: $(AZCOPY_AUTO_LOGIN_TYPE)
AZCOPY_MSI_CLIENT_ID: $(AZCOPY_MSI_CLIENT_ID)
AZCOPY_SHOW_PERF_STATES: "1"
AZCOPY_LOG_LOCATION: $(Build.ArtifactStagingDirectory)/logs

- script: |
time azcopy copy $(localPath)/smallToMediumFiles/ $(Blob2BlobSmallAndMedFilesDst) --recursive --log-level=ERROR
sudo rm -rf $(localPath)/*
displayName: 'Upload - Small to Medium sized files'
condition: always()
env:
AZCOPY_AUTO_LOGIN_TYPE: $(AZCOPY_AUTO_LOGIN_TYPE)
AZCOPY_MSI_CLIENT_ID: $(AZCOPY_MSI_CLIENT_ID)
AZCOPY_SHOW_PERF_STATES: "1"
AZCOPY_LOG_LOCATION: $(Build.ArtifactStagingDirectory)/logs

- script: |
sudo mkdir -m 777 $(localPath)/smallFiles/
time azcopy copy $(Blob2BlobSmallFilesSrc) /dev/null --recursive --check-length=false --log-level=ERROR
displayName: 'Download - Small Files'
condition: always()
env:
AZCOPY_AUTO_LOGIN_TYPE: $(AZCOPY_AUTO_LOGIN_TYPE)
AZCOPY_MSI_CLIENT_ID: $(AZCOPY_MSI_CLIENT_ID)
AZCOPY_SHOW_PERF_STATES: "1"
AZCOPY_LOG_LOCATION: $(Build.ArtifactStagingDirectory)/logs

- script: |
time azcopy bench $(Blob2BlobSmallFilesDst) --size-per-file=5k --file-count=8000000 --check-length=false --log-level=ERROR --delete-test-data=false
sudo rm -rf $(localPath)/*
displayName: 'Upload - Small Files'
condition: always()
env:
AZCOPY_AUTO_LOGIN_TYPE: $(AZCOPY_AUTO_LOGIN_TYPE)
AZCOPY_MSI_CLIENT_ID: $(AZCOPY_MSI_CLIENT_ID)
AZCOPY_SHOW_PERF_STATES: "1"
AZCOPY_LOG_LOCATION: $(Build.ArtifactStagingDirectory)/logs

- task: PublishBuildArtifacts@1
condition: always()
inputs:
pathToPublish: $(Build.ArtifactStagingDirectory)
artifactName: Logs

- script: |
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
az login --identity --username $(AZCOPY_MSI_CLIENT_ID)
for container in `az storage container list --account-name $(DestinationAccount) --query "[*].[name]" --output tsv --auth-mode login`; do
az storage container delete --account-name $(DestinationAccount) --name $container --auth-mode login
done
displayName: 'Clean destination storage Account'
condition: always()