Skip to content

Commit

Permalink
Reenable Perf Correctness smoketest (#17520)
Browse files Browse the repository at this point in the history
  • Loading branch information
michellemcdaniel committed Apr 12, 2018
1 parent 01690df commit 53e61b4
Showing 1 changed file with 107 additions and 88 deletions.
195 changes: 107 additions & 88 deletions perf.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -29,118 +29,137 @@ def static getOSGroup(def os) {
[true, false].each { isPR ->
['Windows_NT'].each { os ->
['x64', 'x86'].each { arch ->
['ryujit'].each { jit ->
['full_opt', 'min_opt'].each { opt_level ->
[true, false].each { isSmoketest ->
['ryujit'].each { jit ->
['full_opt', 'min_opt'].each { opt_level ->

def architecture = arch
def jobName = "perf_perflab_${os}_${arch}_${opt_level}_${jit}"
def testEnv = ""
def architecture = arch
def jobName = isSmoketest ? "perf_perflab_${os}_${arch}_${opt_level}_${jit}_smoketest" : "perf_perflab_${os}_${arch}_${opt_level}_${jit}"
def testEnv = ""

def newJob = job(Utilities.getFullJobName(project, jobName, isPR)) {
// Set the label.
label('windows_server_2016_clr_perf')
wrappers {
credentialsBinding {
string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas')
def newJob = job(Utilities.getFullJobName(project, jobName, isPR)) {
// Set the label.
label('windows_server_2016_clr_perf')
wrappers {
credentialsBinding {
string('BV_UPLOAD_SAS_TOKEN', 'CoreCLR Perf BenchView Sas')
}
}
}

if (isPR) {
parameters {
stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form <branch> private BenchviewCommitName')
if (isPR) {
parameters {
stringParam('BenchviewCommitName', '\${ghprbPullTitle}', 'The name that you will be used to build the full title of a run in Benchview. The final name will be of the form <branch> private BenchviewCommitName')
}
}
}

parameters {
stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample')
stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample')
}
if (isSmoketest) {
parameters {
stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '2', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample')
stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '2', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample')
}
}
else{
parameters {
stringParam('XUNIT_PERFORMANCE_MAX_ITERATION', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample')
stringParam('XUNIT_PERFORMANCE_MAX_ITERATION_INNER_SPECIFIED', '21', 'Sets the number of iterations to twenty one. We are doing this to limit the amount of data that we upload as 20 iterations is enough to get a good sample')
}
}

def configuration = 'Release'
def runType = isPR ? 'private' : 'rolling'
def benchViewName = isPR ? 'coreclr private %BenchviewCommitName%' : 'coreclr rolling %GIT_BRANCH_WITHOUT_ORIGIN% %GIT_COMMIT%'
def uploadString = '-uploadToBenchview'
def configuration = 'Release'
def runType = isPR ? 'private' : 'rolling'
def benchViewName = isPR ? 'coreclr private %BenchviewCommitName%' : 'coreclr rolling %GIT_BRANCH_WITHOUT_ORIGIN% %GIT_COMMIT%'
def uploadString = isSmoketest ? '' : '-uploadToBenchview'

steps {
// Batch
steps {
// Batch

batchFile("powershell -NoProfile wget https://dist.nuget.org/win-x86-commandline/latest/nuget.exe -OutFile \"%WORKSPACE%\\nuget.exe\"")
batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\"")
batchFile("\"%WORKSPACE%\\nuget.exe\" install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion")
//Do this here to remove the origin but at the front of the branch name as this is a problem for BenchView
//we have to do it all as one statement because cmd is called each time and we lose the set environment variable
batchFile("if \"%GIT_BRANCH:~0,7%\" == \"origin/\" (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH:origin/=%\") else (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH%\")\n" +
"set \"BENCHVIEWNAME=${benchViewName}\"\n" +
"set \"BENCHVIEWNAME=%BENCHVIEWNAME:\"=\"\"%\"\n" +
"py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\submission-metadata.py\" --name \"%BENCHVIEWNAME%\" --user-email \"dotnet-bot@microsoft.com\"\n" +
"py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\build.py\" git --branch %GIT_BRANCH_WITHOUT_ORIGIN% --type ${runType}")
batchFile("py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\machinedata.py\"")
batchFile("set __TestIntermediateDir=int&&build.cmd ${configuration} ${architecture}")
batchFile("powershell -NoProfile wget https://dist.nuget.org/win-x86-commandline/latest/nuget.exe -OutFile \"%WORKSPACE%\\nuget.exe\"")
batchFile("if exist \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\" rmdir /s /q \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\"")
batchFile("\"%WORKSPACE%\\nuget.exe\" install Microsoft.BenchView.JSONFormat -Source http://benchviewtestfeed.azurewebsites.net/nuget -OutputDirectory \"%WORKSPACE%\" -Prerelease -ExcludeVersion")
//Do this here to remove the origin but at the front of the branch name as this is a problem for BenchView
//we have to do it all as one statement because cmd is called each time and we lose the set environment variable
batchFile("if \"%GIT_BRANCH:~0,7%\" == \"origin/\" (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH:origin/=%\") else (set \"GIT_BRANCH_WITHOUT_ORIGIN=%GIT_BRANCH%\")\n" +
"set \"BENCHVIEWNAME=${benchViewName}\"\n" +
"set \"BENCHVIEWNAME=%BENCHVIEWNAME:\"=\"\"%\"\n" +
"py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\submission-metadata.py\" --name \"%BENCHVIEWNAME%\" --user-email \"dotnet-bot@microsoft.com\"\n" +
"py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\build.py\" git --branch %GIT_BRANCH_WITHOUT_ORIGIN% --type ${runType}")
batchFile("py \"%WORKSPACE%\\Microsoft.BenchView.JSONFormat\\tools\\machinedata.py\"")
batchFile("set __TestIntermediateDir=int&&build.cmd ${configuration} ${architecture}")

batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly")
batchFile("tests\\runtest.cmd ${configuration} ${architecture} GenerateLayoutOnly")

def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -os ${os} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} ${testEnv} -optLevel ${opt_level} -jitName ${jit} -outputdir \"%WORKSPACE%\\bin\\sandbox_logs\" -stabilityPrefix \"START \\\"CORECLR_PERF_RUN\\\" /B /WAIT /HIGH /AFFINITY 0x2\""
def runXUnitPerfCommonArgs = "-arch ${arch} -configuration ${configuration} -os ${os} -generateBenchviewData \"%WORKSPACE%\\Microsoft.Benchview.JSONFormat\\tools\" ${uploadString} -runtype ${runType} ${testEnv} -optLevel ${opt_level} -jitName ${jit} -outputdir \"%WORKSPACE%\\bin\\sandbox_logs\" -stabilityPrefix \"START \\\"CORECLR_PERF_RUN\\\" /B /WAIT /HIGH /AFFINITY 0x2\""

// Run with just stopwatch: Profile=Off
batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library")
batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality")
// Run with just stopwatch: Profile=Off
batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library")
batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality")

// Run with the full set of counters enabled: Profile=On
if (opt_level != 'min_opt') {
batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi")
batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi")
// Run with the full set of counters enabled: Profile=On
if (opt_level != 'min_opt') {
batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\performance\\perflab\\Perflab -library -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi")
batchFile("py tests\\scripts\\run-xunit-perf.py ${runXUnitPerfCommonArgs} -testBinLoc bin\\tests\\${os}.${architecture}.${configuration}\\Jit\\Performance\\CodeQuality -collectionFlags default+BranchMispredictions+CacheMisses+InstructionRetired+gcapi")
}
}
}
}

def archiveSettings = new ArchivalSettings()
archiveSettings.addFiles('bin/sandbox_logs/**')
archiveSettings.addFiles('machinedata.json')
archiveSettings.setAlwaysArchive()
if (isSmoketest) {
Utilities.setMachineAffinity(newJob, "Windows_NT", '20170427-elevated')
}
def archiveSettings = new ArchivalSettings()
archiveSettings.addFiles('bin/sandbox_logs/**')
archiveSettings.addFiles('machinedata.json')
archiveSettings.setAlwaysArchive()

Utilities.addArchival(newJob, archiveSettings)
Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}")
Utilities.addArchival(newJob, archiveSettings)
Utilities.standardJobSetup(newJob, project, isPR, "*/${branch}")

newJob.with {
logRotator {
artifactDaysToKeep(30)
daysToKeep(30)
artifactNumToKeep(200)
numToKeep(200)
}
wrappers {
timeout {
absolute(240)
newJob.with {
logRotator {
artifactDaysToKeep(30)
daysToKeep(30)
artifactNumToKeep(200)
numToKeep(200)
}
wrappers {
timeout {
absolute(240)
}
}
}
}

if (isPR) {
TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest()
builder.setGithubContext("${os} ${arch} ${opt_level} ${jit} CoreCLR Perf Tests")
if (isPR) {
TriggerBuilder builder = TriggerBuilder.triggerOnPullRequest()
if (isSmoketest) {
builder.setGithubContext("${os} ${arch} ${opt_level} ${jit} CoreCLR Perf Tests Correctness")
}
else {
builder.setGithubContext("${os} ${arch} ${opt_level} ${jit} CoreCLR Perf Tests")

def opts = ""
if (opt_level == 'min_opt') {
opts = '\\W+min_opts'
def opts = ""
if (opt_level == 'min_opt') {
opts = '\\W+min_opts'
}
def jitt = ""
if (jit != 'ryujit') {
jitt = "\\W+${jit}"
}

builder.triggerOnlyOnComment()
builder.setCustomTriggerPhrase("(?i).*test\\W+${os}\\W+${arch}${opts}${jitt}\\W+perf.*")
}

builder.triggerForBranch(branch)
builder.emitTrigger(newJob)
}
def jitt = ""
if (jit != 'ryujit') {
jitt = "\\W+${jit}"
else if (opt_level == 'full_opt') {
// Set a push trigger
TriggerBuilder builder = TriggerBuilder.triggerOnCommit()
builder.emitTrigger(newJob)
}
else {
// Set periodic trigger
Utilities.addPeriodicTrigger(newJob, '@daily')
}

builder.triggerOnlyOnComment()
builder.setCustomTriggerPhrase("(?i).*test\\W+${os}\\W+${arch}${opts}${jitt}\\W+perf.*")
builder.triggerForBranch(branch)
builder.emitTrigger(newJob)
}
else if (opt_level == 'full_opt') {
// Set a push trigger
TriggerBuilder builder = TriggerBuilder.triggerOnCommit()
builder.emitTrigger(newJob)
}
else {
// Set periodic trigger
Utilities.addPeriodicTrigger(newJob, '@daily')
}
}
}
Expand Down

0 comments on commit 53e61b4

Please sign in to comment.