2016-09-30 19:53:12 +00:00
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package main
/ *
Generate the tasks . json file .
* /
import (
"encoding/json"
2017-02-01 20:56:55 +00:00
"flag"
2016-09-30 19:53:12 +00:00
"fmt"
2017-02-01 20:56:55 +00:00
"io/ioutil"
2016-09-30 19:53:12 +00:00
"os"
"path"
2017-06-14 19:25:31 +00:00
"path/filepath"
2017-02-01 20:56:55 +00:00
"regexp"
2017-06-14 19:25:31 +00:00
"runtime"
2016-09-30 19:53:12 +00:00
"sort"
2017-10-17 17:40:52 +00:00
"strconv"
2016-09-30 19:53:12 +00:00
"strings"
2016-11-08 17:55:32 +00:00
"time"
2016-09-30 19:53:12 +00:00
"github.com/skia-dev/glog"
2017-06-14 19:25:31 +00:00
"go.skia.org/infra/go/sklog"
2016-09-30 19:53:12 +00:00
"go.skia.org/infra/go/util"
"go.skia.org/infra/task_scheduler/go/specs"
)
const (
2017-11-29 19:45:14 +00:00
BUNDLE_RECIPES_NAME = "Housekeeper-PerCommit-BundleRecipes"
2018-03-07 19:44:44 +00:00
ISOLATE_GCLOUD_LINUX_NAME = "Housekeeper-PerCommit-IsolateGCloudLinux"
ISOLATE_GO_LINUX_NAME = "Housekeeper-PerCommit-IsolateGoLinux"
2017-11-29 19:45:14 +00:00
ISOLATE_SKIMAGE_NAME = "Housekeeper-PerCommit-IsolateSkImage"
ISOLATE_SKP_NAME = "Housekeeper-PerCommit-IsolateSKP"
ISOLATE_SVG_NAME = "Housekeeper-PerCommit-IsolateSVG"
ISOLATE_NDK_LINUX_NAME = "Housekeeper-PerCommit-IsolateAndroidNDKLinux"
2018-02-20 16:40:25 +00:00
ISOLATE_SDK_LINUX_NAME = "Housekeeper-PerCommit-IsolateAndroidSDKLinux"
2017-11-29 19:45:14 +00:00
ISOLATE_WIN_TOOLCHAIN_NAME = "Housekeeper-PerCommit-IsolateWinToolchain"
ISOLATE_WIN_VULKAN_SDK_NAME = "Housekeeper-PerCommit-IsolateWinVulkanSDK"
2017-04-04 13:06:16 +00:00
2018-04-06 21:39:06 +00:00
DEFAULT_OS_DEBIAN = "Debian-9.4"
DEFAULT_OS_LINUX_GCE = DEFAULT_OS_DEBIAN
2018-02-09 16:21:15 +00:00
DEFAULT_OS_MAC = "Mac-10.13.3"
2017-11-15 16:22:57 +00:00
DEFAULT_OS_UBUNTU = "Ubuntu-14.04"
DEFAULT_OS_WIN = "Windows-2016Server-14393"
2016-09-30 19:53:12 +00:00
2018-04-19 13:36:45 +00:00
DEFAULT_PROJECT = "skia"
2018-04-17 19:55:57 +00:00
// Swarming output dirs.
OUTPUT_NONE = "output_ignored" // This will result in outputs not being isolated.
OUTPUT_BUILD = "build"
OUTPUT_COVERAGE = "coverage"
OUTPUT_TEST = "test"
OUTPUT_PERF = "perf"
2016-09-30 19:53:12 +00:00
// Name prefix for upload jobs.
PREFIX_UPLOAD = "Upload"
2018-04-17 19:55:57 +00:00
SERVICE_ACCOUNT_BOOKMAKER = "skia-bookmaker@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_COMPILE = "skia-external-compile-tasks@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_CT_SKPS = "skia-external-ct-skps@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_HOUSEKEEPER = "skia-external-housekeeper@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_RECREATE_SKPS = "skia-recreate-skps@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_UPDATE_META_CONFIG = "skia-update-meta-config@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_UPLOAD_BINARY = "skia-external-binary-uploader@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_UPLOAD_CALMBENCH = "skia-external-calmbench-upload@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_UPLOAD_COVERAGE = "skia-external-coverage-uploade@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_UPLOAD_GM = "skia-external-gm-uploader@skia-swarming-bots.iam.gserviceaccount.com"
SERVICE_ACCOUNT_UPLOAD_NANO = "skia-external-nano-uploader@skia-swarming-bots.iam.gserviceaccount.com"
2016-09-30 19:53:12 +00:00
)
var (
// "Constants"
2017-02-01 20:56:55 +00:00
// Top-level list of all jobs to run at each commit; loaded from
// jobs.json.
JOBS [ ] string
// General configuration information.
CONFIG struct {
2017-10-09 19:26:19 +00:00
GsBucketCoverage string ` json:"gs_bucket_coverage" `
GsBucketGm string ` json:"gs_bucket_gm" `
GsBucketNano string ` json:"gs_bucket_nano" `
2017-10-24 13:43:21 +00:00
GsBucketCalm string ` json:"gs_bucket_calm" `
2017-10-09 19:26:19 +00:00
NoUpload [ ] string ` json:"no_upload" `
Pool string ` json:"pool" `
2016-09-30 19:53:12 +00:00
}
2018-04-19 13:36:45 +00:00
// alternateProject can be set in an init function to override the default project ID.
alternateProject string
2018-04-17 16:45:29 +00:00
// alternateServiceAccount can be set in an init function to override the normal service accounts.
// Takes one of SERVICE_ACCOUNT_* constants as an argument and returns the service account that
// should be used, or uses sklog.Fatal to indicate a problem.
alternateServiceAccount func ( serviceAccountEnum string ) string
2017-06-13 21:01:16 +00:00
// alternateSwarmDimensions can be set in an init function to override the default swarming bot
// dimensions for the given task.
alternateSwarmDimensions func ( parts map [ string ] string ) [ ] string
2017-09-15 14:35:44 +00:00
// internalHardwareLabelFn can be set in an init function to provide an
// internal_hardware_label variable to the recipe.
2017-09-15 18:09:07 +00:00
internalHardwareLabelFn func ( parts map [ string ] string ) * int
2017-09-15 12:35:31 +00:00
2016-09-30 19:53:12 +00:00
// Defines the structure of job names.
jobNameSchema * JobNameSchema
2017-02-01 20:56:55 +00:00
2018-04-17 19:55:57 +00:00
// TODO(borenet): Roll these versions automatically!
CIPD_PKGS_PYTHON = [ ] * specs . CipdPackage {
& specs . CipdPackage {
Name : "infra/tools/luci/vpython/${platform}" ,
Path : "cipd_bin_packages" ,
Version : "git_revision:d0130097bd6364a8d834cb9efd4554c1f6192c82" ,
} ,
2017-12-07 14:54:05 +00:00
}
2018-04-17 19:55:57 +00:00
CIPD_PKGS_KITCHEN = append ( [ ] * specs . CipdPackage {
& specs . CipdPackage {
Name : "infra/tools/luci/kitchen/${platform}" ,
Path : "." ,
Version : "git_revision:206b4474cb712bdad8b7b3f213880cfbf03f120c" ,
} ,
& specs . CipdPackage {
Name : "infra/tools/authutil/${platform}" ,
Path : "cipd_bin_packages" ,
Version : "git_revision:9c63809842a277ce10a86afd51b61c639a665d11" ,
} ,
} , CIPD_PKGS_PYTHON ... )
CIPD_PKGS_GIT = [ ] * specs . CipdPackage {
& specs . CipdPackage {
Name : "infra/git/${platform}" ,
Path : "cipd_bin_packages" ,
Version : "version:2.15.0.chromium12" ,
} ,
& specs . CipdPackage {
Name : "infra/tools/git/${platform}" ,
Path : "cipd_bin_packages" ,
Version : "git_revision:fa7a52f4741f5e04bba0dfccc9b8456dc572c60b" ,
} ,
& specs . CipdPackage {
Name : "infra/tools/luci/git-credential-luci/${platform}" ,
Path : "cipd_bin_packages" ,
Version : "git_revision:d0130097bd6364a8d834cb9efd4554c1f6192c82" ,
} ,
2017-06-06 12:27:09 +00:00
}
2017-12-07 14:21:07 +00:00
2018-04-17 19:55:57 +00:00
CIPD_PKGS_GSUTIL = [ ] * specs . CipdPackage {
& specs . CipdPackage {
Name : "infra/gsutil" ,
Path : "cipd_bin_packages" ,
Version : "version:4.28" ,
} ,
}
RECIPE_BUNDLE_UNIX = "recipe_bundle/recipes"
RECIPE_BUNDLE_WIN = "recipe_bundle/recipes.bat"
2017-02-01 20:56:55 +00:00
// Flags.
2017-02-07 14:16:30 +00:00
builderNameSchemaFile = flag . String ( "builder_name_schema" , "" , "Path to the builder_name_schema.json file. If not specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json from this repo." )
assetsDir = flag . String ( "assets_dir" , "" , "Directory containing assets." )
cfgFile = flag . String ( "cfg_file" , "" , "JSON file containing general configuration information." )
jobsFile = flag . String ( "jobs" , "" , "JSON file containing jobs to run." )
2016-09-30 19:53:12 +00:00
)
2018-04-19 13:36:45 +00:00
// Build the LogDog annotation URL.
func logdogAnnotationUrl ( ) string {
project := DEFAULT_PROJECT
if alternateProject != "" {
project = alternateProject
}
return fmt . Sprintf ( "logdog://logs.chromium.org/%s/%s/+/annotations" , project , specs . PLACEHOLDER_TASK_ID )
}
2018-04-17 19:55:57 +00:00
// Create a properties JSON string.
func props ( p map [ string ] string ) string {
d := make ( map [ string ] interface { } , len ( p ) + 1 )
for k , v := range p {
d [ k ] = interface { } ( v )
}
d [ "$kitchen" ] = struct {
DevShell bool ` json:"devshell" `
GitAuth bool ` json:"git_auth" `
} {
DevShell : true ,
GitAuth : true ,
}
j , err := json . Marshal ( d )
if err != nil {
sklog . Fatal ( err )
}
return strings . Replace ( string ( j ) , "\\u003c" , "<" , - 1 )
}
// kitchenTask returns a specs.TaskSpec instance which uses Kitchen to run a
// recipe.
func kitchenTask ( name , recipe , isolate , serviceAccount string , dimensions [ ] string , extraProps map [ string ] string , outputDir string ) * specs . TaskSpec {
if serviceAccount != "" && alternateServiceAccount != nil {
serviceAccount = alternateServiceAccount ( serviceAccount )
}
cipd := append ( [ ] * specs . CipdPackage { } , CIPD_PKGS_KITCHEN ... )
properties := map [ string ] string {
"buildbucket_build_id" : specs . PLACEHOLDER_BUILDBUCKET_BUILD_ID ,
"buildername" : name ,
"patch_issue" : specs . PLACEHOLDER_ISSUE ,
"patch_repo" : specs . PLACEHOLDER_PATCH_REPO ,
"patch_set" : specs . PLACEHOLDER_PATCHSET ,
"patch_storage" : specs . PLACEHOLDER_PATCH_STORAGE ,
"repository" : specs . PLACEHOLDER_REPO ,
"revision" : specs . PLACEHOLDER_REVISION ,
"swarm_out_dir" : outputDir ,
}
for k , v := range extraProps {
properties [ k ] = v
}
var outputs [ ] string = nil
if outputDir != OUTPUT_NONE {
outputs = [ ] string { outputDir }
}
return & specs . TaskSpec {
CipdPackages : cipd ,
Command : [ ] string {
"./kitchen${EXECUTABLE_SUFFIX}" , "cook" ,
"-checkout-dir" , "recipe_bundle" ,
"-mode" , "swarming" ,
"-luci-system-account" , "system" ,
"-cache-dir" , "cache" ,
"-temp-dir" , "tmp" ,
"-known-gerrit-host" , "android.googlesource.com" ,
"-known-gerrit-host" , "boringssl.googlesource.com" ,
"-known-gerrit-host" , "chromium.googlesource.com" ,
"-known-gerrit-host" , "dart.googlesource.com" ,
"-known-gerrit-host" , "fuchsia.googlesource.com" ,
"-known-gerrit-host" , "go.googlesource.com" ,
"-known-gerrit-host" , "llvm.googlesource.com" ,
"-known-gerrit-host" , "pdfium.googlesource.com" ,
"-known-gerrit-host" , "skia.googlesource.com" ,
"-known-gerrit-host" , "webrtc.googlesource.com" ,
"-output-result-json" , "${ISOLATED_OUTDIR}/build_result_filename" ,
"-workdir" , "." ,
"-recipe" , recipe ,
"-properties" , props ( properties ) ,
2018-04-19 13:36:45 +00:00
"-logdog-annotation-url" , logdogAnnotationUrl ( ) ,
2018-04-17 19:55:57 +00:00
} ,
Dependencies : [ ] string { BUNDLE_RECIPES_NAME } ,
Dimensions : dimensions ,
EnvPrefixes : map [ string ] [ ] string {
"PATH" : [ ] string { "cipd_bin_packages" , "cipd_bin_packages/bin" } ,
"VPYTHON_VIRTUALENV_ROOT" : [ ] string { "${cache_dir}/vpython" } ,
} ,
ExtraTags : map [ string ] string {
2018-04-19 13:36:45 +00:00
"log_location" : logdogAnnotationUrl ( ) ,
2018-04-17 19:55:57 +00:00
} ,
Isolate : relpath ( isolate ) ,
Outputs : outputs ,
Priority : 0.8 ,
ServiceAccount : serviceAccount ,
}
}
2017-09-15 14:35:44 +00:00
// internalHardwareLabel returns the internal ID for the bot, if any.
2017-09-15 18:09:07 +00:00
func internalHardwareLabel ( parts map [ string ] string ) * int {
2017-09-15 14:35:44 +00:00
if internalHardwareLabelFn != nil {
return internalHardwareLabelFn ( parts )
2017-09-15 12:35:31 +00:00
}
return nil
}
2017-02-01 20:56:55 +00:00
// linuxGceDimensions are the Swarming dimensions for Linux GCE
// instances.
func linuxGceDimensions ( ) [ ] string {
return [ ] string {
2017-09-25 16:56:53 +00:00
// Specify CPU to avoid running builds on bots with a more unique CPU.
"cpu:x86-64-Haswell_GCE" ,
2017-02-01 20:56:55 +00:00
"gpu:none" ,
2018-04-26 22:02:23 +00:00
// Currently all Linux GCE tasks run on 16-CPU machines.
"machine_type:n1-standard-16" ,
2017-11-15 16:22:57 +00:00
fmt . Sprintf ( "os:%s" , DEFAULT_OS_LINUX_GCE ) ,
2017-02-01 20:56:55 +00:00
fmt . Sprintf ( "pool:%s" , CONFIG . Pool ) ,
}
}
2016-09-30 19:53:12 +00:00
// deriveCompileTaskName returns the name of a compile task based on the given
// job name.
func deriveCompileTaskName ( jobName string , parts map [ string ] string ) string {
2018-01-02 19:54:43 +00:00
if strings . Contains ( jobName , "Bookmaker" ) {
2017-12-08 17:58:20 +00:00
return "Build-Debian9-GCC-x86_64-Release"
} else if parts [ "role" ] == "Housekeeper" {
2017-06-28 15:45:54 +00:00
return "Build-Debian9-GCC-x86_64-Release-Shared"
2018-01-05 16:13:43 +00:00
} else if parts [ "role" ] == "Test" || parts [ "role" ] == "Perf" || parts [ "role" ] == "Calmbench" {
2016-09-30 19:53:12 +00:00
task_os := parts [ "os" ]
2017-04-27 17:08:50 +00:00
ec := [ ] string { }
if val := parts [ "extra_config" ] ; val != "" {
ec = strings . Split ( val , "_" )
2018-03-15 17:40:07 +00:00
ignore := [ ] string { "Skpbench" , "AbandonGpuContext" , "PreAbandonGpuContext" , "Valgrind" , "ReleaseAndAbandonGpuContext" , "CCPR" , "FSAA" , "FAAA" , "FDAA" , "NativeFonts" , "GDI" , "NoGPUThreads" , "ProcDump" , "DDL1" , "DDL3" }
2017-04-27 17:08:50 +00:00
keep := make ( [ ] string , 0 , len ( ec ) )
for _ , part := range ec {
if ! util . In ( part , ignore ) {
keep = append ( keep , part )
}
}
ec = keep
2017-04-26 18:25:29 +00:00
}
2016-09-30 19:53:12 +00:00
if task_os == "Android" {
2017-04-27 17:08:50 +00:00
if ! util . In ( "Android" , ec ) {
ec = append ( [ ] string { "Android" } , ec ... )
2016-09-30 19:53:12 +00:00
}
2017-06-28 15:45:54 +00:00
task_os = "Debian9"
2017-03-08 19:01:01 +00:00
} else if task_os == "Chromecast" {
2017-06-28 15:45:54 +00:00
task_os = "Debian9"
2017-04-27 17:08:50 +00:00
ec = append ( [ ] string { "Chromecast" } , ec ... )
2017-04-07 14:04:08 +00:00
} else if strings . Contains ( task_os , "ChromeOS" ) {
2017-11-02 13:34:08 +00:00
ec = append ( [ ] string { "Chromebook" , "GLES" } , ec ... )
2017-06-28 15:45:54 +00:00
task_os = "Debian9"
2016-09-30 19:53:12 +00:00
} else if task_os == "iOS" {
2017-04-27 17:08:50 +00:00
ec = append ( [ ] string { task_os } , ec ... )
2016-09-30 19:53:12 +00:00
task_os = "Mac"
} else if strings . Contains ( task_os , "Win" ) {
task_os = "Win"
2017-06-28 15:45:54 +00:00
} else if strings . Contains ( task_os , "Ubuntu" ) || strings . Contains ( task_os , "Debian" ) {
task_os = "Debian9"
2016-09-30 19:53:12 +00:00
}
2016-11-18 18:10:51 +00:00
jobNameMap := map [ string ] string {
2016-09-30 19:53:12 +00:00
"role" : "Build" ,
"os" : task_os ,
"compiler" : parts [ "compiler" ] ,
"target_arch" : parts [ "arch" ] ,
"configuration" : parts [ "configuration" ] ,
2016-11-18 18:10:51 +00:00
}
2017-04-27 17:08:50 +00:00
if len ( ec ) > 0 {
jobNameMap [ "extra_config" ] = strings . Join ( ec , "_" )
2016-11-18 18:10:51 +00:00
}
name , err := jobNameSchema . MakeJobName ( jobNameMap )
2016-09-30 19:53:12 +00:00
if err != nil {
glog . Fatal ( err )
}
return name
} else {
return jobName
}
}
// swarmDimensions generates swarming bot dimensions for the given task.
func swarmDimensions ( parts map [ string ] string ) [ ] string {
2017-06-13 21:01:16 +00:00
if alternateSwarmDimensions != nil {
return alternateSwarmDimensions ( parts )
}
return defaultSwarmDimensions ( parts )
}
// defaultSwarmDimensions generates default swarming bot dimensions for the given task.
func defaultSwarmDimensions ( parts map [ string ] string ) [ ] string {
2016-09-30 19:53:12 +00:00
d := map [ string ] string {
2017-02-01 20:56:55 +00:00
"pool" : CONFIG . Pool ,
2016-09-30 19:53:12 +00:00
}
if os , ok := parts [ "os" ] ; ok {
2017-06-13 21:01:16 +00:00
d [ "os" ] , ok = map [ string ] string {
2017-03-21 13:25:34 +00:00
"Android" : "Android" ,
"Chromecast" : "Android" ,
2017-04-07 14:04:08 +00:00
"ChromeOS" : "ChromeOS" ,
2017-06-28 15:45:54 +00:00
"Debian9" : DEFAULT_OS_DEBIAN ,
2017-11-14 18:30:04 +00:00
"Mac" : DEFAULT_OS_MAC ,
2017-06-28 15:45:54 +00:00
"Ubuntu14" : DEFAULT_OS_UBUNTU ,
2017-07-11 12:11:15 +00:00
"Ubuntu17" : "Ubuntu-17.04" ,
2017-11-14 18:30:04 +00:00
"Win" : DEFAULT_OS_WIN ,
2018-04-11 18:38:45 +00:00
"Win10" : "Windows-10-16299.371" ,
2017-03-21 13:25:34 +00:00
"Win2k8" : "Windows-2008ServerR2-SP1" ,
2017-11-14 18:30:04 +00:00
"Win2016" : DEFAULT_OS_WIN ,
2017-04-18 19:38:15 +00:00
"Win7" : "Windows-7-SP1" ,
2017-03-21 13:25:34 +00:00
"Win8" : "Windows-8.1-SP0" ,
2017-05-19 17:08:19 +00:00
"iOS" : "iOS-10.3.1" ,
2016-12-02 17:09:10 +00:00
} [ os ]
2017-06-13 21:01:16 +00:00
if ! ok {
glog . Fatalf ( "Entry %q not found in OS mapping." , os )
}
2018-02-28 19:22:27 +00:00
if os == "Win10" && parts [ "model" ] == "Golo" {
2018-03-15 17:53:25 +00:00
// ChOps-owned machines have Windows 10 v1709, but a slightly different version than Skolo.
d [ "os" ] = "Windows-10-16299.309"
2018-02-23 16:17:03 +00:00
}
2018-04-24 20:59:48 +00:00
if d [ "os" ] == DEFAULT_OS_WIN {
// TODO(dogben): Temporarily add image dimension during upgrade.
2018-04-26 18:52:11 +00:00
d [ "image" ] = "windows-server-2016-dc-v20180410"
2018-04-24 20:59:48 +00:00
}
2016-09-30 19:53:12 +00:00
} else {
2017-06-28 15:45:54 +00:00
d [ "os" ] = DEFAULT_OS_DEBIAN
2016-09-30 19:53:12 +00:00
}
2017-11-02 17:48:23 +00:00
if parts [ "role" ] == "Test" || parts [ "role" ] == "Perf" || parts [ "role" ] == "Calmbench" {
2017-03-21 13:25:34 +00:00
if strings . Contains ( parts [ "os" ] , "Android" ) || strings . Contains ( parts [ "os" ] , "Chromecast" ) {
2016-09-30 19:53:12 +00:00
// For Android, the device type is a better dimension
// than CPU or GPU.
2017-06-14 14:01:45 +00:00
deviceInfo , ok := map [ string ] [ ] string {
2017-06-13 21:01:16 +00:00
"AndroidOne" : { "sprout" , "MOB30Q" } ,
2018-01-24 20:48:26 +00:00
"Chorizo" : { "chorizo" , "1.30_109591" } ,
2017-11-28 14:41:48 +00:00
"GalaxyS6" : { "zerofltetmo" , "NRD90M_G920TUVU5FQK1" } ,
2017-06-13 21:01:16 +00:00
"GalaxyS7_G930A" : { "heroqlteatt" , "NRD90M_G930AUCS4BQC2" } ,
"GalaxyS7_G930FD" : { "herolte" , "NRD90M_G930FXXU1DQAS" } ,
"MotoG4" : { "athene" , "NPJ25.93-14" } ,
2017-12-06 21:29:04 +00:00
"NVIDIA_Shield" : { "foster" , "NRD90M_1915764_848" } ,
"Nexus5" : { "hammerhead" , "M4B30Z_3437181" } ,
2017-11-17 13:59:44 +00:00
"Nexus5x" : { "bullhead" , "OPR6.170623.023" } ,
2017-12-06 21:29:04 +00:00
"Nexus7" : { "grouper" , "LMY47V_1836172" } , // 2012 Nexus 7
2018-04-11 16:36:56 +00:00
"NexusPlayer" : { "fugu" , "OPR2.170623.027" } ,
2018-02-20 15:29:45 +00:00
"Pixel" : { "sailfish" , "OPM1.171019.016" } ,
2018-04-02 15:00:52 +00:00
"Pixel2XL" : { "taimen" , "OPM1.171019.021" } ,
2018-04-11 16:36:56 +00:00
"PixelC" : { "dragon" , "OPM1.171019.026" } ,
2017-06-14 14:01:45 +00:00
} [ parts [ "model" ] ]
2017-02-01 20:56:55 +00:00
if ! ok {
2017-06-14 14:01:45 +00:00
glog . Fatalf ( "Entry %q not found in Android mapping." , parts [ "model" ] )
2017-02-01 20:56:55 +00:00
}
2016-11-08 17:55:32 +00:00
d [ "device_type" ] = deviceInfo [ 0 ]
d [ "device_os" ] = deviceInfo [ 1 ]
2016-09-30 19:53:12 +00:00
} else if strings . Contains ( parts [ "os" ] , "iOS" ) {
2017-06-13 21:01:16 +00:00
device , ok := map [ string ] string {
2016-11-09 19:03:20 +00:00
"iPadMini4" : "iPad5,1" ,
2017-04-25 15:38:38 +00:00
"iPhone6" : "iPhone7,2" ,
"iPhone7" : "iPhone9,1" ,
"iPadPro" : "iPad6,3" ,
2016-09-30 19:53:12 +00:00
} [ parts [ "model" ] ]
2017-06-13 21:01:16 +00:00
if ! ok {
glog . Fatalf ( "Entry %q not found in iOS mapping." , parts [ "model" ] )
}
d [ "device" ] = device
2016-09-30 19:53:12 +00:00
} else if parts [ "cpu_or_gpu" ] == "CPU" {
2017-09-25 16:56:53 +00:00
modelMapping , ok := map [ string ] map [ string ] string {
"AVX" : {
"MacMini7.1" : "x86-64-E5-2697_v2" ,
"Golo" : "x86-64-E5-2670" ,
} ,
"AVX2" : {
2017-11-07 21:57:37 +00:00
"GCE" : "x86-64-Haswell_GCE" ,
2017-11-03 16:19:54 +00:00
"NUC5i7RYH" : "x86-64-i7-5557U" ,
2017-09-25 16:56:53 +00:00
} ,
2017-09-28 20:38:34 +00:00
"AVX512" : {
"GCE" : "x86-64-Skylake_GCE" ,
} ,
2016-09-30 19:53:12 +00:00
} [ parts [ "cpu_or_gpu_value" ] ]
2017-06-13 21:01:16 +00:00
if ! ok {
glog . Fatalf ( "Entry %q not found in CPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
2017-09-25 16:56:53 +00:00
cpu , ok := modelMapping [ parts [ "model" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in %q model mapping." , parts [ "model" ] , parts [ "cpu_or_gpu_value" ] )
2016-09-30 19:53:12 +00:00
}
2017-09-25 16:56:53 +00:00
d [ "cpu" ] = cpu
2017-11-15 16:22:57 +00:00
if parts [ "model" ] == "GCE" && d [ "os" ] == DEFAULT_OS_DEBIAN {
d [ "os" ] = DEFAULT_OS_LINUX_GCE
2018-04-26 22:02:23 +00:00
// Currently all Linux GCE tasks run on 16-CPU machines.
d [ "machine_type" ] = "n1-standard-16"
2017-11-15 16:22:57 +00:00
}
2017-12-14 18:15:01 +00:00
if parts [ "model" ] == "GCE" && d [ "os" ] == DEFAULT_OS_WIN {
2017-12-15 16:23:40 +00:00
// Use normal-size machines for Test and Perf tasks on Win GCE.
2017-12-14 18:15:01 +00:00
d [ "machine_type" ] = "n1-standard-16"
}
2016-09-30 19:53:12 +00:00
} else {
2017-06-14 14:34:18 +00:00
if strings . Contains ( parts [ "os" ] , "Win" ) {
gpu , ok := map [ string ] string {
2018-03-15 17:53:25 +00:00
"GT610" : "10de:104a-23.21.13.9101" ,
2018-02-28 22:30:19 +00:00
"GTX1070" : "10de:1ba1-23.21.13.9101" ,
"GTX660" : "10de:11c0-23.21.13.9101" ,
"GTX960" : "10de:1401-23.21.13.9101" ,
2018-02-27 04:38:52 +00:00
"IntelHD4400" : "8086:0a16-20.19.15.4835" ,
2018-04-11 15:29:59 +00:00
"IntelIris540" : "8086:1926-23.20.16.4982" ,
2018-02-27 04:38:52 +00:00
"IntelIris6100" : "8086:162b-20.19.15.4835" ,
2018-04-24 22:17:02 +00:00
"RadeonHD7770" : "1002:683d-23.20.15033.5003" ,
"RadeonR9M470X" : "1002:6646-23.20.15033.5003" ,
2018-03-15 17:53:25 +00:00
"QuadroP400" : "10de:1cb3-23.21.13.9103" ,
2017-06-14 14:34:18 +00:00
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in Win GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
d [ "gpu" ] = gpu
2017-02-19 04:28:26 +00:00
2017-06-15 16:28:04 +00:00
// Specify cpu dimension for NUCs and ShuttleCs. We temporarily have two
// types of machines with a GTX960.
cpu , ok := map [ string ] string {
"NUC6i7KYK" : "x86-64-i7-6770HQ" ,
"ShuttleC" : "x86-64-i7-6700K" ,
2017-06-14 14:34:18 +00:00
} [ parts [ "model" ] ]
if ok {
2017-06-15 16:28:04 +00:00
d [ "cpu" ] = cpu
2017-06-14 14:34:18 +00:00
}
2017-06-28 15:45:54 +00:00
} else if strings . Contains ( parts [ "os" ] , "Ubuntu" ) || strings . Contains ( parts [ "os" ] , "Debian" ) {
2017-06-14 14:34:18 +00:00
gpu , ok := map [ string ] string {
// Intel drivers come from CIPD, so no need to specify the version here.
"IntelBayTrail" : "8086:0f31" ,
"IntelHD2000" : "8086:0102" ,
"IntelHD405" : "8086:22b1" ,
2017-12-19 20:14:12 +00:00
"IntelIris640" : "8086:5926" ,
2017-08-04 02:29:22 +00:00
"QuadroP400" : "10de:1cb3-384.59" ,
2017-06-14 14:34:18 +00:00
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in Ubuntu GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
d [ "gpu" ] = gpu
} else if strings . Contains ( parts [ "os" ] , "Mac" ) {
gpu , ok := map [ string ] string {
2018-02-02 19:47:31 +00:00
"IntelHD6000" : "8086:1626" ,
2018-02-08 21:57:04 +00:00
"IntelHD615" : "8086:591e" ,
2017-08-17 21:29:04 +00:00
"IntelIris5100" : "8086:0a2e" ,
2017-06-14 14:34:18 +00:00
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in Mac GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
d [ "gpu" ] = gpu
2018-02-09 15:29:09 +00:00
// Yuck. We have two different types of MacMini7,1 with the same GPU but different CPUs.
if parts [ "cpu_or_gpu_value" ] == "IntelIris5100" {
// Run all tasks on Golo machines for now.
d [ "cpu" ] = "x86-64-i7-4578U"
}
2017-06-14 14:34:18 +00:00
} else if strings . Contains ( parts [ "os" ] , "ChromeOS" ) {
2017-12-04 20:43:31 +00:00
version , ok := map [ string ] string {
"MaliT604" : "9901.12.0" ,
"MaliT764" : "10172.0.0" ,
"MaliT860" : "10172.0.0" ,
2017-12-08 19:14:38 +00:00
"PowerVRGX6250" : "10176.5.0" ,
2017-12-04 20:43:31 +00:00
"TegraK1" : "10172.0.0" ,
"IntelHDGraphics615" : "10032.17.0" ,
2017-06-14 14:34:18 +00:00
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in ChromeOS GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
2017-12-04 20:43:31 +00:00
d [ "gpu" ] = parts [ "cpu_or_gpu_value" ]
d [ "release_version" ] = version
2017-06-14 14:34:18 +00:00
} else {
glog . Fatalf ( "Unknown GPU mapping for OS %q." , parts [ "os" ] )
2017-02-19 04:28:26 +00:00
}
2016-09-30 19:53:12 +00:00
}
} else {
d [ "gpu" ] = "none"
2017-06-28 15:45:54 +00:00
if d [ "os" ] == DEFAULT_OS_DEBIAN {
2017-03-22 19:54:54 +00:00
return linuxGceDimensions ( )
2017-11-14 18:30:04 +00:00
} else if d [ "os" ] == DEFAULT_OS_WIN {
// Windows CPU bots.
2017-09-26 22:19:56 +00:00
d [ "cpu" ] = "x86-64-Haswell_GCE"
2018-03-14 15:23:47 +00:00
// Use many-core machines for Build tasks on Win GCE.
d [ "machine_type" ] = "n1-highcpu-64"
2017-11-14 18:30:04 +00:00
} else if d [ "os" ] == DEFAULT_OS_MAC {
// Mac CPU bots.
2017-09-26 22:19:56 +00:00
d [ "cpu" ] = "x86-64-E5-2697_v2"
2017-03-22 19:54:54 +00:00
}
2016-09-30 19:53:12 +00:00
}
2017-03-22 19:54:54 +00:00
2016-09-30 19:53:12 +00:00
rv := make ( [ ] string , 0 , len ( d ) )
for k , v := range d {
rv = append ( rv , fmt . Sprintf ( "%s:%s" , k , v ) )
}
sort . Strings ( rv )
return rv
}
2017-06-14 19:25:31 +00:00
// relpath returns the relative path to the given file from the config file.
func relpath ( f string ) string {
_ , filename , _ , _ := runtime . Caller ( 0 )
dir := path . Dir ( filename )
rel := dir
if * cfgFile != "" {
rel = path . Dir ( * cfgFile )
}
rv , err := filepath . Rel ( rel , path . Join ( dir , f ) )
if err != nil {
sklog . Fatal ( err )
}
return rv
}
2017-04-04 13:06:16 +00:00
// bundleRecipes generates the task to bundle and isolate the recipes.
func bundleRecipes ( b * specs . TasksCfgBuilder ) string {
b . MustAddTask ( BUNDLE_RECIPES_NAME , & specs . TaskSpec {
2018-04-17 19:55:57 +00:00
CipdPackages : CIPD_PKGS_GIT ,
Command : [ ] string {
"/bin/bash" , "skia/infra/bots/bundle_recipes.sh" , specs . PLACEHOLDER_ISOLATED_OUTDIR ,
} ,
Dimensions : linuxGceDimensions ( ) ,
EnvPrefixes : map [ string ] [ ] string {
"PATH" : [ ] string { "cipd_bin_packages" , "cipd_bin_packages/bin" } ,
2017-04-04 13:06:16 +00:00
} ,
2018-04-17 19:55:57 +00:00
Isolate : relpath ( "swarm_recipe.isolate" ) ,
2017-05-11 17:35:23 +00:00
Priority : 0.7 ,
2017-04-04 13:06:16 +00:00
} )
return BUNDLE_RECIPES_NAME
}
2017-05-11 17:35:23 +00:00
type isolateAssetCfg struct {
2018-04-17 19:55:57 +00:00
cipdPkg string
path string
2017-05-11 17:35:23 +00:00
}
var ISOLATE_ASSET_MAPPING = map [ string ] isolateAssetCfg {
2018-03-07 19:44:44 +00:00
ISOLATE_GCLOUD_LINUX_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "gcloud_linux" ,
path : "gcloud_linux" ,
2018-03-07 19:44:44 +00:00
} ,
ISOLATE_GO_LINUX_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "go" ,
path : "go" ,
2018-03-07 19:44:44 +00:00
} ,
2017-05-11 17:35:23 +00:00
ISOLATE_SKIMAGE_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "skimage" ,
path : "skimage" ,
2017-05-11 17:35:23 +00:00
} ,
ISOLATE_SKP_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "skp" ,
path : "skp" ,
2017-05-11 17:35:23 +00:00
} ,
ISOLATE_SVG_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "svg" ,
path : "svg" ,
2017-05-11 17:35:23 +00:00
} ,
2017-11-29 19:45:14 +00:00
ISOLATE_NDK_LINUX_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "android_ndk_linux" ,
path : "android_ndk_linux" ,
2017-11-29 19:45:14 +00:00
} ,
2018-02-20 16:40:25 +00:00
ISOLATE_SDK_LINUX_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "android_sdk_linux" ,
path : "android_sdk_linux" ,
2018-02-20 16:40:25 +00:00
} ,
2017-11-29 19:45:14 +00:00
ISOLATE_WIN_TOOLCHAIN_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "win_toolchain" ,
path : "t" ,
2017-11-29 19:45:14 +00:00
} ,
ISOLATE_WIN_VULKAN_SDK_NAME : {
2018-04-17 19:55:57 +00:00
cipdPkg : "win_vulkan_sdk" ,
path : "win_vulkan_sdk" ,
2017-11-29 19:45:14 +00:00
} ,
2017-05-11 17:35:23 +00:00
}
2018-04-17 19:55:57 +00:00
// isolateCIPDAsset generates a task to isolate the given CIPD asset.
2017-05-11 17:35:23 +00:00
func isolateCIPDAsset ( b * specs . TasksCfgBuilder , name string ) string {
2018-04-17 19:55:57 +00:00
asset := ISOLATE_ASSET_MAPPING [ name ]
2017-05-11 17:35:23 +00:00
b . MustAddTask ( name , & specs . TaskSpec {
CipdPackages : [ ] * specs . CipdPackage {
2018-04-17 19:55:57 +00:00
b . MustGetCipdPackageFromAsset ( asset . cipdPkg ) ,
2017-05-11 17:35:23 +00:00
} ,
2018-04-17 19:55:57 +00:00
Command : [ ] string { "/bin/cp" , "-rL" , asset . path , "${ISOLATED_OUTDIR}" } ,
2017-05-11 17:35:23 +00:00
Dimensions : linuxGceDimensions ( ) ,
2018-04-17 19:55:57 +00:00
Isolate : relpath ( "empty.isolate" ) ,
2017-05-11 17:35:23 +00:00
Priority : 0.7 ,
} )
return name
}
2017-05-15 12:30:27 +00:00
// getIsolatedCIPDDeps returns the slice of Isolate_* tasks a given task needs.
// This allows us to save time on I/O bound bots, like the RPIs.
func getIsolatedCIPDDeps ( parts map [ string ] string ) [ ] string {
deps := [ ] string { }
2017-05-11 17:35:23 +00:00
// Only do this on the RPIs for now. Other, faster machines shouldn't see much
// benefit and we don't need the extra complexity, for now
2017-05-15 12:30:27 +00:00
rpiOS := [ ] string { "Android" , "ChromeOS" , "iOS" }
if o := parts [ "os" ] ; strings . Contains ( o , "Chromecast" ) {
// Chromecasts don't have enough disk space to fit all of the content,
// so we do a subset of the skps.
deps = append ( deps , ISOLATE_SKP_NAME )
} else if e := parts [ "extra_config" ] ; strings . Contains ( e , "Skpbench" ) {
// Skpbench only needs skps
deps = append ( deps , ISOLATE_SKP_NAME )
} else if util . In ( o , rpiOS ) {
deps = append ( deps , ISOLATE_SKP_NAME )
deps = append ( deps , ISOLATE_SVG_NAME )
deps = append ( deps , ISOLATE_SKIMAGE_NAME )
}
return deps
2017-05-11 17:35:23 +00:00
}
2016-09-30 19:53:12 +00:00
// compile generates a compile task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func compile ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string ) string {
2018-04-17 19:55:57 +00:00
task := kitchenTask ( name , "compile" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_COMPILE , swarmDimensions ( parts ) , nil , OUTPUT_BUILD )
2018-04-26 22:19:25 +00:00
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_GIT ... )
2016-09-30 19:53:12 +00:00
// Android bots require a toolchain.
if strings . Contains ( name , "Android" ) {
2018-02-01 18:38:13 +00:00
if parts [ "extra_config" ] == "Android_Framework" {
// Do not need a toolchain when building the
// Android Framework.
} else if strings . Contains ( name , "Mac" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "android_ndk_darwin" ) )
2016-11-02 17:13:16 +00:00
} else if strings . Contains ( name , "Win" ) {
2016-11-02 19:44:26 +00:00
pkg := b . MustGetCipdPackageFromAsset ( "android_ndk_windows" )
pkg . Path = "n"
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , pkg )
2016-09-30 19:53:12 +00:00
} else {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_NDK_LINUX_NAME ) )
2018-03-07 19:44:44 +00:00
if strings . Contains ( name , "SKQP" ) {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_SDK_LINUX_NAME ) , isolateCIPDAsset ( b , ISOLATE_GO_LINUX_NAME ) )
2018-03-07 19:44:44 +00:00
}
2016-09-30 19:53:12 +00:00
}
2017-03-08 19:01:01 +00:00
} else if strings . Contains ( name , "Chromecast" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "cast_toolchain" ) )
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "chromebook_arm_gles" ) )
2017-04-05 11:32:45 +00:00
} else if strings . Contains ( name , "Chromebook" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "clang_linux" ) )
2017-11-02 13:34:08 +00:00
if parts [ "target_arch" ] == "x86_64" {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "chromebook_x86_64_gles" ) )
2017-11-02 13:34:08 +00:00
} else if parts [ "target_arch" ] == "arm" {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "armhf_sysroot" ) )
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "chromebook_arm_gles" ) )
2017-11-02 13:34:08 +00:00
}
2017-06-28 15:45:54 +00:00
} else if strings . Contains ( name , "Debian" ) {
2017-01-18 14:24:56 +00:00
if strings . Contains ( name , "Clang" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "clang_linux" ) )
2017-01-18 14:24:56 +00:00
}
if strings . Contains ( name , "Vulkan" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "linux_vulkan_sdk" ) )
2017-01-18 14:24:56 +00:00
}
2017-09-21 17:45:16 +00:00
if strings . Contains ( name , "EMCC" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "emscripten_sdk" ) )
2017-09-21 17:45:16 +00:00
}
2018-04-04 19:35:01 +00:00
if parts [ "target_arch" ] == "mips64el" || parts [ "target_arch" ] == "loongson3a" {
if parts [ "compiler" ] != "GCC" {
glog . Fatalf ( "mips64el toolchain is GCC, but compiler is %q in %q" , parts [ "compiler" ] , name )
}
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "mips64el_toolchain_linux" ) )
2018-04-04 19:35:01 +00:00
}
2018-04-19 21:53:03 +00:00
if strings . Contains ( name , "SwiftShader" ) {
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "cmake_linux" ) )
}
2016-11-09 21:31:42 +00:00
} else if strings . Contains ( name , "Win" ) {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_WIN_TOOLCHAIN_NAME ) )
2017-07-31 18:57:20 +00:00
if strings . Contains ( name , "Clang" ) {
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "clang_win" ) )
2017-07-31 18:57:20 +00:00
}
2016-09-30 19:53:12 +00:00
if strings . Contains ( name , "Vulkan" ) {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_WIN_VULKAN_SDK_NAME ) )
2016-09-30 19:53:12 +00:00
}
}
2018-04-20 01:48:55 +00:00
task . MaxAttempts = 1
2018-04-26 15:56:09 +00:00
if strings . Contains ( name , "Win" ) {
task . MaxAttempts = 2
}
2018-04-20 01:48:55 +00:00
2018-04-17 11:28:46 +00:00
// Add the task.
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2016-12-12 19:30:12 +00:00
// All compile tasks are runnable as their own Job. Assert that the Job
// is listed in JOBS.
if ! util . In ( name , JOBS ) {
glog . Fatalf ( "Job %q is missing from the JOBS list!" , name )
}
2017-12-12 22:08:24 +00:00
// Upload the skiaserve binary only for Linux Android compile bots.
// See skbug.com/7399 for context.
if parts [ "configuration" ] == "Release" &&
parts [ "extra_config" ] == "Android" &&
! strings . Contains ( parts [ "os" ] , "Win" ) &&
! strings . Contains ( parts [ "os" ] , "Mac" ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , jobNameSchema . Sep , name )
2018-04-17 19:55:57 +00:00
task := kitchenTask ( uploadName , "upload_skiaserve" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_UPLOAD_BINARY , linuxGceDimensions ( ) , nil , OUTPUT_NONE )
task . Dependencies = append ( task . Dependencies , name )
b . MustAddTask ( uploadName , task )
2017-12-12 22:08:24 +00:00
return uploadName
}
2016-09-30 19:53:12 +00:00
return name
}
// recreateSKPs generates a RecreateSKPs task. Returns the name of the last
// task in the generated chain of tasks, which the Job should add as a
// dependency.
2018-04-16 13:21:01 +00:00
func recreateSKPs ( b * specs . TasksCfgBuilder , name string ) string {
2018-04-17 19:55:57 +00:00
task := kitchenTask ( name , "recreate_skps" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_RECREATE_SKPS , linuxGceDimensions ( ) , nil , OUTPUT_NONE )
2018-04-26 12:49:38 +00:00
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_GIT ... )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "go" ) )
task . ExecutionTimeout = 4 * time . Hour
task . IoTimeout = 4 * time . Hour // With kitchen, step logs don't count toward IoTimeout.
b . MustAddTask ( name , task )
2016-09-30 19:53:12 +00:00
return name
}
// ctSKPs generates a CT SKPs task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2018-04-16 13:21:01 +00:00
func ctSKPs ( b * specs . TasksCfgBuilder , name string ) string {
2018-04-17 19:55:57 +00:00
dims := [ ] string {
"pool:SkiaCT" ,
fmt . Sprintf ( "os:%s" , DEFAULT_OS_LINUX_GCE ) ,
}
task := kitchenTask ( name , "ct_skps" , "skia_repo.isolate" , SERVICE_ACCOUNT_CT_SKPS , dims , nil , OUTPUT_NONE )
2018-04-26 22:19:25 +00:00
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_GIT ... )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "clang_linux" ) )
task . ExecutionTimeout = 24 * time . Hour
task . IoTimeout = 24 * time . Hour
2018-04-20 01:48:55 +00:00
task . MaxAttempts = 1
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2016-09-30 19:53:12 +00:00
return name
}
2017-07-28 11:35:28 +00:00
// checkGeneratedFiles verifies that no generated SKSL files have been edited
// by hand.
2018-04-16 13:21:01 +00:00
func checkGeneratedFiles ( b * specs . TasksCfgBuilder , name string ) string {
2018-04-17 19:55:57 +00:00
task := kitchenTask ( name , "check_generated_files" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_COMPILE , linuxGceDimensions ( ) , nil , OUTPUT_NONE )
b . MustAddTask ( name , task )
2017-07-28 11:35:28 +00:00
return name
}
2016-09-30 19:53:12 +00:00
// housekeeper generates a Housekeeper task. Returns the name of the last task
// in the generated chain of tasks, which the Job should add as a dependency.
2018-04-16 13:21:01 +00:00
func housekeeper ( b * specs . TasksCfgBuilder , name , compileTaskName string ) string {
2018-04-17 19:55:57 +00:00
task := kitchenTask ( name , "housekeeper" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_HOUSEKEEPER , linuxGceDimensions ( ) , nil , OUTPUT_NONE )
2018-04-26 22:19:25 +00:00
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_GIT ... )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "go" ) )
task . Dependencies = append ( task . Dependencies , compileTaskName )
b . MustAddTask ( name , task )
2016-09-30 19:53:12 +00:00
return name
}
2017-12-08 17:58:20 +00:00
// bookmaker generates a Bookmaker task. Returns the name of the last task
// in the generated chain of tasks, which the Job should add as a dependency.
func bookmaker ( b * specs . TasksCfgBuilder , name , compileTaskName string ) string {
2018-04-17 19:55:57 +00:00
task := kitchenTask ( name , "bookmaker" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_BOOKMAKER , linuxGceDimensions ( ) , nil , OUTPUT_NONE )
2018-04-26 12:49:38 +00:00
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_GIT ... )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "go" ) )
task . Dependencies = append ( task . Dependencies , compileTaskName )
task . ExecutionTimeout = 2 * time . Hour
task . IoTimeout = 2 * time . Hour
b . MustAddTask ( name , task )
2017-12-08 17:58:20 +00:00
return name
}
2018-02-01 18:38:13 +00:00
// androidFrameworkCompile generates an Android Framework Compile task. Returns
// the name of the last task in the generated chain of tasks, which the Job
// should add as a dependency.
func androidFrameworkCompile ( b * specs . TasksCfgBuilder , name string ) string {
2018-04-17 19:55:57 +00:00
task := kitchenTask ( name , "android_compile" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_COMPILE , linuxGceDimensions ( ) , nil , OUTPUT_NONE )
2018-04-20 01:48:55 +00:00
task . ExecutionTimeout = 1 * time . Hour
task . IoTimeout = 1 * time . Hour
task . MaxAttempts = 1
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2018-02-01 18:38:13 +00:00
return name
}
2016-10-14 13:32:09 +00:00
// infra generates an infra_tests task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2018-04-16 13:21:01 +00:00
func infra ( b * specs . TasksCfgBuilder , name string ) string {
2018-04-17 19:55:57 +00:00
task := kitchenTask ( name , "infra" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_COMPILE , linuxGceDimensions ( ) , nil , OUTPUT_NONE )
2018-04-26 22:19:25 +00:00
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_GIT ... )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "go" ) )
b . MustAddTask ( name , task )
2016-10-14 13:32:09 +00:00
return name
}
2018-01-05 16:13:43 +00:00
func getParentRevisionName ( compileTaskName string , parts map [ string ] string ) string {
if parts [ "extra_config" ] == "" {
return compileTaskName + "-ParentRevision"
} else {
return compileTaskName + "_ParentRevision"
}
}
2017-10-16 16:24:43 +00:00
// calmbench generates a calmbench task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2018-04-17 19:55:57 +00:00
func calmbench ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string , compileTaskName , compileParentName string ) string {
task := kitchenTask ( name , "calmbench" , "calmbench.isolate" , "" , swarmDimensions ( parts ) , nil , OUTPUT_PERF )
2018-04-26 22:19:25 +00:00
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_GIT ... )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "go" ) )
task . Dependencies = append ( task . Dependencies , compileTaskName , compileParentName , ISOLATE_SKP_NAME , ISOLATE_SVG_NAME )
2018-04-20 01:48:55 +00:00
task . MaxAttempts = 1
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2017-10-16 16:24:43 +00:00
2017-10-24 13:43:21 +00:00
// Upload results if necessary.
if strings . Contains ( name , "Release" ) && doUpload ( name ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , jobNameSchema . Sep , name )
2018-04-17 19:55:57 +00:00
extraProps := map [ string ] string {
"gs_bucket" : CONFIG . GsBucketCalm ,
}
uploadTask := kitchenTask ( name , "upload_calmbench_results" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_UPLOAD_CALMBENCH , linuxGceDimensions ( ) , extraProps , OUTPUT_NONE )
uploadTask . CipdPackages = append ( uploadTask . CipdPackages , CIPD_PKGS_GSUTIL ... )
uploadTask . Dependencies = append ( uploadTask . Dependencies , name )
b . MustAddTask ( uploadName , uploadTask )
2017-10-24 13:43:21 +00:00
return uploadName
}
2017-10-16 16:24:43 +00:00
return name
}
2016-09-30 19:53:12 +00:00
// doUpload indicates whether the given Job should upload its results.
func doUpload ( name string ) bool {
2017-02-01 20:56:55 +00:00
for _ , s := range CONFIG . NoUpload {
m , err := regexp . MatchString ( s , name )
if err != nil {
glog . Fatal ( err )
}
if m {
2016-09-30 19:53:12 +00:00
return false
}
}
return true
}
// test generates a Test task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func test ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string , compileTaskName string , pkgs [ ] * specs . CipdPackage ) string {
2018-03-07 19:44:44 +00:00
recipe := "test"
2018-04-17 19:55:57 +00:00
if strings . Contains ( name , "SKQP" ) {
recipe = "skqp_test"
}
2018-04-19 13:36:45 +00:00
extraProps := map [ string ] string { }
iid := internalHardwareLabel ( parts )
if iid != nil {
extraProps [ "internal_hardware_label" ] = strconv . Itoa ( * iid )
}
2018-04-24 17:12:05 +00:00
task := kitchenTask ( name , recipe , "test_skia_bundled.isolate" , "" , swarmDimensions ( parts ) , extraProps , OUTPUT_TEST )
2018-04-17 19:55:57 +00:00
task . CipdPackages = append ( task . CipdPackages , pkgs ... )
task . Dependencies = append ( task . Dependencies , compileTaskName )
2018-04-17 11:28:46 +00:00
if strings . Contains ( name , "Android_ASAN" ) {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_NDK_LINUX_NAME ) )
2018-04-17 11:28:46 +00:00
}
2018-03-07 19:44:44 +00:00
if strings . Contains ( name , "SKQP" ) {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , isolateCIPDAsset ( b , ISOLATE_GCLOUD_LINUX_NAME ) )
2017-04-04 13:06:16 +00:00
}
2017-05-15 12:30:27 +00:00
if deps := getIsolatedCIPDDeps ( parts ) ; len ( deps ) > 0 {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , deps ... )
2017-05-11 17:35:23 +00:00
}
2018-04-17 19:55:57 +00:00
task . ExecutionTimeout = 4 * time . Hour
task . Expiration = 20 * time . Hour
task . IoTimeout = 4 * time . Hour
task . MaxAttempts = 1
2016-11-08 17:55:32 +00:00
if strings . Contains ( parts [ "extra_config" ] , "Valgrind" ) {
2018-04-17 19:55:57 +00:00
task . ExecutionTimeout = 9 * time . Hour
task . Expiration = 48 * time . Hour
task . IoTimeout = 9 * time . Hour
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "valgrind" ) )
task . Dimensions = append ( task . Dimensions , "valgrind:1" )
2016-11-08 17:55:32 +00:00
} else if strings . Contains ( parts [ "extra_config" ] , "MSAN" ) {
2018-04-17 19:55:57 +00:00
task . ExecutionTimeout = 9 * time . Hour
task . IoTimeout = 9 * time . Hour
2017-06-08 14:34:17 +00:00
} else if parts [ "arch" ] == "x86" && parts [ "configuration" ] == "Debug" {
// skia:6737
2018-04-17 19:55:57 +00:00
task . ExecutionTimeout = 6 * time . Hour
task . IoTimeout = 6 * time . Hour
2016-11-08 17:55:32 +00:00
}
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2016-11-08 17:55:32 +00:00
2017-10-09 19:26:19 +00:00
// Upload results if necessary. TODO(kjlubick): If we do coverage analysis at the same
// time as normal tests (which would be nice), cfg.json needs to have Coverage removed.
2016-09-30 19:53:12 +00:00
if doUpload ( name ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , jobNameSchema . Sep , name )
2018-04-17 19:55:57 +00:00
extraProps := map [ string ] string {
"gs_bucket" : CONFIG . GsBucketGm ,
}
uploadTask := kitchenTask ( name , "upload_dm_results" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_UPLOAD_GM , linuxGceDimensions ( ) , extraProps , OUTPUT_NONE )
uploadTask . CipdPackages = append ( uploadTask . CipdPackages , CIPD_PKGS_GSUTIL ... )
uploadTask . Dependencies = append ( uploadTask . Dependencies , name )
b . MustAddTask ( uploadName , uploadTask )
2016-09-30 19:53:12 +00:00
return uploadName
2017-10-17 17:40:52 +00:00
}
return name
}
func coverage ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string , compileTaskName string , pkgs [ ] * specs . CipdPackage ) string {
shards := 1
deps := [ ] string { }
tf := parts [ "test_filter" ]
if strings . Contains ( tf , "Shard" ) {
// Expected Shard_NN
shardstr := strings . Split ( tf , "_" ) [ 1 ]
var err error
shards , err = strconv . Atoi ( shardstr )
if err != nil {
glog . Fatalf ( "Expected int for number of shards %q in %s: %s" , shardstr , name , err )
}
}
for i := 0 ; i < shards ; i ++ {
n := strings . Replace ( name , tf , fmt . Sprintf ( "shard_%02d_%02d" , i , shards ) , 1 )
2018-04-17 19:55:57 +00:00
task := kitchenTask ( n , "test" , "test_skia_bundled.isolate" , "" , swarmDimensions ( parts ) , nil , OUTPUT_COVERAGE )
task . CipdPackages = append ( task . CipdPackages , pkgs ... )
task . Dependencies = append ( task . Dependencies , compileTaskName )
task . ExecutionTimeout = 4 * time . Hour
task . Expiration = 20 * time . Hour
task . IoTimeout = 4 * time . Hour
task . MaxAttempts = 1
2017-10-17 17:40:52 +00:00
if deps := getIsolatedCIPDDeps ( parts ) ; len ( deps ) > 0 {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , deps ... )
2017-10-17 17:40:52 +00:00
}
2018-04-17 19:55:57 +00:00
b . MustAddTask ( n , task )
2017-10-17 17:40:52 +00:00
deps = append ( deps , n )
2016-09-30 19:53:12 +00:00
}
2017-10-09 19:26:19 +00:00
2017-10-17 17:40:52 +00:00
uploadName := fmt . Sprintf ( "%s%s%s" , "Upload" , jobNameSchema . Sep , name )
2018-04-17 19:55:57 +00:00
extraProps := map [ string ] string {
"gs_bucket" : CONFIG . GsBucketCoverage ,
}
uploadTask := kitchenTask ( uploadName , "upload_coverage_results" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_UPLOAD_COVERAGE , linuxGceDimensions ( ) , extraProps , OUTPUT_NONE )
2018-04-26 22:19:25 +00:00
uploadTask . CipdPackages = append ( uploadTask . CipdPackages , CIPD_PKGS_GIT ... )
2018-04-17 19:55:57 +00:00
uploadTask . CipdPackages = append ( uploadTask . CipdPackages , CIPD_PKGS_GSUTIL ... )
2017-10-17 17:40:52 +00:00
// We need clang_linux to get access to the llvm-profdata and llvm-cov binaries
// which are used to deal with the raw coverage data output by the Test step.
2018-04-17 19:55:57 +00:00
uploadTask . CipdPackages = append ( uploadTask . CipdPackages , b . MustGetCipdPackageFromAsset ( "clang_linux" ) )
uploadTask . CipdPackages = append ( uploadTask . CipdPackages , pkgs ... )
// A dependency on compileTaskName makes the TaskScheduler link the
// isolated output of the compile step to the input of the upload step,
// which gives us access to the instrumented binary. The binary is
// needed to figure out symbol names and line numbers.
uploadTask . Dependencies = append ( uploadTask . Dependencies , compileTaskName )
uploadTask . Dependencies = append ( uploadTask . Dependencies , deps ... )
b . MustAddTask ( uploadName , uploadTask )
2017-10-17 17:40:52 +00:00
return uploadName
2016-09-30 19:53:12 +00:00
}
// perf generates a Perf task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func perf ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string , compileTaskName string , pkgs [ ] * specs . CipdPackage ) string {
2017-04-10 12:14:33 +00:00
recipe := "perf"
2018-04-17 19:55:57 +00:00
isolate := relpath ( "perf_skia_bundled.isolate" )
2016-11-14 18:42:27 +00:00
if strings . Contains ( parts [ "extra_config" ] , "Skpbench" ) {
2017-04-10 12:14:33 +00:00
recipe = "skpbench"
2018-04-17 19:55:57 +00:00
isolate = relpath ( "skpbench_skia_bundled.isolate" )
2017-04-04 13:06:16 +00:00
}
2018-04-17 19:55:57 +00:00
task := kitchenTask ( name , recipe , isolate , "" , swarmDimensions ( parts ) , nil , OUTPUT_PERF )
task . CipdPackages = append ( task . CipdPackages , pkgs ... )
task . Dependencies = append ( task . Dependencies , compileTaskName )
task . ExecutionTimeout = 4 * time . Hour
task . Expiration = 20 * time . Hour
task . IoTimeout = 4 * time . Hour
task . MaxAttempts = 1
2017-05-15 12:30:27 +00:00
if deps := getIsolatedCIPDDeps ( parts ) ; len ( deps ) > 0 {
2018-04-17 19:55:57 +00:00
task . Dependencies = append ( task . Dependencies , deps ... )
2017-05-15 12:30:27 +00:00
}
2016-11-08 17:55:32 +00:00
if strings . Contains ( parts [ "extra_config" ] , "Valgrind" ) {
2018-04-17 19:55:57 +00:00
task . ExecutionTimeout = 9 * time . Hour
task . Expiration = 48 * time . Hour
task . IoTimeout = 9 * time . Hour
task . CipdPackages = append ( task . CipdPackages , b . MustGetCipdPackageFromAsset ( "valgrind" ) )
task . Dimensions = append ( task . Dimensions , "valgrind:1" )
2016-11-08 17:55:32 +00:00
} else if strings . Contains ( parts [ "extra_config" ] , "MSAN" ) {
2018-04-17 19:55:57 +00:00
task . ExecutionTimeout = 9 * time . Hour
task . IoTimeout = 9 * time . Hour
2017-06-08 14:34:17 +00:00
} else if parts [ "arch" ] == "x86" && parts [ "configuration" ] == "Debug" {
// skia:6737
2018-04-17 19:55:57 +00:00
task . ExecutionTimeout = 6 * time . Hour
task . IoTimeout = 6 * time . Hour
2016-11-08 17:55:32 +00:00
}
2017-09-15 14:35:44 +00:00
iid := internalHardwareLabel ( parts )
2017-09-15 12:35:31 +00:00
if iid != nil {
2018-04-17 19:55:57 +00:00
task . Command = append ( task . Command , fmt . Sprintf ( "internal_hardware_label=%d" , * iid ) )
2017-09-15 12:35:31 +00:00
}
2018-04-17 19:55:57 +00:00
b . MustAddTask ( name , task )
2016-11-08 17:55:32 +00:00
2016-09-30 19:53:12 +00:00
// Upload results if necessary.
if strings . Contains ( name , "Release" ) && doUpload ( name ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , jobNameSchema . Sep , name )
2018-04-17 19:55:57 +00:00
extraProps := map [ string ] string {
"gs_bucket" : CONFIG . GsBucketNano ,
}
uploadTask := kitchenTask ( name , "upload_nano_results" , "swarm_recipe.isolate" , SERVICE_ACCOUNT_UPLOAD_NANO , linuxGceDimensions ( ) , extraProps , OUTPUT_NONE )
uploadTask . CipdPackages = append ( uploadTask . CipdPackages , CIPD_PKGS_GSUTIL ... )
uploadTask . Dependencies = append ( uploadTask . Dependencies , name )
b . MustAddTask ( uploadName , uploadTask )
2016-09-30 19:53:12 +00:00
return uploadName
}
return name
}
2018-04-25 19:09:22 +00:00
// Run the presubmit.
func presubmit ( b * specs . TasksCfgBuilder , name string ) string {
extraProps := map [ string ] string {
"category" : "cq" ,
"patch_gerrit_url" : "https://skia-review.googlesource.com" ,
"patch_project" : "skia" ,
"patch_ref" : fmt . Sprintf ( "refs/changes/%s/%s/%s" , specs . PLACEHOLDER_ISSUE_SHORT , specs . PLACEHOLDER_ISSUE , specs . PLACEHOLDER_PATCHSET ) ,
"reason" : "CQ" ,
"repo_name" : "skia" ,
}
task := kitchenTask ( name , "run_presubmit" , "empty.isolate" , SERVICE_ACCOUNT_COMPILE , linuxGceDimensions ( ) , extraProps , OUTPUT_NONE )
replaceArg := func ( key , value string ) {
found := false
for idx , arg := range task . Command {
if arg == key {
task . Command [ idx + 1 ] = value
found = true
}
}
if ! found {
task . Command = append ( task . Command , key , value )
}
}
replaceArg ( "-repository" , "https://chromium.googlesource.com/chromium/tools/build" )
replaceArg ( "-revision" , "HEAD" )
2018-04-26 22:19:25 +00:00
task . CipdPackages = append ( task . CipdPackages , CIPD_PKGS_GIT ... )
2018-04-25 19:09:22 +00:00
task . Dependencies = [ ] string { } // No bundled recipes for this one.
b . MustAddTask ( name , task )
return name
}
2016-09-30 19:53:12 +00:00
// process generates tasks and jobs for the given job name.
2016-10-20 18:04:31 +00:00
func process ( b * specs . TasksCfgBuilder , name string ) {
2016-09-30 19:53:12 +00:00
deps := [ ] string { }
2017-04-04 13:06:16 +00:00
// Bundle Recipes.
if name == BUNDLE_RECIPES_NAME {
deps = append ( deps , bundleRecipes ( b ) )
}
2017-05-11 17:35:23 +00:00
// Isolate CIPD assets.
if _ , ok := ISOLATE_ASSET_MAPPING [ name ] ; ok {
deps = append ( deps , isolateCIPDAsset ( b , name ) )
}
2016-09-30 19:53:12 +00:00
parts , err := jobNameSchema . ParseJobName ( name )
if err != nil {
glog . Fatal ( err )
}
// RecreateSKPs.
if strings . Contains ( name , "RecreateSKPs" ) {
2018-04-16 13:21:01 +00:00
deps = append ( deps , recreateSKPs ( b , name ) )
2016-09-30 19:53:12 +00:00
}
// CT bots.
if strings . Contains ( name , "-CT_" ) {
2018-04-16 13:21:01 +00:00
deps = append ( deps , ctSKPs ( b , name ) )
2016-09-30 19:53:12 +00:00
}
2016-10-14 13:32:09 +00:00
// Infra tests.
if name == "Housekeeper-PerCommit-InfraTests" {
2018-04-16 13:21:01 +00:00
deps = append ( deps , infra ( b , name ) )
2016-10-14 13:32:09 +00:00
}
2016-09-30 19:53:12 +00:00
// Compile bots.
if parts [ "role" ] == "Build" {
2018-02-01 18:38:13 +00:00
if parts [ "extra_config" ] == "Android_Framework" {
// Android Framework compile tasks use a different recipe.
deps = append ( deps , androidFrameworkCompile ( b , name ) )
} else {
deps = append ( deps , compile ( b , name , parts ) )
}
2016-09-30 19:53:12 +00:00
}
2016-11-15 20:18:20 +00:00
// Most remaining bots need a compile task.
2016-09-30 19:53:12 +00:00
compileTaskName := deriveCompileTaskName ( name , parts )
2016-10-17 17:17:53 +00:00
compileTaskParts , err := jobNameSchema . ParseJobName ( compileTaskName )
if err != nil {
glog . Fatal ( err )
}
2018-01-05 16:13:43 +00:00
compileParentName := getParentRevisionName ( compileTaskName , compileTaskParts )
compileParentParts , err := jobNameSchema . ParseJobName ( compileParentName )
if err != nil {
glog . Fatal ( err )
}
2016-11-17 16:33:27 +00:00
// These bots do not need a compile task.
2018-01-05 16:13:43 +00:00
if parts [ "role" ] != "Build" &&
2017-04-10 15:00:09 +00:00
name != "Housekeeper-PerCommit-BundleRecipes" &&
2016-11-15 20:18:20 +00:00
name != "Housekeeper-PerCommit-InfraTests" &&
2017-07-28 11:35:28 +00:00
name != "Housekeeper-PerCommit-CheckGeneratedFiles" &&
2018-04-25 19:09:22 +00:00
name != "Housekeeper-OnDemand-Presubmit" &&
2018-02-01 18:38:13 +00:00
! strings . Contains ( name , "Android_Framework" ) &&
2016-11-30 19:05:16 +00:00
! strings . Contains ( name , "RecreateSKPs" ) &&
2017-06-12 17:03:29 +00:00
! strings . Contains ( name , "-CT_" ) &&
! strings . Contains ( name , "Housekeeper-PerCommit-Isolate" ) {
2016-10-20 18:04:31 +00:00
compile ( b , compileTaskName , compileTaskParts )
2018-01-10 16:14:52 +00:00
if parts [ "role" ] == "Calmbench" {
2018-01-05 16:13:43 +00:00
compile ( b , compileParentName , compileParentParts )
}
2016-10-17 17:17:53 +00:00
}
2016-09-30 19:53:12 +00:00
2017-07-28 11:35:28 +00:00
// Housekeepers.
2016-12-02 16:01:33 +00:00
if name == "Housekeeper-PerCommit" {
2018-04-16 13:21:01 +00:00
deps = append ( deps , housekeeper ( b , name , compileTaskName ) )
2016-09-30 19:53:12 +00:00
}
2017-07-28 11:35:28 +00:00
if name == "Housekeeper-PerCommit-CheckGeneratedFiles" {
2018-04-16 13:21:01 +00:00
deps = append ( deps , checkGeneratedFiles ( b , name ) )
2017-07-28 11:35:28 +00:00
}
2018-04-25 19:09:22 +00:00
if name == "Housekeeper-OnDemand-Presubmit" {
deps = append ( deps , presubmit ( b , name ) )
}
2018-01-02 19:54:43 +00:00
if strings . Contains ( name , "Bookmaker" ) {
2017-12-08 17:58:20 +00:00
deps = append ( deps , bookmaker ( b , name , compileTaskName ) )
}
2016-09-30 19:53:12 +00:00
// Common assets needed by the remaining bots.
2017-05-11 17:35:23 +00:00
pkgs := [ ] * specs . CipdPackage { }
2017-05-15 12:30:27 +00:00
if deps := getIsolatedCIPDDeps ( parts ) ; len ( deps ) == 0 {
2017-05-11 17:35:23 +00:00
pkgs = [ ] * specs . CipdPackage {
b . MustGetCipdPackageFromAsset ( "skimage" ) ,
b . MustGetCipdPackageFromAsset ( "skp" ) ,
b . MustGetCipdPackageFromAsset ( "svg" ) ,
}
2016-09-30 19:53:12 +00:00
}
2017-05-15 12:30:27 +00:00
2017-10-02 14:48:32 +00:00
if strings . Contains ( name , "Ubuntu" ) || strings . Contains ( name , "Debian" ) {
if strings . Contains ( name , "SAN" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "clang_linux" ) )
}
2017-02-17 15:25:34 +00:00
if strings . Contains ( name , "Vulkan" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "linux_vulkan_sdk" ) )
}
2017-10-02 14:48:32 +00:00
if strings . Contains ( name , "Intel" ) && strings . Contains ( name , "GPU" ) {
if strings . Contains ( name , "Release" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "linux_vulkan_intel_driver_release" ) )
} else {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "linux_vulkan_intel_driver_debug" ) )
}
2017-02-06 17:45:29 +00:00
}
}
2018-03-09 18:42:56 +00:00
if strings . Contains ( name , "ProcDump" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "procdump_win" ) )
}
2016-09-30 19:53:12 +00:00
// Test bots.
2017-10-17 17:40:52 +00:00
if parts [ "role" ] == "Test" {
if strings . Contains ( parts [ "extra_config" ] , "Coverage" ) {
deps = append ( deps , coverage ( b , name , parts , compileTaskName , pkgs ) )
} else if ! strings . Contains ( name , "-CT_" ) {
deps = append ( deps , test ( b , name , parts , compileTaskName , pkgs ) )
}
2016-09-30 19:53:12 +00:00
}
// Perf bots.
2016-11-30 19:05:16 +00:00
if parts [ "role" ] == "Perf" && ! strings . Contains ( name , "-CT_" ) {
2016-10-20 18:04:31 +00:00
deps = append ( deps , perf ( b , name , parts , compileTaskName , pkgs ) )
2016-09-30 19:53:12 +00:00
}
2018-01-05 16:13:43 +00:00
// Calmbench bots.
if parts [ "role" ] == "Calmbench" {
deps = append ( deps , calmbench ( b , name , parts , compileTaskName , compileParentName ) )
}
2016-09-30 19:53:12 +00:00
// Add the Job spec.
2016-11-15 20:18:20 +00:00
j := & specs . JobSpec {
2016-09-30 19:53:12 +00:00
Priority : 0.8 ,
TaskSpecs : deps ,
2017-07-31 14:41:15 +00:00
Trigger : specs . TRIGGER_ANY_BRANCH ,
}
if strings . Contains ( name , "-Nightly-" ) {
j . Trigger = specs . TRIGGER_NIGHTLY
2017-10-16 14:31:41 +00:00
} else if strings . Contains ( name , "-Weekly-" ) || strings . Contains ( name , "CT_DM_1m_SKPs" ) {
2017-07-31 14:41:15 +00:00
j . Trigger = specs . TRIGGER_WEEKLY
} else if strings . Contains ( name , "Flutter" ) || strings . Contains ( name , "PDFium" ) || strings . Contains ( name , "CommandBuffer" ) {
j . Trigger = specs . TRIGGER_MASTER_ONLY
2018-04-26 12:58:26 +00:00
} else if strings . Contains ( name , "-OnDemand-" ) || strings . Contains ( name , "Android_Framework" ) {
2018-04-25 19:09:22 +00:00
j . Trigger = specs . TRIGGER_ON_DEMAND
2016-11-30 19:05:16 +00:00
}
2016-12-12 19:30:12 +00:00
b . MustAddJob ( name , j )
2016-09-30 19:53:12 +00:00
}
2017-02-01 20:56:55 +00:00
func loadJson ( flag * string , defaultFlag string , val interface { } ) {
if * flag == "" {
* flag = defaultFlag
}
b , err := ioutil . ReadFile ( * flag )
if err != nil {
glog . Fatal ( err )
}
if err := json . Unmarshal ( b , val ) ; err != nil {
glog . Fatal ( err )
}
}
2016-09-30 19:53:12 +00:00
// Regenerate the tasks.json file.
func main ( ) {
2016-10-20 18:04:31 +00:00
b := specs . MustNewTasksCfgBuilder ( )
2017-02-01 20:56:55 +00:00
b . SetAssetsDir ( * assetsDir )
infraBots := path . Join ( b . CheckoutRoot ( ) , "infra" , "bots" )
// Load the jobs from a JSON file.
loadJson ( jobsFile , path . Join ( infraBots , "jobs.json" ) , & JOBS )
// Load general config information from a JSON file.
loadJson ( cfgFile , path . Join ( infraBots , "cfg.json" ) , & CONFIG )
2016-09-30 19:53:12 +00:00
// Create the JobNameSchema.
2017-02-07 14:16:30 +00:00
if * builderNameSchemaFile == "" {
* builderNameSchemaFile = path . Join ( b . CheckoutRoot ( ) , "infra" , "bots" , "recipe_modules" , "builder_name_schema" , "builder_name_schema.json" )
}
schema , err := NewJobNameSchema ( * builderNameSchemaFile )
2016-09-30 19:53:12 +00:00
if err != nil {
glog . Fatal ( err )
}
jobNameSchema = schema
// Create Tasks and Jobs.
2016-10-20 18:04:31 +00:00
for _ , name := range JOBS {
process ( b , name )
2016-09-30 19:53:12 +00:00
}
2016-10-20 18:04:31 +00:00
b . MustFinish ( )
2016-09-30 19:53:12 +00:00
}
// TODO(borenet): The below really belongs in its own file, probably next to the
// builder_name_schema.json file.
2018-04-17 18:11:23 +00:00
// schema is a sub-struct of JobNameSchema.
type schema struct {
Keys [ ] string ` json:"keys" `
OptionalKeys [ ] string ` json:"optional_keys" `
RecurseRoles [ ] string ` json:"recurse_roles" `
}
2016-09-30 19:53:12 +00:00
// JobNameSchema is a struct used for (de)constructing Job names in a
// predictable format.
type JobNameSchema struct {
2018-04-17 18:11:23 +00:00
Schema map [ string ] * schema ` json:"builder_name_schema" `
Sep string ` json:"builder_name_sep" `
2016-09-30 19:53:12 +00:00
}
// NewJobNameSchema returns a JobNameSchema instance based on the given JSON
// file.
func NewJobNameSchema ( jsonFile string ) ( * JobNameSchema , error ) {
var rv JobNameSchema
f , err := os . Open ( jsonFile )
if err != nil {
return nil , err
}
defer util . Close ( f )
if err := json . NewDecoder ( f ) . Decode ( & rv ) ; err != nil {
return nil , err
}
return & rv , nil
}
// ParseJobName splits the given Job name into its component parts, according
// to the schema.
func ( s * JobNameSchema ) ParseJobName ( n string ) ( map [ string ] string , error ) {
2018-04-17 18:11:23 +00:00
popFront := func ( items [ ] string ) ( string , [ ] string , error ) {
if len ( items ) == 0 {
return "" , nil , fmt . Errorf ( "Invalid job name: %s (not enough parts)" , n )
}
return items [ 0 ] , items [ 1 : ] , nil
}
result := map [ string ] string { }
var parse func ( int , string , [ ] string ) ( [ ] string , error )
parse = func ( depth int , role string , parts [ ] string ) ( [ ] string , error ) {
s , ok := s . Schema [ role ]
if ! ok {
return nil , fmt . Errorf ( "Invalid job name; %q is not a valid role." , role )
}
if depth == 0 {
result [ "role" ] = role
} else {
result [ fmt . Sprintf ( "sub-role-%d" , depth ) ] = role
}
var err error
for _ , key := range s . Keys {
var value string
value , parts , err = popFront ( parts )
if err != nil {
return nil , err
}
result [ key ] = value
}
for _ , subRole := range s . RecurseRoles {
if len ( parts ) > 0 && parts [ 0 ] == subRole {
parts , err = parse ( depth + 1 , parts [ 0 ] , parts [ 1 : ] )
if err != nil {
return nil , err
}
}
}
for _ , key := range s . OptionalKeys {
if len ( parts ) > 0 {
var value string
value , parts , err = popFront ( parts )
if err != nil {
return nil , err
}
result [ key ] = value
}
}
if len ( parts ) > 0 {
return nil , fmt . Errorf ( "Invalid job name: %s (too many parts)" , n )
}
return parts , nil
}
2016-09-30 19:53:12 +00:00
split := strings . Split ( n , s . Sep )
if len ( split ) < 2 {
2018-04-17 18:11:23 +00:00
return nil , fmt . Errorf ( "Invalid job name: %s (not enough parts)" , n )
2016-09-30 19:53:12 +00:00
}
role := split [ 0 ]
split = split [ 1 : ]
2018-04-17 18:11:23 +00:00
_ , err := parse ( 0 , role , split )
return result , err
2016-09-30 19:53:12 +00:00
}
// MakeJobName assembles the given parts of a Job name, according to the schema.
func ( s * JobNameSchema ) MakeJobName ( parts map [ string ] string ) ( string , error ) {
rvParts := make ( [ ] string , 0 , len ( parts ) )
2018-04-17 18:11:23 +00:00
var process func ( int , map [ string ] string ) ( map [ string ] string , error )
process = func ( depth int , parts map [ string ] string ) ( map [ string ] string , error ) {
roleKey := "role"
if depth != 0 {
roleKey = fmt . Sprintf ( "sub-role-%d" , depth )
}
role , ok := parts [ roleKey ]
2016-09-30 19:53:12 +00:00
if ! ok {
2018-04-17 18:11:23 +00:00
return nil , fmt . Errorf ( "Invalid job parts; missing key %q" , roleKey )
}
s , ok := s . Schema [ role ]
if ! ok {
return nil , fmt . Errorf ( "Invalid job parts; unknown role %q" , role )
}
rvParts = append ( rvParts , role )
delete ( parts , roleKey )
for _ , key := range s . Keys {
value , ok := parts [ key ]
if ! ok {
return nil , fmt . Errorf ( "Invalid job parts; missing %q" , key )
}
rvParts = append ( rvParts , value )
delete ( parts , key )
2016-09-30 19:53:12 +00:00
}
2018-04-17 18:11:23 +00:00
if len ( s . RecurseRoles ) > 0 {
subRoleKey := fmt . Sprintf ( "sub-role-%d" , depth + 1 )
subRole , ok := parts [ subRoleKey ]
if ! ok {
return nil , fmt . Errorf ( "Invalid job parts; missing %q" , subRoleKey )
}
rvParts = append ( rvParts , subRole )
delete ( parts , subRoleKey )
found := false
for _ , recurseRole := range s . RecurseRoles {
if recurseRole == subRole {
found = true
var err error
parts , err = process ( depth + 1 , parts )
if err != nil {
return nil , err
}
break
}
}
if ! found {
return nil , fmt . Errorf ( "Invalid job parts; unknown sub-role %q" , subRole )
}
}
for _ , key := range s . OptionalKeys {
if value , ok := parts [ key ] ; ok {
rvParts = append ( rvParts , value )
delete ( parts , key )
}
}
if len ( parts ) > 0 {
return nil , fmt . Errorf ( "Invalid job parts: too many parts: %v" , parts )
}
return parts , nil
}
// Copy the parts map, so that we can modify at will.
partsCpy := make ( map [ string ] string , len ( parts ) )
for k , v := range parts {
partsCpy [ k ] = v
2016-09-30 19:53:12 +00:00
}
2018-04-17 18:11:23 +00:00
if _ , err := process ( 0 , partsCpy ) ; err != nil {
return "" , err
2016-09-30 19:53:12 +00:00
}
return strings . Join ( rvParts , s . Sep ) , nil
}