2016-09-30 19:53:12 +00:00
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package main
/ *
Generate the tasks . json file .
* /
import (
"encoding/json"
2017-02-01 20:56:55 +00:00
"flag"
2016-09-30 19:53:12 +00:00
"fmt"
2017-02-01 20:56:55 +00:00
"io/ioutil"
2016-09-30 19:53:12 +00:00
"os"
"path"
2017-06-14 19:25:31 +00:00
"path/filepath"
2017-02-01 20:56:55 +00:00
"regexp"
2017-06-14 19:25:31 +00:00
"runtime"
2016-09-30 19:53:12 +00:00
"sort"
"strings"
2016-11-08 17:55:32 +00:00
"time"
2016-09-30 19:53:12 +00:00
"github.com/skia-dev/glog"
2017-06-14 19:25:31 +00:00
"go.skia.org/infra/go/sklog"
2016-09-30 19:53:12 +00:00
"go.skia.org/infra/go/util"
"go.skia.org/infra/task_scheduler/go/specs"
)
const (
2017-05-11 17:35:23 +00:00
BUNDLE_RECIPES_NAME = "Housekeeper-PerCommit-BundleRecipes"
ISOLATE_SKIMAGE_NAME = "Housekeeper-PerCommit-IsolateSkImage"
ISOLATE_SKP_NAME = "Housekeeper-PerCommit-IsolateSKP"
ISOLATE_SVG_NAME = "Housekeeper-PerCommit-IsolateSVG"
2017-04-04 13:06:16 +00:00
2017-06-28 15:45:54 +00:00
DEFAULT_OS_DEBIAN = "Debian-9.0"
DEFAULT_OS_UBUNTU = "Ubuntu-14.04"
2016-09-30 19:53:12 +00:00
// Name prefix for upload jobs.
PREFIX_UPLOAD = "Upload"
)
var (
// "Constants"
2017-02-01 20:56:55 +00:00
// Top-level list of all jobs to run at each commit; loaded from
// jobs.json.
JOBS [ ] string
// General configuration information.
CONFIG struct {
2017-02-06 20:38:41 +00:00
GsBucketGm string ` json:"gs_bucket_gm" `
GsBucketNano string ` json:"gs_bucket_nano" `
NoUpload [ ] string ` json:"no_upload" `
Pool string ` json:"pool" `
2016-09-30 19:53:12 +00:00
}
2017-06-13 21:01:16 +00:00
// alternateSwarmDimensions can be set in an init function to override the default swarming bot
// dimensions for the given task.
alternateSwarmDimensions func ( parts map [ string ] string ) [ ] string
2016-09-30 19:53:12 +00:00
// Defines the structure of job names.
jobNameSchema * JobNameSchema
2017-02-01 20:56:55 +00:00
2017-06-06 12:27:09 +00:00
// Git 2.13.
cipdGit1 = & specs . CipdPackage {
Name : fmt . Sprintf ( "infra/git/${platform}" ) ,
Path : "git" ,
Version : fmt . Sprintf ( "version:2.13.0.chromium9" ) ,
}
cipdGit2 = & specs . CipdPackage {
Name : fmt . Sprintf ( "infra/tools/git/${platform}" ) ,
Path : "git" ,
Version : fmt . Sprintf ( "git_revision:a78b5f3658c0578a017db48df97d20ac09822bcd" ) ,
}
2017-02-01 20:56:55 +00:00
// Flags.
2017-02-07 14:16:30 +00:00
builderNameSchemaFile = flag . String ( "builder_name_schema" , "" , "Path to the builder_name_schema.json file. If not specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json from this repo." )
assetsDir = flag . String ( "assets_dir" , "" , "Directory containing assets." )
cfgFile = flag . String ( "cfg_file" , "" , "JSON file containing general configuration information." )
jobsFile = flag . String ( "jobs" , "" , "JSON file containing jobs to run." )
2016-09-30 19:53:12 +00:00
)
2017-02-01 20:56:55 +00:00
// linuxGceDimensions are the Swarming dimensions for Linux GCE
// instances.
func linuxGceDimensions ( ) [ ] string {
return [ ] string {
"cpu:x86-64-avx2" ,
"gpu:none" ,
2017-06-28 15:45:54 +00:00
fmt . Sprintf ( "os:%s" , DEFAULT_OS_DEBIAN ) ,
2017-02-01 20:56:55 +00:00
fmt . Sprintf ( "pool:%s" , CONFIG . Pool ) ,
}
}
2016-09-30 19:53:12 +00:00
// deriveCompileTaskName returns the name of a compile task based on the given
// job name.
func deriveCompileTaskName ( jobName string , parts map [ string ] string ) string {
if parts [ "role" ] == "Housekeeper" {
2017-06-28 15:45:54 +00:00
return "Build-Debian9-GCC-x86_64-Release-Shared"
2016-09-30 19:53:12 +00:00
} else if parts [ "role" ] == "Test" || parts [ "role" ] == "Perf" {
task_os := parts [ "os" ]
2017-04-27 17:08:50 +00:00
ec := [ ] string { }
if val := parts [ "extra_config" ] ; val != "" {
ec = strings . Split ( val , "_" )
2017-05-19 17:08:19 +00:00
ignore := [ ] string { "Skpbench" , "AbandonGpuContext" , "PreAbandonGpuContext" , "Valgrind" , "ReleaseAndAbandonGpuContext" }
2017-04-27 17:08:50 +00:00
keep := make ( [ ] string , 0 , len ( ec ) )
for _ , part := range ec {
if ! util . In ( part , ignore ) {
keep = append ( keep , part )
}
}
ec = keep
2017-04-26 18:25:29 +00:00
}
2016-09-30 19:53:12 +00:00
if task_os == "Android" {
2017-04-27 17:08:50 +00:00
if ! util . In ( "Android" , ec ) {
ec = append ( [ ] string { "Android" } , ec ... )
2016-09-30 19:53:12 +00:00
}
2017-06-28 15:45:54 +00:00
task_os = "Debian9"
2017-03-08 19:01:01 +00:00
} else if task_os == "Chromecast" {
2017-06-28 15:45:54 +00:00
task_os = "Debian9"
2017-04-27 17:08:50 +00:00
ec = append ( [ ] string { "Chromecast" } , ec ... )
2017-04-07 14:04:08 +00:00
} else if strings . Contains ( task_os , "ChromeOS" ) {
2017-04-27 17:08:50 +00:00
ec = append ( [ ] string { "Chromebook" , "ARM" , "GLES" } , ec ... )
2017-06-28 15:45:54 +00:00
task_os = "Debian9"
2016-09-30 19:53:12 +00:00
} else if task_os == "iOS" {
2017-04-27 17:08:50 +00:00
ec = append ( [ ] string { task_os } , ec ... )
2016-09-30 19:53:12 +00:00
task_os = "Mac"
} else if strings . Contains ( task_os , "Win" ) {
task_os = "Win"
2017-06-28 15:45:54 +00:00
} else if strings . Contains ( task_os , "Ubuntu" ) || strings . Contains ( task_os , "Debian" ) {
task_os = "Debian9"
2016-09-30 19:53:12 +00:00
}
2016-11-18 18:10:51 +00:00
jobNameMap := map [ string ] string {
2016-09-30 19:53:12 +00:00
"role" : "Build" ,
"os" : task_os ,
"compiler" : parts [ "compiler" ] ,
"target_arch" : parts [ "arch" ] ,
"configuration" : parts [ "configuration" ] ,
2016-11-18 18:10:51 +00:00
}
2017-04-27 17:08:50 +00:00
if len ( ec ) > 0 {
jobNameMap [ "extra_config" ] = strings . Join ( ec , "_" )
2016-11-18 18:10:51 +00:00
}
name , err := jobNameSchema . MakeJobName ( jobNameMap )
2016-09-30 19:53:12 +00:00
if err != nil {
glog . Fatal ( err )
}
return name
} else {
return jobName
}
}
// swarmDimensions generates swarming bot dimensions for the given task.
func swarmDimensions ( parts map [ string ] string ) [ ] string {
2017-06-13 21:01:16 +00:00
if alternateSwarmDimensions != nil {
return alternateSwarmDimensions ( parts )
}
return defaultSwarmDimensions ( parts )
}
// defaultSwarmDimensions generates default swarming bot dimensions for the given task.
func defaultSwarmDimensions ( parts map [ string ] string ) [ ] string {
2016-09-30 19:53:12 +00:00
d := map [ string ] string {
2017-02-01 20:56:55 +00:00
"pool" : CONFIG . Pool ,
2016-09-30 19:53:12 +00:00
}
if os , ok := parts [ "os" ] ; ok {
2017-06-13 21:01:16 +00:00
d [ "os" ] , ok = map [ string ] string {
2017-03-21 13:25:34 +00:00
"Android" : "Android" ,
"Chromecast" : "Android" ,
2017-04-07 14:04:08 +00:00
"ChromeOS" : "ChromeOS" ,
2017-06-28 15:45:54 +00:00
"Debian9" : DEFAULT_OS_DEBIAN ,
2017-03-21 13:25:34 +00:00
"Mac" : "Mac-10.11" ,
2017-06-28 15:45:54 +00:00
"Ubuntu14" : DEFAULT_OS_UBUNTU ,
2017-03-21 13:25:34 +00:00
"Ubuntu16" : "Ubuntu-16.10" ,
"Win" : "Windows-2008ServerR2-SP1" ,
2017-05-02 17:13:13 +00:00
"Win10" : "Windows-10-15063" ,
2017-03-21 13:25:34 +00:00
"Win2k8" : "Windows-2008ServerR2-SP1" ,
2017-04-18 19:38:15 +00:00
"Win7" : "Windows-7-SP1" ,
2017-03-21 13:25:34 +00:00
"Win8" : "Windows-8.1-SP0" ,
2017-05-19 17:08:19 +00:00
"iOS" : "iOS-10.3.1" ,
2016-12-02 17:09:10 +00:00
} [ os ]
2017-06-13 21:01:16 +00:00
if ! ok {
glog . Fatalf ( "Entry %q not found in OS mapping." , os )
}
2016-12-29 21:27:03 +00:00
// Chrome Golo has a different Windows image.
if parts [ "model" ] == "Golo" && os == "Win10" {
d [ "os" ] = "Windows-10-10586"
2016-12-22 13:40:14 +00:00
}
2016-09-30 19:53:12 +00:00
} else {
2017-06-28 15:45:54 +00:00
d [ "os" ] = DEFAULT_OS_DEBIAN
2016-09-30 19:53:12 +00:00
}
if parts [ "role" ] == "Test" || parts [ "role" ] == "Perf" {
2017-03-21 13:25:34 +00:00
if strings . Contains ( parts [ "os" ] , "Android" ) || strings . Contains ( parts [ "os" ] , "Chromecast" ) {
2016-09-30 19:53:12 +00:00
// For Android, the device type is a better dimension
// than CPU or GPU.
2017-06-14 14:01:45 +00:00
deviceInfo , ok := map [ string ] [ ] string {
2017-06-13 21:01:16 +00:00
"AndroidOne" : { "sprout" , "MOB30Q" } ,
"Chorizo" : { "chorizo" , "1.24_82923" } ,
"Ci20" : { "ci20" , "NRD90M" } ,
"GalaxyJ5" : { "j5xnlte" , "MMB29M" } ,
"GalaxyS6" : { "zerofltetmo" , "MMB29K" } ,
"GalaxyS7_G930A" : { "heroqlteatt" , "NRD90M_G930AUCS4BQC2" } ,
"GalaxyS7_G930FD" : { "herolte" , "NRD90M_G930FXXU1DQAS" } ,
"GalaxyTab3" : { "goyawifi" , "JDQ39" } ,
"MotoG4" : { "athene" , "NPJ25.93-14" } ,
"NVIDIA_Shield" : { "foster" , "NRD90M" } ,
"Nexus10" : { "manta" , "LMY49J" } ,
"Nexus5" : { "hammerhead" , "M4B30Z" } ,
"Nexus6" : { "shamu" , "M" } ,
"Nexus6p" : { "angler" , "OPP1.170223.012" } ,
"Nexus7" : { "grouper" , "LMY47V" } ,
"Nexus7v2" : { "flo" , "M" } ,
"NexusPlayer" : { "fugu" , "OPP2.170420.017" } ,
"Pixel" : { "sailfish" , "NMF26Q" } ,
"PixelC" : { "dragon" , "N2G47D" } ,
2017-06-14 15:09:19 +00:00
"PixelXL" : { "marlin" , "OPP3.170518.006" } ,
2017-06-14 14:01:45 +00:00
} [ parts [ "model" ] ]
2017-02-01 20:56:55 +00:00
if ! ok {
2017-06-14 14:01:45 +00:00
glog . Fatalf ( "Entry %q not found in Android mapping." , parts [ "model" ] )
2017-02-01 20:56:55 +00:00
}
2016-11-08 17:55:32 +00:00
d [ "device_type" ] = deviceInfo [ 0 ]
d [ "device_os" ] = deviceInfo [ 1 ]
2016-09-30 19:53:12 +00:00
} else if strings . Contains ( parts [ "os" ] , "iOS" ) {
2017-06-13 21:01:16 +00:00
device , ok := map [ string ] string {
2016-11-09 19:03:20 +00:00
"iPadMini4" : "iPad5,1" ,
2017-04-25 15:38:38 +00:00
"iPhone6" : "iPhone7,2" ,
"iPhone7" : "iPhone9,1" ,
"iPadPro" : "iPad6,3" ,
2016-09-30 19:53:12 +00:00
} [ parts [ "model" ] ]
2017-06-13 21:01:16 +00:00
if ! ok {
glog . Fatalf ( "Entry %q not found in iOS mapping." , parts [ "model" ] )
}
d [ "device" ] = device
2016-09-30 19:53:12 +00:00
} else if parts [ "cpu_or_gpu" ] == "CPU" {
d [ "gpu" ] = "none"
2017-06-13 21:01:16 +00:00
cpu , ok := map [ string ] string {
2016-09-30 19:53:12 +00:00
"AVX" : "x86-64" ,
"AVX2" : "x86-64-avx2" ,
"SSE4" : "x86-64" ,
} [ parts [ "cpu_or_gpu_value" ] ]
2017-06-13 21:01:16 +00:00
if ! ok {
glog . Fatalf ( "Entry %q not found in CPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
d [ "cpu" ] = cpu
2016-09-30 19:53:12 +00:00
if strings . Contains ( parts [ "os" ] , "Win" ) && parts [ "cpu_or_gpu_value" ] == "AVX2" {
// AVX2 is not correctly detected on Windows. Fall back on other
// dimensions to ensure that we correctly target machines which we know
// have AVX2 support.
2017-04-06 17:32:44 +00:00
d [ "cpu" ] = "x86-64"
2017-04-18 19:38:15 +00:00
if parts [ "model" ] != "GCE" {
glog . Fatalf ( "Please double-check that %q supports AVX2 and update this assertion." , parts [ "model" ] )
}
2016-09-30 19:53:12 +00:00
}
} else {
2017-06-14 14:34:18 +00:00
if strings . Contains ( parts [ "os" ] , "Win" ) {
gpu , ok := map [ string ] string {
"AMDHD7770" : "1002:683d-22.19.165.512" ,
"GT610" : "10de:104a-21.21.13.7619" ,
"GTX1070" : "10de:1ba1-22.21.13.8205" ,
"GTX660" : "10de:11c0-22.21.13.8205" ,
"GTX960" : "10de:1401-22.21.13.8205" ,
"IntelHD530" : "8086:1912-21.20.16.4590" ,
2017-06-23 18:42:30 +00:00
"IntelHD4400" : "8086:0a16-20.19.15.4624" ,
"IntelHD4600" : "8086:0412-20.19.15.4624" ,
2017-06-14 14:34:18 +00:00
"IntelIris540" : "8086:1926-21.20.16.4590" ,
2017-06-23 18:42:30 +00:00
"IntelIris6100" : "8086:162b-20.19.15.4624" ,
2017-06-14 14:34:18 +00:00
"RadeonR9M470X" : "1002:6646-22.19.165.512" ,
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in Win GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
d [ "gpu" ] = gpu
2017-02-19 04:28:26 +00:00
2017-06-15 16:28:04 +00:00
// Specify cpu dimension for NUCs and ShuttleCs. We temporarily have two
// types of machines with a GTX960.
cpu , ok := map [ string ] string {
"NUC6i7KYK" : "x86-64-i7-6770HQ" ,
"ShuttleC" : "x86-64-i7-6700K" ,
2017-06-14 14:34:18 +00:00
} [ parts [ "model" ] ]
if ok {
2017-06-15 16:28:04 +00:00
d [ "cpu" ] = cpu
2017-06-14 14:34:18 +00:00
}
2017-06-28 15:45:54 +00:00
} else if strings . Contains ( parts [ "os" ] , "Ubuntu" ) || strings . Contains ( parts [ "os" ] , "Debian" ) {
2017-06-14 14:34:18 +00:00
gpu , ok := map [ string ] string {
"GT610" : "10de:104a-340.96" ,
2017-06-29 18:40:11 +00:00
"GTX550Ti" : "10de:1244-375.66" ,
"GTX660" : "10de:11c0-375.66" ,
"GTX960" : "10de:1401-375.66" ,
2017-06-14 14:34:18 +00:00
// Intel drivers come from CIPD, so no need to specify the version here.
"IntelBayTrail" : "8086:0f31" ,
"IntelHD2000" : "8086:0102" ,
"IntelHD405" : "8086:22b1" ,
"IntelIris540" : "8086:1926" ,
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in Ubuntu GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
d [ "gpu" ] = gpu
} else if strings . Contains ( parts [ "os" ] , "Mac" ) {
gpu , ok := map [ string ] string {
// TODO(benjaminwagner): GPU name doesn't match device ID.
"IntelHD4000" : "8086:0a2e" ,
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in Mac GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
d [ "gpu" ] = gpu
} else if strings . Contains ( parts [ "os" ] , "ChromeOS" ) {
gpu , ok := map [ string ] string {
"MaliT604" : "MaliT604" ,
"MaliT764" : "MaliT764" ,
"MaliT860" : "MaliT860" ,
"TegraK1" : "TegraK1" ,
} [ parts [ "cpu_or_gpu_value" ] ]
if ! ok {
glog . Fatalf ( "Entry %q not found in ChromeOS GPU mapping." , parts [ "cpu_or_gpu_value" ] )
}
d [ "gpu" ] = gpu
} else {
glog . Fatalf ( "Unknown GPU mapping for OS %q." , parts [ "os" ] )
2017-02-19 04:28:26 +00:00
}
2016-09-30 19:53:12 +00:00
}
} else {
d [ "gpu" ] = "none"
2017-06-28 15:45:54 +00:00
if d [ "os" ] == DEFAULT_OS_DEBIAN {
2017-03-22 19:54:54 +00:00
return linuxGceDimensions ( )
}
2016-09-30 19:53:12 +00:00
}
2017-03-22 19:54:54 +00:00
2016-09-30 19:53:12 +00:00
rv := make ( [ ] string , 0 , len ( d ) )
for k , v := range d {
rv = append ( rv , fmt . Sprintf ( "%s:%s" , k , v ) )
}
sort . Strings ( rv )
return rv
}
2017-06-14 19:25:31 +00:00
// relpath returns the relative path to the given file from the config file.
func relpath ( f string ) string {
_ , filename , _ , _ := runtime . Caller ( 0 )
dir := path . Dir ( filename )
rel := dir
if * cfgFile != "" {
rel = path . Dir ( * cfgFile )
}
rv , err := filepath . Rel ( rel , path . Join ( dir , f ) )
if err != nil {
sklog . Fatal ( err )
}
return rv
}
2017-04-04 13:06:16 +00:00
// bundleRecipes generates the task to bundle and isolate the recipes.
func bundleRecipes ( b * specs . TasksCfgBuilder ) string {
b . MustAddTask ( BUNDLE_RECIPES_NAME , & specs . TaskSpec {
2017-06-06 12:27:09 +00:00
CipdPackages : [ ] * specs . CipdPackage { cipdGit1 , cipdGit2 } ,
2017-04-04 13:06:16 +00:00
Dimensions : linuxGceDimensions ( ) ,
ExtraArgs : [ ] string {
"--workdir" , "../../.." , "bundle_recipes" ,
fmt . Sprintf ( "buildername=%s" , BUNDLE_RECIPES_NAME ) ,
fmt . Sprintf ( "swarm_out_dir=%s" , specs . PLACEHOLDER_ISOLATED_OUTDIR ) ,
} ,
2017-06-14 19:25:31 +00:00
Isolate : relpath ( "bundle_recipes.isolate" ) ,
2017-05-11 17:35:23 +00:00
Priority : 0.7 ,
2017-04-04 13:06:16 +00:00
} )
return BUNDLE_RECIPES_NAME
}
2017-04-07 12:31:22 +00:00
// useBundledRecipes returns true iff the given bot should use bundled recipes
// instead of syncing recipe DEPS itself.
func useBundledRecipes ( parts map [ string ] string ) bool {
// Use bundled recipes for all test/perf tasks.
return true
}
2017-05-11 17:35:23 +00:00
type isolateAssetCfg struct {
isolateFile string
cipdPkg string
}
var ISOLATE_ASSET_MAPPING = map [ string ] isolateAssetCfg {
ISOLATE_SKIMAGE_NAME : {
isolateFile : "isolate_skimage.isolate" ,
cipdPkg : "skimage" ,
} ,
ISOLATE_SKP_NAME : {
isolateFile : "isolate_skp.isolate" ,
cipdPkg : "skp" ,
} ,
ISOLATE_SVG_NAME : {
isolateFile : "isolate_svg.isolate" ,
cipdPkg : "svg" ,
} ,
}
// bundleRecipes generates the task to bundle and isolate the recipes.
func isolateCIPDAsset ( b * specs . TasksCfgBuilder , name string ) string {
b . MustAddTask ( name , & specs . TaskSpec {
CipdPackages : [ ] * specs . CipdPackage {
b . MustGetCipdPackageFromAsset ( ISOLATE_ASSET_MAPPING [ name ] . cipdPkg ) ,
} ,
Dimensions : linuxGceDimensions ( ) ,
2017-06-14 19:25:31 +00:00
Isolate : relpath ( ISOLATE_ASSET_MAPPING [ name ] . isolateFile ) ,
2017-05-11 17:35:23 +00:00
Priority : 0.7 ,
} )
return name
}
2017-05-15 12:30:27 +00:00
// getIsolatedCIPDDeps returns the slice of Isolate_* tasks a given task needs.
// This allows us to save time on I/O bound bots, like the RPIs.
func getIsolatedCIPDDeps ( parts map [ string ] string ) [ ] string {
deps := [ ] string { }
2017-05-11 17:35:23 +00:00
// Only do this on the RPIs for now. Other, faster machines shouldn't see much
// benefit and we don't need the extra complexity, for now
2017-05-15 12:30:27 +00:00
rpiOS := [ ] string { "Android" , "ChromeOS" , "iOS" }
if o := parts [ "os" ] ; strings . Contains ( o , "Chromecast" ) {
// Chromecasts don't have enough disk space to fit all of the content,
// so we do a subset of the skps.
deps = append ( deps , ISOLATE_SKP_NAME )
} else if e := parts [ "extra_config" ] ; strings . Contains ( e , "Skpbench" ) {
// Skpbench only needs skps
deps = append ( deps , ISOLATE_SKP_NAME )
} else if util . In ( o , rpiOS ) {
deps = append ( deps , ISOLATE_SKP_NAME )
deps = append ( deps , ISOLATE_SVG_NAME )
deps = append ( deps , ISOLATE_SKIMAGE_NAME )
}
return deps
2017-05-11 17:35:23 +00:00
}
2016-09-30 19:53:12 +00:00
// compile generates a compile task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func compile ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string ) string {
2016-09-30 19:53:12 +00:00
// Collect the necessary CIPD packages.
pkgs := [ ] * specs . CipdPackage { }
// Android bots require a toolchain.
if strings . Contains ( name , "Android" ) {
if strings . Contains ( name , "Mac" ) {
2016-10-20 18:04:31 +00:00
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "android_ndk_darwin" ) )
2016-11-02 17:13:16 +00:00
} else if strings . Contains ( name , "Win" ) {
2016-11-02 19:44:26 +00:00
pkg := b . MustGetCipdPackageFromAsset ( "android_ndk_windows" )
pkg . Path = "n"
pkgs = append ( pkgs , pkg )
2016-09-30 19:53:12 +00:00
} else {
2016-10-20 18:04:31 +00:00
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "android_ndk_linux" ) )
2016-09-30 19:53:12 +00:00
}
2017-03-08 19:01:01 +00:00
} else if strings . Contains ( name , "Chromecast" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "cast_toolchain" ) )
2017-05-24 19:30:35 +00:00
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "chromebook_arm_gles" ) )
2017-04-05 11:32:45 +00:00
} else if strings . Contains ( name , "Chromebook" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "clang_linux" ) )
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "armhf_sysroot" ) )
2017-04-12 14:50:18 +00:00
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "chromebook_arm_gles" ) )
2017-06-28 15:45:54 +00:00
} else if strings . Contains ( name , "Debian" ) {
2017-01-18 14:24:56 +00:00
if strings . Contains ( name , "Clang" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "clang_linux" ) )
}
if strings . Contains ( name , "Vulkan" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "linux_vulkan_sdk" ) )
}
2016-11-09 21:31:42 +00:00
} else if strings . Contains ( name , "Win" ) {
2016-10-20 18:04:31 +00:00
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "win_toolchain" ) )
2016-09-30 19:53:12 +00:00
if strings . Contains ( name , "Vulkan" ) {
2016-10-20 18:04:31 +00:00
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "win_vulkan_sdk" ) )
2016-09-30 19:53:12 +00:00
}
}
2017-03-20 17:38:45 +00:00
// TODO(stephana): Remove this once all Mac machines are on the same
// OS version again. Move the call to swarmDimensions back to the
// creation of the TaskSpec struct below.
dimensions := swarmDimensions ( parts )
if strings . Contains ( name , "Mac" ) {
for idx , dim := range dimensions {
if strings . HasPrefix ( dim , "os" ) {
dimensions [ idx ] = "os:Mac-10.12"
break
}
}
}
2016-09-30 19:53:12 +00:00
// Add the task.
2016-10-20 18:04:31 +00:00
b . MustAddTask ( name , & specs . TaskSpec {
2016-09-30 19:53:12 +00:00
CipdPackages : pkgs ,
2017-03-20 17:38:45 +00:00
Dimensions : dimensions ,
2016-09-30 19:53:12 +00:00
ExtraArgs : [ ] string {
2017-04-10 12:14:33 +00:00
"--workdir" , "../../.." , "compile" ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "repository=%s" , specs . PLACEHOLDER_REPO ) ,
2016-09-30 19:53:12 +00:00
fmt . Sprintf ( "buildername=%s" , name ) ,
fmt . Sprintf ( "swarm_out_dir=%s" , specs . PLACEHOLDER_ISOLATED_OUTDIR ) ,
fmt . Sprintf ( "revision=%s" , specs . PLACEHOLDER_REVISION ) ,
2017-04-21 13:37:37 +00:00
fmt . Sprintf ( "patch_repo=%s" , specs . PLACEHOLDER_PATCH_REPO ) ,
2016-10-13 13:23:45 +00:00
fmt . Sprintf ( "patch_storage=%s" , specs . PLACEHOLDER_PATCH_STORAGE ) ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "patch_issue=%s" , specs . PLACEHOLDER_ISSUE ) ,
fmt . Sprintf ( "patch_set=%s" , specs . PLACEHOLDER_PATCHSET ) ,
2016-09-30 19:53:12 +00:00
} ,
2017-06-14 19:25:31 +00:00
Isolate : relpath ( "compile_skia.isolate" ) ,
2016-09-30 19:53:12 +00:00
Priority : 0.8 ,
2016-10-20 18:04:31 +00:00
} )
2016-12-12 19:30:12 +00:00
// All compile tasks are runnable as their own Job. Assert that the Job
// is listed in JOBS.
if ! util . In ( name , JOBS ) {
glog . Fatalf ( "Job %q is missing from the JOBS list!" , name )
}
2016-09-30 19:53:12 +00:00
return name
}
// recreateSKPs generates a RecreateSKPs task. Returns the name of the last
// task in the generated chain of tasks, which the Job should add as a
// dependency.
2016-10-20 18:04:31 +00:00
func recreateSKPs ( b * specs . TasksCfgBuilder , name string ) string {
2016-11-08 17:55:32 +00:00
b . MustAddTask ( name , & specs . TaskSpec {
2017-04-17 11:50:20 +00:00
CipdPackages : [ ] * specs . CipdPackage { b . MustGetCipdPackageFromAsset ( "go" ) } ,
2017-02-01 20:56:55 +00:00
Dimensions : linuxGceDimensions ( ) ,
2016-11-15 20:18:20 +00:00
ExecutionTimeout : 4 * time . Hour ,
2016-11-08 17:55:32 +00:00
ExtraArgs : [ ] string {
2017-04-10 12:14:33 +00:00
"--workdir" , "../../.." , "recreate_skps" ,
2016-11-08 17:55:32 +00:00
fmt . Sprintf ( "repository=%s" , specs . PLACEHOLDER_REPO ) ,
fmt . Sprintf ( "buildername=%s" , name ) ,
fmt . Sprintf ( "swarm_out_dir=%s" , specs . PLACEHOLDER_ISOLATED_OUTDIR ) ,
fmt . Sprintf ( "revision=%s" , specs . PLACEHOLDER_REVISION ) ,
2017-04-21 13:37:37 +00:00
fmt . Sprintf ( "patch_repo=%s" , specs . PLACEHOLDER_PATCH_REPO ) ,
2016-11-08 17:55:32 +00:00
fmt . Sprintf ( "patch_storage=%s" , specs . PLACEHOLDER_PATCH_STORAGE ) ,
fmt . Sprintf ( "patch_issue=%s" , specs . PLACEHOLDER_ISSUE ) ,
fmt . Sprintf ( "patch_set=%s" , specs . PLACEHOLDER_PATCHSET ) ,
} ,
2016-11-15 20:18:20 +00:00
IoTimeout : 40 * time . Minute ,
2017-06-14 19:25:31 +00:00
Isolate : relpath ( "compile_skia.isolate" ) ,
2016-11-15 20:18:20 +00:00
Priority : 0.8 ,
2016-11-08 17:55:32 +00:00
} )
2016-09-30 19:53:12 +00:00
return name
}
2017-05-17 18:28:06 +00:00
// updateMetaConfig generates a UpdateMetaConfig task. Returns the name of the
// last task in the generated chain of tasks, which the Job should add as a
// dependency.
func updateMetaConfig ( b * specs . TasksCfgBuilder , name string ) string {
b . MustAddTask ( name , & specs . TaskSpec {
2017-05-19 17:08:19 +00:00
CipdPackages : [ ] * specs . CipdPackage { } ,
Dimensions : linuxGceDimensions ( ) ,
2017-05-17 18:28:06 +00:00
ExtraArgs : [ ] string {
"--workdir" , "../../.." , "update_meta_config" ,
fmt . Sprintf ( "repository=%s" , specs . PLACEHOLDER_REPO ) ,
fmt . Sprintf ( "buildername=%s" , name ) ,
fmt . Sprintf ( "swarm_out_dir=%s" , specs . PLACEHOLDER_ISOLATED_OUTDIR ) ,
fmt . Sprintf ( "revision=%s" , specs . PLACEHOLDER_REVISION ) ,
fmt . Sprintf ( "patch_repo=%s" , specs . PLACEHOLDER_PATCH_REPO ) ,
fmt . Sprintf ( "patch_storage=%s" , specs . PLACEHOLDER_PATCH_STORAGE ) ,
fmt . Sprintf ( "patch_issue=%s" , specs . PLACEHOLDER_ISSUE ) ,
fmt . Sprintf ( "patch_set=%s" , specs . PLACEHOLDER_PATCHSET ) ,
} ,
2017-06-14 19:25:31 +00:00
Isolate : relpath ( "meta_config.isolate" ) ,
2017-05-19 17:08:19 +00:00
Priority : 0.8 ,
2017-05-17 18:28:06 +00:00
} )
return name
}
2016-09-30 19:53:12 +00:00
// ctSKPs generates a CT SKPs task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func ctSKPs ( b * specs . TasksCfgBuilder , name string ) string {
2016-11-08 17:55:32 +00:00
b . MustAddTask ( name , & specs . TaskSpec {
CipdPackages : [ ] * specs . CipdPackage { } ,
Dimensions : [ ] string { "pool:SkiaCT" } ,
ExecutionTimeout : 24 * time . Hour ,
ExtraArgs : [ ] string {
2017-04-10 12:14:33 +00:00
"--workdir" , "../../.." , "ct_skps" ,
2016-11-08 17:55:32 +00:00
fmt . Sprintf ( "repository=%s" , specs . PLACEHOLDER_REPO ) ,
fmt . Sprintf ( "buildername=%s" , name ) ,
fmt . Sprintf ( "swarm_out_dir=%s" , specs . PLACEHOLDER_ISOLATED_OUTDIR ) ,
fmt . Sprintf ( "revision=%s" , specs . PLACEHOLDER_REVISION ) ,
2017-04-21 13:37:37 +00:00
fmt . Sprintf ( "patch_repo=%s" , specs . PLACEHOLDER_PATCH_REPO ) ,
2016-11-08 17:55:32 +00:00
fmt . Sprintf ( "patch_storage=%s" , specs . PLACEHOLDER_PATCH_STORAGE ) ,
fmt . Sprintf ( "patch_issue=%s" , specs . PLACEHOLDER_ISSUE ) ,
fmt . Sprintf ( "patch_set=%s" , specs . PLACEHOLDER_PATCHSET ) ,
} ,
IoTimeout : time . Hour ,
2017-06-14 19:25:31 +00:00
Isolate : relpath ( "ct_skps_skia.isolate" ) ,
2016-11-08 17:55:32 +00:00
Priority : 0.8 ,
} )
2016-09-30 19:53:12 +00:00
return name
}
// housekeeper generates a Housekeeper task. Returns the name of the last task
// in the generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func housekeeper ( b * specs . TasksCfgBuilder , name , compileTaskName string ) string {
2016-11-08 17:55:32 +00:00
b . MustAddTask ( name , & specs . TaskSpec {
2016-12-02 16:01:33 +00:00
CipdPackages : [ ] * specs . CipdPackage { b . MustGetCipdPackageFromAsset ( "go" ) } ,
2016-11-08 17:55:32 +00:00
Dependencies : [ ] string { compileTaskName } ,
2017-02-01 20:56:55 +00:00
Dimensions : linuxGceDimensions ( ) ,
2016-11-08 17:55:32 +00:00
ExtraArgs : [ ] string {
2017-04-10 12:14:33 +00:00
"--workdir" , "../../.." , "housekeeper" ,
2016-11-08 17:55:32 +00:00
fmt . Sprintf ( "repository=%s" , specs . PLACEHOLDER_REPO ) ,
fmt . Sprintf ( "buildername=%s" , name ) ,
fmt . Sprintf ( "swarm_out_dir=%s" , specs . PLACEHOLDER_ISOLATED_OUTDIR ) ,
fmt . Sprintf ( "revision=%s" , specs . PLACEHOLDER_REVISION ) ,
2017-04-21 13:37:37 +00:00
fmt . Sprintf ( "patch_repo=%s" , specs . PLACEHOLDER_PATCH_REPO ) ,
2016-11-08 17:55:32 +00:00
fmt . Sprintf ( "patch_storage=%s" , specs . PLACEHOLDER_PATCH_STORAGE ) ,
fmt . Sprintf ( "patch_issue=%s" , specs . PLACEHOLDER_ISSUE ) ,
fmt . Sprintf ( "patch_set=%s" , specs . PLACEHOLDER_PATCHSET ) ,
} ,
2017-06-14 19:25:31 +00:00
Isolate : relpath ( "housekeeper_skia.isolate" ) ,
2016-11-08 17:55:32 +00:00
Priority : 0.8 ,
} )
2016-09-30 19:53:12 +00:00
return name
}
2016-10-14 13:32:09 +00:00
// infra generates an infra_tests task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func infra ( b * specs . TasksCfgBuilder , name string ) string {
b . MustAddTask ( name , & specs . TaskSpec {
2017-04-13 14:00:43 +00:00
CipdPackages : [ ] * specs . CipdPackage { b . MustGetCipdPackageFromAsset ( "go" ) } ,
2017-02-01 20:56:55 +00:00
Dimensions : linuxGceDimensions ( ) ,
2016-10-14 13:32:09 +00:00
ExtraArgs : [ ] string {
2017-04-10 12:14:33 +00:00
"--workdir" , "../../.." , "infra" ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "repository=%s" , specs . PLACEHOLDER_REPO ) ,
2016-10-14 13:32:09 +00:00
fmt . Sprintf ( "buildername=%s" , name ) ,
fmt . Sprintf ( "swarm_out_dir=%s" , specs . PLACEHOLDER_ISOLATED_OUTDIR ) ,
fmt . Sprintf ( "revision=%s" , specs . PLACEHOLDER_REVISION ) ,
2017-04-21 13:37:37 +00:00
fmt . Sprintf ( "patch_repo=%s" , specs . PLACEHOLDER_PATCH_REPO ) ,
2016-10-14 13:32:09 +00:00
fmt . Sprintf ( "patch_storage=%s" , specs . PLACEHOLDER_PATCH_STORAGE ) ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "patch_issue=%s" , specs . PLACEHOLDER_ISSUE ) ,
fmt . Sprintf ( "patch_set=%s" , specs . PLACEHOLDER_PATCHSET ) ,
2016-10-14 13:32:09 +00:00
} ,
2017-06-14 19:25:31 +00:00
Isolate : relpath ( "infra_skia.isolate" ) ,
2016-10-14 13:32:09 +00:00
Priority : 0.8 ,
2016-10-20 18:04:31 +00:00
} )
2016-10-14 13:32:09 +00:00
return name
}
2016-09-30 19:53:12 +00:00
// doUpload indicates whether the given Job should upload its results.
func doUpload ( name string ) bool {
2017-02-01 20:56:55 +00:00
for _ , s := range CONFIG . NoUpload {
m , err := regexp . MatchString ( s , name )
if err != nil {
glog . Fatal ( err )
}
if m {
2016-09-30 19:53:12 +00:00
return false
}
}
return true
}
// test generates a Test task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func test ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string , compileTaskName string , pkgs [ ] * specs . CipdPackage ) string {
2016-11-08 17:55:32 +00:00
s := & specs . TaskSpec {
2016-11-09 23:35:15 +00:00
CipdPackages : pkgs ,
Dependencies : [ ] string { compileTaskName } ,
Dimensions : swarmDimensions ( parts ) ,
ExecutionTimeout : 4 * time . Hour ,
Expiration : 20 * time . Hour ,
2016-09-30 19:53:12 +00:00
ExtraArgs : [ ] string {
2017-04-10 12:14:33 +00:00
"--workdir" , "../../.." , "test" ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "repository=%s" , specs . PLACEHOLDER_REPO ) ,
2016-09-30 19:53:12 +00:00
fmt . Sprintf ( "buildername=%s" , name ) ,
fmt . Sprintf ( "swarm_out_dir=%s" , specs . PLACEHOLDER_ISOLATED_OUTDIR ) ,
fmt . Sprintf ( "revision=%s" , specs . PLACEHOLDER_REVISION ) ,
2017-04-21 13:37:37 +00:00
fmt . Sprintf ( "patch_repo=%s" , specs . PLACEHOLDER_PATCH_REPO ) ,
2016-10-13 13:23:45 +00:00
fmt . Sprintf ( "patch_storage=%s" , specs . PLACEHOLDER_PATCH_STORAGE ) ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "patch_issue=%s" , specs . PLACEHOLDER_ISSUE ) ,
fmt . Sprintf ( "patch_set=%s" , specs . PLACEHOLDER_PATCHSET ) ,
2016-09-30 19:53:12 +00:00
} ,
2017-02-22 13:36:03 +00:00
IoTimeout : 40 * time . Minute ,
2017-06-14 19:25:31 +00:00
Isolate : relpath ( "test_skia.isolate" ) ,
2017-02-22 13:36:03 +00:00
MaxAttempts : 1 ,
Priority : 0.8 ,
2016-11-08 17:55:32 +00:00
}
2017-04-07 12:31:22 +00:00
if useBundledRecipes ( parts ) {
2017-04-04 13:06:16 +00:00
s . Dependencies = append ( s . Dependencies , BUNDLE_RECIPES_NAME )
2017-04-07 12:31:22 +00:00
if strings . Contains ( parts [ "os" ] , "Win" ) {
2017-06-14 19:25:31 +00:00
s . Isolate = relpath ( "test_skia_bundled_win.isolate" )
2017-04-07 12:31:22 +00:00
} else {
2017-06-14 19:25:31 +00:00
s . Isolate = relpath ( "test_skia_bundled_unix.isolate" )
2017-04-07 12:31:22 +00:00
}
2017-04-04 13:06:16 +00:00
}
2017-05-15 12:30:27 +00:00
if deps := getIsolatedCIPDDeps ( parts ) ; len ( deps ) > 0 {
s . Dependencies = append ( s . Dependencies , deps ... )
2017-05-11 17:35:23 +00:00
}
2016-11-08 17:55:32 +00:00
if strings . Contains ( parts [ "extra_config" ] , "Valgrind" ) {
s . ExecutionTimeout = 9 * time . Hour
s . Expiration = 48 * time . Hour
s . IoTimeout = time . Hour
2017-05-31 19:09:10 +00:00
s . CipdPackages = append ( s . CipdPackages , b . MustGetCipdPackageFromAsset ( "valgrind" ) )
2016-11-08 17:55:32 +00:00
} else if strings . Contains ( parts [ "extra_config" ] , "MSAN" ) {
s . ExecutionTimeout = 9 * time . Hour
2017-06-08 14:34:17 +00:00
} else if parts [ "arch" ] == "x86" && parts [ "configuration" ] == "Debug" {
// skia:6737
s . ExecutionTimeout = 6 * time . Hour
2016-11-08 17:55:32 +00:00
}
b . MustAddTask ( name , s )
2016-09-30 19:53:12 +00:00
// Upload results if necessary.
if doUpload ( name ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , jobNameSchema . Sep , name )
2016-10-20 18:04:31 +00:00
b . MustAddTask ( uploadName , & specs . TaskSpec {
2016-09-30 19:53:12 +00:00
Dependencies : [ ] string { name } ,
2017-02-01 20:56:55 +00:00
Dimensions : linuxGceDimensions ( ) ,
2016-09-30 19:53:12 +00:00
ExtraArgs : [ ] string {
"--workdir" , "../../.." , "upload_dm_results" ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "repository=%s" , specs . PLACEHOLDER_REPO ) ,
2016-09-30 19:53:12 +00:00
fmt . Sprintf ( "buildername=%s" , name ) ,
fmt . Sprintf ( "swarm_out_dir=%s" , specs . PLACEHOLDER_ISOLATED_OUTDIR ) ,
fmt . Sprintf ( "revision=%s" , specs . PLACEHOLDER_REVISION ) ,
2017-04-21 13:37:37 +00:00
fmt . Sprintf ( "patch_repo=%s" , specs . PLACEHOLDER_PATCH_REPO ) ,
2016-10-13 13:23:45 +00:00
fmt . Sprintf ( "patch_storage=%s" , specs . PLACEHOLDER_PATCH_STORAGE ) ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "patch_issue=%s" , specs . PLACEHOLDER_ISSUE ) ,
fmt . Sprintf ( "patch_set=%s" , specs . PLACEHOLDER_PATCHSET ) ,
2017-02-06 20:38:41 +00:00
fmt . Sprintf ( "gs_bucket=%s" , CONFIG . GsBucketGm ) ,
2016-09-30 19:53:12 +00:00
} ,
2017-06-14 19:25:31 +00:00
Isolate : relpath ( "upload_dm_results.isolate" ) ,
2016-09-30 19:53:12 +00:00
Priority : 0.8 ,
2016-10-20 18:04:31 +00:00
} )
2016-09-30 19:53:12 +00:00
return uploadName
}
return name
}
// perf generates a Perf task. Returns the name of the last task in the
// generated chain of tasks, which the Job should add as a dependency.
2016-10-20 18:04:31 +00:00
func perf ( b * specs . TasksCfgBuilder , name string , parts map [ string ] string , compileTaskName string , pkgs [ ] * specs . CipdPackage ) string {
2017-04-10 12:14:33 +00:00
recipe := "perf"
2017-06-14 19:25:31 +00:00
isolate := relpath ( "perf_skia.isolate" )
2016-11-14 18:42:27 +00:00
if strings . Contains ( parts [ "extra_config" ] , "Skpbench" ) {
2017-04-10 12:14:33 +00:00
recipe = "skpbench"
2017-06-14 19:25:31 +00:00
isolate = relpath ( "skpbench_skia.isolate" )
2017-04-07 12:31:22 +00:00
if useBundledRecipes ( parts ) {
if strings . Contains ( parts [ "os" ] , "Win" ) {
2017-06-14 19:25:31 +00:00
isolate = relpath ( "skpbench_skia_bundled_win.isolate" )
2017-04-07 12:31:22 +00:00
} else {
2017-06-14 19:25:31 +00:00
isolate = relpath ( "skpbench_skia_bundled_unix.isolate" )
2017-04-07 12:31:22 +00:00
}
}
} else if useBundledRecipes ( parts ) {
if strings . Contains ( parts [ "os" ] , "Win" ) {
2017-06-14 19:25:31 +00:00
isolate = relpath ( "perf_skia_bundled_win.isolate" )
2017-04-07 12:31:22 +00:00
} else {
2017-06-14 19:25:31 +00:00
isolate = relpath ( "perf_skia_bundled_unix.isolate" )
2017-04-04 13:06:16 +00:00
}
2016-11-14 18:42:27 +00:00
}
2016-11-08 17:55:32 +00:00
s := & specs . TaskSpec {
2016-11-09 23:35:15 +00:00
CipdPackages : pkgs ,
Dependencies : [ ] string { compileTaskName } ,
Dimensions : swarmDimensions ( parts ) ,
ExecutionTimeout : 4 * time . Hour ,
Expiration : 20 * time . Hour ,
2016-09-30 19:53:12 +00:00
ExtraArgs : [ ] string {
2016-11-14 18:42:27 +00:00
"--workdir" , "../../.." , recipe ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "repository=%s" , specs . PLACEHOLDER_REPO ) ,
2016-09-30 19:53:12 +00:00
fmt . Sprintf ( "buildername=%s" , name ) ,
fmt . Sprintf ( "swarm_out_dir=%s" , specs . PLACEHOLDER_ISOLATED_OUTDIR ) ,
fmt . Sprintf ( "revision=%s" , specs . PLACEHOLDER_REVISION ) ,
2017-04-21 13:37:37 +00:00
fmt . Sprintf ( "patch_repo=%s" , specs . PLACEHOLDER_PATCH_REPO ) ,
2016-10-13 13:23:45 +00:00
fmt . Sprintf ( "patch_storage=%s" , specs . PLACEHOLDER_PATCH_STORAGE ) ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "patch_issue=%s" , specs . PLACEHOLDER_ISSUE ) ,
fmt . Sprintf ( "patch_set=%s" , specs . PLACEHOLDER_PATCHSET ) ,
2016-09-30 19:53:12 +00:00
} ,
2017-02-22 13:36:03 +00:00
IoTimeout : 40 * time . Minute ,
Isolate : isolate ,
MaxAttempts : 1 ,
Priority : 0.8 ,
2016-11-08 17:55:32 +00:00
}
2017-04-07 12:31:22 +00:00
if useBundledRecipes ( parts ) {
2017-04-04 13:06:16 +00:00
s . Dependencies = append ( s . Dependencies , BUNDLE_RECIPES_NAME )
}
2017-05-15 12:30:27 +00:00
if deps := getIsolatedCIPDDeps ( parts ) ; len ( deps ) > 0 {
s . Dependencies = append ( s . Dependencies , deps ... )
}
2016-11-08 17:55:32 +00:00
if strings . Contains ( parts [ "extra_config" ] , "Valgrind" ) {
s . ExecutionTimeout = 9 * time . Hour
s . Expiration = 48 * time . Hour
s . IoTimeout = time . Hour
2017-06-01 11:13:33 +00:00
s . CipdPackages = append ( s . CipdPackages , b . MustGetCipdPackageFromAsset ( "valgrind" ) )
2016-11-08 17:55:32 +00:00
} else if strings . Contains ( parts [ "extra_config" ] , "MSAN" ) {
s . ExecutionTimeout = 9 * time . Hour
2017-06-08 14:34:17 +00:00
} else if parts [ "arch" ] == "x86" && parts [ "configuration" ] == "Debug" {
// skia:6737
s . ExecutionTimeout = 6 * time . Hour
2016-11-08 17:55:32 +00:00
}
b . MustAddTask ( name , s )
2016-09-30 19:53:12 +00:00
// Upload results if necessary.
if strings . Contains ( name , "Release" ) && doUpload ( name ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , jobNameSchema . Sep , name )
2016-10-20 18:04:31 +00:00
b . MustAddTask ( uploadName , & specs . TaskSpec {
2016-09-30 19:53:12 +00:00
Dependencies : [ ] string { name } ,
2017-02-01 20:56:55 +00:00
Dimensions : linuxGceDimensions ( ) ,
2016-09-30 19:53:12 +00:00
ExtraArgs : [ ] string {
"--workdir" , "../../.." , "upload_nano_results" ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "repository=%s" , specs . PLACEHOLDER_REPO ) ,
2016-09-30 19:53:12 +00:00
fmt . Sprintf ( "buildername=%s" , name ) ,
fmt . Sprintf ( "swarm_out_dir=%s" , specs . PLACEHOLDER_ISOLATED_OUTDIR ) ,
fmt . Sprintf ( "revision=%s" , specs . PLACEHOLDER_REVISION ) ,
2017-04-21 13:37:37 +00:00
fmt . Sprintf ( "patch_repo=%s" , specs . PLACEHOLDER_PATCH_REPO ) ,
2016-10-13 13:23:45 +00:00
fmt . Sprintf ( "patch_storage=%s" , specs . PLACEHOLDER_PATCH_STORAGE ) ,
2016-11-04 18:37:26 +00:00
fmt . Sprintf ( "patch_issue=%s" , specs . PLACEHOLDER_ISSUE ) ,
fmt . Sprintf ( "patch_set=%s" , specs . PLACEHOLDER_PATCHSET ) ,
2017-02-06 20:38:41 +00:00
fmt . Sprintf ( "gs_bucket=%s" , CONFIG . GsBucketNano ) ,
2016-09-30 19:53:12 +00:00
} ,
2017-06-14 19:25:31 +00:00
Isolate : relpath ( "upload_nano_results.isolate" ) ,
2016-09-30 19:53:12 +00:00
Priority : 0.8 ,
2016-10-20 18:04:31 +00:00
} )
2016-09-30 19:53:12 +00:00
return uploadName
}
return name
}
// process generates tasks and jobs for the given job name.
2016-10-20 18:04:31 +00:00
func process ( b * specs . TasksCfgBuilder , name string ) {
2016-09-30 19:53:12 +00:00
deps := [ ] string { }
2017-04-04 13:06:16 +00:00
// Bundle Recipes.
if name == BUNDLE_RECIPES_NAME {
deps = append ( deps , bundleRecipes ( b ) )
}
2017-05-11 17:35:23 +00:00
// Isolate CIPD assets.
if _ , ok := ISOLATE_ASSET_MAPPING [ name ] ; ok {
deps = append ( deps , isolateCIPDAsset ( b , name ) )
}
2016-09-30 19:53:12 +00:00
parts , err := jobNameSchema . ParseJobName ( name )
if err != nil {
glog . Fatal ( err )
}
// RecreateSKPs.
if strings . Contains ( name , "RecreateSKPs" ) {
2016-10-20 18:04:31 +00:00
deps = append ( deps , recreateSKPs ( b , name ) )
2016-09-30 19:53:12 +00:00
}
2017-05-17 18:28:06 +00:00
// UpdateMetaConfig bot.
if strings . Contains ( name , "UpdateMetaConfig" ) {
deps = append ( deps , updateMetaConfig ( b , name ) )
}
2016-09-30 19:53:12 +00:00
// CT bots.
if strings . Contains ( name , "-CT_" ) {
2016-10-20 18:04:31 +00:00
deps = append ( deps , ctSKPs ( b , name ) )
2016-09-30 19:53:12 +00:00
}
2016-10-14 13:32:09 +00:00
// Infra tests.
if name == "Housekeeper-PerCommit-InfraTests" {
2016-10-20 18:04:31 +00:00
deps = append ( deps , infra ( b , name ) )
2016-10-14 13:32:09 +00:00
}
2016-09-30 19:53:12 +00:00
// Compile bots.
if parts [ "role" ] == "Build" {
2016-10-20 18:04:31 +00:00
deps = append ( deps , compile ( b , name , parts ) )
2016-09-30 19:53:12 +00:00
}
2016-11-15 20:18:20 +00:00
// Most remaining bots need a compile task.
2016-09-30 19:53:12 +00:00
compileTaskName := deriveCompileTaskName ( name , parts )
2016-10-17 17:17:53 +00:00
compileTaskParts , err := jobNameSchema . ParseJobName ( compileTaskName )
if err != nil {
glog . Fatal ( err )
}
2016-11-17 16:33:27 +00:00
// These bots do not need a compile task.
2016-11-15 20:18:20 +00:00
if parts [ "role" ] != "Build" &&
2017-04-10 15:00:09 +00:00
name != "Housekeeper-PerCommit-BundleRecipes" &&
2016-11-15 20:18:20 +00:00
name != "Housekeeper-PerCommit-InfraTests" &&
2016-11-30 19:05:16 +00:00
! strings . Contains ( name , "RecreateSKPs" ) &&
2017-05-17 18:28:06 +00:00
! strings . Contains ( name , "UpdateMetaConfig" ) &&
2017-06-12 17:03:29 +00:00
! strings . Contains ( name , "-CT_" ) &&
! strings . Contains ( name , "Housekeeper-PerCommit-Isolate" ) {
2016-10-20 18:04:31 +00:00
compile ( b , compileTaskName , compileTaskParts )
2016-10-17 17:17:53 +00:00
}
2016-09-30 19:53:12 +00:00
// Housekeeper.
2016-12-02 16:01:33 +00:00
if name == "Housekeeper-PerCommit" {
2016-10-20 18:04:31 +00:00
deps = append ( deps , housekeeper ( b , name , compileTaskName ) )
2016-09-30 19:53:12 +00:00
}
// Common assets needed by the remaining bots.
2017-05-11 17:35:23 +00:00
pkgs := [ ] * specs . CipdPackage { }
2017-05-15 12:30:27 +00:00
if deps := getIsolatedCIPDDeps ( parts ) ; len ( deps ) == 0 {
2017-05-11 17:35:23 +00:00
pkgs = [ ] * specs . CipdPackage {
b . MustGetCipdPackageFromAsset ( "skimage" ) ,
b . MustGetCipdPackageFromAsset ( "skp" ) ,
b . MustGetCipdPackageFromAsset ( "svg" ) ,
}
2016-09-30 19:53:12 +00:00
}
2017-05-15 12:30:27 +00:00
2017-06-29 19:22:46 +00:00
if ( strings . Contains ( name , "Ubuntu" ) || strings . Contains ( name , "Debian" ) ) && strings . Contains ( name , "SAN" ) {
2016-11-08 17:55:32 +00:00
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "clang_linux" ) )
}
2017-02-17 15:25:34 +00:00
if strings . Contains ( name , "Ubuntu16" ) {
if strings . Contains ( name , "Vulkan" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "linux_vulkan_sdk" ) )
}
2017-02-06 17:45:29 +00:00
if strings . Contains ( name , "Release" ) {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "linux_vulkan_intel_driver_release" ) )
} else {
pkgs = append ( pkgs , b . MustGetCipdPackageFromAsset ( "linux_vulkan_intel_driver_debug" ) )
}
}
2016-09-30 19:53:12 +00:00
// Test bots.
2016-11-30 19:05:16 +00:00
if parts [ "role" ] == "Test" && ! strings . Contains ( name , "-CT_" ) {
2016-10-20 18:04:31 +00:00
deps = append ( deps , test ( b , name , parts , compileTaskName , pkgs ) )
2016-09-30 19:53:12 +00:00
}
// Perf bots.
2016-11-30 19:05:16 +00:00
if parts [ "role" ] == "Perf" && ! strings . Contains ( name , "-CT_" ) {
2016-10-20 18:04:31 +00:00
deps = append ( deps , perf ( b , name , parts , compileTaskName , pkgs ) )
2016-09-30 19:53:12 +00:00
}
// Add the Job spec.
2016-11-15 20:18:20 +00:00
j := & specs . JobSpec {
2016-09-30 19:53:12 +00:00
Priority : 0.8 ,
TaskSpecs : deps ,
2016-11-15 20:18:20 +00:00
}
if name == "Housekeeper-Nightly-RecreateSKPs_Canary" {
j . Trigger = "nightly"
}
2017-05-17 18:28:06 +00:00
if name == "Housekeeper-Nightly-UpdateMetaConfig" {
j . Trigger = "nightly"
}
2016-11-15 20:18:20 +00:00
if name == "Housekeeper-Weekly-RecreateSKPs" {
j . Trigger = "weekly"
}
2017-06-28 15:45:54 +00:00
if name == "Test-Ubuntu14-GCC-GCE-CPU-AVX2-x86_64-Debug-CT_DM_1m_SKPs" {
2016-11-30 19:05:16 +00:00
j . Trigger = "weekly"
}
2016-12-12 19:30:12 +00:00
b . MustAddJob ( name , j )
2016-09-30 19:53:12 +00:00
}
2017-02-01 20:56:55 +00:00
func loadJson ( flag * string , defaultFlag string , val interface { } ) {
if * flag == "" {
* flag = defaultFlag
}
b , err := ioutil . ReadFile ( * flag )
if err != nil {
glog . Fatal ( err )
}
if err := json . Unmarshal ( b , val ) ; err != nil {
glog . Fatal ( err )
}
}
2016-09-30 19:53:12 +00:00
// Regenerate the tasks.json file.
func main ( ) {
2016-10-20 18:04:31 +00:00
b := specs . MustNewTasksCfgBuilder ( )
2017-02-01 20:56:55 +00:00
b . SetAssetsDir ( * assetsDir )
infraBots := path . Join ( b . CheckoutRoot ( ) , "infra" , "bots" )
// Load the jobs from a JSON file.
loadJson ( jobsFile , path . Join ( infraBots , "jobs.json" ) , & JOBS )
// Load general config information from a JSON file.
loadJson ( cfgFile , path . Join ( infraBots , "cfg.json" ) , & CONFIG )
2016-09-30 19:53:12 +00:00
// Create the JobNameSchema.
2017-02-07 14:16:30 +00:00
if * builderNameSchemaFile == "" {
* builderNameSchemaFile = path . Join ( b . CheckoutRoot ( ) , "infra" , "bots" , "recipe_modules" , "builder_name_schema" , "builder_name_schema.json" )
}
schema , err := NewJobNameSchema ( * builderNameSchemaFile )
2016-09-30 19:53:12 +00:00
if err != nil {
glog . Fatal ( err )
}
jobNameSchema = schema
// Create Tasks and Jobs.
2016-10-20 18:04:31 +00:00
for _ , name := range JOBS {
process ( b , name )
2016-09-30 19:53:12 +00:00
}
2016-10-20 18:04:31 +00:00
b . MustFinish ( )
2016-09-30 19:53:12 +00:00
}
// TODO(borenet): The below really belongs in its own file, probably next to the
// builder_name_schema.json file.
// JobNameSchema is a struct used for (de)constructing Job names in a
// predictable format.
type JobNameSchema struct {
Schema map [ string ] [ ] string ` json:"builder_name_schema" `
Sep string ` json:"builder_name_sep" `
}
// NewJobNameSchema returns a JobNameSchema instance based on the given JSON
// file.
func NewJobNameSchema ( jsonFile string ) ( * JobNameSchema , error ) {
var rv JobNameSchema
f , err := os . Open ( jsonFile )
if err != nil {
return nil , err
}
defer util . Close ( f )
if err := json . NewDecoder ( f ) . Decode ( & rv ) ; err != nil {
return nil , err
}
return & rv , nil
}
// ParseJobName splits the given Job name into its component parts, according
// to the schema.
func ( s * JobNameSchema ) ParseJobName ( n string ) ( map [ string ] string , error ) {
split := strings . Split ( n , s . Sep )
if len ( split ) < 2 {
return nil , fmt . Errorf ( "Invalid job name: %q" , n )
}
role := split [ 0 ]
split = split [ 1 : ]
keys , ok := s . Schema [ role ]
if ! ok {
return nil , fmt . Errorf ( "Invalid job name; %q is not a valid role." , role )
}
extraConfig := ""
if len ( split ) == len ( keys ) + 1 {
extraConfig = split [ len ( split ) - 1 ]
split = split [ : len ( split ) - 1 ]
}
if len ( split ) != len ( keys ) {
return nil , fmt . Errorf ( "Invalid job name; %q has incorrect number of parts." , n )
}
rv := make ( map [ string ] string , len ( keys ) + 2 )
rv [ "role" ] = role
if extraConfig != "" {
rv [ "extra_config" ] = extraConfig
}
for i , k := range keys {
rv [ k ] = split [ i ]
}
return rv , nil
}
// MakeJobName assembles the given parts of a Job name, according to the schema.
func ( s * JobNameSchema ) MakeJobName ( parts map [ string ] string ) ( string , error ) {
role , ok := parts [ "role" ]
if ! ok {
return "" , fmt . Errorf ( "Invalid job parts; jobs must have a role." )
}
keys , ok := s . Schema [ role ]
if ! ok {
return "" , fmt . Errorf ( "Invalid job parts; unknown role %q" , role )
}
rvParts := make ( [ ] string , 0 , len ( parts ) )
rvParts = append ( rvParts , role )
for _ , k := range keys {
v , ok := parts [ k ]
if ! ok {
return "" , fmt . Errorf ( "Invalid job parts; missing %q" , k )
}
rvParts = append ( rvParts , v )
}
if _ , ok := parts [ "extra_config" ] ; ok {
rvParts = append ( rvParts , parts [ "extra_config" ] )
}
return strings . Join ( rvParts , s . Sep ) , nil
}