2019-07-16 13:19:10 +00:00
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package gen_tasks_logic
/ *
Generate the tasks . json file .
* /
import (
"encoding/json"
"fmt"
"io/ioutil"
2020-03-09 12:43:45 +00:00
"log"
2020-01-02 20:53:42 +00:00
"path"
2019-07-16 13:19:10 +00:00
"path/filepath"
"regexp"
"runtime"
"sort"
"strconv"
"strings"
"time"
"go.skia.org/infra/task_scheduler/go/specs"
)
const (
BUILD_TASK_DRIVERS_NAME = "Housekeeper-PerCommit-BuildTaskDrivers"
BUNDLE_RECIPES_NAME = "Housekeeper-PerCommit-BundleRecipes"
ISOLATE_GCLOUD_LINUX_NAME = "Housekeeper-PerCommit-IsolateGCloudLinux"
ISOLATE_SKIMAGE_NAME = "Housekeeper-PerCommit-IsolateSkImage"
ISOLATE_SKP_NAME = "Housekeeper-PerCommit-IsolateSKP"
2019-09-06 20:45:22 +00:00
ISOLATE_MSKP_NAME = "Housekeeper-PerCommit-IsolateMSKP"
2019-07-16 13:19:10 +00:00
ISOLATE_SVG_NAME = "Housekeeper-PerCommit-IsolateSVG"
ISOLATE_NDK_LINUX_NAME = "Housekeeper-PerCommit-IsolateAndroidNDKLinux"
ISOLATE_SDK_LINUX_NAME = "Housekeeper-PerCommit-IsolateAndroidSDKLinux"
ISOLATE_WIN_TOOLCHAIN_NAME = "Housekeeper-PerCommit-IsolateWinToolchain"
2020-04-09 17:40:49 +00:00
DEFAULT_OS_DEBIAN = "Debian-10.3"
DEFAULT_OS_LINUX_GCE = "Debian-10.3"
OLD_OS_LINUX_GCE = "Debian-9.8"
COMPILE_TASK_NAME_OS_LINUX = "Debian10"
COMPILE_TASK_NAME_OS_LINUX_OLD = "Debian9"
DEFAULT_OS_MAC = "Mac-10.14.6"
DEFAULT_OS_WIN = "Windows-Server-17763"
2019-07-16 13:19:10 +00:00
// Small is a 2-core machine.
// TODO(dogben): Would n1-standard-1 or n1-standard-2 be sufficient?
MACHINE_TYPE_SMALL = "n1-highmem-2"
// Medium is a 16-core machine
MACHINE_TYPE_MEDIUM = "n1-standard-16"
// Large is a 64-core machine. (We use "highcpu" because we don't need more than 57GB memory for
// any of our tasks.)
MACHINE_TYPE_LARGE = "n1-highcpu-64"
// Swarming output dirs.
OUTPUT_NONE = "output_ignored" // This will result in outputs not being isolated.
OUTPUT_BUILD = "build"
OUTPUT_TEST = "test"
OUTPUT_PERF = "perf"
// Name prefix for upload jobs.
PREFIX_UPLOAD = "Upload"
)
var (
// "Constants"
// Named caches used by tasks.
CACHES_GIT = [ ] * specs . Cache {
& specs . Cache {
Name : "git" ,
Path : "cache/git" ,
} ,
& specs . Cache {
Name : "git_cache" ,
Path : "cache/git_cache" ,
} ,
}
CACHES_GO = [ ] * specs . Cache {
& specs . Cache {
Name : "go_cache" ,
Path : "cache/go_cache" ,
} ,
& specs . Cache {
Name : "gopath" ,
Path : "cache/gopath" ,
} ,
}
CACHES_WORKDIR = [ ] * specs . Cache {
& specs . Cache {
Name : "work" ,
Path : "cache/work" ,
} ,
}
2020-02-07 20:27:13 +00:00
CACHES_CCACHE = [ ] * specs . Cache {
& specs . Cache {
Name : "ccache" ,
Path : "cache/ccache" ,
} ,
}
2020-03-09 16:51:44 +00:00
// The "docker" cache is used as a persistent working directory for
// tasks which use Docker. It is not to be confused with Docker's own
// cache, which stores images. We do not currently use a named Swarming
// cache for the latter.
// TODO(borenet): We should ensure that any task which uses Docker does
// not also use the normal "work" cache, to prevent issues like
// https://bugs.chromium.org/p/skia/issues/detail?id=9749.
CACHES_DOCKER = [ ] * specs . Cache {
& specs . Cache {
Name : "docker" ,
Path : "cache/docker" ,
} ,
}
2019-07-16 13:19:10 +00:00
2019-11-20 13:28:20 +00:00
// TODO(borenet): This hacky and bad.
2020-04-28 11:14:22 +00:00
CIPD_PKG_LUCI_AUTH = specs . CIPD_PKGS_KITCHEN [ 1 ]
CIPD_PKGS_KITCHEN = append ( specs . CIPD_PKGS_KITCHEN [ : 2 ] , specs . CIPD_PKGS_PYTHON [ 1 ] )
CIPD_PKG_CPYTHON = specs . CIPD_PKGS_PYTHON [ 0 ]
2019-07-16 13:19:10 +00:00
CIPD_PKGS_XCODE = [ ] * specs . CipdPackage {
// https://chromium.googlesource.com/chromium/tools/build/+/e19b7d9390e2bb438b566515b141ed2b9ed2c7c2/scripts/slave/recipe_modules/ios/api.py#317
// This package is really just an installer for XCode.
& specs . CipdPackage {
Name : "infra/tools/mac_toolchain/${platform}" ,
Path : "mac_toolchain" ,
// When this is updated, also update
// https://skia.googlesource.com/skcms.git/+/f1e2b45d18facbae2dece3aca673fe1603077846/infra/bots/gen_tasks.go#56
Version : "git_revision:796d2b92cff93fc2059623ce0a66284373ceea0a" ,
} ,
}
// These properties are required by some tasks, eg. for running
// bot_update, but they prevent de-duplication, so they should only be
// used where necessary.
EXTRA_PROPS = map [ string ] string {
"buildbucket_build_id" : specs . PLACEHOLDER_BUILDBUCKET_BUILD_ID ,
2019-10-08 00:11:36 +00:00
"patch_issue" : specs . PLACEHOLDER_ISSUE_INT ,
2019-07-16 13:19:10 +00:00
"patch_ref" : specs . PLACEHOLDER_PATCH_REF ,
"patch_repo" : specs . PLACEHOLDER_PATCH_REPO ,
2019-10-08 00:11:36 +00:00
"patch_set" : specs . PLACEHOLDER_PATCHSET_INT ,
2019-07-16 13:19:10 +00:00
"patch_storage" : specs . PLACEHOLDER_PATCH_STORAGE ,
"repository" : specs . PLACEHOLDER_REPO ,
"revision" : specs . PLACEHOLDER_REVISION ,
"task_id" : specs . PLACEHOLDER_TASK_ID ,
}
2020-03-09 12:43:45 +00:00
// ISOLATE_ASSET_MAPPING maps the name of an asset to the configuration
// for how the CIPD package should be installed for a given task.
2019-07-16 13:19:10 +00:00
ISOLATE_ASSET_MAPPING = map [ string ] isolateAssetCfg {
2020-03-09 12:43:45 +00:00
"gcloud_linux" : {
isolateTaskName : ISOLATE_GCLOUD_LINUX_NAME ,
path : "gcloud_linux" ,
2019-07-16 13:19:10 +00:00
} ,
2020-03-09 12:43:45 +00:00
"skimage" : {
isolateTaskName : ISOLATE_SKIMAGE_NAME ,
path : "skimage" ,
2019-07-16 13:19:10 +00:00
} ,
2020-03-09 12:43:45 +00:00
"skp" : {
isolateTaskName : ISOLATE_SKP_NAME ,
path : "skp" ,
2019-07-16 13:19:10 +00:00
} ,
2020-03-09 12:43:45 +00:00
"svg" : {
isolateTaskName : ISOLATE_SVG_NAME ,
path : "svg" ,
2019-07-16 13:19:10 +00:00
} ,
2020-03-09 12:43:45 +00:00
"mskp" : {
isolateTaskName : ISOLATE_MSKP_NAME ,
path : "mskp" ,
2019-08-01 17:39:34 +00:00
} ,
2020-03-09 12:43:45 +00:00
"android_ndk_linux" : {
isolateTaskName : ISOLATE_NDK_LINUX_NAME ,
path : "android_ndk_linux" ,
2019-07-16 13:19:10 +00:00
} ,
2020-03-09 12:43:45 +00:00
"android_sdk_linux" : {
isolateTaskName : ISOLATE_SDK_LINUX_NAME ,
path : "android_sdk_linux" ,
2019-07-16 13:19:10 +00:00
} ,
2020-03-09 12:43:45 +00:00
"win_toolchain" : {
2020-03-09 14:05:22 +00:00
alwaysIsolate : true ,
2020-03-09 12:43:45 +00:00
isolateTaskName : ISOLATE_WIN_TOOLCHAIN_NAME ,
path : "win_toolchain" ,
2019-07-16 13:19:10 +00:00
} ,
}
)
// Config contains general configuration information.
type Config struct {
// Directory containing assets. Assumed to be relative to the directory
2019-07-24 19:24:08 +00:00
// which contains the calling gen_tasks.go file. If not specified, uses
// the infra/bots/assets from this repo.
2019-07-16 13:19:10 +00:00
AssetsDir string ` json:"assets_dir" `
// Path to the builder name schema JSON file. Assumed to be relative to
2019-07-24 19:24:08 +00:00
// the directory which contains the calling gen_tasks.go file. If not
// specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json
// from this repo.
2019-07-16 13:19:10 +00:00
BuilderNameSchemaFile string ` json:"builder_name_schema" `
// URL of the Skia Gold known hashes endpoint.
GoldHashesURL string ` json:"gold_hashes_url" `
// GCS bucket used for GM results.
GsBucketGm string ` json:"gs_bucket_gm" `
// GCS bucket used for Nanobench results.
GsBucketNano string ` json:"gs_bucket_nano" `
// Optional function which returns a bot ID for internal devices.
InternalHardwareLabel func ( parts map [ string ] string ) * int ` json:"-" `
// List of task names for which we'll never upload results.
NoUpload [ ] string ` json:"no_upload" `
// Swarming pool used for triggering tasks.
Pool string ` json:"pool" `
// LUCI project associated with this repo.
Project string ` json:"project" `
// Service accounts.
2020-07-16 15:42:16 +00:00
ServiceAccountCanary string ` json:"service_account_canary" `
2019-11-22 18:05:22 +00:00
ServiceAccountCompile string ` json:"service_account_compile" `
ServiceAccountHousekeeper string ` json:"service_account_housekeeper" `
ServiceAccountRecreateSKPs string ` json:"service_account_recreate_skps" `
ServiceAccountUploadBinary string ` json:"service_account_upload_binary" `
ServiceAccountUploadGM string ` json:"service_account_upload_gm" `
ServiceAccountUploadNano string ` json:"service_account_upload_nano" `
2019-07-16 13:19:10 +00:00
// Optional override function which derives Swarming bot dimensions
// from parts of task names.
SwarmDimensions func ( parts map [ string ] string ) [ ] string ` json:"-" `
}
// LoadConfig loads the Config from a cfg.json file which is the sibling of the
// calling gen_tasks.go file.
func LoadConfig ( ) * Config {
cfgDir := getCallingDirName ( )
var cfg Config
LoadJson ( filepath . Join ( cfgDir , "cfg.json" ) , & cfg )
return & cfg
}
// CheckoutRoot is a wrapper around specs.GetCheckoutRoot which prevents the
// caller from needing a dependency on the specs package.
func CheckoutRoot ( ) string {
root , err := specs . GetCheckoutRoot ( )
if err != nil {
2020-03-09 12:43:45 +00:00
log . Fatal ( err )
2019-07-16 13:19:10 +00:00
}
return root
}
// LoadJson loads JSON from the given file and unmarshals it into the given
// destination.
func LoadJson ( filename string , dest interface { } ) {
b , err := ioutil . ReadFile ( filename )
if err != nil {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Unable to read %q: %s" , filename , err )
2019-07-16 13:19:10 +00:00
}
if err := json . Unmarshal ( b , dest ) ; err != nil {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Unable to parse %q: %s" , filename , err )
2019-07-16 13:19:10 +00:00
}
}
2019-11-20 13:28:20 +00:00
// In returns true if |s| is *in* |a| slice.
// TODO(borenet): This is copied from go.skia.org/infra/go/util to avoid the
// huge set of additional dependencies added by that package.
func In ( s string , a [ ] string ) bool {
for _ , x := range a {
if x == s {
return true
}
}
return false
}
2019-07-16 13:19:10 +00:00
// GenTasks regenerates the tasks.json file. Loads the job list from a jobs.json
// file which is the sibling of the calling gen_tasks.go file. If cfg is nil, it
// is similarly loaded from a cfg.json file which is the sibling of the calling
// gen_tasks.go file.
func GenTasks ( cfg * Config ) {
b := specs . MustNewTasksCfgBuilder ( )
// Find the paths to the infra/bots directories in this repo and the
// repo of the calling file.
relpathTargetDir := getThisDirName ( )
relpathBaseDir := getCallingDirName ( )
var jobs [ ] string
LoadJson ( filepath . Join ( relpathBaseDir , "jobs.json" ) , & jobs )
if cfg == nil {
cfg = new ( Config )
LoadJson ( filepath . Join ( relpathBaseDir , "cfg.json" ) , cfg )
}
// Create the JobNameSchema.
builderNameSchemaFile := filepath . Join ( relpathTargetDir , "recipe_modules" , "builder_name_schema" , "builder_name_schema.json" )
if cfg . BuilderNameSchemaFile != "" {
builderNameSchemaFile = filepath . Join ( relpathBaseDir , cfg . BuilderNameSchemaFile )
}
schema , err := NewJobNameSchema ( builderNameSchemaFile )
if err != nil {
2020-03-09 12:43:45 +00:00
log . Fatal ( err )
2019-07-16 13:19:10 +00:00
}
// Set the assets dir.
assetsDir := filepath . Join ( relpathTargetDir , "assets" )
if cfg . AssetsDir != "" {
assetsDir = filepath . Join ( relpathBaseDir , cfg . AssetsDir )
}
b . SetAssetsDir ( assetsDir )
// Create Tasks and Jobs.
builder := & builder {
TasksCfgBuilder : b ,
cfg : cfg ,
jobNameSchema : schema ,
jobs : jobs ,
relpathBaseDir : relpathBaseDir ,
relpathTargetDir : relpathTargetDir ,
}
for _ , name := range jobs {
2020-03-09 12:43:45 +00:00
jb := newJobBuilder ( builder , name )
jb . genTasksForJob ( )
jb . finish ( )
2019-07-16 13:19:10 +00:00
}
builder . MustFinish ( )
}
// getThisDirName returns the infra/bots directory which is an ancestor of this
// file.
func getThisDirName ( ) string {
_ , thisFileName , _ , ok := runtime . Caller ( 0 )
if ! ok {
2020-03-09 12:43:45 +00:00
log . Fatal ( "Unable to find path to current file." )
2019-07-16 13:19:10 +00:00
}
return filepath . Dir ( filepath . Dir ( thisFileName ) )
}
// getCallingDirName returns the infra/bots directory which is an ancestor of
// the calling gen_tasks.go file. WARNING: assumes that the calling gen_tasks.go
// file appears two steps up the stack; do not call from a function which is not
// directly called by gen_tasks.go.
func getCallingDirName ( ) string {
_ , callingFileName , _ , ok := runtime . Caller ( 2 )
if ! ok {
2020-03-09 12:43:45 +00:00
log . Fatal ( "Unable to find path to calling file." )
2019-07-16 13:19:10 +00:00
}
return filepath . Dir ( callingFileName )
}
// builder is a wrapper for specs.TasksCfgBuilder.
type builder struct {
* specs . TasksCfgBuilder
cfg * Config
jobNameSchema * JobNameSchema
jobs [ ] string
relpathBaseDir string
relpathTargetDir string
}
2020-03-02 15:05:23 +00:00
// marshalJson encodes the given data as JSON and fixes escaping of '<' which Go
// does by default.
func marshalJson ( data interface { } ) string {
j , err := json . Marshal ( data )
if err != nil {
2020-03-09 12:43:45 +00:00
log . Fatal ( err )
2020-03-02 15:05:23 +00:00
}
return strings . Replace ( string ( j ) , "\\u003c" , "<" , - 1 )
}
2020-03-09 12:43:45 +00:00
// kitchenTaskNoBundle sets up the task to run a recipe via Kitchen, without the
// recipe bundle.
2020-03-16 17:49:33 +00:00
func ( b * taskBuilder ) kitchenTaskNoBundle ( recipe string , outputDir string ) {
2020-03-09 12:43:45 +00:00
b . cipd ( CIPD_PKGS_KITCHEN ... )
2020-04-28 11:14:22 +00:00
b . usesPython ( )
2020-03-16 17:49:33 +00:00
b . recipeProp ( "swarm_out_dir" , outputDir )
2019-07-16 13:19:10 +00:00
if outputDir != OUTPUT_NONE {
2020-03-09 12:43:45 +00:00
b . output ( outputDir )
2019-07-16 13:19:10 +00:00
}
python := "cipd_bin_packages/vpython${EXECUTABLE_SUFFIX}"
2020-03-16 17:49:33 +00:00
b . cmd ( python , "-u" , "skia/infra/bots/run_recipe.py" , "${ISOLATED_OUTDIR}" , recipe , b . getRecipeProps ( ) , b . cfg . Project )
2020-03-09 12:43:45 +00:00
// Most recipes want this isolate; they can override if necessary.
b . isolate ( "swarm_recipe.isolate" )
b . timeout ( time . Hour )
b . addToPATH ( "cipd_bin_packages" , "cipd_bin_packages/bin" )
b . Spec . ExtraTags = map [ string ] string {
"log_location" : fmt . Sprintf ( "logdog://logs.chromium.org/%s/${SWARMING_TASK_ID}/+/annotations" , b . cfg . Project ) ,
}
// Attempts.
2020-07-30 15:35:08 +00:00
if ! b . role ( "Build" , "Upload" ) && b . extraConfig ( "ASAN" , "MSAN" , "TSAN" , "Valgrind" ) {
2020-03-09 12:43:45 +00:00
// Sanitizers often find non-deterministic issues that retries would hide.
b . attempts ( 1 )
} else {
// Retry by default to hide random bot/hardware failures.
b . attempts ( 2 )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
}
// kitchenTask sets up the task to run a recipe via Kitchen.
2020-03-16 17:49:33 +00:00
func ( b * taskBuilder ) kitchenTask ( recipe string , outputDir string ) {
b . kitchenTaskNoBundle ( recipe , outputDir )
2020-03-09 12:43:45 +00:00
b . dep ( b . bundleRecipes ( ) )
2019-07-16 13:19:10 +00:00
}
// internalHardwareLabel returns the internal ID for the bot, if any.
2020-03-09 12:43:45 +00:00
func ( b * taskBuilder ) internalHardwareLabel ( ) * int {
2019-07-16 13:19:10 +00:00
if b . cfg . InternalHardwareLabel != nil {
2020-03-09 12:43:45 +00:00
return b . cfg . InternalHardwareLabel ( b . parts )
2019-07-16 13:19:10 +00:00
}
return nil
}
2020-03-09 12:43:45 +00:00
// linuxGceDimensions adds the Swarming bot dimensions for Linux GCE instances.
func ( b * taskBuilder ) linuxGceDimensions ( machineType string ) {
b . dimension (
2019-07-16 13:19:10 +00:00
// Specify CPU to avoid running builds on bots with a more unique CPU.
"cpu:x86-64-Haswell_GCE" ,
"gpu:none" ,
// Currently all Linux GCE tasks run on 16-CPU machines.
fmt . Sprintf ( "machine_type:%s" , machineType ) ,
fmt . Sprintf ( "os:%s" , DEFAULT_OS_LINUX_GCE ) ,
fmt . Sprintf ( "pool:%s" , b . cfg . Pool ) ,
2020-03-09 12:43:45 +00:00
)
2019-07-16 13:19:10 +00:00
}
// deriveCompileTaskName returns the name of a compile task based on the given
// job name.
2020-03-09 12:43:45 +00:00
func ( b * jobBuilder ) deriveCompileTaskName ( ) string {
2020-04-29 17:49:28 +00:00
if b . role ( "Test" , "Perf" , "FM" ) {
2020-03-09 12:43:45 +00:00
task_os := b . parts [ "os" ]
2019-07-16 13:19:10 +00:00
ec := [ ] string { }
2020-03-09 12:43:45 +00:00
if val := b . parts [ "extra_config" ] ; val != "" {
2019-07-16 13:19:10 +00:00
ec = strings . Split ( val , "_" )
2019-08-01 17:39:34 +00:00
ignore := [ ] string {
"Skpbench" , "AbandonGpuContext" , "PreAbandonGpuContext" , "Valgrind" ,
"ReleaseAndAbandonGpuContext" , "CCPR" , "FSAA" , "FAAA" , "FDAA" , "NativeFonts" , "GDI" ,
2020-06-26 13:36:22 +00:00
"NoGPUThreads" , "ProcDump" , "DDL1" , "DDL3" , "OOPRDDL" , "T8888" ,
"DDLTotal" , "DDLRecord" , "9x9" , "BonusConfigs" , "SkottieTracing" , "SkottieWASM" ,
"GpuTess" , "NonNVPR" , "Mskp" , "Docker" , "PDF" , "SkVM" , "Puppeteer" ,
2020-09-24 17:42:47 +00:00
"SkottieFrames" , "RenderSKP" , "CanvasPerf" , "AllPathsVolatile" }
2019-07-16 13:19:10 +00:00
keep := make ( [ ] string , 0 , len ( ec ) )
for _ , part := range ec {
2019-11-20 13:28:20 +00:00
if ! In ( part , ignore ) {
2019-07-16 13:19:10 +00:00
keep = append ( keep , part )
}
}
ec = keep
}
2020-03-09 12:43:45 +00:00
if b . os ( "Android" ) {
2019-11-20 13:28:20 +00:00
if ! In ( "Android" , ec ) {
2019-07-16 13:19:10 +00:00
ec = append ( [ ] string { "Android" } , ec ... )
}
2020-04-07 00:21:34 +00:00
task_os = COMPILE_TASK_NAME_OS_LINUX
2020-03-09 12:43:45 +00:00
} else if b . os ( "ChromeOS" ) {
2019-07-16 13:19:10 +00:00
ec = append ( [ ] string { "Chromebook" , "GLES" } , ec ... )
2020-04-14 14:37:22 +00:00
task_os = COMPILE_TASK_NAME_OS_LINUX
if b . model ( "Pixelbook" ) {
task_os = COMPILE_TASK_NAME_OS_LINUX_OLD
ec = append ( ec , "Docker" )
}
2020-03-09 12:43:45 +00:00
} else if b . os ( "iOS" ) {
2019-07-16 13:19:10 +00:00
ec = append ( [ ] string { task_os } , ec ... )
task_os = "Mac"
2020-07-22 17:18:38 +00:00
// iPhone11 requires xcode 11.4.1 which requires >10.15.2.
if b . parts [ "model" ] == "iPhone11" {
task_os = "Mac10.15.5"
}
2020-03-09 12:43:45 +00:00
} else if b . matchOs ( "Win" ) {
2019-07-16 13:19:10 +00:00
task_os = "Win"
2020-03-09 12:43:45 +00:00
} else if b . compiler ( "GCC" ) {
2019-11-21 21:32:26 +00:00
// GCC compiles are now on a Docker container. We use the same OS and
// version to compile as to test.
ec = append ( ec , "Docker" )
2020-03-09 12:43:45 +00:00
} else if b . matchOs ( "Ubuntu" , "Debian" ) {
2020-04-07 00:21:34 +00:00
task_os = COMPILE_TASK_NAME_OS_LINUX
2020-03-09 12:43:45 +00:00
} else if b . matchOs ( "Mac" ) {
2019-07-16 13:19:10 +00:00
task_os = "Mac"
}
jobNameMap := map [ string ] string {
"role" : "Build" ,
"os" : task_os ,
2020-03-09 12:43:45 +00:00
"compiler" : b . parts [ "compiler" ] ,
"target_arch" : b . parts [ "arch" ] ,
"configuration" : b . parts [ "configuration" ] ,
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
if b . extraConfig ( "PathKit" ) {
2019-07-16 13:19:10 +00:00
ec = [ ] string { "PathKit" }
}
2020-05-01 18:16:27 +00:00
if b . extraConfig ( "CanvasKit" , "SkottieWASM" , "Puppeteer" ) {
2020-03-09 12:43:45 +00:00
if b . cpu ( ) {
2019-07-16 13:19:10 +00:00
ec = [ ] string { "CanvasKit_CPU" }
} else {
ec = [ ] string { "CanvasKit" }
}
}
if len ( ec ) > 0 {
jobNameMap [ "extra_config" ] = strings . Join ( ec , "_" )
}
name , err := b . jobNameSchema . MakeJobName ( jobNameMap )
if err != nil {
2020-03-09 12:43:45 +00:00
log . Fatal ( err )
2019-07-16 13:19:10 +00:00
}
return name
2020-03-09 12:43:45 +00:00
} else if b . parts [ "role" ] == "BuildStats" {
return strings . Replace ( b . Name , "BuildStats" , "Build" , 1 )
2019-07-16 13:19:10 +00:00
} else {
2020-03-09 12:43:45 +00:00
return b . Name
2019-07-16 13:19:10 +00:00
}
}
// swarmDimensions generates swarming bot dimensions for the given task.
2020-03-09 12:43:45 +00:00
func ( b * taskBuilder ) swarmDimensions ( ) {
2019-07-16 13:19:10 +00:00
if b . cfg . SwarmDimensions != nil {
2020-03-09 12:43:45 +00:00
dims := b . cfg . SwarmDimensions ( b . parts )
2019-07-16 13:19:10 +00:00
if dims != nil {
2020-03-09 12:43:45 +00:00
b . dimension ( dims ... )
return
2019-07-16 13:19:10 +00:00
}
}
2020-03-09 12:43:45 +00:00
b . defaultSwarmDimensions ( )
2019-07-16 13:19:10 +00:00
}
// defaultSwarmDimensions generates default swarming bot dimensions for the given task.
2020-03-09 12:43:45 +00:00
func ( b * taskBuilder ) defaultSwarmDimensions ( ) {
2019-07-16 13:19:10 +00:00
d := map [ string ] string {
"pool" : b . cfg . Pool ,
}
2020-03-09 12:43:45 +00:00
if os , ok := b . parts [ "os" ] ; ok {
2019-07-16 13:19:10 +00:00
d [ "os" ] , ok = map [ string ] string {
2020-07-22 17:18:38 +00:00
"Android" : "Android" ,
"ChromeOS" : "ChromeOS" ,
"Debian9" : DEFAULT_OS_LINUX_GCE , // Runs in Deb9 Docker.
"Debian10" : DEFAULT_OS_LINUX_GCE ,
"Mac" : DEFAULT_OS_MAC ,
"Mac10.13" : "Mac-10.13.6" ,
"Mac10.14" : "Mac-10.14.3" ,
"Mac10.15" : "Mac-10.15.1" ,
"Mac10.15.5" : "Mac-10.15.5" , // We have some builders at 10.15.5 to run Xcode 11.4.1
"Ubuntu18" : "Ubuntu-18.04" ,
"Win" : DEFAULT_OS_WIN ,
"Win10" : "Windows-10-18363" ,
"Win2019" : DEFAULT_OS_WIN ,
"Win7" : "Windows-7-SP1" ,
"Win8" : "Windows-8.1-SP0" ,
"iOS" : "iOS-13.3.1" ,
2019-07-16 13:19:10 +00:00
} [ os ]
if ! ok {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Entry %q not found in OS mapping." , os )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
if os == "Win10" && b . parts [ "model" ] == "Golo" {
2019-07-24 20:44:20 +00:00
// ChOps-owned machines have Windows 10 v1709.
d [ "os" ] = "Windows-10-16299"
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
if os == "Mac10.14" && b . parts [ "model" ] == "VMware7.1" {
2019-07-16 13:19:10 +00:00
// ChOps VMs are at a newer version of MacOS.
2019-09-06 20:45:22 +00:00
d [ "os" ] = "Mac-10.14.6"
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
if b . parts [ "model" ] == "LenovoYogaC630" {
2019-07-16 13:19:10 +00:00
// This is currently a unique snowflake.
d [ "os" ] = "Windows-10"
}
2020-03-09 12:43:45 +00:00
if b . parts [ "model" ] == "iPhone6" {
2020-03-02 17:47:32 +00:00
// This is the latest iOS that supports iPhone6.
d [ "os" ] = "iOS-12.4.5"
2020-02-28 21:12:05 +00:00
}
2020-07-22 17:18:38 +00:00
if b . parts [ "model" ] == "iPhone11" {
d [ "os" ] = "iOS-13.6"
}
2020-08-06 16:44:13 +00:00
if b . parts [ "model" ] == "iPadPro" {
d [ "os" ] = "iOS-13.6"
}
2019-07-16 13:19:10 +00:00
} else {
d [ "os" ] = DEFAULT_OS_DEBIAN
}
2020-03-09 12:43:45 +00:00
if b . role ( "Test" , "Perf" ) {
if b . os ( "Android" ) {
2019-07-16 13:19:10 +00:00
// For Android, the device type is a better dimension
// than CPU or GPU.
deviceInfo , ok := map [ string ] [ ] string {
"AndroidOne" : { "sprout" , "MOB30Q" } ,
2019-07-29 18:32:08 +00:00
"GalaxyS6" : { "zerofltetmo" , "NRD90M_G920TUVS6FRC1" } ,
2019-07-16 13:19:10 +00:00
"GalaxyS7_G930FD" : { "herolte" , "R16NW_G930FXXS2ERH6" } , // This is Oreo.
2020-09-28 13:49:48 +00:00
"GalaxyS9" : { "starlte" , "QP1A.190711.020" } , // This is Android10.
2020-04-24 20:14:50 +00:00
"GalaxyS20" : { "exynos990" , "QP1A.190711.020" } ,
2019-07-16 13:19:10 +00:00
"MotoG4" : { "athene" , "NPJS25.93-14.7-8" } ,
"NVIDIA_Shield" : { "foster" , "OPR6.170623.010_3507953_1441.7411" } ,
"Nexus5" : { "hammerhead" , "M4B30Z_3437181" } ,
"Nexus5x" : { "bullhead" , "OPR6.170623.023" } ,
"Nexus7" : { "grouper" , "LMY47V_1836172" } , // 2012 Nexus 7
"P30" : { "HWELE" , "HUAWEIELE-L29" } ,
"Pixel" : { "sailfish" , "PPR1.180610.009" } ,
"Pixel2XL" : { "taimen" , "PPR1.180610.009" } ,
"Pixel3" : { "blueline" , "PQ1A.190105.004" } ,
2019-09-10 21:48:33 +00:00
"Pixel3a" : { "sargo" , "QP1A.190711.020" } ,
2020-07-14 15:06:29 +00:00
"Pixel4" : { "flame" , "RPB2.200611.009" } , // R Preview
2020-06-09 13:38:01 +00:00
"Pixel4XL" : { "coral" , "QD1A.190821.011.C4" } ,
2019-07-16 13:19:10 +00:00
"TecnoSpark3Pro" : { "TECNO-KB8" , "PPR1.180610.011" } ,
2020-03-09 12:43:45 +00:00
} [ b . parts [ "model" ] ]
2019-07-16 13:19:10 +00:00
if ! ok {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Entry %q not found in Android mapping." , b . parts [ "model" ] )
2019-07-16 13:19:10 +00:00
}
d [ "device_type" ] = deviceInfo [ 0 ]
d [ "device_os" ] = deviceInfo [ 1 ]
2020-03-09 12:43:45 +00:00
} else if b . os ( "iOS" ) {
2019-07-16 13:19:10 +00:00
device , ok := map [ string ] string {
"iPadMini4" : "iPad5,1" ,
"iPhone6" : "iPhone7,2" ,
"iPhone7" : "iPhone9,1" ,
2019-09-27 22:29:28 +00:00
"iPhone8" : "iPhone10,1" ,
2020-03-04 16:38:29 +00:00
"iPhone11" : "iPhone12,1" ,
2019-07-16 13:19:10 +00:00
"iPadPro" : "iPad6,3" ,
2020-03-09 12:43:45 +00:00
} [ b . parts [ "model" ] ]
2019-07-16 13:19:10 +00:00
if ! ok {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Entry %q not found in iOS mapping." , b . parts [ "model" ] )
2019-07-16 13:19:10 +00:00
}
2019-09-26 22:31:40 +00:00
d [ "device_type" ] = device
2020-02-21 15:21:29 +00:00
// Temporarily use this dimension to ensure we only use the new libimobiledevice, since the
// old version won't work with current recipes.
d [ "libimobiledevice" ] = "1582155448"
2020-03-09 12:43:45 +00:00
} else if b . extraConfig ( "SKQP" ) && b . cpu ( "Emulator" ) {
if ! b . model ( "NUC7i5BNK" ) || d [ "os" ] != DEFAULT_OS_DEBIAN {
log . Fatalf ( "Please update defaultSwarmDimensions for SKQP::Emulator %s %s." , b . parts [ "os" ] , b . parts [ "model" ] )
2019-07-16 13:19:10 +00:00
}
d [ "cpu" ] = "x86-64-i5-7260U"
d [ "os" ] = DEFAULT_OS_DEBIAN
// KVM means Kernel-based Virtual Machine, that is, can this vm virtualize commands
// For us, this means, can we run an x86 android emulator on it.
// kjlubick tried running this on GCE, but it was a bit too slow on the large install.
// So, we run on bare metal machines in the Skolo (that should also have KVM).
d [ "kvm" ] = "1"
d [ "docker_installed" ] = "true"
2020-03-10 13:19:06 +00:00
} else if b . cpu ( ) || b . extraConfig ( "CanvasKit" , "Docker" , "SwiftShader" ) {
2019-07-16 13:19:10 +00:00
modelMapping , ok := map [ string ] map [ string ] string {
"AVX" : {
"VMware7.1" : "x86-64-E5-2697_v2" ,
} ,
"AVX2" : {
"GCE" : "x86-64-Haswell_GCE" ,
"MacBookAir7.2" : "x86-64-i5-5350U" ,
"MacBookPro11.5" : "x86-64-i7-4870HQ" ,
"NUC5i7RYH" : "x86-64-i7-5557U" ,
} ,
"AVX512" : {
2020-04-09 17:40:49 +00:00
"GCE" : "x86-64-Skylake_GCE" ,
"Golo" : "Intel64_Family_6_Model_85_Stepping_7__GenuineIntel" ,
2019-07-16 13:19:10 +00:00
} ,
2020-05-27 15:25:50 +00:00
"Rome" : {
"GCE" : "x86-64-AMD_Rome_GCE" ,
} ,
2019-07-16 13:19:10 +00:00
"Snapdragon850" : {
"LenovoYogaC630" : "arm64-64-Snapdragon850" ,
} ,
2020-02-14 22:01:09 +00:00
"SwiftShader" : {
"GCE" : "x86-64-Haswell_GCE" ,
} ,
2020-03-09 12:43:45 +00:00
} [ b . parts [ "cpu_or_gpu_value" ] ]
2019-07-16 13:19:10 +00:00
if ! ok {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Entry %q not found in CPU mapping." , b . parts [ "cpu_or_gpu_value" ] )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
cpu , ok := modelMapping [ b . parts [ "model" ] ]
2019-07-16 13:19:10 +00:00
if ! ok {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Entry %q not found in %q model mapping." , b . parts [ "model" ] , b . parts [ "cpu_or_gpu_value" ] )
2019-07-16 13:19:10 +00:00
}
d [ "cpu" ] = cpu
2020-03-09 12:43:45 +00:00
if b . model ( "GCE" ) && b . matchOs ( "Debian" ) {
2019-07-16 13:19:10 +00:00
d [ "os" ] = DEFAULT_OS_LINUX_GCE
}
2020-03-09 12:43:45 +00:00
if b . model ( "GCE" ) && d [ "cpu" ] == "x86-64-Haswell_GCE" {
2019-07-16 13:19:10 +00:00
d [ "machine_type" ] = MACHINE_TYPE_MEDIUM
}
} else {
2020-03-10 13:19:06 +00:00
if b . matchOs ( "Win" ) {
2019-07-16 13:19:10 +00:00
gpu , ok := map [ string ] string {
// At some point this might use the device ID, but for now it's like Chromebooks.
"Adreno630" : "Adreno630" ,
"GT610" : "10de:104a-23.21.13.9101" ,
2019-11-25 19:47:56 +00:00
"GTX660" : "10de:11c0-26.21.14.4120" ,
"GTX960" : "10de:1401-26.21.14.4120" ,
2019-07-16 13:19:10 +00:00
"IntelHD4400" : "8086:0a16-20.19.15.4963" ,
2019-11-25 18:01:41 +00:00
"IntelIris540" : "8086:1926-26.20.100.7463" ,
2019-07-16 13:19:10 +00:00
"IntelIris6100" : "8086:162b-20.19.15.4963" ,
2019-11-25 18:01:41 +00:00
"IntelIris655" : "8086:3ea5-26.20.100.7463" ,
2019-11-27 00:25:31 +00:00
"RadeonHD7770" : "1002:683d-26.20.13031.18002" ,
"RadeonR9M470X" : "1002:6646-26.20.13031.18002" ,
2019-07-16 13:19:10 +00:00
"QuadroP400" : "10de:1cb3-25.21.14.1678" ,
2020-03-09 12:43:45 +00:00
} [ b . parts [ "cpu_or_gpu_value" ] ]
2019-07-16 13:19:10 +00:00
if ! ok {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Entry %q not found in Win GPU mapping." , b . parts [ "cpu_or_gpu_value" ] )
2019-07-16 13:19:10 +00:00
}
d [ "gpu" ] = gpu
2020-03-09 12:43:45 +00:00
} else if b . isLinux ( ) {
2019-07-16 13:19:10 +00:00
gpu , ok := map [ string ] string {
// Intel drivers come from CIPD, so no need to specify the version here.
"IntelBayTrail" : "8086:0f31" ,
"IntelHD2000" : "8086:0102" ,
"IntelHD405" : "8086:22b1" ,
"IntelIris640" : "8086:5926" ,
"QuadroP400" : "10de:1cb3-430.14" ,
2020-03-09 12:43:45 +00:00
} [ b . parts [ "cpu_or_gpu_value" ] ]
2019-07-16 13:19:10 +00:00
if ! ok {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Entry %q not found in Ubuntu GPU mapping." , b . parts [ "cpu_or_gpu_value" ] )
2019-07-16 13:19:10 +00:00
}
d [ "gpu" ] = gpu
2020-03-09 12:43:45 +00:00
} else if b . matchOs ( "Mac" ) {
2019-07-16 13:19:10 +00:00
gpu , ok := map [ string ] string {
"IntelHD6000" : "8086:1626" ,
"IntelHD615" : "8086:591e" ,
"IntelIris5100" : "8086:0a2e" ,
"RadeonHD8870M" : "1002:6821-4.0.20-3.2.8" ,
2020-03-09 12:43:45 +00:00
} [ b . parts [ "cpu_or_gpu_value" ] ]
2019-07-16 13:19:10 +00:00
if ! ok {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Entry %q not found in Mac GPU mapping." , b . parts [ "cpu_or_gpu_value" ] )
2019-07-16 13:19:10 +00:00
}
d [ "gpu" ] = gpu
// Yuck. We have two different types of MacMini7,1 with the same GPU but different CPUs.
2020-03-09 12:43:45 +00:00
if b . gpu ( "IntelIris5100" ) {
2019-07-16 13:19:10 +00:00
// Run all tasks on Golo machines for now.
d [ "cpu" ] = "x86-64-i7-4578U"
}
2020-03-09 12:43:45 +00:00
} else if b . os ( "ChromeOS" ) {
2019-07-16 13:19:10 +00:00
version , ok := map [ string ] string {
"MaliT604" : "10575.22.0" ,
"MaliT764" : "10575.22.0" ,
"MaliT860" : "10575.22.0" ,
"PowerVRGX6250" : "10575.22.0" ,
"TegraK1" : "10575.22.0" ,
"IntelHDGraphics615" : "10575.22.0" ,
2020-03-09 12:43:45 +00:00
} [ b . parts [ "cpu_or_gpu_value" ] ]
2019-07-16 13:19:10 +00:00
if ! ok {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Entry %q not found in ChromeOS GPU mapping." , b . parts [ "cpu_or_gpu_value" ] )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
d [ "gpu" ] = b . parts [ "cpu_or_gpu_value" ]
2019-07-16 13:19:10 +00:00
d [ "release_version" ] = version
} else {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Unknown GPU mapping for OS %q." , b . parts [ "os" ] )
2019-07-16 13:19:10 +00:00
}
}
} else {
d [ "gpu" ] = "none"
2020-04-07 00:21:34 +00:00
if d [ "os" ] == DEFAULT_OS_LINUX_GCE {
2020-03-10 13:19:06 +00:00
if b . extraConfig ( "CanvasKit" , "CMake" , "Docker" , "PathKit" ) || b . role ( "BuildStats" ) {
2020-03-09 12:43:45 +00:00
b . linuxGceDimensions ( MACHINE_TYPE_MEDIUM )
return
2019-07-16 13:19:10 +00:00
}
// Use many-core machines for Build tasks.
2020-03-09 12:43:45 +00:00
b . linuxGceDimensions ( MACHINE_TYPE_LARGE )
return
2019-07-16 13:19:10 +00:00
} else if d [ "os" ] == DEFAULT_OS_WIN {
// Windows CPU bots.
d [ "cpu" ] = "x86-64-Haswell_GCE"
// Use many-core machines for Build tasks.
d [ "machine_type" ] = MACHINE_TYPE_LARGE
} else if d [ "os" ] == DEFAULT_OS_MAC {
// Mac CPU bots.
d [ "cpu" ] = "x86-64-E5-2697_v2"
}
}
2020-03-09 12:43:45 +00:00
dims := make ( [ ] string , 0 , len ( d ) )
2019-07-16 13:19:10 +00:00
for k , v := range d {
2020-03-09 12:43:45 +00:00
dims = append ( dims , fmt . Sprintf ( "%s:%s" , k , v ) )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
sort . Strings ( dims )
b . dimension ( dims ... )
2019-07-16 13:19:10 +00:00
}
// relpath returns the relative path to the given file from the config file.
func ( b * builder ) relpath ( f string ) string {
target := filepath . Join ( b . relpathTargetDir , f )
rv , err := filepath . Rel ( b . relpathBaseDir , target )
if err != nil {
2020-03-09 12:43:45 +00:00
log . Fatal ( err )
2019-07-16 13:19:10 +00:00
}
return rv
}
2020-03-09 12:43:45 +00:00
// bundleRecipes generates the task to bundle and isolate the recipes. Returns
// the name of the task, which may be added as a dependency.
func ( b * jobBuilder ) bundleRecipes ( ) string {
b . addTask ( BUNDLE_RECIPES_NAME , func ( b * taskBuilder ) {
2020-03-25 18:55:26 +00:00
b . cipd ( specs . CIPD_PKGS_GIT_LINUX_AMD64 ... )
2020-03-09 12:43:45 +00:00
b . cipd ( specs . CIPD_PKGS_PYTHON ... )
b . cmd ( "/bin/bash" , "skia/infra/bots/bundle_recipes.sh" , specs . PLACEHOLDER_ISOLATED_OUTDIR )
b . linuxGceDimensions ( MACHINE_TYPE_SMALL )
b . addToPATH ( "cipd_bin_packages" , "cipd_bin_packages/bin" )
b . idempotent ( )
b . isolate ( "recipes.isolate" )
2019-07-16 13:19:10 +00:00
} )
return BUNDLE_RECIPES_NAME
}
// buildTaskDrivers generates the task to compile the task driver code to run on
2020-03-09 12:43:45 +00:00
// all platforms. Returns the name of the task, which may be added as a
// dependency.
func ( b * jobBuilder ) buildTaskDrivers ( ) string {
b . addTask ( BUILD_TASK_DRIVERS_NAME , func ( b * taskBuilder ) {
b . usesGo ( )
b . cmd ( "/bin/bash" , "skia/infra/bots/build_task_drivers.sh" , specs . PLACEHOLDER_ISOLATED_OUTDIR )
b . linuxGceDimensions ( MACHINE_TYPE_SMALL )
b . addToPATH ( "cipd_bin_packages" , "cipd_bin_packages/bin" , "go/go/bin" )
b . idempotent ( )
b . isolate ( "task_drivers.isolate" )
2019-07-16 13:19:10 +00:00
} )
return BUILD_TASK_DRIVERS_NAME
}
// updateGoDeps generates the task to update Go dependencies.
2020-03-09 12:43:45 +00:00
func ( b * jobBuilder ) updateGoDeps ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
b . usesGo ( )
b . asset ( "protoc" )
b . cmd (
2019-07-16 13:19:10 +00:00
"./update_go_deps" ,
"--project_id" , "skia-swarming-bots" ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
2020-03-09 12:43:45 +00:00
"--task_name" , b . Name ,
2019-07-16 13:19:10 +00:00
"--workdir" , "." ,
"--gerrit_project" , "skia" ,
"--gerrit_url" , "https://skia-review.googlesource.com" ,
"--repo" , specs . PLACEHOLDER_REPO ,
"--revision" , specs . PLACEHOLDER_REVISION ,
"--patch_issue" , specs . PLACEHOLDER_ISSUE ,
"--patch_set" , specs . PLACEHOLDER_PATCHSET ,
"--patch_server" , specs . PLACEHOLDER_CODEREVIEW_SERVER ,
"--alsologtostderr" ,
2020-03-09 12:43:45 +00:00
)
b . dep ( b . buildTaskDrivers ( ) )
b . linuxGceDimensions ( MACHINE_TYPE_MEDIUM )
b . addToPATH ( "cipd_bin_packages" , "cipd_bin_packages/bin" , "go/go/bin" )
b . isolate ( "empty.isolate" )
b . serviceAccount ( b . cfg . ServiceAccountRecreateSKPs )
} )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
// createDockerImage creates the specified docker image. Returns the name of the
// generated task.
func ( b * jobBuilder ) createDockerImage ( wasm bool ) string {
// First, derive the name of the task.
imageName := "skia-release"
taskName := "Housekeeper-PerCommit-CreateDockerImage_Skia_Release"
if wasm {
imageName = "skia-wasm-release"
taskName = "Housekeeper-PerCommit-CreateDockerImage_Skia_WASM_Release"
}
imageDir := path . Join ( "docker" , imageName )
2019-12-17 19:54:42 +00:00
2020-03-09 12:43:45 +00:00
// Add the task.
b . addTask ( taskName , func ( b * taskBuilder ) {
2020-03-09 14:05:22 +00:00
// TODO(borenet): Make this task not use Git.
b . usesGit ( )
2020-03-09 12:43:45 +00:00
b . cmd (
2019-12-17 19:54:42 +00:00
"./build_push_docker_image" ,
"--image_name" , fmt . Sprintf ( "gcr.io/skia-public/%s" , imageName ) ,
"--dockerfile_dir" , imageDir ,
"--project_id" , "skia-swarming-bots" ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
2020-03-09 12:43:45 +00:00
"--task_name" , b . Name ,
2019-12-17 19:54:42 +00:00
"--workdir" , "." ,
"--gerrit_project" , "skia" ,
"--gerrit_url" , "https://skia-review.googlesource.com" ,
"--repo" , specs . PLACEHOLDER_REPO ,
"--revision" , specs . PLACEHOLDER_REVISION ,
"--patch_issue" , specs . PLACEHOLDER_ISSUE ,
"--patch_set" , specs . PLACEHOLDER_PATCHSET ,
"--patch_server" , specs . PLACEHOLDER_CODEREVIEW_SERVER ,
2019-12-26 15:33:32 +00:00
"--swarm_out_dir" , specs . PLACEHOLDER_ISOLATED_OUTDIR ,
2019-12-17 19:54:42 +00:00
"--alsologtostderr" ,
2020-03-09 12:43:45 +00:00
)
b . dep ( b . buildTaskDrivers ( ) )
b . addToPATH ( "cipd_bin_packages" , "cipd_bin_packages/bin" , "go/go/bin" )
b . isolate ( "empty.isolate" )
b . serviceAccount ( b . cfg . ServiceAccountCompile )
b . linuxGceDimensions ( MACHINE_TYPE_MEDIUM )
b . usesDocker ( )
2020-03-09 16:51:44 +00:00
b . cache ( CACHES_DOCKER ... )
2020-03-09 12:43:45 +00:00
} )
return taskName
2019-12-17 19:54:42 +00:00
}
2019-12-26 15:33:32 +00:00
// createPushAppsFromSkiaDockerImage creates and pushes docker images of some apps
2019-12-27 14:25:28 +00:00
// (eg: fiddler, debugger, api) using the skia-release docker image.
2020-03-09 12:43:45 +00:00
func ( b * jobBuilder ) createPushAppsFromSkiaDockerImage ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
2020-03-09 14:05:22 +00:00
// TODO(borenet): Make this task not use Git.
b . usesGit ( )
2020-03-09 12:43:45 +00:00
b . cmd (
2019-12-26 15:33:32 +00:00
"./push_apps_from_skia_image" ,
"--project_id" , "skia-swarming-bots" ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
2020-03-09 12:43:45 +00:00
"--task_name" , b . Name ,
2019-12-26 15:33:32 +00:00
"--workdir" , "." ,
"--gerrit_project" , "buildbot" ,
"--gerrit_url" , "https://skia-review.googlesource.com" ,
"--repo" , specs . PLACEHOLDER_REPO ,
"--revision" , specs . PLACEHOLDER_REVISION ,
"--patch_issue" , specs . PLACEHOLDER_ISSUE ,
"--patch_set" , specs . PLACEHOLDER_PATCHSET ,
"--patch_server" , specs . PLACEHOLDER_CODEREVIEW_SERVER ,
"--alsologtostderr" ,
2020-03-09 12:43:45 +00:00
)
b . dep ( b . buildTaskDrivers ( ) )
b . dep ( b . createDockerImage ( false ) )
b . addToPATH ( "cipd_bin_packages" , "cipd_bin_packages/bin" , "go/go/bin" )
b . isolate ( "empty.isolate" )
b . serviceAccount ( b . cfg . ServiceAccountCompile )
b . linuxGceDimensions ( MACHINE_TYPE_MEDIUM )
b . usesDocker ( )
2020-03-09 16:51:44 +00:00
b . cache ( CACHES_DOCKER ... )
2020-03-09 12:43:45 +00:00
} )
2019-12-26 15:33:32 +00:00
}
// createPushAppsFromWASMDockerImage creates and pushes docker images of some apps
2019-12-27 14:25:28 +00:00
// (eg: jsfiddle, skottie, particles) using the skia-wasm-release docker image.
2020-03-09 12:43:45 +00:00
func ( b * jobBuilder ) createPushAppsFromWASMDockerImage ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
2020-03-09 14:05:22 +00:00
// TODO(borenet): Make this task not use Git.
b . usesGit ( )
2020-03-09 12:43:45 +00:00
b . cmd (
2019-12-26 15:33:32 +00:00
"./push_apps_from_wasm_image" ,
"--project_id" , "skia-swarming-bots" ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
2020-03-09 12:43:45 +00:00
"--task_name" , b . Name ,
2019-12-26 15:33:32 +00:00
"--workdir" , "." ,
"--gerrit_project" , "buildbot" ,
"--gerrit_url" , "https://skia-review.googlesource.com" ,
"--repo" , specs . PLACEHOLDER_REPO ,
"--revision" , specs . PLACEHOLDER_REVISION ,
"--patch_issue" , specs . PLACEHOLDER_ISSUE ,
"--patch_set" , specs . PLACEHOLDER_PATCHSET ,
"--patch_server" , specs . PLACEHOLDER_CODEREVIEW_SERVER ,
"--alsologtostderr" ,
2020-03-09 12:43:45 +00:00
)
b . dep ( b . buildTaskDrivers ( ) )
b . dep ( b . createDockerImage ( true ) )
b . addToPATH ( "cipd_bin_packages" , "cipd_bin_packages/bin" , "go/go/bin" )
b . isolate ( "empty.isolate" )
b . serviceAccount ( b . cfg . ServiceAccountCompile )
b . linuxGceDimensions ( MACHINE_TYPE_MEDIUM )
b . usesDocker ( )
2020-03-09 16:51:44 +00:00
b . cache ( CACHES_DOCKER ... )
2020-03-09 12:43:45 +00:00
} )
2019-12-26 15:33:32 +00:00
}
// createPushAppsFromSkiaWASMDockerImages creates and pushes docker images of some apps
2019-12-27 14:25:28 +00:00
// (eg: debugger-assets) using the skia-release and skia-wasm-release
2019-12-26 15:33:32 +00:00
// docker images.
2020-03-09 12:43:45 +00:00
func ( b * jobBuilder ) createPushAppsFromSkiaWASMDockerImages ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
2020-03-09 14:05:22 +00:00
// TODO(borenet): Make this task not use Git.
b . usesGit ( )
2020-03-09 12:43:45 +00:00
b . cmd (
2019-12-26 15:33:32 +00:00
"./push_apps_from_skia_wasm_images" ,
"--project_id" , "skia-swarming-bots" ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
2020-03-09 12:43:45 +00:00
"--task_name" , b . Name ,
2019-12-26 15:33:32 +00:00
"--workdir" , "." ,
"--gerrit_project" , "buildbot" ,
"--gerrit_url" , "https://skia-review.googlesource.com" ,
"--repo" , specs . PLACEHOLDER_REPO ,
"--revision" , specs . PLACEHOLDER_REVISION ,
"--patch_issue" , specs . PLACEHOLDER_ISSUE ,
"--patch_set" , specs . PLACEHOLDER_PATCHSET ,
"--patch_server" , specs . PLACEHOLDER_CODEREVIEW_SERVER ,
"--alsologtostderr" ,
2020-03-09 12:43:45 +00:00
)
b . dep ( b . buildTaskDrivers ( ) )
b . dep ( b . createDockerImage ( false ) )
b . dep ( b . createDockerImage ( true ) )
b . addToPATH ( "cipd_bin_packages" , "cipd_bin_packages/bin" , "go/go/bin" )
b . isolate ( "empty.isolate" )
b . serviceAccount ( b . cfg . ServiceAccountCompile )
b . linuxGceDimensions ( MACHINE_TYPE_MEDIUM )
b . usesDocker ( )
2020-03-09 16:51:44 +00:00
b . cache ( CACHES_DOCKER ... )
2019-07-16 13:19:10 +00:00
} )
}
2020-02-21 15:21:29 +00:00
var iosRegex = regexp . MustCompile ( ` os:iOS-(.*) ` )
2020-03-09 12:43:45 +00:00
func ( b * taskBuilder ) maybeAddIosDevImage ( ) {
for _ , dim := range b . Spec . Dimensions {
2020-02-21 15:21:29 +00:00
if m := iosRegex . FindStringSubmatch ( dim ) ; len ( m ) >= 2 {
var asset string
switch m [ 1 ] {
2020-02-28 21:12:05 +00:00
// Other patch versions can be added to the same case.
2020-02-21 15:21:29 +00:00
case "11.4.1" :
asset = "ios-dev-image-11.4"
2020-03-02 17:47:32 +00:00
case "12.4.5" :
asset = "ios-dev-image-12.4"
2020-02-28 21:12:05 +00:00
case "13.3.1" :
asset = "ios-dev-image-13.3"
2020-07-22 17:18:38 +00:00
case "13.4.1" :
asset = "ios-dev-image-13.4"
case "13.5.1" :
asset = "ios-dev-image-13.5"
case "13.6" :
asset = "ios-dev-image-13.6"
2020-02-21 15:21:29 +00:00
default :
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Unable to determine correct ios-dev-image asset for %s. If %s is a new iOS release, you must add a CIPD package containing the corresponding iOS dev image; see ios-dev-image-11.4 for an example." , b . Name , m [ 1 ] )
2020-02-21 15:21:29 +00:00
}
2020-03-09 12:43:45 +00:00
b . asset ( asset )
2020-02-21 15:21:29 +00:00
break
} else if strings . Contains ( dim , "iOS" ) {
2020-03-09 12:43:45 +00:00
log . Fatalf ( "Must specify iOS version for %s to obtain correct dev image; os dimension is missing version: %s" , b . Name , dim )
2019-07-16 13:19:10 +00:00
}
}
}
2020-03-09 12:43:45 +00:00
// compile generates a compile task. Returns the name of the compile task.
func ( b * jobBuilder ) compile ( ) string {
name := b . deriveCompileTaskName ( )
b . addTask ( name , func ( b * taskBuilder ) {
recipe := "compile"
isolate := "compile.isolate"
if b . extraConfig ( "NoDEPS" , "CMake" , "CommandBuffer" , "Flutter" , "SKQP" ) {
recipe = "sync_and_compile"
isolate = "swarm_recipe.isolate"
2020-03-16 17:49:33 +00:00
b . recipeProps ( EXTRA_PROPS )
2020-03-09 12:43:45 +00:00
b . usesGit ( )
2020-03-09 15:25:43 +00:00
if ! b . extraConfig ( "NoDEPS" ) {
b . cache ( CACHES_WORKDIR ... )
}
2020-03-09 12:43:45 +00:00
} else {
b . idempotent ( )
2019-07-16 13:19:10 +00:00
}
2020-03-16 17:49:33 +00:00
b . kitchenTask ( recipe , OUTPUT_BUILD )
2020-03-09 15:25:43 +00:00
b . isolate ( isolate )
b . serviceAccount ( b . cfg . ServiceAccountCompile )
2020-03-09 12:43:45 +00:00
b . swarmDimensions ( )
if b . extraConfig ( "Docker" , "LottieWeb" , "SKQP" , "CMake" ) || b . compiler ( "EMCC" ) {
b . usesDocker ( )
2020-03-09 16:51:44 +00:00
b . cache ( CACHES_DOCKER ... )
2019-09-27 22:16:02 +00:00
}
2019-07-16 13:19:10 +00:00
2020-03-09 12:43:45 +00:00
// Android bots require a toolchain.
if b . extraConfig ( "Android" ) {
if b . matchOs ( "Mac" ) {
b . asset ( "android_ndk_darwin" )
} else if b . matchOs ( "Win" ) {
pkg := b . MustGetCipdPackageFromAsset ( "android_ndk_windows" )
pkg . Path = "n"
b . cipd ( pkg )
} else if ! b . extraConfig ( "SKQP" ) {
b . asset ( "android_ndk_linux" )
}
} else if b . extraConfig ( "Chromebook" ) {
b . asset ( "clang_linux" )
if b . arch ( "x86_64" ) {
b . asset ( "chromebook_x86_64_gles" )
} else if b . arch ( "arm" ) {
b . asset ( "armhf_sysroot" )
b . asset ( "chromebook_arm_gles" )
}
} else if b . isLinux ( ) {
if b . compiler ( "Clang" ) {
b . asset ( "clang_linux" )
}
if b . extraConfig ( "SwiftShader" ) {
b . asset ( "cmake_linux" )
}
if b . extraConfig ( "OpenCL" ) {
b . asset ( "opencl_headers" , "opencl_ocl_icd_linux" )
}
b . asset ( "ccache_linux" )
b . usesCCache ( )
} else if b . matchOs ( "Win" ) {
b . asset ( "win_toolchain" )
if b . compiler ( "Clang" ) {
b . asset ( "clang_win" )
}
if b . extraConfig ( "OpenCL" ) {
b . asset ( "opencl_headers" )
}
} else if b . matchOs ( "Mac" ) {
b . cipd ( CIPD_PKGS_XCODE ... )
b . Spec . Caches = append ( b . Spec . Caches , & specs . Cache {
Name : "xcode" ,
Path : "cache/Xcode.app" ,
} )
b . asset ( "ccache_mac" )
b . usesCCache ( )
if b . extraConfig ( "CommandBuffer" ) {
b . timeout ( 2 * time . Hour )
}
if b . extraConfig ( "iOS" ) {
b . asset ( "provisioning_profile_ios" )
}
}
} )
2019-07-16 13:19:10 +00:00
// All compile tasks are runnable as their own Job. Assert that the Job
// is listed in jobs.
2019-11-20 13:28:20 +00:00
if ! In ( name , b . jobs ) {
2020-04-07 00:21:34 +00:00
log . Fatalf ( "Job %q is missing from the jobs list! Derived from: %q" , name , b . Name )
2019-07-16 13:19:10 +00:00
}
return name
}
2020-03-09 12:43:45 +00:00
// recreateSKPs generates a RecreateSKPs task.
func ( b * jobBuilder ) recreateSKPs ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
2020-03-16 17:49:33 +00:00
b . recipeProps ( EXTRA_PROPS )
b . kitchenTask ( "recreate_skps" , OUTPUT_NONE )
2020-03-09 12:43:45 +00:00
b . serviceAccount ( b . cfg . ServiceAccountRecreateSKPs )
b . dimension (
"pool:SkiaCT" ,
fmt . Sprintf ( "os:%s" , DEFAULT_OS_LINUX_GCE ) ,
)
b . usesGo ( )
2020-03-09 15:25:43 +00:00
b . cache ( CACHES_WORKDIR ... )
2020-03-09 12:43:45 +00:00
b . timeout ( 4 * time . Hour )
} )
2019-07-16 13:19:10 +00:00
}
// checkGeneratedFiles verifies that no generated SKSL files have been edited
// by hand.
2020-03-09 12:43:45 +00:00
func ( b * jobBuilder ) checkGeneratedFiles ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
2020-03-16 17:49:33 +00:00
b . recipeProps ( EXTRA_PROPS )
b . kitchenTask ( "check_generated_files" , OUTPUT_NONE )
2020-03-09 12:43:45 +00:00
b . serviceAccount ( b . cfg . ServiceAccountCompile )
b . linuxGceDimensions ( MACHINE_TYPE_LARGE )
b . usesGo ( )
b . asset ( "clang_linux" )
b . asset ( "ccache_linux" )
b . usesCCache ( )
2020-03-09 15:25:43 +00:00
b . cache ( CACHES_WORKDIR ... )
2020-03-09 12:43:45 +00:00
} )
2019-07-16 13:19:10 +00:00
}
2020-07-30 12:43:48 +00:00
// checkGnToBp verifies that the gn_to_bp.py script continues to work.
func ( b * jobBuilder ) checkGnToBp ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
b . isolate ( "compile.isolate" )
b . dep ( b . buildTaskDrivers ( ) )
b . cmd ( "./run_gn_to_bp" ,
"--local=false" ,
"--project_id" , "skia-swarming-bots" ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
"--task_name" , b . Name ,
"--alsologtostderr" )
b . linuxGceDimensions ( MACHINE_TYPE_SMALL )
b . usesPython ( )
b . serviceAccount ( b . cfg . ServiceAccountHousekeeper )
} )
}
2020-03-09 12:43:45 +00:00
// housekeeper generates a Housekeeper task.
func ( b * jobBuilder ) housekeeper ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
2020-03-16 17:49:33 +00:00
b . recipeProps ( EXTRA_PROPS )
b . kitchenTask ( "housekeeper" , OUTPUT_NONE )
2020-03-09 12:43:45 +00:00
b . serviceAccount ( b . cfg . ServiceAccountHousekeeper )
b . linuxGceDimensions ( MACHINE_TYPE_SMALL )
b . usesGit ( )
2020-03-09 15:25:43 +00:00
b . cache ( CACHES_WORKDIR ... )
2020-03-09 12:43:45 +00:00
} )
2019-07-16 13:19:10 +00:00
}
2020-07-16 13:11:23 +00:00
// g3FrameworkCanary generates a G3 Framework Canary task. Returns
2019-07-16 13:19:10 +00:00
// the name of the last task in the generated chain of tasks, which the Job
// should add as a dependency.
2020-07-16 13:11:23 +00:00
func ( b * jobBuilder ) g3FrameworkCanary ( ) {
2020-03-09 12:43:45 +00:00
b . addTask ( b . Name , func ( b * taskBuilder ) {
2020-07-28 14:19:10 +00:00
b . isolate ( "empty.isolate" )
b . dep ( b . buildTaskDrivers ( ) )
b . cmd ( "./g3_canary" ,
"--local=false" ,
"--project_id" , "skia-swarming-bots" ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
"--task_name" , b . Name ,
"--repo" , specs . PLACEHOLDER_REPO ,
"--revision" , specs . PLACEHOLDER_REVISION ,
"--patch_issue" , specs . PLACEHOLDER_ISSUE ,
"--patch_set" , specs . PLACEHOLDER_PATCHSET ,
"--patch_server" , specs . PLACEHOLDER_CODEREVIEW_SERVER ,
"--alsologtostderr" )
2020-03-09 12:43:45 +00:00
b . linuxGceDimensions ( MACHINE_TYPE_SMALL )
2020-07-28 14:19:10 +00:00
b . cipd ( CIPD_PKG_LUCI_AUTH )
b . serviceAccount ( "skia-g3-framework-compile@skia-swarming-bots.iam.gserviceaccount.com" )
2020-03-09 12:43:45 +00:00
b . timeout ( 3 * time . Hour )
2020-07-16 13:11:23 +00:00
b . attempts ( 1 )
2020-03-09 12:43:45 +00:00
} )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
// infra generates an infra_tests task.
func ( b * jobBuilder ) infra ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
if b . matchOs ( "Win" ) || b . matchExtraConfig ( "Win" ) {
b . dimension (
// Specify CPU to avoid running builds on bots with a more unique CPU.
"cpu:x86-64-Haswell_GCE" ,
"gpu:none" ,
fmt . Sprintf ( "machine_type:%s" , MACHINE_TYPE_MEDIUM ) , // We don't have any small Windows instances.
fmt . Sprintf ( "os:%s" , DEFAULT_OS_WIN ) ,
fmt . Sprintf ( "pool:%s" , b . cfg . Pool ) ,
)
} else {
b . linuxGceDimensions ( MACHINE_TYPE_SMALL )
2019-07-16 13:19:10 +00:00
}
2020-03-16 17:49:33 +00:00
b . recipeProp ( "repository" , specs . PLACEHOLDER_REPO )
b . kitchenTask ( "infra" , OUTPUT_NONE )
2020-03-09 12:43:45 +00:00
b . isolate ( "infra_tests.isolate" )
b . serviceAccount ( b . cfg . ServiceAccountCompile )
b . cipd ( specs . CIPD_PKGS_GSUTIL ... )
b . idempotent ( )
// Repos which call into Skia's gen_tasks.go should define their own
// infra_tests.isolate and therefore should not use relpath().
b . Spec . Isolate = "infra_tests.isolate"
b . usesGo ( )
} )
2019-07-16 13:19:10 +00:00
}
// buildstats generates a builtstats task, which compiles code and generates
// statistics about the build.
2020-03-09 12:43:45 +00:00
func ( b * jobBuilder ) buildstats ( ) {
compileTaskName := b . compile ( )
b . addTask ( b . Name , func ( b * taskBuilder ) {
2020-03-16 17:49:33 +00:00
b . recipeProps ( EXTRA_PROPS )
b . kitchenTask ( "compute_buildstats" , OUTPUT_PERF )
2020-03-09 12:43:45 +00:00
b . dep ( compileTaskName )
b . asset ( "bloaty" )
b . linuxGceDimensions ( MACHINE_TYPE_MEDIUM )
b . usesDocker ( )
b . usesGit ( )
2020-03-09 15:25:43 +00:00
b . cache ( CACHES_WORKDIR ... )
2020-03-09 12:43:45 +00:00
} )
2019-07-16 13:19:10 +00:00
// Upload release results (for tracking in perf)
// We have some jobs that are FYI (e.g. Debug-CanvasKit, tree-map generator)
2020-03-09 12:43:45 +00:00
if b . release ( ) && ! b . arch ( "x86_64" ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , b . jobNameSchema . Sep , b . Name )
depName := b . Name
b . addTask ( uploadName , func ( b * taskBuilder ) {
2020-03-16 17:49:33 +00:00
b . recipeProp ( "gs_bucket" , b . cfg . GsBucketNano )
b . recipeProps ( EXTRA_PROPS )
2020-03-09 12:43:45 +00:00
// TODO(borenet): I'm not sure why the upload task is
// using the BuildStats task name, but I've done this
// to maintain existing behavior.
b . Name = depName
2020-03-16 17:49:33 +00:00
b . kitchenTask ( "upload_buildstats_results" , OUTPUT_NONE )
2020-03-09 12:43:45 +00:00
b . Name = uploadName
b . serviceAccount ( b . cfg . ServiceAccountUploadNano )
b . linuxGceDimensions ( MACHINE_TYPE_SMALL )
b . cipd ( specs . CIPD_PKGS_GSUTIL ... )
b . dep ( depName )
} )
2019-07-16 13:19:10 +00:00
}
}
// doUpload indicates whether the given Job should upload its results.
2020-03-09 12:43:45 +00:00
func ( b * jobBuilder ) doUpload ( ) bool {
2019-07-16 13:19:10 +00:00
for _ , s := range b . cfg . NoUpload {
2020-03-09 12:43:45 +00:00
m , err := regexp . MatchString ( s , b . Name )
2019-07-16 13:19:10 +00:00
if err != nil {
2020-03-09 12:43:45 +00:00
log . Fatal ( err )
2019-07-16 13:19:10 +00:00
}
if m {
return false
}
}
return true
}
2020-03-09 12:43:45 +00:00
// commonTestPerfAssets adds the assets needed by Test and Perf tasks.
func ( b * taskBuilder ) commonTestPerfAssets ( ) {
// Docker-based tests don't need the standard CIPD assets
if b . extraConfig ( "CanvasKit" , "PathKit" ) || ( b . role ( "Test" ) && b . extraConfig ( "LottieWeb" ) ) {
return
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
if b . extraConfig ( "Skpbench" ) {
// Skpbench only needs skps
b . asset ( "skp" , "mskp" )
} else if b . os ( "Android" , "ChromeOS" , "iOS" ) {
b . asset ( "skp" , "svg" , "skimage" )
} else {
// for desktop machines
b . asset ( "skimage" , "skp" , "svg" )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
if b . isLinux ( ) && b . matchExtraConfig ( "SAN" ) {
b . asset ( "clang_linux" )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
if b . isLinux ( ) {
if b . extraConfig ( "Vulkan" ) {
b . asset ( "linux_vulkan_sdk" )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
if b . matchGpu ( "Intel" ) {
b . asset ( "mesa_intel_driver_linux" )
}
if b . extraConfig ( "OpenCL" ) {
b . asset ( "opencl_ocl_icd_linux" , "opencl_intel_neo_linux" )
2019-07-16 13:19:10 +00:00
}
}
2020-03-09 12:43:45 +00:00
if b . matchOs ( "Win" ) && b . extraConfig ( "ProcDump" ) {
b . asset ( "procdump_win" )
2019-10-28 15:06:00 +00:00
}
2020-03-09 12:43:45 +00:00
}
2019-07-16 13:19:10 +00:00
2020-05-01 18:16:27 +00:00
// dm generates a Test task using dm.
2020-04-29 17:49:28 +00:00
func ( b * jobBuilder ) dm ( ) {
2020-03-09 12:43:45 +00:00
compileTaskName := ""
// LottieWeb doesn't require anything in Skia to be compiled.
2020-04-28 11:14:22 +00:00
if ! b . extraConfig ( "LottieWeb" ) {
2020-03-09 12:43:45 +00:00
compileTaskName = b . compile ( )
}
b . addTask ( b . Name , func ( b * taskBuilder ) {
isolate := "test_skia_bundled.isolate"
recipe := "test"
if b . extraConfig ( "SKQP" ) {
isolate = "skqp.isolate"
recipe = "skqp_test"
if b . cpu ( "Emulator" ) {
recipe = "test_skqp_emulator"
}
} else if b . extraConfig ( "OpenCL" ) {
// TODO(dogben): Longer term we may not want this to be called a "Test" task, but until we start
// running hs_bench or kx, it will be easier to fit into the current job name schema.
recipe = "compute_test"
} else if b . extraConfig ( "PathKit" ) {
isolate = "pathkit.isolate"
recipe = "test_pathkit"
} else if b . extraConfig ( "CanvasKit" ) {
isolate = "canvaskit.isolate"
recipe = "test_canvaskit"
} else if b . extraConfig ( "LottieWeb" ) {
2020-07-06 15:07:34 +00:00
// lottie_ci.isolate differs from lottie_web.isolate in that it includes more of the files,
// especially those brought in via DEPS in the lottie-ci repo. The main difference between
// Perf.+LottieWeb and Test.+LottieWeb is that the former pulls in the lottie build via
// npm and the latter always tests at lottie's ToT.
isolate = "lottie_ci.isolate"
2020-03-09 12:43:45 +00:00
recipe = "test_lottie_web"
}
2020-03-16 17:49:33 +00:00
b . recipeProp ( "gold_hashes_url" , b . cfg . GoldHashesURL )
b . recipeProps ( EXTRA_PROPS )
2020-03-09 12:43:45 +00:00
iid := b . internalHardwareLabel ( )
iidStr := ""
if iid != nil {
iidStr = strconv . Itoa ( * iid )
2019-12-17 19:54:42 +00:00
}
2020-03-09 12:43:45 +00:00
if recipe == "test" {
2020-03-16 17:49:33 +00:00
b . dmFlags ( iidStr )
2019-07-16 13:19:10 +00:00
}
2020-03-16 17:49:33 +00:00
b . kitchenTask ( recipe , OUTPUT_TEST )
2020-03-09 12:43:45 +00:00
b . isolate ( isolate )
b . swarmDimensions ( )
2020-03-09 16:51:44 +00:00
if b . extraConfig ( "CanvasKit" , "Docker" , "LottieWeb" , "PathKit" , "SKQP" ) {
2020-03-09 12:43:45 +00:00
b . usesDocker ( )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
if compileTaskName != "" {
b . dep ( compileTaskName )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
if b . os ( "Android" ) && b . extraConfig ( "ASAN" ) {
b . asset ( "android_ndk_linux" )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
b . commonTestPerfAssets ( )
if b . matchExtraConfig ( "Lottie" ) {
b . asset ( "lottie-samples" )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
if b . extraConfig ( "SKQP" ) {
if ! b . cpu ( "Emulator" ) {
b . asset ( "gcloud_linux" )
}
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
b . expiration ( 20 * time . Hour )
b . timeout ( 4 * time . Hour )
if b . extraConfig ( "Valgrind" ) {
b . timeout ( 9 * time . Hour )
b . expiration ( 48 * time . Hour )
b . asset ( "valgrind" )
// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
// to ensure there are always bots free for CQ tasks.
b . dimension ( "valgrind:1" )
} else if b . extraConfig ( "MSAN" ) {
b . timeout ( 9 * time . Hour )
} else if b . arch ( "x86" ) && b . debug ( ) {
// skia:6737
b . timeout ( 6 * time . Hour )
2019-11-20 13:28:20 +00:00
}
2020-03-09 12:43:45 +00:00
b . maybeAddIosDevImage ( )
} )
2019-07-16 13:19:10 +00:00
2020-03-09 12:43:45 +00:00
// Upload results if necessary. TODO(kjlubick): If we do coverage analysis at the same
// time as normal tests (which would be nice), cfg.json needs to have Coverage removed.
if b . doUpload ( ) {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , b . jobNameSchema . Sep , b . Name )
depName := b . Name
b . addTask ( uploadName , func ( b * taskBuilder ) {
2020-03-16 17:49:33 +00:00
b . recipeProp ( "gs_bucket" , b . cfg . GsBucketGm )
b . recipeProps ( EXTRA_PROPS )
2020-03-09 12:43:45 +00:00
// TODO(borenet): I'm not sure why the upload task is
// using the Test task name, but I've done this
// to maintain existing behavior.
b . Name = depName
2020-03-16 17:49:33 +00:00
b . kitchenTask ( "upload_dm_results" , OUTPUT_NONE )
2020-03-09 12:43:45 +00:00
b . Name = uploadName
b . serviceAccount ( b . cfg . ServiceAccountUploadGM )
b . linuxGceDimensions ( MACHINE_TYPE_SMALL )
b . cipd ( specs . CIPD_PKGS_GSUTIL ... )
b . dep ( depName )
} )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
}
2019-07-16 13:19:10 +00:00
2020-04-29 17:49:28 +00:00
func ( b * jobBuilder ) fm ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
b . isolate ( "test_skia_bundled.isolate" )
b . dep ( b . buildTaskDrivers ( ) , b . compile ( ) )
b . cmd ( "./fm_driver" ,
"--local=false" ,
"--resources=skia/resources" ,
"--project_id" , "skia-swarming-bots" ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
2020-04-30 14:57:14 +00:00
"--task_name" , b . Name ,
"build/fm" )
2020-04-29 17:49:28 +00:00
b . serviceAccount ( b . cfg . ServiceAccountCompile )
b . swarmDimensions ( )
b . expiration ( 15 * time . Minute )
b . attempts ( 1 )
} )
}
2020-07-16 15:42:16 +00:00
// canary generates a task that uses TaskDrivers to trigger canary manual rolls on autorollers.
// Canary-G3 does not use this path because it is very different from other autorollers.
func ( b * jobBuilder ) canary ( rollerName string ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
b . isolate ( "empty.isolate" )
b . dep ( b . buildTaskDrivers ( ) )
b . cmd ( "./canary" ,
"--local=false" ,
"--project_id" , "skia-swarming-bots" ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
"--task_name" , b . Name ,
"--roller_name" , rollerName ,
"--repo" , specs . PLACEHOLDER_REPO ,
"--revision" , specs . PLACEHOLDER_REVISION ,
"--patch_issue" , specs . PLACEHOLDER_ISSUE ,
"--patch_set" , specs . PLACEHOLDER_PATCHSET ,
"--patch_server" , specs . PLACEHOLDER_CODEREVIEW_SERVER ,
"--alsologtostderr" )
b . linuxGceDimensions ( MACHINE_TYPE_SMALL )
b . cipd ( CIPD_PKG_LUCI_AUTH )
b . serviceAccount ( b . cfg . ServiceAccountCanary )
b . timeout ( 3 * time . Hour )
b . attempts ( 1 )
} )
}
2020-05-01 18:16:27 +00:00
// puppeteer generates a task that uses TaskDrivers combined with a node script and puppeteer to
// benchmark something using Chromium (e.g. CanvasKit, LottieWeb).
func ( b * jobBuilder ) puppeteer ( ) {
compileTaskName := b . compile ( )
b . addTask ( b . Name , func ( b * taskBuilder ) {
b . defaultSwarmDimensions ( )
b . usesNode ( )
b . cipd ( CIPD_PKG_LUCI_AUTH )
b . dep ( b . buildTaskDrivers ( ) , compileTaskName )
b . output ( OUTPUT_PERF )
b . timeout ( 20 * time . Minute )
2020-06-10 19:10:16 +00:00
b . isolate ( "perf_puppeteer.isolate" )
b . serviceAccount ( b . cfg . ServiceAccountCompile )
2020-07-17 19:20:44 +00:00
webglversion := "2"
2020-07-28 14:19:10 +00:00
if b . extraConfig ( "WebGL1" ) {
2020-07-17 19:20:44 +00:00
webglversion = "1"
}
2020-06-10 19:10:16 +00:00
if b . extraConfig ( "SkottieFrames" ) {
b . cmd (
"./perf_puppeteer_skottie_frames" ,
"--project_id" , "skia-swarming-bots" ,
"--git_hash" , specs . PLACEHOLDER_REVISION ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
"--task_name" , b . Name ,
"--canvaskit_bin_path" , "./build" ,
"--lotties_path" , "./lotties_with_assets" ,
"--node_bin_path" , "./node/node/bin" ,
"--benchmark_path" , "./tools/perf-canvaskit-puppeteer" ,
"--output_path" , OUTPUT_PERF ,
"--os_trace" , b . parts [ "os" ] ,
"--model_trace" , b . parts [ "model" ] ,
"--cpu_or_gpu_trace" , b . parts [ "cpu_or_gpu" ] ,
"--cpu_or_gpu_value_trace" , b . parts [ "cpu_or_gpu_value" ] ,
2020-07-17 19:20:44 +00:00
"--webgl_version" , webglversion , // ignore when running with cpu backend
2020-06-10 19:10:16 +00:00
"--alsologtostderr" ,
)
// This CIPD package was made by hand with the following invocation:
// cipd create -name skia/internal/lotties_with_assets -in ./lotties/ -tag version:0
// cipd acl-edit skia/internal/lotties_with_assets -reader group:project-skia-external-task-accounts
// cipd acl-edit skia/internal/lotties_with_assets -reader user:pool-skia@chromium-swarm.iam.gserviceaccount.com
// Where lotties is a hand-selected set of lottie animations and (optionally) assets used in
// them (e.g. fonts, images).
b . cipd ( & specs . CipdPackage {
Name : "skia/internal/lotties_with_assets" ,
Path : "lotties_with_assets" ,
Version : "version:0" ,
} )
} else if b . extraConfig ( "RenderSKP" ) {
b . cmd (
"./perf_puppeteer_render_skps" ,
"--project_id" , "skia-swarming-bots" ,
"--git_hash" , specs . PLACEHOLDER_REVISION ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
"--task_name" , b . Name ,
"--canvaskit_bin_path" , "./build" ,
"--skps_path" , "./skp" ,
"--node_bin_path" , "./node/node/bin" ,
"--benchmark_path" , "./tools/perf-canvaskit-puppeteer" ,
"--output_path" , OUTPUT_PERF ,
"--os_trace" , b . parts [ "os" ] ,
"--model_trace" , b . parts [ "model" ] ,
"--cpu_or_gpu_trace" , b . parts [ "cpu_or_gpu" ] ,
"--cpu_or_gpu_value_trace" , b . parts [ "cpu_or_gpu_value" ] ,
2020-07-17 19:20:44 +00:00
"--webgl_version" , webglversion ,
2020-06-10 19:10:16 +00:00
"--alsologtostderr" ,
)
b . asset ( "skp" )
2020-07-29 15:01:55 +00:00
} else if b . extraConfig ( "CanvasPerf" ) { // refers to the canvas_perf.js test suite
b . cmd (
"./perf_puppeteer_canvas" ,
"--project_id" , "skia-swarming-bots" ,
"--git_hash" , specs . PLACEHOLDER_REVISION ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
"--task_name" , b . Name ,
"--canvaskit_bin_path" , "./build" ,
"--node_bin_path" , "./node/node/bin" ,
"--benchmark_path" , "./tools/perf-canvaskit-puppeteer" ,
"--output_path" , OUTPUT_PERF ,
"--os_trace" , b . parts [ "os" ] ,
"--model_trace" , b . parts [ "model" ] ,
"--cpu_or_gpu_trace" , b . parts [ "cpu_or_gpu" ] ,
"--cpu_or_gpu_value_trace" , b . parts [ "cpu_or_gpu_value" ] ,
"--webgl_version" , webglversion ,
"--alsologtostderr" ,
)
b . asset ( "skp" )
2020-06-10 19:10:16 +00:00
}
2020-05-01 18:16:27 +00:00
} )
// Upload results to Perf after.
// TODO(kjlubick,borenet) deduplicate this with the logic in perf().
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , b . jobNameSchema . Sep , b . Name )
depName := b . Name
b . addTask ( uploadName , func ( b * taskBuilder ) {
b . recipeProp ( "gs_bucket" , b . cfg . GsBucketNano )
b . recipeProps ( EXTRA_PROPS )
// TODO(borenet): I'm not sure why the upload task is
// using the Perf task name, but I've done this to
// maintain existing behavior.
b . Name = depName
b . kitchenTask ( "upload_nano_results" , OUTPUT_NONE )
b . Name = uploadName
b . serviceAccount ( b . cfg . ServiceAccountUploadNano )
b . linuxGceDimensions ( MACHINE_TYPE_SMALL )
b . cipd ( specs . CIPD_PKGS_GSUTIL ... )
b . dep ( depName )
} )
}
2020-09-28 13:49:48 +00:00
func ( b * jobBuilder ) cifuzz ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
b . attempts ( 1 )
b . usesDocker ( )
b . linuxGceDimensions ( MACHINE_TYPE_MEDIUM )
b . cipd ( CIPD_PKG_LUCI_AUTH )
b . cipd ( specs . CIPD_PKGS_GIT_LINUX_AMD64 ... )
b . dep ( b . buildTaskDrivers ( ) )
b . output ( "cifuzz_out" )
b . timeout ( 60 * time . Minute )
b . isolate ( "whole_repo.isolate" )
b . serviceAccount ( b . cfg . ServiceAccountCompile )
b . cmd (
"./cifuzz" ,
"--project_id" , "skia-swarming-bots" ,
"--task_id" , specs . PLACEHOLDER_TASK_ID ,
"--task_name" , b . Name ,
"--git_exe_path" , "./cipd_bin_packages/git" ,
"--out_path" , "./cifuzz_out" ,
"--skia_path" , "./skia" ,
"--work_path" , "./cifuzz_work" ,
"--alsologtostderr" ,
)
} )
}
2020-03-09 12:43:45 +00:00
// perf generates a Perf task.
func ( b * jobBuilder ) perf ( ) {
compileTaskName := ""
// LottieWeb doesn't require anything in Skia to be compiled.
if ! b . extraConfig ( "LottieWeb" ) {
compileTaskName = b . compile ( )
}
doUpload := b . release ( ) && b . doUpload ( )
2020-05-01 18:16:27 +00:00
b . addTask ( b . Name , func ( b * taskBuilder ) {
recipe := "perf"
isolate := "perf_skia_bundled.isolate"
if b . extraConfig ( "Skpbench" ) {
recipe = "skpbench"
isolate = "skpbench_skia_bundled.isolate"
} else if b . extraConfig ( "PathKit" ) {
isolate = "pathkit.isolate"
recipe = "perf_pathkit"
} else if b . extraConfig ( "CanvasKit" ) {
isolate = "canvaskit.isolate"
recipe = "perf_canvaskit"
} else if b . extraConfig ( "SkottieTracing" ) {
recipe = "perf_skottietrace"
} else if b . extraConfig ( "SkottieWASM" ) {
recipe = "perf_skottiewasm_lottieweb"
isolate = "skottie_wasm.isolate"
} else if b . extraConfig ( "LottieWeb" ) {
recipe = "perf_skottiewasm_lottieweb"
isolate = "lottie_web.isolate"
}
b . recipeProps ( EXTRA_PROPS )
if recipe == "perf" {
b . nanobenchFlags ( doUpload )
}
b . kitchenTask ( recipe , OUTPUT_PERF )
b . isolate ( isolate )
b . swarmDimensions ( )
if b . extraConfig ( "CanvasKit" , "Docker" , "PathKit" ) {
b . usesDocker ( )
}
if compileTaskName != "" {
b . dep ( compileTaskName )
}
b . commonTestPerfAssets ( )
b . expiration ( 20 * time . Hour )
b . timeout ( 4 * time . Hour )
2019-07-16 13:19:10 +00:00
2020-05-01 18:16:27 +00:00
if b . extraConfig ( "Valgrind" ) {
b . timeout ( 9 * time . Hour )
b . expiration ( 48 * time . Hour )
b . asset ( "valgrind" )
// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
// to ensure there are always bots free for CQ tasks.
b . dimension ( "valgrind:1" )
} else if b . extraConfig ( "MSAN" ) {
b . timeout ( 9 * time . Hour )
} else if b . parts [ "arch" ] == "x86" && b . parts [ "configuration" ] == "Debug" {
// skia:6737
b . timeout ( 6 * time . Hour )
} else if b . extraConfig ( "LottieWeb" , "SkottieWASM" ) {
b . asset ( "node" , "lottie-samples" )
} else if b . matchExtraConfig ( "Skottie" ) {
b . asset ( "lottie-samples" )
}
2019-07-16 13:19:10 +00:00
2020-05-01 18:16:27 +00:00
if b . os ( "Android" ) && b . cpu ( ) {
b . asset ( "text_blob_traces" )
}
b . maybeAddIosDevImage ( )
iid := b . internalHardwareLabel ( )
if iid != nil {
b . Spec . Command = append ( b . Spec . Command , fmt . Sprintf ( "internal_hardware_label=%d" , * iid ) )
}
} )
2019-07-16 13:19:10 +00:00
2020-03-09 12:43:45 +00:00
// Upload results if necessary.
if doUpload {
uploadName := fmt . Sprintf ( "%s%s%s" , PREFIX_UPLOAD , b . jobNameSchema . Sep , b . Name )
depName := b . Name
b . addTask ( uploadName , func ( b * taskBuilder ) {
2020-03-16 17:49:33 +00:00
b . recipeProp ( "gs_bucket" , b . cfg . GsBucketNano )
b . recipeProps ( EXTRA_PROPS )
2020-03-09 12:43:45 +00:00
// TODO(borenet): I'm not sure why the upload task is
// using the Perf task name, but I've done this to
// maintain existing behavior.
b . Name = depName
2020-03-16 17:49:33 +00:00
b . kitchenTask ( "upload_nano_results" , OUTPUT_NONE )
2020-03-09 12:43:45 +00:00
b . Name = uploadName
b . serviceAccount ( b . cfg . ServiceAccountUploadNano )
b . linuxGceDimensions ( MACHINE_TYPE_SMALL )
b . cipd ( specs . CIPD_PKGS_GSUTIL ... )
b . dep ( depName )
} )
2019-07-16 13:19:10 +00:00
}
2020-03-09 12:43:45 +00:00
}
2019-07-16 13:19:10 +00:00
2020-03-09 12:43:45 +00:00
// presubmit generates a task which runs the presubmit for this repo.
func ( b * jobBuilder ) presubmit ( ) {
b . addTask ( b . Name , func ( b * taskBuilder ) {
2020-03-16 17:49:33 +00:00
b . recipeProps ( map [ string ] string {
2020-03-09 12:43:45 +00:00
"category" : "cq" ,
"patch_gerrit_url" : "https://skia-review.googlesource.com" ,
"patch_project" : "skia" ,
"patch_ref" : specs . PLACEHOLDER_PATCH_REF ,
"reason" : "CQ" ,
"repo_name" : "skia" ,
2020-03-16 17:49:33 +00:00
} )
b . recipeProps ( EXTRA_PROPS )
b . kitchenTaskNoBundle ( "run_presubmit" , OUTPUT_NONE )
2020-03-09 12:43:45 +00:00
b . isolate ( "run_recipe.isolate" )
b . serviceAccount ( b . cfg . ServiceAccountCompile )
// Use MACHINE_TYPE_LARGE because it seems to save time versus
// MEDIUM and we want presubmit to be fast.
b . linuxGceDimensions ( MACHINE_TYPE_LARGE )
b . usesGit ( )
b . cipd ( & specs . CipdPackage {
Name : "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build" ,
Path : "recipe_bundle" ,
Version : "git_revision:a8bcedad6768e206c4d2bd1718caa849f29cd42d" ,
} )
} )
2019-07-16 13:19:10 +00:00
}