#!/usr/bin/python """ Copyright 2013 Google Inc. Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. HTTP server for our HTML rebaseline viewer. """ # System-level imports import argparse import BaseHTTPServer import json import logging import os import posixpath import re import shutil import socket import subprocess import thread import threading import time import urllib import urlparse # Must fix up PYTHONPATH before importing from within Skia import fix_pythonpath # pylint: disable=W0611 # Imports from within Skia from py.utils import gs_utils import buildbot_globals import gm_json # Imports from local dir # # pylint: disable=C0301 # Note: we import results under a different name, to avoid confusion with the # Server.results() property. See discussion at # https://codereview.chromium.org/195943004/diff/1/gm/rebaseline_server/server.py#newcode44 # pylint: enable=C0301 import compare_configs import compare_rendered_pictures import compare_to_expectations import download_actuals import imagediffdb import imagepairset import results as results_mod PATHSPLIT_RE = re.compile('/([^/]+)/(.+)') # A simple dictionary of file name extensions to MIME types. The empty string # entry is used as the default when no extension was given or if the extension # has no entry in this dictionary. MIME_TYPE_MAP = {'': 'application/octet-stream', 'html': 'text/html', 'css': 'text/css', 'png': 'image/png', 'js': 'application/javascript', 'json': 'application/json' } # Keys that server.py uses to create the toplevel content header. # NOTE: Keep these in sync with static/constants.js KEY__EDITS__MODIFICATIONS = 'modifications' KEY__EDITS__OLD_RESULTS_HASH = 'oldResultsHash' KEY__EDITS__OLD_RESULTS_TYPE = 'oldResultsType' DEFAULT_ACTUALS_DIR = results_mod.DEFAULT_ACTUALS_DIR DEFAULT_GM_SUMMARIES_BUCKET = download_actuals.GM_SUMMARIES_BUCKET DEFAULT_JSON_FILENAME = download_actuals.DEFAULT_JSON_FILENAME DEFAULT_PORT = 8888 PARENT_DIRECTORY = os.path.dirname(os.path.realpath(__file__)) TRUNK_DIRECTORY = os.path.dirname(os.path.dirname(PARENT_DIRECTORY)) # Directory, relative to PARENT_DIRECTORY, within which the server will serve # out static files. STATIC_CONTENTS_SUBDIR = 'static' # All of the GENERATED_*_SUBDIRS are relative to STATIC_CONTENTS_SUBDIR GENERATED_HTML_SUBDIR = 'generated-html' GENERATED_IMAGES_SUBDIR = 'generated-images' GENERATED_JSON_SUBDIR = 'generated-json' # Directives associated with various HTTP GET requests. GET__LIVE_RESULTS = 'live-results' GET__PRECOMPUTED_RESULTS = 'results' GET__PREFETCH_RESULTS = 'prefetch' GET__STATIC_CONTENTS = 'static' # Parameters we use within do_GET_live_results() and do_GET_prefetch_results() LIVE_PARAM__DOWNLOAD_ONLY_DIFFERING = 'downloadOnlyDifferingImages' LIVE_PARAM__SET_A_DIR = 'setADir' LIVE_PARAM__SET_A_SECTION = 'setASection' LIVE_PARAM__SET_B_DIR = 'setBDir' LIVE_PARAM__SET_B_SECTION = 'setBSection' # How often (in seconds) clients should reload while waiting for initial # results to load. RELOAD_INTERVAL_UNTIL_READY = 10 _GM_SUMMARY_TYPES = [ results_mod.KEY__HEADER__RESULTS_FAILURES, results_mod.KEY__HEADER__RESULTS_ALL, ] # If --compare-configs is specified, compare these configs. CONFIG_PAIRS_TO_COMPARE = [('8888', 'gpu')] # SKP results that are available to compare. # # TODO(stephana): We don't actually want to maintain this list of platforms. # We are just putting them in here for now, as "convenience" links for testing # SKP diffs. # Ultimately, we will depend on buildbot steps linking to their own diffs on # the shared rebaseline_server instance. _SKP_BASE_GS_URL = 'gs://' + buildbot_globals.Get('skp_summaries_bucket') _SKP_BASE_REPO_URL = ( compare_rendered_pictures.REPO_URL_PREFIX + posixpath.join( 'expectations', 'skp')) _SKP_PLATFORMS = [ 'Test-Mac10.8-MacMini4.1-GeForce320M-x86_64-Debug', 'Test-Ubuntu12-ShuttleA-GTX660-x86-Release', ] _HTTP_HEADER_CONTENT_LENGTH = 'Content-Length' _HTTP_HEADER_CONTENT_TYPE = 'Content-Type' _SERVER = None # This gets filled in by main() def _run_command(args, directory): """Runs a command and returns stdout as a single string. Args: args: the command to run, as a list of arguments directory: directory within which to run the command Returns: stdout, as a string Raises an Exception if the command failed (exited with nonzero return code). """ logging.debug('_run_command: %s in directory %s' % (args, directory)) proc = subprocess.Popen(args, cwd=directory, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdout, stderr) = proc.communicate() if proc.returncode is not 0: raise Exception('command "%s" failed in dir "%s": %s' % (args, directory, stderr)) return stdout def _get_routable_ip_address(): """Returns routable IP address of this host (the IP address of its network interface that would be used for most traffic, not its localhost interface). See http://stackoverflow.com/a/166589 """ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.connect(('8.8.8.8', 80)) host = sock.getsockname()[0] sock.close() return host def _create_index(file_path, config_pairs): """Creates an index file linking to all results available from this server. Prior to https://codereview.chromium.org/215503002 , we had a static index.html within our repo. But now that the results may or may not include config comparisons, index.html needs to be generated differently depending on which results are included. TODO(epoger): Instead of including raw HTML within the Python code, consider restoring the index.html file as a template and using django (or similar) to fill in dynamic content. Args: file_path: path on local disk to write index to; any directory components of this path that do not already exist will be created config_pairs: what pairs of configs (if any) we compare actual results of """ dir_path = os.path.dirname(file_path) if not os.path.isdir(dir_path): os.makedirs(dir_path) with open(file_path, 'w') as file_handle: file_handle.write( '' 'rebaseline_server' '