2013-09-27 15:02:44 +00:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
2013-10-09 18:05:58 +00:00
|
|
|
"""
|
2013-09-27 15:02:44 +00:00
|
|
|
Copyright 2013 Google Inc.
|
|
|
|
|
|
|
|
Use of this source code is governed by a BSD-style license that can be
|
|
|
|
found in the LICENSE file.
|
|
|
|
|
|
|
|
HTTP server for our HTML rebaseline viewer.
|
2013-10-09 18:05:58 +00:00
|
|
|
"""
|
2013-09-27 15:02:44 +00:00
|
|
|
|
|
|
|
# System-level imports
|
|
|
|
import argparse
|
|
|
|
import BaseHTTPServer
|
|
|
|
import json
|
2013-10-11 18:45:33 +00:00
|
|
|
import logging
|
2013-09-27 15:02:44 +00:00
|
|
|
import os
|
|
|
|
import posixpath
|
|
|
|
import re
|
|
|
|
import shutil
|
2013-10-30 14:09:04 +00:00
|
|
|
import socket
|
2013-12-02 13:50:38 +00:00
|
|
|
import subprocess
|
2013-09-27 15:02:44 +00:00
|
|
|
import sys
|
2013-10-15 20:10:33 +00:00
|
|
|
import thread
|
2013-12-02 13:50:38 +00:00
|
|
|
import threading
|
2013-10-15 20:10:33 +00:00
|
|
|
import time
|
2013-10-11 18:45:33 +00:00
|
|
|
import urlparse
|
2013-09-27 15:02:44 +00:00
|
|
|
|
|
|
|
# Imports from within Skia
|
|
|
|
#
|
|
|
|
# We need to add the 'tools' directory, so that we can import svn.py within
|
|
|
|
# that directory.
|
|
|
|
# Make sure that the 'tools' dir is in the PYTHONPATH, but add it at the *end*
|
|
|
|
# so any dirs that are already in the PYTHONPATH will be preferred.
|
2013-10-02 19:27:35 +00:00
|
|
|
PARENT_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
TRUNK_DIRECTORY = os.path.dirname(os.path.dirname(PARENT_DIRECTORY))
|
2013-09-27 15:02:44 +00:00
|
|
|
TOOLS_DIRECTORY = os.path.join(TRUNK_DIRECTORY, 'tools')
|
|
|
|
if TOOLS_DIRECTORY not in sys.path:
|
|
|
|
sys.path.append(TOOLS_DIRECTORY)
|
|
|
|
import svn
|
|
|
|
|
|
|
|
# Imports from local dir
|
|
|
|
import results
|
|
|
|
|
|
|
|
ACTUALS_SVN_REPO = 'http://skia-autogen.googlecode.com/svn/gm-actual'
|
|
|
|
PATHSPLIT_RE = re.compile('/([^/]+)/(.+)')
|
2013-11-25 18:06:29 +00:00
|
|
|
EXPECTATIONS_DIR = os.path.join(TRUNK_DIRECTORY, 'expectations', 'gm')
|
2013-11-08 16:25:25 +00:00
|
|
|
GENERATED_IMAGES_ROOT = os.path.join(PARENT_DIRECTORY, 'static',
|
|
|
|
'generated-images')
|
2013-09-27 15:02:44 +00:00
|
|
|
|
|
|
|
# A simple dictionary of file name extensions to MIME types. The empty string
|
|
|
|
# entry is used as the default when no extension was given or if the extension
|
|
|
|
# has no entry in this dictionary.
|
|
|
|
MIME_TYPE_MAP = {'': 'application/octet-stream',
|
|
|
|
'html': 'text/html',
|
|
|
|
'css': 'text/css',
|
|
|
|
'png': 'image/png',
|
|
|
|
'js': 'application/javascript',
|
|
|
|
'json': 'application/json'
|
|
|
|
}
|
|
|
|
|
|
|
|
DEFAULT_ACTUALS_DIR = '.gm-actuals'
|
|
|
|
DEFAULT_PORT = 8888
|
|
|
|
|
2013-12-05 16:05:16 +00:00
|
|
|
# How often (in seconds) clients should reload while waiting for initial
|
|
|
|
# results to load.
|
|
|
|
RELOAD_INTERVAL_UNTIL_READY = 10
|
|
|
|
|
2013-10-23 15:07:26 +00:00
|
|
|
_HTTP_HEADER_CONTENT_LENGTH = 'Content-Length'
|
|
|
|
_HTTP_HEADER_CONTENT_TYPE = 'Content-Type'
|
|
|
|
|
2013-09-27 15:02:44 +00:00
|
|
|
_SERVER = None # This gets filled in by main()
|
|
|
|
|
2013-12-02 13:50:38 +00:00
|
|
|
|
|
|
|
def _run_command(args, directory):
|
|
|
|
"""Runs a command and returns stdout as a single string.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
args: the command to run, as a list of arguments
|
|
|
|
directory: directory within which to run the command
|
|
|
|
|
|
|
|
Returns: stdout, as a string
|
|
|
|
|
|
|
|
Raises an Exception if the command failed (exited with nonzero return code).
|
|
|
|
"""
|
|
|
|
logging.debug('_run_command: %s in directory %s' % (args, directory))
|
|
|
|
proc = subprocess.Popen(args, cwd=directory,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE)
|
|
|
|
(stdout, stderr) = proc.communicate()
|
|
|
|
if proc.returncode is not 0:
|
|
|
|
raise Exception('command "%s" failed in dir "%s": %s' %
|
|
|
|
(args, directory, stderr))
|
|
|
|
return stdout
|
|
|
|
|
|
|
|
|
2013-11-20 19:58:06 +00:00
|
|
|
def _get_routable_ip_address():
|
2013-10-30 14:09:04 +00:00
|
|
|
"""Returns routable IP address of this host (the IP address of its network
|
|
|
|
interface that would be used for most traffic, not its localhost
|
|
|
|
interface). See http://stackoverflow.com/a/166589 """
|
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
|
|
sock.connect(('8.8.8.8', 80))
|
|
|
|
host = sock.getsockname()[0]
|
|
|
|
sock.close()
|
|
|
|
return host
|
|
|
|
|
2013-12-02 13:50:38 +00:00
|
|
|
|
2013-11-20 19:58:06 +00:00
|
|
|
def _create_svn_checkout(dir_path, repo_url):
|
|
|
|
"""Creates local checkout of an SVN repository at the specified directory
|
|
|
|
path, returning an svn.Svn object referring to the local checkout.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
dir_path: path to the local checkout; if this directory does not yet exist,
|
|
|
|
it will be created and the repo will be checked out into it
|
|
|
|
repo_url: URL of SVN repo to check out into dir_path (unless the local
|
|
|
|
checkout already exists)
|
|
|
|
Returns: an svn.Svn object referring to the local checkout.
|
|
|
|
"""
|
|
|
|
local_checkout = svn.Svn(dir_path)
|
|
|
|
if not os.path.isdir(dir_path):
|
|
|
|
os.makedirs(dir_path)
|
|
|
|
local_checkout.Checkout(repo_url, '.')
|
|
|
|
return local_checkout
|
|
|
|
|
2013-10-30 14:09:04 +00:00
|
|
|
|
2013-09-27 15:02:44 +00:00
|
|
|
class Server(object):
|
2013-10-09 18:05:58 +00:00
|
|
|
""" HTTP server for our HTML rebaseline viewer. """
|
|
|
|
|
2013-09-27 15:02:44 +00:00
|
|
|
def __init__(self,
|
|
|
|
actuals_dir=DEFAULT_ACTUALS_DIR,
|
2013-10-15 20:10:33 +00:00
|
|
|
port=DEFAULT_PORT, export=False, editable=True,
|
|
|
|
reload_seconds=0):
|
2013-10-09 18:05:58 +00:00
|
|
|
"""
|
|
|
|
Args:
|
|
|
|
actuals_dir: directory under which we will check out the latest actual
|
|
|
|
GM results
|
|
|
|
port: which TCP port to listen on for HTTP requests
|
|
|
|
export: whether to allow HTTP clients on other hosts to access this server
|
2013-10-15 20:10:33 +00:00
|
|
|
editable: whether HTTP clients are allowed to submit new baselines
|
|
|
|
reload_seconds: polling interval with which to check for new results;
|
|
|
|
if 0, don't check for new results at all
|
2013-10-09 18:05:58 +00:00
|
|
|
"""
|
2013-09-27 15:02:44 +00:00
|
|
|
self._actuals_dir = actuals_dir
|
|
|
|
self._port = port
|
|
|
|
self._export = export
|
2013-10-15 20:10:33 +00:00
|
|
|
self._editable = editable
|
|
|
|
self._reload_seconds = reload_seconds
|
2013-11-20 19:58:06 +00:00
|
|
|
self._actuals_repo = _create_svn_checkout(
|
|
|
|
dir_path=actuals_dir, repo_url=ACTUALS_SVN_REPO)
|
|
|
|
|
2013-12-02 13:50:38 +00:00
|
|
|
# Reentrant lock that must be held whenever updating EITHER of:
|
|
|
|
# 1. self._results
|
|
|
|
# 2. the expected or actual results on local disk
|
|
|
|
self.results_rlock = threading.RLock()
|
|
|
|
# self._results will be filled in by calls to update_results()
|
|
|
|
self._results = None
|
2013-11-26 17:59:28 +00:00
|
|
|
|
2013-12-02 13:50:38 +00:00
|
|
|
@property
|
|
|
|
def results(self):
|
2013-12-17 18:06:13 +00:00
|
|
|
""" Returns the most recently generated results, or None if we don't have
|
|
|
|
any valid results (update_results() has not completed yet). """
|
2013-12-02 13:50:38 +00:00
|
|
|
return self._results
|
|
|
|
|
|
|
|
@property
|
2013-10-09 18:05:58 +00:00
|
|
|
def is_exported(self):
|
|
|
|
""" Returns true iff HTTP clients on other hosts are allowed to access
|
|
|
|
this server. """
|
|
|
|
return self._export
|
|
|
|
|
2013-12-02 13:50:38 +00:00
|
|
|
@property
|
2013-10-15 20:10:33 +00:00
|
|
|
def is_editable(self):
|
|
|
|
""" Returns true iff HTTP clients are allowed to submit new baselines. """
|
|
|
|
return self._editable
|
|
|
|
|
2013-12-02 13:50:38 +00:00
|
|
|
@property
|
2013-10-15 20:10:33 +00:00
|
|
|
def reload_seconds(self):
|
|
|
|
""" Returns the result reload period in seconds, or 0 if we don't reload
|
|
|
|
results. """
|
|
|
|
return self._reload_seconds
|
2013-09-27 15:02:44 +00:00
|
|
|
|
2013-12-17 18:06:13 +00:00
|
|
|
def update_results(self, invalidate=False):
|
2013-12-02 13:50:38 +00:00
|
|
|
""" Create or update self._results, based on the expectations in
|
2013-11-25 18:06:29 +00:00
|
|
|
EXPECTATIONS_DIR and the latest actuals from skia-autogen.
|
2013-12-02 13:50:38 +00:00
|
|
|
|
|
|
|
We hold self.results_rlock while we do this, to guarantee that no other
|
|
|
|
thread attempts to update either self._results or the underlying files at
|
|
|
|
the same time.
|
2013-12-17 18:06:13 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
invalidate: if True, invalidate self._results immediately upon entry;
|
|
|
|
otherwise, we will let readers see those results until we
|
|
|
|
replace them
|
2013-09-27 15:02:44 +00:00
|
|
|
"""
|
2013-12-02 13:50:38 +00:00
|
|
|
with self.results_rlock:
|
2013-12-17 18:06:13 +00:00
|
|
|
if invalidate:
|
|
|
|
self._results = None
|
2013-12-02 13:50:38 +00:00
|
|
|
logging.info('Updating actual GM results in %s from SVN repo %s ...' % (
|
|
|
|
self._actuals_dir, ACTUALS_SVN_REPO))
|
|
|
|
self._actuals_repo.Update('.')
|
|
|
|
|
|
|
|
# We only update the expectations dir if the server was run with a
|
|
|
|
# nonzero --reload argument; otherwise, we expect the user to maintain
|
|
|
|
# her own expectations as she sees fit.
|
|
|
|
#
|
|
|
|
# Because the Skia repo is moving from SVN to git, and git does not
|
|
|
|
# support updating a single directory tree, we have to update the entire
|
|
|
|
# repo checkout.
|
|
|
|
#
|
|
|
|
# Because Skia uses depot_tools, we have to update using "gclient sync"
|
|
|
|
# instead of raw git (or SVN) update. Happily, this will work whether
|
|
|
|
# the checkout was created using git or SVN.
|
|
|
|
if self._reload_seconds:
|
|
|
|
logging.info(
|
|
|
|
'Updating expected GM results in %s by syncing Skia repo ...' %
|
|
|
|
EXPECTATIONS_DIR)
|
|
|
|
_run_command(['gclient', 'sync'], TRUNK_DIRECTORY)
|
2013-11-20 19:58:06 +00:00
|
|
|
|
2013-12-02 13:50:38 +00:00
|
|
|
self._results = results.Results(
|
|
|
|
actuals_root=self._actuals_dir,
|
|
|
|
expected_root=EXPECTATIONS_DIR,
|
|
|
|
generated_images_root=GENERATED_IMAGES_ROOT)
|
2013-09-27 15:02:44 +00:00
|
|
|
|
2013-12-05 16:05:16 +00:00
|
|
|
def _result_loader(self, reload_seconds=0):
|
|
|
|
""" Call self.update_results(), either once or periodically.
|
|
|
|
|
|
|
|
Params:
|
|
|
|
reload_seconds: integer; if nonzero, reload results at this interval
|
|
|
|
(in which case, this method will never return!)
|
2013-10-15 20:10:33 +00:00
|
|
|
"""
|
2013-12-05 16:05:16 +00:00
|
|
|
self.update_results()
|
|
|
|
logging.info('Initial results loaded. Ready for requests on %s' % self._url)
|
|
|
|
if reload_seconds:
|
|
|
|
while True:
|
|
|
|
time.sleep(reload_seconds)
|
|
|
|
self.update_results()
|
2013-10-15 20:10:33 +00:00
|
|
|
|
2013-09-27 15:02:44 +00:00
|
|
|
def run(self):
|
2013-12-05 16:05:16 +00:00
|
|
|
arg_tuple = (self._reload_seconds,) # start_new_thread needs a tuple,
|
|
|
|
# even though it holds just one param
|
|
|
|
thread.start_new_thread(self._result_loader, arg_tuple)
|
2013-10-15 20:10:33 +00:00
|
|
|
|
2013-09-27 15:02:44 +00:00
|
|
|
if self._export:
|
|
|
|
server_address = ('', self._port)
|
2013-11-20 19:58:06 +00:00
|
|
|
host = _get_routable_ip_address()
|
2013-10-15 20:10:33 +00:00
|
|
|
if self._editable:
|
|
|
|
logging.warning('Running with combination of "export" and "editable" '
|
|
|
|
'flags. Users on other machines will '
|
|
|
|
'be able to modify your GM expectations!')
|
2013-09-27 15:02:44 +00:00
|
|
|
else:
|
2013-10-30 14:09:04 +00:00
|
|
|
host = '127.0.0.1'
|
|
|
|
server_address = (host, self._port)
|
2013-09-27 15:02:44 +00:00
|
|
|
http_server = BaseHTTPServer.HTTPServer(server_address, HTTPRequestHandler)
|
2013-12-05 16:05:16 +00:00
|
|
|
self._url = 'http://%s:%d' % (host, http_server.server_port)
|
|
|
|
logging.info('Listening for requests on %s' % self._url)
|
2013-09-27 15:02:44 +00:00
|
|
|
http_server.serve_forever()
|
|
|
|
|
|
|
|
|
|
|
|
class HTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
|
|
|
""" HTTP request handlers for various types of queries this server knows
|
|
|
|
how to handle (static HTML and Javascript, expected/actual results, etc.)
|
|
|
|
"""
|
|
|
|
def do_GET(self):
|
2014-02-07 18:21:59 +00:00
|
|
|
"""
|
|
|
|
Handles all GET requests, forwarding them to the appropriate
|
|
|
|
do_GET_* dispatcher.
|
2013-09-27 15:02:44 +00:00
|
|
|
|
2014-02-07 18:21:59 +00:00
|
|
|
If we see any Exceptions, return a 404. This fixes http://skbug.com/2147
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
logging.debug('do_GET: path="%s"' % self.path)
|
|
|
|
if self.path == '' or self.path == '/' or self.path == '/index.html' :
|
|
|
|
self.redirect_to('/static/index.html')
|
|
|
|
return
|
|
|
|
if self.path == '/favicon.ico' :
|
|
|
|
self.redirect_to('/static/favicon.ico')
|
|
|
|
return
|
|
|
|
|
|
|
|
# All requests must be of this form:
|
|
|
|
# /dispatcher/remainder
|
|
|
|
# where 'dispatcher' indicates which do_GET_* dispatcher to run
|
|
|
|
# and 'remainder' is the remaining path sent to that dispatcher.
|
|
|
|
normpath = posixpath.normpath(self.path)
|
|
|
|
(dispatcher_name, remainder) = PATHSPLIT_RE.match(normpath).groups()
|
|
|
|
dispatchers = {
|
|
|
|
'results': self.do_GET_results,
|
|
|
|
'static': self.do_GET_static,
|
|
|
|
}
|
|
|
|
dispatcher = dispatchers[dispatcher_name]
|
|
|
|
dispatcher(remainder)
|
|
|
|
except:
|
|
|
|
self.send_error(404)
|
|
|
|
raise
|
2013-09-27 15:02:44 +00:00
|
|
|
|
2013-10-11 18:45:33 +00:00
|
|
|
def do_GET_results(self, type):
|
2013-09-27 15:02:44 +00:00
|
|
|
""" Handle a GET request for GM results.
|
|
|
|
|
2013-10-09 18:05:58 +00:00
|
|
|
Args:
|
2013-10-11 18:45:33 +00:00
|
|
|
type: string indicating which set of results to return;
|
|
|
|
must be one of the results.RESULTS_* constants
|
|
|
|
"""
|
|
|
|
logging.debug('do_GET_results: sending results of type "%s"' % type)
|
2014-02-07 18:21:59 +00:00
|
|
|
# Since we must make multiple calls to the Results object, grab a
|
|
|
|
# reference to it in case it is updated to point at a new Results
|
|
|
|
# object within another thread.
|
|
|
|
#
|
|
|
|
# TODO(epoger): Rather than using a global variable for the handler
|
|
|
|
# to refer to the Server object, make Server a subclass of
|
|
|
|
# HTTPServer, and then it could be available to the handler via
|
|
|
|
# the handler's .server instance variable.
|
|
|
|
results_obj = _SERVER.results
|
|
|
|
if results_obj:
|
|
|
|
response_dict = self.package_results(results_obj, type)
|
|
|
|
else:
|
|
|
|
now = int(time.time())
|
|
|
|
response_dict = {
|
|
|
|
'header': {
|
|
|
|
'resultsStillLoading': True,
|
|
|
|
'timeUpdated': now,
|
|
|
|
'timeNextUpdateAvailable': now + RELOAD_INTERVAL_UNTIL_READY,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
self.send_json_dict(response_dict)
|
2013-12-05 16:05:16 +00:00
|
|
|
|
|
|
|
def package_results(self, results_obj, type):
|
|
|
|
""" Given a nonempty "results" object, package it as a response_dict
|
|
|
|
as needed within do_GET_results.
|
2013-10-15 20:10:33 +00:00
|
|
|
|
2013-12-05 16:05:16 +00:00
|
|
|
Args:
|
|
|
|
results_obj: nonempty "results" object
|
|
|
|
type: string indicating which set of results to return;
|
|
|
|
must be one of the results.RESULTS_* constants
|
|
|
|
"""
|
|
|
|
response_dict = results_obj.get_results_of_type(type)
|
|
|
|
time_updated = results_obj.get_timestamp()
|
|
|
|
response_dict['header'] = {
|
2013-10-15 20:10:33 +00:00
|
|
|
# Timestamps:
|
|
|
|
# 1. when this data was last updated
|
|
|
|
# 2. when the caller should check back for new data (if ever)
|
|
|
|
#
|
|
|
|
# We only return these timestamps if the --reload argument was passed;
|
|
|
|
# otherwise, we have no idea when the expectations were last updated
|
|
|
|
# (we allow the user to maintain her own expectations as she sees fit).
|
2013-12-02 13:50:38 +00:00
|
|
|
'timeUpdated': time_updated if _SERVER.reload_seconds else None,
|
2013-10-15 20:10:33 +00:00
|
|
|
'timeNextUpdateAvailable': (
|
2013-12-02 13:50:38 +00:00
|
|
|
(time_updated+_SERVER.reload_seconds) if _SERVER.reload_seconds
|
2013-10-15 20:10:33 +00:00
|
|
|
else None),
|
|
|
|
|
2013-10-23 15:07:26 +00:00
|
|
|
# The type we passed to get_results_of_type()
|
|
|
|
'type': type,
|
|
|
|
|
2013-10-09 18:05:58 +00:00
|
|
|
# Hash of testData, which the client must return with any edits--
|
|
|
|
# this ensures that the edits were made to a particular dataset.
|
2013-10-15 20:10:33 +00:00
|
|
|
'dataHash': str(hash(repr(response_dict['testData']))),
|
2013-10-09 18:05:58 +00:00
|
|
|
|
|
|
|
# Whether the server will accept edits back.
|
2013-12-02 13:50:38 +00:00
|
|
|
'isEditable': _SERVER.is_editable,
|
2013-10-09 18:05:58 +00:00
|
|
|
|
|
|
|
# Whether the service is accessible from other hosts.
|
2013-12-02 13:50:38 +00:00
|
|
|
'isExported': _SERVER.is_exported,
|
2013-12-05 16:05:16 +00:00
|
|
|
}
|
|
|
|
return response_dict
|
2013-09-27 15:02:44 +00:00
|
|
|
|
|
|
|
def do_GET_static(self, path):
|
2013-10-02 19:27:35 +00:00
|
|
|
""" Handle a GET request for a file under the 'static' directory.
|
|
|
|
Only allow serving of files within the 'static' directory that is a
|
2013-10-09 18:05:58 +00:00
|
|
|
filesystem sibling of this script.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
path: path to file (under static directory) to retrieve
|
|
|
|
"""
|
2013-10-11 18:45:33 +00:00
|
|
|
# Strip arguments ('?resultsToLoad=all') from the path
|
|
|
|
path = urlparse.urlparse(path).path
|
|
|
|
|
|
|
|
logging.debug('do_GET_static: sending file "%s"' % path)
|
2013-10-02 19:27:35 +00:00
|
|
|
static_dir = os.path.realpath(os.path.join(PARENT_DIRECTORY, 'static'))
|
|
|
|
full_path = os.path.realpath(os.path.join(static_dir, path))
|
|
|
|
if full_path.startswith(static_dir):
|
|
|
|
self.send_file(full_path)
|
|
|
|
else:
|
2013-10-11 18:45:33 +00:00
|
|
|
logging.error(
|
|
|
|
'Attempted do_GET_static() of path [%s] outside of static dir [%s]'
|
|
|
|
% (full_path, static_dir))
|
2013-10-02 19:27:35 +00:00
|
|
|
self.send_error(404)
|
2013-09-27 15:02:44 +00:00
|
|
|
|
2013-10-23 15:07:26 +00:00
|
|
|
def do_POST(self):
|
|
|
|
""" Handles all POST requests, forwarding them to the appropriate
|
|
|
|
do_POST_* dispatcher. """
|
|
|
|
# All requests must be of this form:
|
|
|
|
# /dispatcher
|
|
|
|
# where 'dispatcher' indicates which do_POST_* dispatcher to run.
|
2014-02-07 18:21:59 +00:00
|
|
|
logging.debug('do_POST: path="%s"' % self.path)
|
2013-10-23 15:07:26 +00:00
|
|
|
normpath = posixpath.normpath(self.path)
|
|
|
|
dispatchers = {
|
|
|
|
'/edits': self.do_POST_edits,
|
|
|
|
}
|
|
|
|
try:
|
|
|
|
dispatcher = dispatchers[normpath]
|
|
|
|
dispatcher()
|
|
|
|
self.send_response(200)
|
|
|
|
except:
|
|
|
|
self.send_error(404)
|
|
|
|
raise
|
|
|
|
|
|
|
|
def do_POST_edits(self):
|
|
|
|
""" Handle a POST request with modifications to GM expectations, in this
|
|
|
|
format:
|
|
|
|
|
|
|
|
{
|
|
|
|
'oldResultsType': 'all', # type of results that the client loaded
|
|
|
|
# and then made modifications to
|
|
|
|
'oldResultsHash': 39850913, # hash of results when the client loaded them
|
|
|
|
# (ensures that the client and server apply
|
|
|
|
# modifications to the same base)
|
|
|
|
'modifications': [
|
|
|
|
{
|
|
|
|
'builder': 'Test-Android-Nexus10-MaliT604-Arm7-Debug',
|
|
|
|
'test': 'strokerect',
|
|
|
|
'config': 'gpu',
|
|
|
|
'expectedHashType': 'bitmap-64bitMD5',
|
|
|
|
'expectedHashDigest': '1707359671708613629',
|
|
|
|
},
|
|
|
|
...
|
|
|
|
],
|
|
|
|
}
|
|
|
|
|
|
|
|
Raises an Exception if there were any problems.
|
|
|
|
"""
|
2013-12-02 13:50:38 +00:00
|
|
|
if not _SERVER.is_editable:
|
2013-10-23 15:07:26 +00:00
|
|
|
raise Exception('this server is not running in --editable mode')
|
|
|
|
|
|
|
|
content_type = self.headers[_HTTP_HEADER_CONTENT_TYPE]
|
|
|
|
if content_type != 'application/json;charset=UTF-8':
|
|
|
|
raise Exception('unsupported %s [%s]' % (
|
|
|
|
_HTTP_HEADER_CONTENT_TYPE, content_type))
|
|
|
|
|
|
|
|
content_length = int(self.headers[_HTTP_HEADER_CONTENT_LENGTH])
|
|
|
|
json_data = self.rfile.read(content_length)
|
|
|
|
data = json.loads(json_data)
|
|
|
|
logging.debug('do_POST_edits: received new GM expectations data [%s]' %
|
|
|
|
data)
|
|
|
|
|
2013-12-02 13:50:38 +00:00
|
|
|
# Update the results on disk with the information we received from the
|
|
|
|
# client.
|
|
|
|
# We must hold _SERVER.results_rlock while we do this, to guarantee that
|
|
|
|
# no other thread updates expectations (from the Skia repo) while we are
|
|
|
|
# updating them (using the info we received from the client).
|
|
|
|
with _SERVER.results_rlock:
|
|
|
|
oldResultsType = data['oldResultsType']
|
|
|
|
oldResults = _SERVER.results.get_results_of_type(oldResultsType)
|
|
|
|
oldResultsHash = str(hash(repr(oldResults['testData'])))
|
|
|
|
if oldResultsHash != data['oldResultsHash']:
|
|
|
|
raise Exception('results of type "%s" changed while the client was '
|
|
|
|
'making modifications. The client should reload the '
|
|
|
|
'results and submit the modifications again.' %
|
|
|
|
oldResultsType)
|
|
|
|
_SERVER.results.edit_expectations(data['modifications'])
|
2013-12-17 18:06:13 +00:00
|
|
|
|
|
|
|
# Read the updated results back from disk.
|
|
|
|
# We can do this in a separate thread; we should return our success message
|
|
|
|
# to the UI as soon as possible.
|
|
|
|
thread.start_new_thread(_SERVER.update_results, (True,))
|
2013-10-23 15:07:26 +00:00
|
|
|
|
2013-09-27 15:02:44 +00:00
|
|
|
def redirect_to(self, url):
|
2013-10-09 18:05:58 +00:00
|
|
|
""" Redirect the HTTP client to a different url.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
url: URL to redirect the HTTP client to
|
|
|
|
"""
|
2013-09-27 15:02:44 +00:00
|
|
|
self.send_response(301)
|
|
|
|
self.send_header('Location', url)
|
|
|
|
self.end_headers()
|
|
|
|
|
|
|
|
def send_file(self, path):
|
|
|
|
""" Send the contents of the file at this path, with a mimetype based
|
2013-10-09 18:05:58 +00:00
|
|
|
on the filename extension.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
path: path of file whose contents to send to the HTTP client
|
|
|
|
"""
|
2013-09-27 15:02:44 +00:00
|
|
|
# Grab the extension if there is one
|
|
|
|
extension = os.path.splitext(path)[1]
|
|
|
|
if len(extension) >= 1:
|
|
|
|
extension = extension[1:]
|
|
|
|
|
|
|
|
# Determine the MIME type of the file from its extension
|
|
|
|
mime_type = MIME_TYPE_MAP.get(extension, MIME_TYPE_MAP[''])
|
|
|
|
|
|
|
|
# Open the file and send it over HTTP
|
|
|
|
if os.path.isfile(path):
|
|
|
|
with open(path, 'rb') as sending_file:
|
|
|
|
self.send_response(200)
|
|
|
|
self.send_header('Content-type', mime_type)
|
|
|
|
self.end_headers()
|
|
|
|
self.wfile.write(sending_file.read())
|
|
|
|
else:
|
|
|
|
self.send_error(404)
|
|
|
|
|
|
|
|
def send_json_dict(self, json_dict):
|
|
|
|
""" Send the contents of this dictionary in JSON format, with a JSON
|
2013-10-09 18:05:58 +00:00
|
|
|
mimetype.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
json_dict: dictionary to send
|
|
|
|
"""
|
2013-09-27 15:02:44 +00:00
|
|
|
self.send_response(200)
|
|
|
|
self.send_header('Content-type', 'application/json')
|
|
|
|
self.end_headers()
|
|
|
|
json.dump(json_dict, self.wfile)
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2013-12-19 19:08:31 +00:00
|
|
|
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
|
|
|
|
datefmt='%m/%d/%Y %H:%M:%S',
|
|
|
|
level=logging.INFO)
|
2013-09-27 15:02:44 +00:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('--actuals-dir',
|
|
|
|
help=('Directory into which we will check out the latest '
|
|
|
|
'actual GM results. If this directory does not '
|
|
|
|
'exist, it will be created. Defaults to %(default)s'),
|
|
|
|
default=DEFAULT_ACTUALS_DIR)
|
2013-10-15 20:10:33 +00:00
|
|
|
parser.add_argument('--editable', action='store_true',
|
2013-10-23 15:07:26 +00:00
|
|
|
help=('Allow HTTP clients to submit new baselines.'))
|
2013-09-27 15:02:44 +00:00
|
|
|
parser.add_argument('--export', action='store_true',
|
|
|
|
help=('Instead of only allowing access from HTTP clients '
|
|
|
|
'on localhost, allow HTTP clients on other hosts '
|
|
|
|
'to access this server. WARNING: doing so will '
|
|
|
|
'allow users on other hosts to modify your '
|
2013-10-15 20:10:33 +00:00
|
|
|
'GM expectations, if combined with --editable.'))
|
2013-09-30 15:06:25 +00:00
|
|
|
parser.add_argument('--port', type=int,
|
|
|
|
help=('Which TCP port to listen on for HTTP requests; '
|
|
|
|
'defaults to %(default)s'),
|
|
|
|
default=DEFAULT_PORT)
|
2013-10-15 20:10:33 +00:00
|
|
|
parser.add_argument('--reload', type=int,
|
|
|
|
help=('How often (a period in seconds) to update the '
|
2013-11-25 18:06:29 +00:00
|
|
|
'results. If specified, both expected and actual '
|
2013-12-02 13:50:38 +00:00
|
|
|
'results will be updated by running "gclient sync" '
|
|
|
|
'on your Skia checkout as a whole. '
|
2013-10-15 20:10:33 +00:00
|
|
|
'By default, we do not reload at all, and you '
|
|
|
|
'must restart the server to pick up new data.'),
|
|
|
|
default=0)
|
2013-09-27 15:02:44 +00:00
|
|
|
args = parser.parse_args()
|
|
|
|
global _SERVER
|
2013-10-15 20:10:33 +00:00
|
|
|
_SERVER = Server(actuals_dir=args.actuals_dir,
|
|
|
|
port=args.port, export=args.export, editable=args.editable,
|
|
|
|
reload_seconds=args.reload)
|
2013-09-27 15:02:44 +00:00
|
|
|
_SERVER.run()
|
|
|
|
|
2013-12-02 13:50:38 +00:00
|
|
|
|
2013-09-27 15:02:44 +00:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|