Format scripts
This commit is contained in:
parent
e9a0d3141b
commit
41356aa00a
@ -10,22 +10,23 @@ platform = os.environ.get('PLATFORM')
|
||||
path = os.environ['PATH']
|
||||
cmake_command = ['cmake', '-DFMT_PEDANTIC=ON', '-DCMAKE_BUILD_TYPE=' + config]
|
||||
if build == 'mingw':
|
||||
cmake_command.append('-GMinGW Makefiles')
|
||||
build_command = ['mingw32-make', '-j4']
|
||||
test_command = ['mingw32-make', 'test']
|
||||
# Remove the path to Git bin directory from $PATH because it breaks MinGW config.
|
||||
path = path.replace(r'C:\Program Files (x86)\Git\bin', '')
|
||||
os.environ['PATH'] = r'C:\MinGW\bin;' + path
|
||||
cmake_command.append('-GMinGW Makefiles')
|
||||
build_command = ['mingw32-make', '-j4']
|
||||
test_command = ['mingw32-make', 'test']
|
||||
# Remove the path to Git bin directory from $PATH because it breaks
|
||||
# MinGW config.
|
||||
path = path.replace(r'C:\Program Files (x86)\Git\bin', '')
|
||||
os.environ['PATH'] = r'C:\MinGW\bin;' + path
|
||||
else:
|
||||
# Add MSBuild 14.0 to PATH as described in
|
||||
# http://help.appveyor.com/discussions/problems/2229-v140-not-found-on-vs2105rc.
|
||||
os.environ['PATH'] = r'C:\Program Files (x86)\MSBuild\14.0\Bin;' + path
|
||||
generator = 'Visual Studio 14 2015'
|
||||
if platform == 'x64':
|
||||
generator += ' Win64'
|
||||
cmake_command.append('-G' + generator)
|
||||
build_command = ['cmake', '--build', '.', '--config', config, '--', '/m:4']
|
||||
test_command = ['ctest', '-C', config]
|
||||
# Add MSBuild 14.0 to PATH as described in
|
||||
# http://help.appveyor.com/discussions/problems/2229-v140-not-found-on-vs2105rc.
|
||||
os.environ['PATH'] = r'C:\Program Files (x86)\MSBuild\14.0\Bin;' + path
|
||||
generator = 'Visual Studio 14 2015'
|
||||
if platform == 'x64':
|
||||
generator += ' Win64'
|
||||
cmake_command.append('-G' + generator)
|
||||
build_command = ['cmake', '--build', '.', '--config', config, '--', '/m:4']
|
||||
test_command = ['ctest', '-C', config]
|
||||
|
||||
check_call(cmake_command)
|
||||
check_call(build_command)
|
||||
|
@ -3,44 +3,44 @@
|
||||
import contextlib, os, tempfile, timer, urllib2, urlparse
|
||||
|
||||
class Downloader:
|
||||
def __init__(self, dir=None):
|
||||
self.dir = dir
|
||||
def __init__(self, dir=None):
|
||||
self.dir = dir
|
||||
|
||||
# Downloads a file and removes it when exiting a block.
|
||||
# Usage:
|
||||
# d = Downloader()
|
||||
# with d.download(url) as f:
|
||||
# use_file(f)
|
||||
def download(self, url, cookie=None):
|
||||
suffix = os.path.splitext(urlparse.urlsplit(url)[2])[1]
|
||||
fd, filename = tempfile.mkstemp(suffix=suffix, dir=self.dir)
|
||||
os.close(fd)
|
||||
with timer.print_time('Downloading', url, 'to', filename):
|
||||
opener = urllib2.build_opener()
|
||||
if cookie:
|
||||
opener.addheaders.append(('Cookie', cookie))
|
||||
num_tries = 2
|
||||
for i in range(num_tries):
|
||||
try:
|
||||
f = opener.open(url)
|
||||
except urllib2.URLError, e:
|
||||
print('Failed to open url', url)
|
||||
continue
|
||||
length = f.headers.get('content-length')
|
||||
if not length:
|
||||
print('Failed to get content-length')
|
||||
continue
|
||||
length = int(length)
|
||||
with open(filename, 'wb') as out:
|
||||
count = 0
|
||||
while count < length:
|
||||
data = f.read(1024 * 1024)
|
||||
count += len(data)
|
||||
out.write(data)
|
||||
@contextlib.contextmanager
|
||||
def remove(filename):
|
||||
try:
|
||||
yield filename
|
||||
finally:
|
||||
os.remove(filename)
|
||||
return remove(filename)
|
||||
# Downloads a file and removes it when exiting a block.
|
||||
# Usage:
|
||||
# d = Downloader()
|
||||
# with d.download(url) as f:
|
||||
# use_file(f)
|
||||
def download(self, url, cookie=None):
|
||||
suffix = os.path.splitext(urlparse.urlsplit(url)[2])[1]
|
||||
fd, filename = tempfile.mkstemp(suffix=suffix, dir=self.dir)
|
||||
os.close(fd)
|
||||
with timer.print_time('Downloading', url, 'to', filename):
|
||||
opener = urllib2.build_opener()
|
||||
if cookie:
|
||||
opener.addheaders.append(('Cookie', cookie))
|
||||
num_tries = 2
|
||||
for i in range(num_tries):
|
||||
try:
|
||||
f = opener.open(url)
|
||||
except urllib2.URLError, e:
|
||||
print('Failed to open url', url)
|
||||
continue
|
||||
length = f.headers.get('content-length')
|
||||
if not length:
|
||||
print('Failed to get content-length')
|
||||
continue
|
||||
length = int(length)
|
||||
with open(filename, 'wb') as out:
|
||||
count = 0
|
||||
while count < length:
|
||||
data = f.read(1024 * 1024)
|
||||
count += len(data)
|
||||
out.write(data)
|
||||
@contextlib.contextmanager
|
||||
def remove(filename):
|
||||
try:
|
||||
yield filename
|
||||
finally:
|
||||
os.remove(filename)
|
||||
return remove(filename)
|
||||
|
@ -139,7 +139,7 @@ if __name__ == '__main__':
|
||||
fmt_dir = os.path.join(workdir, 'fmt')
|
||||
branch = args.get('<branch>')
|
||||
if branch is None:
|
||||
branch = 'master'
|
||||
branch = 'master'
|
||||
run('git', 'clone', '-b', branch, 'git@github.com:fmtlib/fmt.git', fmt_dir)
|
||||
|
||||
# Convert changelog from RST to GitHub-flavored Markdown and get the version.
|
||||
|
@ -5,31 +5,31 @@ from contextlib import contextmanager
|
||||
import timeit
|
||||
|
||||
class Timer:
|
||||
"""
|
||||
A with statement based timer.
|
||||
Usage:
|
||||
t = Timer()
|
||||
with t:
|
||||
do_something()
|
||||
time = t.time
|
||||
"""
|
||||
|
||||
def __enter__(self):
|
||||
self.start = timeit.default_timer()
|
||||
"""
|
||||
A with statement based timer.
|
||||
Usage:
|
||||
t = Timer()
|
||||
with t:
|
||||
do_something()
|
||||
time = t.time
|
||||
"""
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
finish = timeit.default_timer()
|
||||
self.time = finish - self.start
|
||||
def __enter__(self):
|
||||
self.start = timeit.default_timer()
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
finish = timeit.default_timer()
|
||||
self.time = finish - self.start
|
||||
|
||||
@contextmanager
|
||||
def print_time(*args):
|
||||
"""
|
||||
Measures and prints the time taken to execute nested code.
|
||||
args: Additional arguments to print.
|
||||
"""
|
||||
t = Timer()
|
||||
print(*args)
|
||||
with t:
|
||||
yield
|
||||
print(*args, end=' ')
|
||||
print('finished in {0:.2f} second(s)'.format(t.time))
|
||||
"""
|
||||
Measures and prints the time taken to execute nested code.
|
||||
args: Additional arguments to print.
|
||||
"""
|
||||
t = Timer()
|
||||
print(*args)
|
||||
with t:
|
||||
yield
|
||||
print(*args, end=' ')
|
||||
print('finished in {0:.2f} second(s)'.format(t.time))
|
||||
|
@ -6,34 +6,28 @@ import errno, os, re, shutil, sys, tempfile, urllib
|
||||
from subprocess import call, check_call, check_output, Popen, PIPE, STDOUT
|
||||
|
||||
def rmtree_if_exists(dir):
|
||||
try:
|
||||
shutil.rmtree(dir)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
pass
|
||||
try:
|
||||
shutil.rmtree(dir)
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
pass
|
||||
|
||||
def makedirs_if_not_exist(dir):
|
||||
try:
|
||||
os.makedirs(dir)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
try:
|
||||
os.makedirs(dir)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
fmt_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
build = os.environ['BUILD']
|
||||
if build == 'Doc':
|
||||
travis = 'TRAVIS' in os.environ
|
||||
# Install dependencies.
|
||||
if travis:
|
||||
def install_dependencies():
|
||||
branch = os.environ['TRAVIS_BRANCH']
|
||||
if branch != 'master':
|
||||
print('Branch: ' + branch)
|
||||
exit(0) # Ignore non-master branches
|
||||
check_call('curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key | ' +
|
||||
'sudo apt-key add -', shell=True)
|
||||
check_call('echo "deb https://deb.nodesource.com/node_0.10 precise main" | ' +
|
||||
'sudo tee /etc/apt/sources.list.d/nodesource.list', shell=True)
|
||||
print('Branch: ' + branch)
|
||||
exit(0) # Ignore non-master branches
|
||||
check_call('curl -s https://deb.nodesource.com/gpgkey/nodesource.gpg.key ' +
|
||||
'| sudo apt-key add -', shell=True)
|
||||
check_call('echo "deb https://deb.nodesource.com/node_0.10 precise main" ' +
|
||||
'| sudo tee /etc/apt/sources.list.d/nodesource.list', shell=True)
|
||||
check_call(['sudo', 'apt-get', 'update'])
|
||||
check_call(['sudo', 'apt-get', 'install', 'python-virtualenv', 'nodejs'])
|
||||
check_call(['npm', 'install', '-g', 'less', 'less-plugin-clean-css'])
|
||||
@ -41,40 +35,48 @@ if build == 'Doc':
|
||||
urllib.urlretrieve('http://mirrors.kernel.org/ubuntu/pool/main/d/doxygen/' +
|
||||
deb_file, deb_file)
|
||||
check_call(['sudo', 'dpkg', '-i', deb_file])
|
||||
sys.path.insert(0, os.path.join(fmt_dir, 'doc'))
|
||||
import build
|
||||
build.create_build_env()
|
||||
html_dir = build.build_docs()
|
||||
repo = 'fmtlib.github.io'
|
||||
if travis and 'KEY' not in os.environ:
|
||||
# Don't update the repo if building on Travis from an account that doesn't
|
||||
# have push access.
|
||||
print('Skipping update of ' + repo)
|
||||
exit(0)
|
||||
# Clone the fmtlib.github.io repo.
|
||||
rmtree_if_exists(repo)
|
||||
git_url = 'https://github.com/' if travis else 'git@github.com:'
|
||||
check_call(['git', 'clone', git_url + 'fmtlib/{}.git'.format(repo)])
|
||||
# Copy docs to the repo.
|
||||
target_dir = os.path.join(repo, 'dev')
|
||||
rmtree_if_exists(target_dir)
|
||||
shutil.copytree(html_dir, target_dir, ignore=shutil.ignore_patterns('.*'))
|
||||
if travis:
|
||||
check_call(['git', 'config', '--global', 'user.name', 'amplbot'])
|
||||
check_call(['git', 'config', '--global', 'user.email', 'viz@ampl.com'])
|
||||
# Push docs to GitHub pages.
|
||||
check_call(['git', 'add', '--all'], cwd=repo)
|
||||
if call(['git', 'diff-index', '--quiet', 'HEAD'], cwd=repo):
|
||||
check_call(['git', 'commit', '-m', 'Update documentation'], cwd=repo)
|
||||
cmd = 'git push'
|
||||
|
||||
fmt_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
build = os.environ['BUILD']
|
||||
if build == 'Doc':
|
||||
travis = 'TRAVIS' in os.environ
|
||||
if travis:
|
||||
cmd += ' https://$KEY@github.com/fmtlib/fmtlib.github.io.git master'
|
||||
p = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT, cwd=repo)
|
||||
# Print the output without the key.
|
||||
print(p.communicate()[0].replace(os.environ['KEY'], '$KEY'))
|
||||
if p.returncode != 0:
|
||||
raise CalledProcessError(p.returncode, cmd)
|
||||
exit(0)
|
||||
install_dependencies()
|
||||
sys.path.insert(0, os.path.join(fmt_dir, 'doc'))
|
||||
import build
|
||||
build.create_build_env()
|
||||
html_dir = build.build_docs()
|
||||
repo = 'fmtlib.github.io'
|
||||
if travis and 'KEY' not in os.environ:
|
||||
# Don't update the repo if building on Travis from an account that
|
||||
# doesn't have push access.
|
||||
print('Skipping update of ' + repo)
|
||||
exit(0)
|
||||
# Clone the fmtlib.github.io repo.
|
||||
rmtree_if_exists(repo)
|
||||
git_url = 'https://github.com/' if travis else 'git@github.com:'
|
||||
check_call(['git', 'clone', git_url + 'fmtlib/{}.git'.format(repo)])
|
||||
# Copy docs to the repo.
|
||||
target_dir = os.path.join(repo, 'dev')
|
||||
rmtree_if_exists(target_dir)
|
||||
shutil.copytree(html_dir, target_dir, ignore=shutil.ignore_patterns('.*'))
|
||||
if travis:
|
||||
check_call(['git', 'config', '--global', 'user.name', 'amplbot'])
|
||||
check_call(['git', 'config', '--global', 'user.email', 'viz@ampl.com'])
|
||||
# Push docs to GitHub pages.
|
||||
check_call(['git', 'add', '--all'], cwd=repo)
|
||||
if call(['git', 'diff-index', '--quiet', 'HEAD'], cwd=repo):
|
||||
check_call(['git', 'commit', '-m', 'Update documentation'], cwd=repo)
|
||||
cmd = 'git push'
|
||||
if travis:
|
||||
cmd += ' https://$KEY@github.com/fmtlib/fmtlib.github.io.git master'
|
||||
p = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT, cwd=repo)
|
||||
# Print the output without the key.
|
||||
print(p.communicate()[0].replace(os.environ['KEY'], '$KEY'))
|
||||
if p.returncode != 0:
|
||||
raise CalledProcessError(p.returncode, cmd)
|
||||
exit(0)
|
||||
|
||||
standard = os.environ['STANDARD']
|
||||
install_dir = os.path.join(fmt_dir, "_install")
|
||||
@ -84,11 +86,13 @@ test_build_dir = os.path.join(fmt_dir, "_build_test")
|
||||
# Configure library.
|
||||
makedirs_if_not_exist(build_dir)
|
||||
common_cmake_flags = [
|
||||
'-DCMAKE_INSTALL_PREFIX=' + install_dir, '-DCMAKE_BUILD_TYPE=' + build
|
||||
'-DCMAKE_INSTALL_PREFIX=' + install_dir, '-DCMAKE_BUILD_TYPE=' + build
|
||||
]
|
||||
extra_cmake_flags = []
|
||||
if standard != '0x':
|
||||
extra_cmake_flags = ['-DCMAKE_CXX_FLAGS=-std=c++' + standard, '-DFMT_USE_CPP11=OFF']
|
||||
extra_cmake_flags = [
|
||||
'-DCMAKE_CXX_FLAGS=-std=c++' + standard, '-DFMT_USE_CPP11=OFF'
|
||||
]
|
||||
check_call(['cmake', '-DFMT_DOC=OFF', '-DFMT_PEDANTIC=ON', fmt_dir] +
|
||||
common_cmake_flags + extra_cmake_flags, cwd=build_dir)
|
||||
|
||||
@ -99,9 +103,9 @@ check_call(['make', '-j4'], cwd=build_dir)
|
||||
env = os.environ.copy()
|
||||
env['CTEST_OUTPUT_ON_FAILURE'] = '1'
|
||||
if call(['make', 'test'], env=env, cwd=build_dir):
|
||||
with open('Testing/Temporary/LastTest.log', 'r') as f:
|
||||
print(f.read())
|
||||
sys.exit(-1)
|
||||
with open('Testing/Temporary/LastTest.log', 'r') as f:
|
||||
print(f.read())
|
||||
sys.exit(-1)
|
||||
|
||||
# Install library.
|
||||
check_call(['make', 'install'], cwd=build_dir)
|
||||
|
@ -8,23 +8,23 @@ import shutil, tempfile
|
||||
from subprocess import check_output, STDOUT
|
||||
|
||||
class Git:
|
||||
def __init__(self, dir):
|
||||
self.dir = dir
|
||||
def __init__(self, dir):
|
||||
self.dir = dir
|
||||
|
||||
def __call__(self, *args):
|
||||
output = check_output(['git'] + list(args), cwd=self.dir, stderr=STDOUT)
|
||||
print(output)
|
||||
return output
|
||||
def __call__(self, *args):
|
||||
output = check_output(['git'] + list(args), cwd=self.dir, stderr=STDOUT)
|
||||
print(output)
|
||||
return output
|
||||
|
||||
dir = tempfile.mkdtemp()
|
||||
try:
|
||||
git = Git(dir)
|
||||
git('clone', '-b', 'coverity', 'git@github.com:fmtlib/fmt.git', dir)
|
||||
output = git('merge', '-X', 'theirs', '--no-commit', 'origin/master')
|
||||
if 'Fast-forward' not in output:
|
||||
git('reset', 'HEAD', '.travis.yml')
|
||||
git('checkout', '--', '.travis.yml')
|
||||
git('commit', '-m', 'Update coverity branch')
|
||||
git('push')
|
||||
git = Git(dir)
|
||||
git('clone', '-b', 'coverity', 'git@github.com:fmtlib/fmt.git', dir)
|
||||
output = git('merge', '-X', 'theirs', '--no-commit', 'origin/master')
|
||||
if 'Fast-forward' not in output:
|
||||
git('reset', 'HEAD', '.travis.yml')
|
||||
git('checkout', '--', '.travis.yml')
|
||||
git('commit', '-m', 'Update coverity branch')
|
||||
git('push')
|
||||
finally:
|
||||
shutil.rmtree(dir)
|
||||
shutil.rmtree(dir)
|
||||
|
@ -5,30 +5,30 @@ from distutils.version import LooseVersion
|
||||
from subprocess import check_call
|
||||
|
||||
class Git:
|
||||
def __init__(self, dir):
|
||||
self.dir = dir
|
||||
def __init__(self, dir):
|
||||
self.dir = dir
|
||||
|
||||
def call(self, method, args, **kwargs):
|
||||
return check_call(['git', method] + list(args), **kwargs)
|
||||
def call(self, method, args, **kwargs):
|
||||
return check_call(['git', method] + list(args), **kwargs)
|
||||
|
||||
def clone(self, *args):
|
||||
return self.call('clone', list(args) + [self.dir])
|
||||
def clone(self, *args):
|
||||
return self.call('clone', list(args) + [self.dir])
|
||||
|
||||
def checkout(self, *args):
|
||||
return self.call('checkout', args, cwd=self.dir)
|
||||
def checkout(self, *args):
|
||||
return self.call('checkout', args, cwd=self.dir)
|
||||
|
||||
def clean(self, *args):
|
||||
return self.call('clean', args, cwd=self.dir)
|
||||
def clean(self, *args):
|
||||
return self.call('clean', args, cwd=self.dir)
|
||||
|
||||
def reset(self, *args):
|
||||
return self.call('reset', args, cwd=self.dir)
|
||||
def reset(self, *args):
|
||||
return self.call('reset', args, cwd=self.dir)
|
||||
|
||||
def pull(self, *args):
|
||||
return self.call('pull', args, cwd=self.dir)
|
||||
def pull(self, *args):
|
||||
return self.call('pull', args, cwd=self.dir)
|
||||
|
||||
def update(self, *args):
|
||||
if not os.path.exists(self.dir):
|
||||
self.clone(*args)
|
||||
def update(self, *args):
|
||||
if not os.path.exists(self.dir):
|
||||
self.clone(*args)
|
||||
|
||||
# Import the documentation build module.
|
||||
fmt_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
@ -47,58 +47,58 @@ doc_repo = Git(os.path.join(build_dir, 'fmtlib.github.io'))
|
||||
doc_repo.update('git@github.com:fmtlib/fmtlib.github.io')
|
||||
|
||||
for version in ['1.0.0', '1.1.0', '2.0.0', '3.0.0']:
|
||||
fmt_repo.clean('-f', '-d')
|
||||
fmt_repo.reset('--hard')
|
||||
fmt_repo.checkout(version)
|
||||
target_doc_dir = os.path.join(fmt_repo.dir, 'doc')
|
||||
# Remove the old theme.
|
||||
for entry in os.listdir(target_doc_dir):
|
||||
path = os.path.join(target_doc_dir, entry)
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path)
|
||||
# Copy the new theme.
|
||||
for entry in ['_static', '_templates', 'basic-bootstrap', 'bootstrap',
|
||||
'conf.py', 'fmt.less']:
|
||||
src = os.path.join(fmt_dir, 'doc', entry)
|
||||
dst = os.path.join(target_doc_dir, entry)
|
||||
copy = shutil.copytree if os.path.isdir(src) else shutil.copyfile
|
||||
copy(src, dst)
|
||||
# Rename index to contents.
|
||||
contents = os.path.join(target_doc_dir, 'contents.rst')
|
||||
if not os.path.exists(contents):
|
||||
os.rename(os.path.join(target_doc_dir, 'index.rst'), contents)
|
||||
# Fix issues in reference.rst/api.rst.
|
||||
for filename in ['reference.rst', 'api.rst']:
|
||||
reference = os.path.join(target_doc_dir, filename)
|
||||
if not os.path.exists(reference):
|
||||
continue
|
||||
with open(reference) as f:
|
||||
data = f.read()
|
||||
data = data.replace('std::ostream &', 'std::ostream&')
|
||||
data = re.sub(re.compile('doxygenfunction.. (bin|oct|hexu|hex)$', re.MULTILINE),
|
||||
r'doxygenfunction:: \1(int)', data)
|
||||
data = data.replace('std::FILE*', 'std::FILE *')
|
||||
data = data.replace('unsigned int', 'unsigned')
|
||||
with open(reference, 'w') as f:
|
||||
f.write(data)
|
||||
# Build the docs.
|
||||
html_dir = os.path.join(build_dir, 'html')
|
||||
if os.path.exists(html_dir):
|
||||
shutil.rmtree(html_dir)
|
||||
include_dir = fmt_repo.dir
|
||||
if LooseVersion(version) >= LooseVersion('3.0.0'):
|
||||
include_dir = os.path.join(include_dir, 'fmt')
|
||||
build.build_docs(version, doc_dir=target_doc_dir,
|
||||
include_dir=include_dir, work_dir=build_dir)
|
||||
shutil.rmtree(os.path.join(html_dir, '.doctrees'))
|
||||
# Create symlinks for older versions.
|
||||
for link, target in {'index': 'contents', 'api': 'reference'}.items():
|
||||
link = os.path.join(html_dir, link) + '.html'
|
||||
target += '.html'
|
||||
if os.path.exists(os.path.join(html_dir, target)) and \
|
||||
not os.path.exists(link):
|
||||
os.symlink(target, link)
|
||||
# Copy docs to the website.
|
||||
version_doc_dir = os.path.join(doc_repo.dir, version)
|
||||
shutil.rmtree(version_doc_dir)
|
||||
shutil.move(html_dir, version_doc_dir)
|
||||
fmt_repo.clean('-f', '-d')
|
||||
fmt_repo.reset('--hard')
|
||||
fmt_repo.checkout(version)
|
||||
target_doc_dir = os.path.join(fmt_repo.dir, 'doc')
|
||||
# Remove the old theme.
|
||||
for entry in os.listdir(target_doc_dir):
|
||||
path = os.path.join(target_doc_dir, entry)
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path)
|
||||
# Copy the new theme.
|
||||
for entry in ['_static', '_templates', 'basic-bootstrap', 'bootstrap',
|
||||
'conf.py', 'fmt.less']:
|
||||
src = os.path.join(fmt_dir, 'doc', entry)
|
||||
dst = os.path.join(target_doc_dir, entry)
|
||||
copy = shutil.copytree if os.path.isdir(src) else shutil.copyfile
|
||||
copy(src, dst)
|
||||
# Rename index to contents.
|
||||
contents = os.path.join(target_doc_dir, 'contents.rst')
|
||||
if not os.path.exists(contents):
|
||||
os.rename(os.path.join(target_doc_dir, 'index.rst'), contents)
|
||||
# Fix issues in reference.rst/api.rst.
|
||||
for filename in ['reference.rst', 'api.rst']:
|
||||
reference = os.path.join(target_doc_dir, filename)
|
||||
if not os.path.exists(reference):
|
||||
continue
|
||||
with open(reference) as f:
|
||||
data = f.read()
|
||||
data = data.replace('std::ostream &', 'std::ostream&')
|
||||
pat = re.compile('doxygenfunction.. (bin|oct|hexu|hex)$', re.MULTILINE)
|
||||
data = re.sub(pat, r'doxygenfunction:: \1(int)', data)
|
||||
data = data.replace('std::FILE*', 'std::FILE *')
|
||||
data = data.replace('unsigned int', 'unsigned')
|
||||
with open(reference, 'w') as f:
|
||||
f.write(data)
|
||||
# Build the docs.
|
||||
html_dir = os.path.join(build_dir, 'html')
|
||||
if os.path.exists(html_dir):
|
||||
shutil.rmtree(html_dir)
|
||||
include_dir = fmt_repo.dir
|
||||
if LooseVersion(version) >= LooseVersion('3.0.0'):
|
||||
include_dir = os.path.join(include_dir, 'fmt')
|
||||
build.build_docs(version, doc_dir=target_doc_dir,
|
||||
include_dir=include_dir, work_dir=build_dir)
|
||||
shutil.rmtree(os.path.join(html_dir, '.doctrees'))
|
||||
# Create symlinks for older versions.
|
||||
for link, target in {'index': 'contents', 'api': 'reference'}.items():
|
||||
link = os.path.join(html_dir, link) + '.html'
|
||||
target += '.html'
|
||||
if os.path.exists(os.path.join(html_dir, target)) and \
|
||||
not os.path.exists(link):
|
||||
os.symlink(target, link)
|
||||
# Copy docs to the website.
|
||||
version_doc_dir = os.path.join(doc_repo.dir, version)
|
||||
shutil.rmtree(version_doc_dir)
|
||||
shutil.move(html_dir, version_doc_dir)
|
||||
|
Loading…
Reference in New Issue
Block a user