style: format python files with isort and double-quote-string-fixer

This commit is contained in:
Fu Hanxi
2021-01-26 10:49:01 +08:00
parent dc8402ea61
commit 0146f258d7
276 changed files with 8241 additions and 8162 deletions

View File

@@ -24,31 +24,33 @@
# limitations under the License.
#
from __future__ import print_function
import argparse
import locale
import math
import multiprocessing
import os
import os.path
import re
import subprocess
import sys
import re
from packaging import version
from collections import namedtuple
LANGUAGES = ["en", "zh_CN"]
TARGETS = ["esp32", "esp32s2"]
from packaging import version
SPHINX_WARN_LOG = "sphinx-warning-log.txt"
SPHINX_SANITIZED_LOG = "sphinx-warning-log-sanitized.txt"
SPHINX_KNOWN_WARNINGS = os.path.join(os.environ["IDF_PATH"], "docs", "sphinx-known-warnings.txt")
LANGUAGES = ['en', 'zh_CN']
TARGETS = ['esp32', 'esp32s2']
DXG_WARN_LOG = "doxygen-warning-log.txt"
DXG_SANITIZED_LOG = "doxygen-warning-log-sanitized.txt"
DXG_KNOWN_WARNINGS = os.path.join(os.environ["IDF_PATH"], "docs", "doxygen-known-warnings.txt")
SPHINX_WARN_LOG = 'sphinx-warning-log.txt'
SPHINX_SANITIZED_LOG = 'sphinx-warning-log-sanitized.txt'
SPHINX_KNOWN_WARNINGS = os.path.join(os.environ['IDF_PATH'], 'docs', 'sphinx-known-warnings.txt')
DXG_WARN_LOG = 'doxygen-warning-log.txt'
DXG_SANITIZED_LOG = 'doxygen-warning-log-sanitized.txt'
DXG_KNOWN_WARNINGS = os.path.join(os.environ['IDF_PATH'], 'docs', 'doxygen-known-warnings.txt')
DXG_CI_VERSION = version.parse('1.8.11')
LogMessage = namedtuple("LogMessage", "original_text sanitized_text")
LogMessage = namedtuple('LogMessage', 'original_text sanitized_text')
languages = LANGUAGES
targets = TARGETS
@@ -58,11 +60,11 @@ def main():
# check Python dependencies for docs
try:
subprocess.check_call([sys.executable,
os.path.join(os.environ["IDF_PATH"],
"tools",
"check_python_dependencies.py"),
"-r",
"{}/docs/requirements.txt".format(os.environ["IDF_PATH"])
os.path.join(os.environ['IDF_PATH'],
'tools',
'check_python_dependencies.py'),
'-r',
'{}/docs/requirements.txt'.format(os.environ['IDF_PATH'])
])
except subprocess.CalledProcessError:
raise SystemExit(2) # stdout will already have these errors
@@ -73,31 +75,31 @@ def main():
# type not the str type.
if ('UTF-8' not in locale.getlocale()) and ('utf8' not in locale.getlocale()):
raise RuntimeError("build_docs.py requires the default locale's encoding to be UTF-8.\n" +
" - Linux. Setting environment variable LC_ALL=C.UTF-8 when running build_docs.py may be " +
"enough to fix this.\n"
" - Windows. Possible solution for the Windows 10 starting version 1803. Go to " +
"Control Panel->Clock and Region->Region->Administrative->Change system locale...; " +
"Check `Beta: Use Unicode UTF-8 for worldwide language support` and reboot")
' - Linux. Setting environment variable LC_ALL=C.UTF-8 when running build_docs.py may be ' +
'enough to fix this.\n'
' - Windows. Possible solution for the Windows 10 starting version 1803. Go to ' +
'Control Panel->Clock and Region->Region->Administrative->Change system locale...; ' +
'Check `Beta: Use Unicode UTF-8 for worldwide language support` and reboot')
parser = argparse.ArgumentParser(description='build_docs.py: Build IDF docs', prog='build_docs.py')
parser.add_argument("--language", "-l", choices=LANGUAGES, required=False)
parser.add_argument("--target", "-t", choices=TARGETS, required=False)
parser.add_argument("--build-dir", "-b", type=str, default="_build")
parser.add_argument("--source-dir", "-s", type=str, default="")
parser.add_argument("--builders", "-bs", nargs='+', type=str, default=["html"],
help="List of builders for Sphinx, e.g. html or latex, for latex a PDF is also generated")
parser.add_argument("--sphinx-parallel-builds", "-p", choices=["auto"] + [str(x) for x in range(8)],
help="Parallel Sphinx builds - number of independent Sphinx builds to run", default="auto")
parser.add_argument("--sphinx-parallel-jobs", "-j", choices=["auto"] + [str(x) for x in range(8)],
help="Sphinx parallel jobs argument - number of threads for each Sphinx build to use", default="1")
parser.add_argument("--input-docs", "-i", nargs='+', default=[""],
help="List of documents to build relative to the doc base folder, i.e. the language folder. Defaults to all documents")
parser.add_argument('--language', '-l', choices=LANGUAGES, required=False)
parser.add_argument('--target', '-t', choices=TARGETS, required=False)
parser.add_argument('--build-dir', '-b', type=str, default='_build')
parser.add_argument('--source-dir', '-s', type=str, default='')
parser.add_argument('--builders', '-bs', nargs='+', type=str, default=['html'],
help='List of builders for Sphinx, e.g. html or latex, for latex a PDF is also generated')
parser.add_argument('--sphinx-parallel-builds', '-p', choices=['auto'] + [str(x) for x in range(8)],
help='Parallel Sphinx builds - number of independent Sphinx builds to run', default='auto')
parser.add_argument('--sphinx-parallel-jobs', '-j', choices=['auto'] + [str(x) for x in range(8)],
help='Sphinx parallel jobs argument - number of threads for each Sphinx build to use', default='1')
parser.add_argument('--input-docs', '-i', nargs='+', default=[''],
help='List of documents to build relative to the doc base folder, i.e. the language folder. Defaults to all documents')
action_parsers = parser.add_subparsers(dest='action')
build_parser = action_parsers.add_parser('build', help='Build documentation')
build_parser.add_argument("--check-warnings-only", "-w", action='store_true')
build_parser.add_argument('--check-warnings-only', '-w', action='store_true')
action_parsers.add_parser('linkcheck', help='Check links (a current IDF revision should be uploaded to GitHub)')
@@ -107,27 +109,27 @@ def main():
global languages
if args.language is None:
print("Building all languages")
print('Building all languages')
languages = LANGUAGES
else:
languages = [args.language]
global targets
if args.target is None:
print("Building all targets")
print('Building all targets')
targets = TARGETS
else:
targets = [args.target]
if args.action == "build" or args.action is None:
if args.action == 'build' or args.action is None:
if args.action is None:
args.check_warnings_only = False
sys.exit(action_build(args))
if args.action == "linkcheck":
if args.action == 'linkcheck':
sys.exit(action_linkcheck(args))
if args.action == "gh-linkcheck":
if args.action == 'gh-linkcheck':
sys.exit(action_gh_linkcheck(args))
@@ -135,7 +137,7 @@ def parallel_call(args, callback):
num_sphinx_builds = len(languages) * len(targets)
num_cpus = multiprocessing.cpu_count()
if args.sphinx_parallel_builds == "auto":
if args.sphinx_parallel_builds == 'auto':
# at most one sphinx build per CPU, up to the number of CPUs
args.sphinx_parallel_builds = min(num_sphinx_builds, num_cpus)
else:
@@ -143,17 +145,17 @@ def parallel_call(args, callback):
# Force -j1 because sphinx works incorrectly
args.sphinx_parallel_jobs = 1
if args.sphinx_parallel_jobs == "auto":
if args.sphinx_parallel_jobs == 'auto':
# N CPUs per build job, rounded up - (maybe smarter to round down to avoid contention, idk)
args.sphinx_parallel_jobs = int(math.ceil(num_cpus / args.sphinx_parallel_builds))
else:
args.sphinx_parallel_jobs = int(args.sphinx_parallel_jobs)
print("Will use %d parallel builds and %d jobs per build" % (args.sphinx_parallel_builds, args.sphinx_parallel_jobs))
print('Will use %d parallel builds and %d jobs per build' % (args.sphinx_parallel_builds, args.sphinx_parallel_jobs))
pool = multiprocessing.Pool(args.sphinx_parallel_builds)
if args.sphinx_parallel_jobs > 1:
print("WARNING: Sphinx parallel jobs currently produce incorrect docs output with Sphinx 1.8.5")
print('WARNING: Sphinx parallel jobs currently produce incorrect docs output with Sphinx 1.8.5')
# make a list of all combinations of build_docs() args as tuples
#
@@ -173,13 +175,13 @@ def parallel_call(args, callback):
is_error = False
for ret in errcodes:
if ret != 0:
print("\nThe following language/target combinations failed to build:")
print('\nThe following language/target combinations failed to build:')
is_error = True
break
if is_error:
for ret, entry in zip(errcodes, entries):
if ret != 0:
print("language: %s, target: %s, errcode: %d" % (entry[0], entry[1], ret))
print('language: %s, target: %s, errcode: %d' % (entry[0], entry[1], ret))
# Don't re-throw real error code from each parallel process
return 1
else:
@@ -193,9 +195,9 @@ def sphinx_call(language, target, build_dir, src_dir, sphinx_parallel_jobs, buil
# wrap stdout & stderr in a way that lets us see which build_docs instance they come from
#
# this doesn't apply to subprocesses, they write to OS stdout & stderr so no prefix appears
prefix = "%s/%s: " % (language, target)
prefix = '%s/%s: ' % (language, target)
print("Building in build_dir: %s" % (build_dir))
print('Building in build_dir: %s' % (build_dir))
try:
os.makedirs(build_dir)
except OSError:
@@ -205,21 +207,21 @@ def sphinx_call(language, target, build_dir, src_dir, sphinx_parallel_jobs, buil
environ.update(os.environ)
environ['BUILDDIR'] = build_dir
args = [sys.executable, "-u", "-m", "sphinx.cmd.build",
"-j", str(sphinx_parallel_jobs),
"-b", buildername,
"-d", os.path.join(build_dir, "doctrees"),
"-w", SPHINX_WARN_LOG,
"-t", target,
"-D", "idf_target={}".format(target),
"-D", "docs_to_build={}".format(",". join(input_docs)),
args = [sys.executable, '-u', '-m', 'sphinx.cmd.build',
'-j', str(sphinx_parallel_jobs),
'-b', buildername,
'-d', os.path.join(build_dir, 'doctrees'),
'-w', SPHINX_WARN_LOG,
'-t', target,
'-D', 'idf_target={}'.format(target),
'-D', 'docs_to_build={}'.format(','. join(input_docs)),
src_dir,
os.path.join(build_dir, buildername) # build directory
]
saved_cwd = os.getcwd()
os.chdir(build_dir) # also run sphinx in the build directory
print("Running '%s'" % (" ".join(args)))
print("Running '%s'" % (' '.join(args)))
ret = 1
try:
@@ -282,7 +284,7 @@ def call_build_docs(entry):
# Build PDF from tex
if 'latex' in builders:
latex_dir = os.path.join(build_dir, "latex")
latex_dir = os.path.join(build_dir, 'latex')
ret = build_pdf(language, target, latex_dir)
return ret
@@ -294,9 +296,9 @@ def build_pdf(language, target, latex_dir):
# wrap stdout & stderr in a way that lets us see which build_docs instance they come from
#
# this doesn't apply to subprocesses, they write to OS stdout & stderr so no prefix appears
prefix = "%s/%s: " % (language, target)
prefix = '%s/%s: ' % (language, target)
print("Building PDF in latex_dir: %s" % (latex_dir))
print('Building PDF in latex_dir: %s' % (latex_dir))
saved_cwd = os.getcwd()
os.chdir(latex_dir)
@@ -337,8 +339,8 @@ def build_pdf(language, target, latex_dir):
return ret
SANITIZE_FILENAME_REGEX = re.compile("[^:]*/([^/:]*)(:.*)")
SANITIZE_LINENUM_REGEX = re.compile("([^:]*)(:[0-9]+:)(.*)")
SANITIZE_FILENAME_REGEX = re.compile('[^:]*/([^/:]*)(:.*)')
SANITIZE_LINENUM_REGEX = re.compile('([^:]*)(:[0-9]+:)(.*)')
def sanitize_line(line):
@@ -376,12 +378,12 @@ def check_docs(language, target, log_file, known_warnings_file, out_sanitized_lo
for known_line in k:
known_messages.append(known_line)
if "doxygen" in known_warnings_file:
if 'doxygen' in known_warnings_file:
# Clean a known Doxygen limitation: it's expected to always document anonymous
# structs/unions but we don't do this in our docs, so filter these all out with a regex
# (this won't match any named field, only anonymous members -
# ie the last part of the field is is just <something>::@NUM not <something>::name)
RE_ANONYMOUS_FIELD = re.compile(r".+:line: warning: parameters of member [^:\s]+(::[^:\s]+)*(::@\d+)+ are not \(all\) documented")
RE_ANONYMOUS_FIELD = re.compile(r'.+:line: warning: parameters of member [^:\s]+(::[^:\s]+)*(::@\d+)+ are not \(all\) documented')
all_messages = [msg for msg in all_messages if not re.match(RE_ANONYMOUS_FIELD, msg.sanitized_text)]
# Collect all new messages that are not match with the known messages.
@@ -395,17 +397,17 @@ def check_docs(language, target, log_file, known_warnings_file, out_sanitized_lo
new_messages.append(msg)
if new_messages:
print("\n%s/%s: Build failed due to new/different warnings (%s):\n" % (language, target, log_file))
print('\n%s/%s: Build failed due to new/different warnings (%s):\n' % (language, target, log_file))
for msg in new_messages:
print("%s/%s: %s" % (language, target, msg.original_text), end='')
print("\n%s/%s: (Check files %s and %s for full details.)" % (language, target, known_warnings_file, log_file))
print('%s/%s: %s' % (language, target, msg.original_text), end='')
print('\n%s/%s: (Check files %s and %s for full details.)' % (language, target, known_warnings_file, log_file))
return 1
return 0
def action_linkcheck(args):
args.builders = "linkcheck"
args.builders = 'linkcheck'
return parallel_call(args, call_linkcheck)
@@ -416,49 +418,49 @@ def call_linkcheck(entry):
# https://github.com/espressif/esp-idf/tree/
# https://github.com/espressif/esp-idf/blob/
# https://github.com/espressif/esp-idf/raw/
GH_LINK_RE = r"https://github.com/espressif/esp-idf/(?:tree|blob|raw)/[^\s]+"
GH_LINK_RE = r'https://github.com/espressif/esp-idf/(?:tree|blob|raw)/[^\s]+'
# we allow this one doc, because we always want users to see the latest support policy
GH_LINK_ALLOWED = ["https://github.com/espressif/esp-idf/blob/master/SUPPORT_POLICY.md",
"https://github.com/espressif/esp-idf/blob/master/SUPPORT_POLICY_CN.md"]
GH_LINK_ALLOWED = ['https://github.com/espressif/esp-idf/blob/master/SUPPORT_POLICY.md',
'https://github.com/espressif/esp-idf/blob/master/SUPPORT_POLICY_CN.md']
def action_gh_linkcheck(args):
print("Checking for hardcoded GitHub links\n")
print('Checking for hardcoded GitHub links\n')
github_links = []
docs_dir = os.path.relpath(os.path.dirname(__file__))
for root, _, files in os.walk(docs_dir):
if "_build" in root:
if '_build' in root:
continue
files = [os.path.join(root, f) for f in files if f.endswith(".rst")]
files = [os.path.join(root, f) for f in files if f.endswith('.rst')]
for path in files:
with open(path, "r") as f:
with open(path, 'r') as f:
for link in re.findall(GH_LINK_RE, f.read()):
if link not in GH_LINK_ALLOWED:
github_links.append((path, link))
if github_links:
for path, link in github_links:
print("%s: %s" % (path, link))
print("WARNING: Some .rst files contain hardcoded Github links.")
print("Please check above output and replace links with one of the following:")
print("- :idf:`dir` - points to directory inside ESP-IDF")
print("- :idf_file:`file` - points to file inside ESP-IDF")
print("- :idf_raw:`file` - points to raw view of the file inside ESP-IDF")
print("- :component:`dir` - points to directory inside ESP-IDF components dir")
print("- :component_file:`file` - points to file inside ESP-IDF components dir")
print("- :component_raw:`file` - points to raw view of the file inside ESP-IDF components dir")
print("- :example:`dir` - points to directory inside ESP-IDF examples dir")
print("- :example_file:`file` - points to file inside ESP-IDF examples dir")
print("- :example_raw:`file` - points to raw view of the file inside ESP-IDF examples dir")
print("These link types will point to the correct GitHub version automatically")
print('%s: %s' % (path, link))
print('WARNING: Some .rst files contain hardcoded Github links.')
print('Please check above output and replace links with one of the following:')
print('- :idf:`dir` - points to directory inside ESP-IDF')
print('- :idf_file:`file` - points to file inside ESP-IDF')
print('- :idf_raw:`file` - points to raw view of the file inside ESP-IDF')
print('- :component:`dir` - points to directory inside ESP-IDF components dir')
print('- :component_file:`file` - points to file inside ESP-IDF components dir')
print('- :component_raw:`file` - points to raw view of the file inside ESP-IDF components dir')
print('- :example:`dir` - points to directory inside ESP-IDF examples dir')
print('- :example_file:`file` - points to file inside ESP-IDF examples dir')
print('- :example_raw:`file` - points to raw view of the file inside ESP-IDF examples dir')
print('These link types will point to the correct GitHub version automatically')
return 1
else:
print("No hardcoded links found")
print('No hardcoded links found')
return 0
if __name__ == "__main__":
if __name__ == '__main__':
main()

View File

@@ -14,17 +14,17 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import print_function
from __future__ import unicode_literals
import sys
from __future__ import print_function, unicode_literals
import os
import os.path
import re
import subprocess
from sanitize_version import sanitize_version
from idf_extensions.util import download_file_if_missing
from get_github_rev import get_github_rev
import sys
from get_github_rev import get_github_rev
from idf_extensions.util import download_file_if_missing
from sanitize_version import sanitize_version
# build_docs on the CI server sometimes fails under Python3. This is a workaround:
sys.setrecursionlimit(3500)
@@ -242,7 +242,7 @@ versions_url = 'https://dl.espressif.com/dl/esp-idf/idf_versions.js'
idf_targets = ['esp32', 'esp32s2']
languages = ['en', 'zh_CN']
project_homepage = "https://github.com/espressif/esp-idf"
project_homepage = 'https://github.com/espressif/esp-idf'
# -- Options for HTML output ----------------------------------------------
@@ -250,11 +250,11 @@ project_homepage = "https://github.com/espressif/esp-idf"
#
# Redirects should be listed in page_redirects.xt
#
with open("../page_redirects.txt") as f:
lines = [re.sub(" +", " ", line.strip()) for line in f.readlines() if line.strip() != "" and not line.startswith("#")]
with open('../page_redirects.txt') as f:
lines = [re.sub(' +', ' ', line.strip()) for line in f.readlines() if line.strip() != '' and not line.startswith('#')]
for line in lines: # check for well-formed entries
if len(line.split(' ')) != 2:
raise RuntimeError("Invalid line in page_redirects.txt: %s" % line)
raise RuntimeError('Invalid line in page_redirects.txt: %s' % line)
html_redirect_pages = [tuple(line.split(' ')) for line in lines]
# The theme to use for HTML and HTML Help pages. See the documentation for
@@ -264,10 +264,10 @@ html_theme = 'sphinx_idf_theme'
# context used by sphinx_idf_theme
html_context = {
"display_github": True, # Add 'Edit on Github' link instead of 'View page source'
"github_user": "espressif",
"github_repo": "esp-idf",
"github_version": get_github_rev(),
'display_github': True, # Add 'Edit on Github' link instead of 'View page source'
'github_user': 'espressif',
'github_repo': 'esp-idf',
'github_version': get_github_rev(),
}
# Theme options are theme-specific and customize the look and feel of a theme
@@ -287,7 +287,7 @@ html_context = {
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "../_static/espressif-logo.svg"
html_logo = '../_static/espressif-logo.svg'
# The name of an image file (within the static path) to use as favicon of the
@@ -380,7 +380,7 @@ latex_elements = {
# The name of an image file (relative to this directory) to place at the bottom of
# the title page.
latex_logo = "../_static/espressif2.pdf"
latex_logo = '../_static/espressif2.pdf'
latex_engine = 'xelatex'
latex_use_xindy = False
@@ -427,7 +427,7 @@ def setup(app):
app.add_stylesheet('theme_overrides.css')
# these two must be pushed in by build_docs.py
if "idf_target" not in app.config:
if 'idf_target' not in app.config:
app.add_config_value('idf_target', None, 'env')
app.add_config_value('idf_targets', None, 'env')
@@ -436,8 +436,8 @@ def setup(app):
# Breathe extension variables (depend on build_dir)
# note: we generate into xml_in and then copy_if_modified to xml dir
app.config.breathe_projects = {"esp32-idf": os.path.join(app.config.build_dir, "xml_in/")}
app.config.breathe_default_project = "esp32-idf"
app.config.breathe_projects = {'esp32-idf': os.path.join(app.config.build_dir, 'xml_in/')}
app.config.breathe_default_project = 'esp32-idf'
setup_diag_font(app)
@@ -455,13 +455,13 @@ def setup_config_values(app, config):
app.add_config_value('idf_target_title_dict', idf_target_title_dict, 'env')
pdf_name = "esp-idf-{}-{}-{}".format(app.config.language, app.config.version, app.config.idf_target)
pdf_name = 'esp-idf-{}-{}-{}'.format(app.config.language, app.config.version, app.config.idf_target)
app.add_config_value('pdf_file', pdf_name, 'env')
def setup_html_context(app, config):
# Setup path for 'edit on github'-link
config.html_context['conf_py_path'] = "/docs/{}/".format(app.config.language)
config.html_context['conf_py_path'] = '/docs/{}/'.format(app.config.language)
def setup_diag_font(app):
@@ -476,7 +476,7 @@ def setup_diag_font(app):
font_dir = os.path.join(config_dir, '_static')
assert os.path.exists(font_dir)
print("Downloading font file %s for %s" % (font_name, app.config.language))
print('Downloading font file %s for %s' % (font_name, app.config.language))
download_file_if_missing('https://dl.espressif.com/dl/esp-idf/docs/_static/{}'.format(font_name), font_dir)
font_path = os.path.abspath(os.path.join(font_dir, font_name))

View File

@@ -9,8 +9,8 @@
try:
from conf_common import * # noqa: F403,F401
except ImportError:
import sys
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
from conf_common import * # noqa: F403,F401

View File

@@ -53,22 +53,22 @@ def create_redirect_pages(app):
return # only relevant for standalone HTML output
for (old_url, new_url) in app.config.html_redirect_pages:
print("Creating redirect %s to %s..." % (old_url, new_url))
print('Creating redirect %s to %s...' % (old_url, new_url))
if old_url.startswith('/'):
print("Stripping leading / from URL in config file...")
print('Stripping leading / from URL in config file...')
old_url = old_url[1:]
new_url = app.builder.get_relative_uri(old_url, new_url)
out_file = app.builder.get_outfilename(old_url)
print("HTML file %s redirects to relative URL %s" % (out_file, new_url))
print('HTML file %s redirects to relative URL %s' % (out_file, new_url))
out_dir = os.path.dirname(out_file)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
content = REDIRECT_TEMPLATE.replace("$NEWURL", new_url)
content = REDIRECT_TEMPLATE.replace('$NEWURL', new_url)
with open(out_file, "w") as rp:
with open(out_file, 'w') as rp:
rp.write(content)
return []

View File

@@ -1,4 +1,5 @@
import re
from docutils import nodes
from docutils.parsers.rst import Directive

View File

@@ -1,5 +1,6 @@
# Based on https://stackoverflow.com/a/46600038 with some modifications
import re
from sphinx.directives.other import TocTree

View File

@@ -18,14 +18,14 @@ import argparse
import datetime as dt
import json
import numpy as np
import requests
import matplotlib.dates
import matplotlib.patches as mpatches
import matplotlib.pyplot as plt
from matplotlib.dates import MONTHLY, DateFormatter, RRuleLocator, rrulewrapper
import numpy as np
import requests
from dateutil import parser
from dateutil.relativedelta import relativedelta
from matplotlib.dates import MONTHLY, DateFormatter, RRuleLocator, rrulewrapper
class Version(object):
@@ -68,18 +68,18 @@ class ChartVersions(object):
def get_releases_as_json(self):
return {
x.version_name: {
"start_date": x.get_start_date().strftime("%Y-%m-%d"),
"end_service": x.get_end_service_date().strftime("%Y-%m-%d"),
"end_date": x.get_end_of_life_date().strftime("%Y-%m-%d")
'start_date': x.get_start_date().strftime('%Y-%m-%d'),
'end_service': x.get_end_service_date().strftime('%Y-%m-%d'),
'end_date': x.get_end_of_life_date().strftime('%Y-%m-%d')
} for x in self.sorted_releases_supported
}
@staticmethod
def parse_chart_releases_from_js(js_as_string):
return json.loads(js_as_string[js_as_string.find("RELEASES: ") + len("RELEASES: "):js_as_string.rfind("};")])
return json.loads(js_as_string[js_as_string.find('RELEASES: ') + len('RELEASES: '):js_as_string.rfind('};')])
def _get_all_version_from_url(self, url=None, filename=None):
releases_file = requests.get(url).text if url is not None else "".join(open(filename).readlines())
releases_file = requests.get(url).text if url is not None else ''.join(open(filename).readlines())
return self.parse_chart_releases_from_js(releases_file)
def _get_releases_from_url(self, url=None, filename=None):
@@ -178,7 +178,7 @@ class ChartVersions(object):
rule = rrulewrapper(MONTHLY, interval=x_ax_interval)
loc = RRuleLocator(rule)
formatter = DateFormatter("%b %Y")
formatter = DateFormatter('%b %Y')
ax.xaxis.set_major_locator(loc)
ax.xaxis.set_major_formatter(formatter)
@@ -198,19 +198,19 @@ class ChartVersions(object):
bbox_to_anchor=(1.01, 1.165), loc='upper right')
fig.set_size_inches(11, 5, forward=True)
plt.savefig(output_chart_name + output_chart_extension, bbox_inches='tight')
print("Saved into " + output_chart_name + output_chart_extension)
print('Saved into ' + output_chart_name + output_chart_extension)
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description="Create chart of version support. Set the url or filename with versions."
"If you set both filename and url the script will prefer filename.")
arg_parser.add_argument("--url", metavar="URL", default="https://dl.espressif.com/dl/esp-idf/idf_versions.js")
arg_parser.add_argument("--filename",
help="Set the name of the source file, if is set, the script ignores the url.")
arg_parser.add_argument("--output-format", help="Set the output format of the image.", default="svg")
arg_parser.add_argument("--output-file", help="Set the name of the output file.", default="docs/chart")
description='Create chart of version support. Set the url or filename with versions.'
'If you set both filename and url the script will prefer filename.')
arg_parser.add_argument('--url', metavar='URL', default='https://dl.espressif.com/dl/esp-idf/idf_versions.js')
arg_parser.add_argument('--filename',
help='Set the name of the source file, if is set, the script ignores the url.')
arg_parser.add_argument('--output-format', help='Set the output format of the image.', default='svg')
arg_parser.add_argument('--output-file', help='Set the name of the output file.', default='docs/chart')
args = arg_parser.parse_args()
ChartVersions(url=args.url if args.filename is None else None, filename=args.filename).create_chart(
output_chart_extension="." + args.output_format.lower()[-3:], output_chart_name=args.output_file)
output_chart_extension='.' + args.output_format.lower()[-3:], output_chart_name=args.output_file)

View File

@@ -6,11 +6,11 @@
#
# Then emits the new 'idf-info' event which has information read from IDF
# build system, that other extensions can use to generate relevant data.
import json
import os.path
import shutil
import sys
import subprocess
import json
import sys
# this directory also contains the dummy IDF project
project_path = os.path.abspath(os.path.dirname(__file__))
@@ -23,7 +23,7 @@ def setup(app):
# Setup some common paths
try:
build_dir = os.environ["BUILDDIR"] # TODO see if we can remove this
build_dir = os.environ['BUILDDIR'] # TODO see if we can remove this
except KeyError:
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
@@ -43,7 +43,7 @@ def setup(app):
except KeyError:
idf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..'))
app.add_config_value('docs_root', os.path.join(idf_path, "docs"), 'env')
app.add_config_value('docs_root', os.path.join(idf_path, 'docs'), 'env')
app.add_config_value('idf_path', idf_path, 'env')
app.add_config_value('build_dir', build_dir, 'env') # not actually an IDF thing
app.add_event('idf-info')
@@ -55,43 +55,43 @@ def setup(app):
def generate_idf_info(app, config):
print("Running CMake on dummy project to get build info...")
print('Running CMake on dummy project to get build info...')
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
cmake_build_dir = os.path.join(build_dir, "build_dummy_project")
idf_py_path = os.path.join(app.config.idf_path, "tools", "idf.py")
print("Running idf.py...")
cmake_build_dir = os.path.join(build_dir, 'build_dummy_project')
idf_py_path = os.path.join(app.config.idf_path, 'tools', 'idf.py')
print('Running idf.py...')
idf_py = [sys.executable,
idf_py_path,
"-B",
'-B',
cmake_build_dir,
"-C",
'-C',
project_path,
"-D",
"SDKCONFIG={}".format(os.path.join(build_dir, "dummy_project_sdkconfig"))
'-D',
'SDKCONFIG={}'.format(os.path.join(build_dir, 'dummy_project_sdkconfig'))
]
# force a clean idf.py build w/ new sdkconfig each time
# (not much slower than 'reconfigure', avoids any potential config & build versioning problems
shutil.rmtree(cmake_build_dir, ignore_errors=True)
print("Starting new dummy IDF project... ")
print('Starting new dummy IDF project... ')
if (app.config.idf_target in PREVIEW_TARGETS):
subprocess.check_call(idf_py + ["--preview", "set-target", app.config.idf_target])
subprocess.check_call(idf_py + ['--preview', 'set-target', app.config.idf_target])
else:
subprocess.check_call(idf_py + ["set-target", app.config.idf_target])
subprocess.check_call(idf_py + ['set-target', app.config.idf_target])
print("Running CMake on dummy project...")
subprocess.check_call(idf_py + ["reconfigure"])
print('Running CMake on dummy project...')
subprocess.check_call(idf_py + ['reconfigure'])
with open(os.path.join(cmake_build_dir, "project_description.json")) as f:
with open(os.path.join(cmake_build_dir, 'project_description.json')) as f:
project_description = json.load(f)
if project_description["target"] != app.config.idf_target:
if project_description['target'] != app.config.idf_target:
# this shouldn't really happen unless someone has been moving around directories inside _build, as
# the cmake_build_dir path should be target-specific
raise RuntimeError(("Error configuring the dummy IDF project for {}. " +
"Target in project description is {}. " +
"Is build directory contents corrupt?")
.format(app.config.idf_target, project_description["target"]))
raise RuntimeError(('Error configuring the dummy IDF project for {}. ' +
'Target in project description is {}. ' +
'Is build directory contents corrupt?')
.format(app.config.idf_target, project_description['target']))
app.emit('idf-info', project_description)
return []

View File

@@ -1,5 +1,5 @@
# Extension to generate esp_err definition as .rst
from .util import copy_if_modified, call_with_python
from .util import call_with_python, copy_if_modified
def setup(app):

View File

@@ -35,8 +35,8 @@ def build_subset(app, config):
# Get all docs that will be built
docs = [filename for filename in get_matching_files(app.srcdir, compile_matchers(exclude_docs))]
if not docs:
raise ValueError("No documents to build")
print("Building a subset of the documents: {}".format(docs))
raise ValueError('No documents to build')
print('Building a subset of the documents: {}'.format(docs))
# Sphinx requires a master document, if there is a document name 'index' then we pick that
index_docs = [doc for doc in docs if 'index' in doc]

View File

@@ -1,9 +1,10 @@
import re
import os
import os.path
import re
from docutils import io, nodes, statemachine, utils
from docutils.utils.error_reporting import SafeString, ErrorString
from docutils.parsers.rst import directives
from docutils.utils.error_reporting import ErrorString, SafeString
from sphinx.directives.other import Include as BaseInclude
from sphinx.util import logging
@@ -73,26 +74,26 @@ class StringSubstituter:
def init_sub_strings(self, config):
self.target_name = config.idf_target
self.add_pair("{IDF_TARGET_NAME}", self.TARGET_NAMES[config.idf_target])
self.add_pair("{IDF_TARGET_PATH_NAME}", config.idf_target)
self.add_pair("{IDF_TARGET_TOOLCHAIN_NAME}", self.TOOLCHAIN_NAMES[config.idf_target])
self.add_pair("{IDF_TARGET_CFG_PREFIX}", self.CONFIG_PREFIX[config.idf_target])
self.add_pair("{IDF_TARGET_TRM_EN_URL}", self.TRM_EN_URL[config.idf_target])
self.add_pair("{IDF_TARGET_TRM_CN_URL}", self.TRM_CN_URL[config.idf_target])
self.add_pair('{IDF_TARGET_NAME}', self.TARGET_NAMES[config.idf_target])
self.add_pair('{IDF_TARGET_PATH_NAME}', config.idf_target)
self.add_pair('{IDF_TARGET_TOOLCHAIN_NAME}', self.TOOLCHAIN_NAMES[config.idf_target])
self.add_pair('{IDF_TARGET_CFG_PREFIX}', self.CONFIG_PREFIX[config.idf_target])
self.add_pair('{IDF_TARGET_TRM_EN_URL}', self.TRM_EN_URL[config.idf_target])
self.add_pair('{IDF_TARGET_TRM_CN_URL}', self.TRM_CN_URL[config.idf_target])
def add_local_subs(self, matches):
for sub_def in matches:
if len(sub_def) != 2:
raise ValueError("IDF_TARGET_X substitution define invalid, val={}".format(sub_def))
raise ValueError('IDF_TARGET_X substitution define invalid, val={}'.format(sub_def))
tag = "{" + "IDF_TARGET_{}".format(sub_def[0]) + "}"
tag = '{' + 'IDF_TARGET_{}'.format(sub_def[0]) + '}'
match_default = re.match(r'^\s*default(\s*)=(\s*)\"(.*?)\"', sub_def[1])
if match_default is None:
# There should always be a default value
raise ValueError("No default value in IDF_TARGET_X substitution define, val={}".format(sub_def))
raise ValueError('No default value in IDF_TARGET_X substitution define, val={}'.format(sub_def))
match_target = re.match(r'^.*{}(\s*)=(\s*)\"(.*?)\"'.format(self.target_name), sub_def[1])

View File

@@ -8,35 +8,35 @@
import glob
import os
import pprint
import subprocess
import re
import subprocess
def generate_defines(app, project_description):
sdk_config_path = os.path.join(project_description["build_dir"], "config")
sdk_config_path = os.path.join(project_description['build_dir'], 'config')
# Parse kconfig macros to pass into doxygen
#
# TODO: this should use the set of "config which can't be changed" eventually,
# not the header
defines = get_defines(os.path.join(project_description["build_dir"],
"config", "sdkconfig.h"), sdk_config_path)
defines = get_defines(os.path.join(project_description['build_dir'],
'config', 'sdkconfig.h'), sdk_config_path)
# Add all SOC _caps.h headers and kconfig macros to the defines
#
# kind of a hack, be nicer to add a component info dict in project_description.json
soc_path = [p for p in project_description["build_component_paths"] if p.endswith("/soc")][0]
soc_headers = glob.glob(os.path.join(soc_path, project_description["target"],
"include", "soc", "*_caps.h"))
soc_path = [p for p in project_description['build_component_paths'] if p.endswith('/soc')][0]
soc_headers = glob.glob(os.path.join(soc_path, project_description['target'],
'include', 'soc', '*_caps.h'))
assert len(soc_headers) > 0
for soc_header in soc_headers:
defines.update(get_defines(soc_header, sdk_config_path))
# write a list of definitions to make debugging easier
with open(os.path.join(app.config.build_dir, "macro-definitions.txt"), "w") as f:
with open(os.path.join(app.config.build_dir, 'macro-definitions.txt'), 'w') as f:
pprint.pprint(defines, f)
print("Saved macro list to %s" % f.name)
print('Saved macro list to %s' % f.name)
add_tags(app, defines)
@@ -48,19 +48,19 @@ def get_defines(header_path, sdk_config_path):
# Note: we run C preprocessor here without any -I arguments (except "sdkconfig.h"), so assumption is
# that these headers are all self-contained and don't include any other headers
# not in the same directory
print("Reading macros from %s..." % (header_path))
processed_output = subprocess.check_output(["xtensa-esp32-elf-gcc", "-I", sdk_config_path,
"-dM", "-E", header_path]).decode()
for line in processed_output.split("\n"):
print('Reading macros from %s...' % (header_path))
processed_output = subprocess.check_output(['xtensa-esp32-elf-gcc', '-I', sdk_config_path,
'-dM', '-E', header_path]).decode()
for line in processed_output.split('\n'):
line = line.strip()
m = re.search("#define ([^ ]+) ?(.*)", line)
m = re.search('#define ([^ ]+) ?(.*)', line)
if m:
name = m.group(1)
value = m.group(2)
if name.startswith("_"):
if name.startswith('_'):
continue # toolchain macro
if (" " in value) or ("=" in value):
value = "" # macros that expand to multiple tokens (ie function macros) cause doxygen errors, so just mark as 'defined'
if (' ' in value) or ('=' in value):
value = '' # macros that expand to multiple tokens (ie function macros) cause doxygen errors, so just mark as 'defined'
defines[name] = value
return defines
@@ -70,7 +70,7 @@ def add_tags(app, defines):
# try to parse define values as ints and add to tags
for name, value in defines.items():
try:
define_value = int(value.strip("()"))
define_value = int(value.strip('()'))
if define_value > 0:
app.tags.add(name)
except ValueError:

View File

@@ -1,7 +1,9 @@
# Generate toolchain download links from toolchain info makefile
from __future__ import print_function
import os.path
from .util import copy_if_modified, call_with_python
from .util import call_with_python, copy_if_modified
def setup(app):
@@ -12,9 +14,9 @@ def setup(app):
def generate_idf_tools_links(app, project_description):
print("Generating IDF Tools list")
os.environ["IDF_MAINTAINER"] = "1"
print('Generating IDF Tools list')
os.environ['IDF_MAINTAINER'] = '1'
tools_rst = os.path.join(app.config.build_dir, 'inc', 'idf-tools-inc.rst')
tools_rst_tmp = os.path.join(app.config.build_dir, 'idf-tools-inc.rst')
call_with_python("{}/tools/idf_tools.py gen-doc --output {}".format(app.config.idf_path, tools_rst_tmp))
call_with_python('{}/tools/idf_tools.py gen-doc --output {}'.format(app.config.idf_path, tools_rst_tmp))
copy_if_modified(tools_rst_tmp, tools_rst)

View File

@@ -1,17 +1,19 @@
# Generate toolchain download links from toolchain info makefile
from __future__ import print_function
import os.path
from collections import namedtuple
from .util import copy_if_modified
BASE_URL = 'https://dl.espressif.com/dl/'
PlatformInfo = namedtuple("PlatformInfo", [
"platform_name",
"platform_archive_suffix",
"extension",
"unpack_cmd",
"unpack_code"
PlatformInfo = namedtuple('PlatformInfo', [
'platform_name',
'platform_archive_suffix',
'extension',
'unpack_cmd',
'unpack_code'
])
@@ -23,9 +25,9 @@ def setup(app):
def generate_toolchain_download_links(app, project_description):
print("Generating toolchain download links")
print('Generating toolchain download links')
toolchain_tmpdir = '{}/toolchain_inc'.format(app.config.build_dir)
toolchain_versions = os.path.join(app.config.idf_path, "tools/toolchain_versions.mk")
toolchain_versions = os.path.join(app.config.idf_path, 'tools/toolchain_versions.mk')
gen_toolchain_links(toolchain_versions, toolchain_tmpdir)
copy_if_modified(toolchain_tmpdir, '{}/inc'.format(app.config.build_dir))
@@ -34,11 +36,11 @@ def gen_toolchain_links(versions_file, out_dir):
version_vars = {}
with open(versions_file) as f:
for line in f:
name, var = line.partition("=")[::2]
name, var = line.partition('=')[::2]
version_vars[name.strip()] = var.strip()
gcc_version = version_vars["CURRENT_TOOLCHAIN_GCC_VERSION"]
toolchain_desc = version_vars["CURRENT_TOOLCHAIN_COMMIT_DESC_SHORT"]
gcc_version = version_vars['CURRENT_TOOLCHAIN_GCC_VERSION']
toolchain_desc = version_vars['CURRENT_TOOLCHAIN_COMMIT_DESC_SHORT']
unpack_code_linux_macos = """
::
@@ -59,10 +61,10 @@ def gen_toolchain_links(versions_file, out_dir):
"""
platform_info = [
PlatformInfo("linux64", "linux-amd64", "tar.gz", "z", unpack_code_linux_macos),
PlatformInfo("linux32", "linux-i686","tar.gz", "z", unpack_code_linux_macos),
PlatformInfo("osx", "macos", "tar.gz", "z", unpack_code_linux_macos),
PlatformInfo("win32", "win32", "zip", None, None)
PlatformInfo('linux64', 'linux-amd64', 'tar.gz', 'z', unpack_code_linux_macos),
PlatformInfo('linux32', 'linux-i686','tar.gz', 'z', unpack_code_linux_macos),
PlatformInfo('osx', 'macos', 'tar.gz', 'z', unpack_code_linux_macos),
PlatformInfo('win32', 'win32', 'zip', None, None)
]
try:
@@ -70,7 +72,7 @@ def gen_toolchain_links(versions_file, out_dir):
except OSError:
pass
with open(os.path.join(out_dir, 'download-links.inc'), "w") as links_file:
with open(os.path.join(out_dir, 'download-links.inc'), 'w') as links_file:
for p in platform_info:
archive_name = 'xtensa-esp32-elf-gcc{}-{}-{}.{}'.format(
gcc_version.replace('.', '_'), toolchain_desc, p.platform_archive_suffix, p.extension)
@@ -79,8 +81,8 @@ def gen_toolchain_links(versions_file, out_dir):
p.platform_name, BASE_URL, archive_name), file=links_file)
if p.unpack_code is not None:
with open(os.path.join(out_dir, 'unpack-code-%s.inc' % p.platform_name), "w") as f:
with open(os.path.join(out_dir, 'unpack-code-%s.inc' % p.platform_name), 'w') as f:
print(p.unpack_code.format(p.unpack_cmd, archive_name), file=f)
with open(os.path.join(out_dir, 'scratch-build-code.inc'), "w") as code_file:
with open(os.path.join(out_dir, 'scratch-build-code.inc'), 'w') as code_file:
print(scratch_build_code_linux_macos.format(toolchain_desc), file=code_file)

View File

@@ -4,17 +4,18 @@
# Sphinx extension to generate ReSTructured Text .inc snippets
# with version-based content for this IDF version
from __future__ import print_function
from __future__ import unicode_literals
from io import open
from .util import copy_if_modified
import subprocess
from __future__ import print_function, unicode_literals
import os
import re
import subprocess
from io import open
from .util import copy_if_modified
TEMPLATES = {
"en": {
"git-clone-bash": """
'en': {
'git-clone-bash': """
.. code-block:: bash
mkdir -p ~/esp
@@ -22,7 +23,7 @@ TEMPLATES = {
git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git
""",
"git-clone-windows": """
'git-clone-windows': """
.. code-block:: batch
mkdir %%userprofile%%\\esp
@@ -30,8 +31,8 @@ TEMPLATES = {
git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git
""",
"git-clone-notes": {
"template": """
'git-clone-notes': {
'template': """
.. note::
%(extra_note)s
@@ -40,35 +41,35 @@ TEMPLATES = {
%(zipfile_note)s
""",
"master": 'This command will clone the master branch, which has the latest development ("bleeding edge") '
'master': 'This command will clone the master branch, which has the latest development ("bleeding edge") '
'version of ESP-IDF. It is fully functional and updated on weekly basis with the most recent features and bugfixes.',
"branch": 'The ``git clone`` option ``-b %(clone_arg)s`` tells git to clone the %(ver_type)s in the ESP-IDF repository ``git clone`` '
'branch': 'The ``git clone`` option ``-b %(clone_arg)s`` tells git to clone the %(ver_type)s in the ESP-IDF repository ``git clone`` '
'corresponding to this version of the documentation.',
"zipfile": {
"stable": 'As a fallback, it is also possible to download a zip file of this stable release from the `Releases page`_. '
'zipfile': {
'stable': 'As a fallback, it is also possible to download a zip file of this stable release from the `Releases page`_. '
'Do not download the "Source code" zip file(s) generated automatically by GitHub, they do not work with ESP-IDF.',
"unstable": 'GitHub\'s "Download zip file" feature does not work with ESP-IDF, a ``git clone`` is required. As a fallback, '
'unstable': 'GitHub\'s "Download zip file" feature does not work with ESP-IDF, a ``git clone`` is required. As a fallback, '
'`Stable version`_ can be installed without Git.'
}, # zipfile
}, # git-clone-notes
"version-note": {
"master": """
'version-note': {
'master': """
.. note::
This is documentation for the master branch (latest version) of ESP-IDF. This version is under continual development.
`Stable version`_ documentation is available, as well as other :doc:`/versions`.
""",
"stable": """
'stable': """
.. note::
This is documentation for stable version %s of ESP-IDF. Other :doc:`/versions` are also available.
""",
"branch": """
'branch': """
.. note::
This is documentation for %s ``%s`` of ESP-IDF. Other :doc:`/versions` are also available.
"""
}, # version-note
}, # en
"zh_CN": {
"git-clone-bash": """
'zh_CN': {
'git-clone-bash': """
.. code-block:: bash
mkdir -p ~/esp
@@ -76,7 +77,7 @@ TEMPLATES = {
git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git
""",
"git-clone-windows": """
'git-clone-windows': """
.. code-block:: batch
mkdir %%userprofile%%\\esp
@@ -84,8 +85,8 @@ TEMPLATES = {
git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git
""",
"git-clone-notes": {
"template": """
'git-clone-notes': {
'template': """
.. note::
%(extra_note)s
@@ -94,24 +95,24 @@ TEMPLATES = {
%(zipfile_note)s
""",
"master": '此命令将克隆 master 分支,该分支保存着 ESP-IDF 的最新版本,它功能齐全,每周都会更新一些新功能并修正一些错误。',
"branch": '``git clone`` 命令的 ``-b %(clone_arg)s`` 选项告诉 git 从 ESP-IDF 仓库中克隆与此版本的文档对应的分支。',
"zipfile": {
"stable": '作为备份,还可以从 `Releases page`_ 下载此稳定版本的 zip 文件。不要下载由 GitHub 自动生成的"源代码"的 zip 文件,它们不适用于 ESP-IDF。',
"unstable": 'GitHub 中"下载 zip 文档"的功能不适用于 ESP-IDF所以需要使用 ``git clone`` 命令。作为备份,可以在没有安装 Git 的环境中下载 '
'master': '此命令将克隆 master 分支,该分支保存着 ESP-IDF 的最新版本,它功能齐全,每周都会更新一些新功能并修正一些错误。',
'branch': '``git clone`` 命令的 ``-b %(clone_arg)s`` 选项告诉 git 从 ESP-IDF 仓库中克隆与此版本的文档对应的分支。',
'zipfile': {
'stable': '作为备份,还可以从 `Releases page`_ 下载此稳定版本的 zip 文件。不要下载由 GitHub 自动生成的"源代码"的 zip 文件,它们不适用于 ESP-IDF。',
'unstable': 'GitHub 中"下载 zip 文档"的功能不适用于 ESP-IDF所以需要使用 ``git clone`` 命令。作为备份,可以在没有安装 Git 的环境中下载 '
'`Stable version`_ 的 zip 归档文件。'
}, # zipfile
}, # git-clone
"version-note": {
"master": """
'version-note': {
'master': """
.. note::
这是ESP-IDF master 分支(最新版本)的文档,该版本在持续开发中。还有 `Stable version`_ 的文档,以及其他版本的文档 :doc:`/versions` 供参考。
""",
"stable": """
'stable': """
.. note::
这是ESP-IDF 稳定版本 %s 的文档,还有其他版本的文档 :doc:`/versions` 供参考。
""",
"branch": """
'branch': """
.. note::
这是ESP-IDF %s ``%s`` 版本的文档,还有其他版本的文档 :doc:`/versions` 供参考。
"""
@@ -128,9 +129,9 @@ def setup(app):
def generate_version_specific_includes(app, project_description):
language = app.config.language
tmp_out_dir = os.path.join(app.config.build_dir, "version_inc")
tmp_out_dir = os.path.join(app.config.build_dir, 'version_inc')
if not os.path.exists(tmp_out_dir):
print("Creating directory %s" % tmp_out_dir)
print('Creating directory %s' % tmp_out_dir)
os.mkdir(tmp_out_dir)
template = TEMPLATES[language]
@@ -138,56 +139,56 @@ def generate_version_specific_includes(app, project_description):
version, ver_type, is_stable = get_version()
write_git_clone_inc_files(template, tmp_out_dir, version, ver_type, is_stable)
write_version_note(template["version-note"], tmp_out_dir, version, ver_type, is_stable)
copy_if_modified(tmp_out_dir, os.path.join(app.config.build_dir, "inc"))
print("Done")
write_version_note(template['version-note'], tmp_out_dir, version, ver_type, is_stable)
copy_if_modified(tmp_out_dir, os.path.join(app.config.build_dir, 'inc'))
print('Done')
def write_git_clone_inc_files(templates, out_dir, version, ver_type, is_stable):
def out_file(basename):
p = os.path.join(out_dir, "%s.inc" % basename)
print("Writing %s..." % p)
p = os.path.join(out_dir, '%s.inc' % basename)
print('Writing %s...' % p)
return p
if version == "master":
clone_args = ""
if version == 'master':
clone_args = ''
else:
clone_args = "-b %s " % version
clone_args = '-b %s ' % version
with open(out_file("git-clone-bash"), "w", encoding="utf-8") as f:
f.write(templates["git-clone-bash"] % locals())
with open(out_file('git-clone-bash'), 'w', encoding='utf-8') as f:
f.write(templates['git-clone-bash'] % locals())
with open(out_file("git-clone-windows"), "w", encoding="utf-8") as f:
f.write(templates["git-clone-windows"] % locals())
with open(out_file('git-clone-windows'), 'w', encoding='utf-8') as f:
f.write(templates['git-clone-windows'] % locals())
with open(out_file("git-clone-notes"), "w", encoding="utf-8") as f:
template = templates["git-clone-notes"]
with open(out_file('git-clone-notes'), 'w', encoding='utf-8') as f:
template = templates['git-clone-notes']
zipfile = template["zipfile"]
zipfile = template['zipfile']
if version == "master":
extra_note = template["master"]
zipfile_note = zipfile["unstable"]
if version == 'master':
extra_note = template['master']
zipfile_note = zipfile['unstable']
else:
extra_note = template["branch"] % {"clone_arg": version, "ver_type": ver_type}
zipfile_note = zipfile["stable"] if is_stable else zipfile["unstable"]
extra_note = template['branch'] % {'clone_arg': version, 'ver_type': ver_type}
zipfile_note = zipfile['stable'] if is_stable else zipfile['unstable']
f.write(template["template"] % locals())
f.write(template['template'] % locals())
print("Wrote git-clone-xxx.inc files")
print('Wrote git-clone-xxx.inc files')
def write_version_note(template, out_dir, version, ver_type, is_stable):
if version == "master":
content = template["master"]
elif ver_type == "tag" and is_stable:
content = template["stable"] % version
if version == 'master':
content = template['master']
elif ver_type == 'tag' and is_stable:
content = template['stable'] % version
else:
content = template["branch"] % (ver_type, version)
out_file = os.path.join(out_dir, "version-note.inc")
with open(out_file, "w", encoding='utf-8') as f:
content = template['branch'] % (ver_type, version)
out_file = os.path.join(out_dir, 'version-note.inc')
with open(out_file, 'w', encoding='utf-8') as f:
f.write(content)
print("%s written" % out_file)
print('%s written' % out_file)
def get_version():
@@ -196,22 +197,22 @@ def get_version():
"""
# Use git to look for a tag
try:
tag = subprocess.check_output(["git", "describe", "--exact-match"]).strip().decode('utf-8')
is_stable = re.match(r"v[0-9\.]+$", tag) is not None
return (tag, "tag", is_stable)
tag = subprocess.check_output(['git', 'describe', '--exact-match']).strip().decode('utf-8')
is_stable = re.match(r'v[0-9\.]+$', tag) is not None
return (tag, 'tag', is_stable)
except subprocess.CalledProcessError:
pass
# No tag, look at branch name from CI, this will give the correct branch name even if the ref for the branch we
# merge into has moved forward before the pipeline runs
branch = os.environ.get("CI_COMMIT_REF_NAME", None)
branch = os.environ.get('CI_COMMIT_REF_NAME', None)
if branch is not None:
return (branch, "branch", False)
return (branch, 'branch', False)
# Try to find the branch name even if docs are built locally
branch = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"]).strip().decode('utf-8')
if branch != "HEAD":
return (branch, "branch", False)
branch = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip().decode('utf-8')
if branch != 'HEAD':
return (branch, 'branch', False)
# As a last resort we return commit SHA-1, should never happen in CI/docs that should be published
return (subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).strip().decode('utf-8'), "commit", False)
return (subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode('utf-8'), 'commit', False)

View File

@@ -1,4 +1,5 @@
import os.path
from docutils.parsers.rst import directives
from docutils.parsers.rst.directives.misc import Include as BaseInclude
from sphinx.util.docutils import SphinxDirective

View File

@@ -1,7 +1,7 @@
# Extension to generate the KConfig reference list
import os.path
import sys
import subprocess
import sys
from .util import copy_if_modified
@@ -18,18 +18,18 @@ def generate_reference(app, project_description):
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
# Generate 'kconfig.inc' file from components' Kconfig files
print("Generating kconfig.inc from kconfig contents")
print('Generating kconfig.inc from kconfig contents')
kconfig_inc_path = '{}/inc/kconfig.inc'.format(build_dir)
temp_sdkconfig_path = '{}/sdkconfig.tmp'.format(build_dir)
kconfigs = project_description["config_environment"]["COMPONENT_KCONFIGS"].split(";")
kconfig_projbuilds = project_description["config_environment"]["COMPONENT_KCONFIGS_PROJBUILD"].split(";")
kconfigs = project_description['config_environment']['COMPONENT_KCONFIGS'].split(';')
kconfig_projbuilds = project_description['config_environment']['COMPONENT_KCONFIGS_PROJBUILD'].split(';')
sdkconfig_renames = set()
# TODO: this should be generated in project description as well, if possible
for k in kconfigs + kconfig_projbuilds:
component_dir = os.path.dirname(k)
sdkconfig_rename = os.path.join(component_dir, "sdkconfig.rename")
sdkconfig_rename = os.path.join(component_dir, 'sdkconfig.rename')
if os.path.exists(sdkconfig_rename):
sdkconfig_renames.add(sdkconfig_rename)
@@ -37,27 +37,27 @@ def generate_reference(app, project_description):
kconfig_projbuilds_source_path = '{}/inc/kconfig_projbuilds_source.in'.format(build_dir)
prepare_kconfig_files_args = [sys.executable,
"{}/tools/kconfig_new/prepare_kconfig_files.py".format(app.config.idf_path),
"--env", "COMPONENT_KCONFIGS={}".format(" ".join(kconfigs)),
"--env", "COMPONENT_KCONFIGS_PROJBUILD={}".format(" ".join(kconfig_projbuilds)),
"--env", "COMPONENT_KCONFIGS_SOURCE_FILE={}".format(kconfigs_source_path),
"--env", "COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE={}".format(kconfig_projbuilds_source_path),
'{}/tools/kconfig_new/prepare_kconfig_files.py'.format(app.config.idf_path),
'--env', 'COMPONENT_KCONFIGS={}'.format(' '.join(kconfigs)),
'--env', 'COMPONENT_KCONFIGS_PROJBUILD={}'.format(' '.join(kconfig_projbuilds)),
'--env', 'COMPONENT_KCONFIGS_SOURCE_FILE={}'.format(kconfigs_source_path),
'--env', 'COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE={}'.format(kconfig_projbuilds_source_path),
]
subprocess.check_call(prepare_kconfig_files_args)
confgen_args = [sys.executable,
"{}/tools/kconfig_new/confgen.py".format(app.config.idf_path),
"--kconfig", "./Kconfig",
"--sdkconfig-rename", "./sdkconfig.rename",
"--config", temp_sdkconfig_path,
"--env", "COMPONENT_KCONFIGS={}".format(" ".join(kconfigs)),
"--env", "COMPONENT_KCONFIGS_PROJBUILD={}".format(" ".join(kconfig_projbuilds)),
"--env", "COMPONENT_SDKCONFIG_RENAMES={}".format(" ".join(sdkconfig_renames)),
"--env", "COMPONENT_KCONFIGS_SOURCE_FILE={}".format(kconfigs_source_path),
"--env", "COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE={}".format(kconfig_projbuilds_source_path),
"--env", "IDF_PATH={}".format(app.config.idf_path),
"--env", "IDF_TARGET={}".format(app.config.idf_target),
"--output", "docs", kconfig_inc_path + '.in'
'{}/tools/kconfig_new/confgen.py'.format(app.config.idf_path),
'--kconfig', './Kconfig',
'--sdkconfig-rename', './sdkconfig.rename',
'--config', temp_sdkconfig_path,
'--env', 'COMPONENT_KCONFIGS={}'.format(' '.join(kconfigs)),
'--env', 'COMPONENT_KCONFIGS_PROJBUILD={}'.format(' '.join(kconfig_projbuilds)),
'--env', 'COMPONENT_SDKCONFIG_RENAMES={}'.format(' '.join(sdkconfig_renames)),
'--env', 'COMPONENT_KCONFIGS_SOURCE_FILE={}'.format(kconfigs_source_path),
'--env', 'COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE={}'.format(kconfig_projbuilds_source_path),
'--env', 'IDF_PATH={}'.format(app.config.idf_path),
'--env', 'IDF_TARGET={}'.format(app.config.idf_target),
'--output', 'docs', kconfig_inc_path + '.in'
]
subprocess.check_call(confgen_args, cwd=app.config.idf_path)
copy_if_modified(kconfig_inc_path + '.in', kconfig_inc_path)

View File

@@ -1,6 +1,7 @@
from sphinx.builders.latex import LaTeXBuilder
import os
from sphinx.builders.latex import LaTeXBuilder
# Overrides the default Sphinx latex build
class IdfLatexBuilder(LaTeXBuilder):
@@ -26,7 +27,7 @@ class IdfLatexBuilder(LaTeXBuilder):
def prepare_latex_macros(self, package_path, config):
PACKAGE_NAME = "espidf.sty"
PACKAGE_NAME = 'espidf.sty'
latex_package = ''
with open(package_path, 'r') as template:
@@ -36,7 +37,7 @@ class IdfLatexBuilder(LaTeXBuilder):
latex_package = latex_package.replace('<idf_target_title>', idf_target_title)
# Release name for the PDF front page, remove '_' as this is used for subscript in Latex
idf_release_name = "Release {}".format(config.version.replace('_', '-'))
idf_release_name = 'Release {}'.format(config.version.replace('_', '-'))
latex_package = latex_package.replace('<idf_release_name>', idf_release_name)
with open(os.path.join(self.outdir, PACKAGE_NAME), 'w') as package_file:
@@ -45,7 +46,7 @@ class IdfLatexBuilder(LaTeXBuilder):
def finish(self):
super().finish()
TEMPLATE_PATH = "../latex_templates/espidf.sty"
TEMPLATE_PATH = '../latex_templates/espidf.sty'
self.prepare_latex_macros(os.path.join(self.confdir,TEMPLATE_PATH), self.config)

View File

@@ -1,14 +1,15 @@
# based on http://protips.readthedocs.io/link-roles.html
from __future__ import print_function
from __future__ import unicode_literals
import re
from __future__ import print_function, unicode_literals
import os
import re
import subprocess
from docutils import nodes
from collections import namedtuple
from sphinx.transforms.post_transforms import SphinxPostTransform
from docutils import nodes
from get_github_rev import get_github_rev
from sphinx.transforms.post_transforms import SphinxPostTransform
# Creates a dict of all submodules with the format {submodule_path : (url relative to git root), commit)}
@@ -27,7 +28,7 @@ def get_submodules():
rev = sub_info[0].lstrip('-')[0:7]
path = sub_info[1].lstrip('./')
config_key_arg = "submodule.{}.url".format(path)
config_key_arg = 'submodule.{}.url'.format(path)
rel_url = subprocess.check_output(['git', 'config', '--file', gitmodules_file, '--get', config_key_arg]).decode('utf-8').lstrip('./').rstrip('\n')
submodule_dict[path] = Submodule(rel_url, rev)
@@ -38,8 +39,8 @@ def get_submodules():
def url_join(*url_parts):
""" Make a URL out of multiple components, assume first part is the https:// part and
anything else is a path component """
result = "/".join(url_parts)
result = re.sub(r"([^:])//+", r"\1/", result) # remove any // that isn't in the https:// part
result = '/'.join(url_parts)
result = re.sub(r'([^:])//+', r'\1/', result) # remove any // that isn't in the https:// part
return result
@@ -47,7 +48,7 @@ def github_link(link_type, idf_rev, submods, root_path, app_config):
def role(name, rawtext, text, lineno, inliner, options={}, content=[]):
msgs = []
BASE_URL = 'https://github.com/'
IDF_REPO = "espressif/esp-idf"
IDF_REPO = 'espressif/esp-idf'
def warning(msg):
system_msg = inliner.reporter.warning(msg)
@@ -90,31 +91,31 @@ def github_link(link_type, idf_rev, submods, root_path, app_config):
line_no = tuple(int(ln_group) for ln_group in line_no.groups() if ln_group) # tuple of (nnn,) or (nnn, NNN) for ranges
elif '#' in abs_path: # drop any other anchor from the line
abs_path = abs_path.split('#')[0]
warning("URL %s seems to contain an unusable anchor after the #, only line numbers are supported" % link)
warning('URL %s seems to contain an unusable anchor after the #, only line numbers are supported' % link)
is_dir = (link_type == 'tree')
if not os.path.exists(abs_path):
warning("IDF path %s does not appear to exist (absolute path %s)" % (rel_path, abs_path))
warning('IDF path %s does not appear to exist (absolute path %s)' % (rel_path, abs_path))
elif is_dir and not os.path.isdir(abs_path):
# note these "wrong type" warnings are not strictly needed as GitHub will apply a redirect,
# but the may become important in the future (plus make for cleaner links)
warning("IDF path %s is not a directory but role :%s: is for linking to a directory, try :%s_file:" % (rel_path, name, name))
warning('IDF path %s is not a directory but role :%s: is for linking to a directory, try :%s_file:' % (rel_path, name, name))
elif not is_dir and os.path.isdir(abs_path):
warning("IDF path %s is a directory but role :%s: is for linking to a file" % (rel_path, name))
warning('IDF path %s is a directory but role :%s: is for linking to a file' % (rel_path, name))
# check the line number is valid
if line_no:
if is_dir:
warning("URL %s contains a line number anchor but role :%s: is for linking to a directory" % (rel_path, name, name))
warning('URL %s contains a line number anchor but role :%s: is for linking to a directory' % (rel_path, name, name))
elif os.path.exists(abs_path) and not os.path.isdir(abs_path):
with open(abs_path, "r") as f:
with open(abs_path, 'r') as f:
lines = len(f.readlines())
if any(True for ln in line_no if ln > lines):
warning("URL %s specifies a range larger than file (file has %d lines)" % (rel_path, lines))
warning('URL %s specifies a range larger than file (file has %d lines)' % (rel_path, lines))
if tuple(sorted(line_no)) != line_no: # second line number comes before first one!
warning("URL %s specifies a backwards line number range" % rel_path)
warning('URL %s specifies a backwards line number range' % rel_path)
node = nodes.reference(rawtext, link_text, refuri=url, **options)
return [node], msgs
@@ -148,7 +149,7 @@ class TranslationLinkNodeTransform(SphinxPostTransform):
doc_path = env.doc2path(docname, None, None)
return_path = '../' * doc_path.count('/') # path back to the root from 'docname'
# then take off 3 more paths for language/release/targetname and build the new URL
url = "{}.html".format(os.path.join(return_path, '../../..', language, env.config.release,
url = '{}.html'.format(os.path.join(return_path, '../../..', language, env.config.release,
env.config.idf_target, docname))
node.replace_self(nodes.reference(rawtext, link_text, refuri=url, **options))
else:

View File

@@ -1,20 +1,21 @@
# Extension to generate Doxygen XML include files, with IDF config & soc macros included
from __future__ import print_function
from __future__ import unicode_literals
from io import open
from __future__ import print_function, unicode_literals
import os
import os.path
import re
import subprocess
from io import open
from .util import copy_if_modified
ALL_KINDS = [
("function", "Functions"),
("union", "Unions"),
("struct", "Structures"),
("define", "Macros"),
("typedef", "Type Definitions"),
("enum", "Enumerations")
('function', 'Functions'),
('union', 'Unions'),
('struct', 'Structures'),
('define', 'Macros'),
('typedef', 'Type Definitions'),
('enum', 'Enumerations')
]
"""list of items that will be generated for a single API file
"""
@@ -30,27 +31,27 @@ def generate_doxygen(app, defines):
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
# Call Doxygen to get XML files from the header files
print("Calling Doxygen to generate latest XML files")
print('Calling Doxygen to generate latest XML files')
doxy_env = os.environ
doxy_env.update({
"ENV_DOXYGEN_DEFINES": " ".join('{}={}'.format(key, value) for key, value in defines.items()),
"IDF_PATH": app.config.idf_path,
"IDF_TARGET": app.config.idf_target,
'ENV_DOXYGEN_DEFINES': ' '.join('{}={}'.format(key, value) for key, value in defines.items()),
'IDF_PATH': app.config.idf_path,
'IDF_TARGET': app.config.idf_target,
})
doxyfile_dir = os.path.join(app.config.docs_root, "doxygen")
doxyfile_main = os.path.join(doxyfile_dir, "Doxyfile_common")
doxyfile_target = os.path.join(doxyfile_dir, "Doxyfile_" + app.config.idf_target)
print("Running doxygen with doxyfiles {} and {}".format(doxyfile_main, doxyfile_target))
doxyfile_dir = os.path.join(app.config.docs_root, 'doxygen')
doxyfile_main = os.path.join(doxyfile_dir, 'Doxyfile_common')
doxyfile_target = os.path.join(doxyfile_dir, 'Doxyfile_' + app.config.idf_target)
print('Running doxygen with doxyfiles {} and {}'.format(doxyfile_main, doxyfile_target))
# It's possible to have doxygen log warnings to a file using WARN_LOGFILE directive,
# but in some cases it will still log an error to stderr and return success!
#
# So take all of stderr and redirect it to a logfile (will contain warnings and errors)
logfile = os.path.join(build_dir, "doxygen-warning-log.txt")
logfile = os.path.join(build_dir, 'doxygen-warning-log.txt')
with open(logfile, "w") as f:
with open(logfile, 'w') as f:
# note: run Doxygen in the build directory, so the xml & xml_in files end up in there
subprocess.check_call(["doxygen", doxyfile_main], env=doxy_env, cwd=build_dir, stderr=f)
subprocess.check_call(['doxygen', doxyfile_main], env=doxy_env, cwd=build_dir, stderr=f)
# Doxygen has generated XML files in 'xml' directory.
# Copy them to 'xml_in', only touching the files which have changed.
@@ -69,11 +70,11 @@ def convert_api_xml_to_inc(app, doxyfiles):
"""
build_dir = app.config.build_dir
xml_directory_path = "{}/xml".format(build_dir)
inc_directory_path = "{}/inc".format(build_dir)
xml_directory_path = '{}/xml'.format(build_dir)
inc_directory_path = '{}/inc'.format(build_dir)
if not os.path.isdir(xml_directory_path):
raise RuntimeError("Directory {} does not exist!".format(xml_directory_path))
raise RuntimeError('Directory {} does not exist!'.format(xml_directory_path))
if not os.path.exists(inc_directory_path):
os.makedirs(inc_directory_path)
@@ -83,16 +84,16 @@ def convert_api_xml_to_inc(app, doxyfiles):
print("Generating 'api_name.inc' files with Doxygen directives")
for header_file_path in header_paths:
api_name = get_api_name(header_file_path)
inc_file_path = inc_directory_path + "/" + api_name + ".inc"
inc_file_path = inc_directory_path + '/' + api_name + '.inc'
rst_output = generate_directives(header_file_path, xml_directory_path)
previous_rst_output = ''
if os.path.isfile(inc_file_path):
with open(inc_file_path, "r", encoding='utf-8') as inc_file_old:
with open(inc_file_path, 'r', encoding='utf-8') as inc_file_old:
previous_rst_output = inc_file_old.read()
if previous_rst_output != rst_output:
with open(inc_file_path, "w", encoding='utf-8') as inc_file:
with open(inc_file_path, 'w', encoding='utf-8') as inc_file:
inc_file.write(rst_output)
@@ -108,11 +109,11 @@ def get_doxyfile_input_paths(app, doxyfile_path):
print("Getting Doxyfile's INPUT")
with open(doxyfile_path, "r", encoding='utf-8') as input_file:
with open(doxyfile_path, 'r', encoding='utf-8') as input_file:
line = input_file.readline()
# read contents of Doxyfile until 'INPUT' statement
while line:
if line.find("INPUT") == 0:
if line.find('INPUT') == 0:
break
line = input_file.readline()
@@ -124,13 +125,13 @@ def get_doxyfile_input_paths(app, doxyfile_path):
# we have reached the end of 'INPUT' statement
break
# process only lines that are not comments
if line.find("#") == -1:
if line.find('#') == -1:
# extract header file path inside components folder
m = re.search("components/(.*\.h)", line) # noqa: W605 - regular expression
m = re.search('components/(.*\.h)', line) # noqa: W605 - regular expression
header_file_path = m.group(1)
# Replace env variable used for multi target header
header_file_path = header_file_path.replace("$(IDF_TARGET)", app.config.idf_target)
header_file_path = header_file_path.replace('$(IDF_TARGET)', app.config.idf_target)
doxyfile_INPUT.append(header_file_path)
@@ -150,8 +151,8 @@ def get_api_name(header_file_path):
The name of API.
"""
api_name = ""
regex = r".*/(.*)\.h"
api_name = ''
regex = r'.*/(.*)\.h'
m = re.search(regex, header_file_path)
if m:
api_name = m.group(1)
@@ -173,15 +174,15 @@ def generate_directives(header_file_path, xml_directory_path):
api_name = get_api_name(header_file_path)
# in XLT file name each "_" in the api name is expanded by Doxygen to "__"
xlt_api_name = api_name.replace("_", "__")
xml_file_path = "%s/%s_8h.xml" % (xml_directory_path, xlt_api_name)
xlt_api_name = api_name.replace('_', '__')
xml_file_path = '%s/%s_8h.xml' % (xml_directory_path, xlt_api_name)
rst_output = ""
rst_output = ''
rst_output = ".. File automatically generated by 'gen-dxd.py'\n"
rst_output += "\n"
rst_output += get_rst_header("Header File")
rst_output += "* :component_file:`" + header_file_path + "`\n"
rst_output += "\n"
rst_output += '\n'
rst_output += get_rst_header('Header File')
rst_output += '* :component_file:`' + header_file_path + '`\n'
rst_output += '\n'
try:
import xml.etree.cElementTree as ET
@@ -206,10 +207,10 @@ def get_rst_header(header_name):
"""
rst_output = ""
rst_output += header_name + "\n"
rst_output += "^" * len(header_name) + "\n"
rst_output += "\n"
rst_output = ''
rst_output += header_name + '\n'
rst_output += '^' * len(header_name) + '\n'
rst_output += '\n'
return rst_output
@@ -226,14 +227,14 @@ def select_unions(innerclass_list):
"""
rst_output = ""
rst_output = ''
for line in innerclass_list.splitlines():
# union is denoted by "union" at the beginning of line
if line.find("union") == 0:
union_id, union_name = re.split(r"\t+", line)
rst_output += ".. doxygenunion:: "
if line.find('union') == 0:
union_id, union_name = re.split(r'\t+', line)
rst_output += '.. doxygenunion:: '
rst_output += union_name
rst_output += "\n"
rst_output += '\n'
return rst_output
@@ -251,20 +252,20 @@ def select_structs(innerclass_list):
"""
rst_output = ""
rst_output = ''
for line in innerclass_list.splitlines():
# structure is denoted by "struct" at the beginning of line
if line.find("struct") == 0:
if line.find('struct') == 0:
# skip structures that are part of union
# they are documented by 'doxygenunion' directive
if line.find("::") > 0:
if line.find('::') > 0:
continue
struct_id, struct_name = re.split(r"\t+", line)
rst_output += ".. doxygenstruct:: "
struct_id, struct_name = re.split(r'\t+', line)
rst_output += '.. doxygenstruct:: '
rst_output += struct_name
rst_output += "\n"
rst_output += " :members:\n"
rst_output += "\n"
rst_output += '\n'
rst_output += ' :members:\n'
rst_output += '\n'
return rst_output
@@ -282,12 +283,12 @@ def get_directives(tree, kind):
"""
rst_output = ""
if kind in ["union", "struct"]:
innerclass_list = ""
rst_output = ''
if kind in ['union', 'struct']:
innerclass_list = ''
for elem in tree.iterfind('compounddef/innerclass'):
innerclass_list += elem.attrib["refid"] + "\t" + elem.text + "\n"
if kind == "union":
innerclass_list += elem.attrib['refid'] + '\t' + elem.text + '\n'
if kind == 'union':
rst_output += select_unions(innerclass_list)
else:
rst_output += select_structs(innerclass_list)
@@ -295,10 +296,10 @@ def get_directives(tree, kind):
for elem in tree.iterfind(
'compounddef/sectiondef/memberdef[@kind="%s"]' % kind):
name = elem.find('name')
rst_output += ".. doxygen%s:: " % kind
rst_output += name.text + "\n"
rst_output += '.. doxygen%s:: ' % kind
rst_output += name.text + '\n'
if rst_output:
all_kinds_dict = dict(ALL_KINDS)
rst_output = get_rst_header(all_kinds_dict[kind]) + rst_output + "\n"
rst_output = get_rst_header(all_kinds_dict[kind]) + rst_output + '\n'
return rst_output

View File

@@ -15,10 +15,11 @@
# limitations under the License.
from __future__ import unicode_literals
from io import open
import os
import shutil
import sys
from io import open
try:
import urllib.request
@@ -33,10 +34,10 @@ def files_equal(path_1, path_2):
if not os.path.exists(path_1) or not os.path.exists(path_2):
return False
file_1_contents = ''
with open(path_1, "r", encoding='utf-8') as f_1:
with open(path_1, 'r', encoding='utf-8') as f_1:
file_1_contents = f_1.read()
file_2_contents = ''
with open(path_2, "r", encoding='utf-8') as f_2:
with open(path_2, 'r', encoding='utf-8') as f_2:
file_2_contents = f_2.read()
return file_1_contents == file_2_contents
@@ -63,7 +64,7 @@ def copy_if_modified(src_path, dst_path):
def download_file_if_missing(from_url, to_path):
filename_with_path = to_path + "/" + os.path.basename(from_url)
filename_with_path = to_path + '/' + os.path.basename(from_url)
exists = os.path.isfile(filename_with_path)
if exists:
print("The file '%s' already exists" % (filename_with_path))

View File

@@ -35,8 +35,8 @@ def sanitize_version(original_version):
except KeyError:
version = original_version
if version == "master":
return "latest"
if version == 'master':
return 'latest'
version = version.replace('/', '-')

View File

@@ -8,8 +8,8 @@
try:
from conf_common import * # noqa: F403,F401
except ImportError:
import sys
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
from conf_common import * # noqa: F403,F401
@@ -27,7 +27,7 @@ html_logo = None
latex_logo = None
html_static_path = []
conditional_include_dict = {'esp32':["esp32_page.rst"],
'esp32s2':["esp32s2_page.rst"],
'SOC_BT_SUPPORTED':["bt_page.rst"],
conditional_include_dict = {'esp32':['esp32_page.rst'],
'esp32s2':['esp32s2_page.rst'],
'SOC_BT_SUPPORTED':['bt_page.rst'],
}

View File

@@ -1,16 +1,16 @@
#!/usr/bin/env python3
import unittest
import os
import subprocess
import sys
import os
import unittest
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
ESP32_DOC = "esp32_page"
ESP32_S2_DOC = "esp32s2_page"
BT_DOC = "bt_page"
LINK_ROLES_DOC = "link_roles"
IDF_FORMAT_DOC = "idf_target_format"
ESP32_DOC = 'esp32_page'
ESP32_S2_DOC = 'esp32s2_page'
BT_DOC = 'bt_page'
LINK_ROLES_DOC = 'link_roles'
IDF_FORMAT_DOC = 'idf_target_format'
class DocBuilder():
@@ -24,7 +24,7 @@ class DocBuilder():
self.html_out_dir = os.path.join(CURRENT_DIR, build_dir, language, target, 'html')
def build(self, opt_args=[]):
args = [sys.executable, self.build_docs_py_path, "-b", self.build_dir, "-s", self.src_dir, "-t", self.target, "-l", self.language]
args = [sys.executable, self.build_docs_py_path, '-b', self.build_dir, '-s', self.src_dir, '-t', self.target, '-l', self.language]
args.extend(opt_args)
return subprocess.call(args)
@@ -33,65 +33,65 @@ class TestDocs(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.builder = DocBuilder("test", "_build/test_docs", "esp32s2", "en")
cls.builder = DocBuilder('test', '_build/test_docs', 'esp32s2', 'en')
cls.build_ret_flag = cls.builder.build()
def setUp(self):
if self.build_ret_flag:
self.fail("Build docs failed with return: {}".format(self.build_ret_flag))
self.fail('Build docs failed with return: {}'.format(self.build_ret_flag))
def assert_str_not_in_doc(self, doc_name, str_to_find):
with open(os.path.join(self.builder.html_out_dir, doc_name)) as f:
content = f.read()
self.assertFalse(str_to_find in content, "Found {} in {}".format(str_to_find, doc_name))
self.assertFalse(str_to_find in content, 'Found {} in {}'.format(str_to_find, doc_name))
def assert_str_in_doc(self, doc_name, str_to_find):
with open(os.path.join(self.builder.html_out_dir, doc_name)) as f:
content = f.read()
self.assertTrue(str_to_find in content, "Did not find {} in {}".format(str_to_find, doc_name))
self.assertTrue(str_to_find in content, 'Did not find {} in {}'.format(str_to_find, doc_name))
def test_only_dir(self):
# Test that ESP32 content was excluded
self.assert_str_not_in_doc(ESP32_S2_DOC + ".html", "!ESP32_CONTENT!")
self.assert_str_not_in_doc(ESP32_S2_DOC + '.html', '!ESP32_CONTENT!')
# Test that ESP32 S2 content was included
self.assert_str_in_doc(ESP32_S2_DOC + ".html", "!ESP32_S2_CONTENT!")
self.assert_str_in_doc(ESP32_S2_DOC + '.html', '!ESP32_S2_CONTENT!')
# Test that BT content was excluded
self.assert_str_not_in_doc(ESP32_S2_DOC + ".html", "!BT_CONTENT!")
self.assert_str_not_in_doc(ESP32_S2_DOC + '.html', '!BT_CONTENT!')
def test_toctree_filter(self):
# ESP32 page should NOT be built
esp32_doc = os.path.join(self.builder.html_out_dir, ESP32_DOC + ".html")
self.assertFalse(os.path.isfile(esp32_doc), "Found {}".format(esp32_doc))
self.assert_str_not_in_doc('index.html', "!ESP32_CONTENT!")
esp32_doc = os.path.join(self.builder.html_out_dir, ESP32_DOC + '.html')
self.assertFalse(os.path.isfile(esp32_doc), 'Found {}'.format(esp32_doc))
self.assert_str_not_in_doc('index.html', '!ESP32_CONTENT!')
esp32s2_doc = os.path.join(self.builder.html_out_dir, ESP32_S2_DOC + ".html")
self.assertTrue(os.path.isfile(esp32s2_doc), "{} not found".format(esp32s2_doc))
esp32s2_doc = os.path.join(self.builder.html_out_dir, ESP32_S2_DOC + '.html')
self.assertTrue(os.path.isfile(esp32s2_doc), '{} not found'.format(esp32s2_doc))
# Spot check a few other tags
# No Bluetooth on ESP32 S2
bt_doc = os.path.join(self.builder.html_out_dir, BT_DOC + ".html")
self.assertFalse(os.path.isfile(bt_doc), "Found {}".format(bt_doc))
self.assert_str_not_in_doc('index.html', "!BT_CONTENT!")
bt_doc = os.path.join(self.builder.html_out_dir, BT_DOC + '.html')
self.assertFalse(os.path.isfile(bt_doc), 'Found {}'.format(bt_doc))
self.assert_str_not_in_doc('index.html', '!BT_CONTENT!')
def test_link_roles(self):
print("test")
print('test')
class TestBuildSubset(unittest.TestCase):
def test_build_subset(self):
builder = DocBuilder("test", "_build/test_build_subset", "esp32", "en")
builder = DocBuilder('test', '_build/test_build_subset', 'esp32', 'en')
docs_to_build = "esp32_page.rst"
docs_to_build = 'esp32_page.rst'
self.assertFalse(builder.build(["-i", docs_to_build]))
self.assertFalse(builder.build(['-i', docs_to_build]))
# Check that we only built the input docs
bt_doc = os.path.join(builder.html_out_dir, BT_DOC + ".html")
esp32_doc = os.path.join(builder.html_out_dir, ESP32_DOC + ".html")
self.assertFalse(os.path.isfile(bt_doc), "Found {}".format(bt_doc))
self.assertTrue(os.path.isfile(esp32_doc), "Found {}".format(esp32_doc))
bt_doc = os.path.join(builder.html_out_dir, BT_DOC + '.html')
esp32_doc = os.path.join(builder.html_out_dir, ESP32_DOC + '.html')
self.assertFalse(os.path.isfile(bt_doc), 'Found {}'.format(bt_doc))
self.assertTrue(os.path.isfile(esp32_doc), 'Found {}'.format(esp32_doc))
if __name__ == '__main__':

View File

@@ -3,8 +3,8 @@
import os
import sys
import unittest
from unittest.mock import MagicMock
from tempfile import TemporaryDirectory
from unittest.mock import MagicMock
from sphinx.util import tags
@@ -14,9 +14,7 @@ except ImportError:
sys.path.append('..')
from idf_extensions import exclude_docs
from idf_extensions import format_idf_target
from idf_extensions import gen_idf_tools_links
from idf_extensions import link_roles
from idf_extensions import format_idf_target, gen_idf_tools_links, link_roles
class TestFormatIdfTarget(unittest.TestCase):
@@ -30,14 +28,14 @@ class TestFormatIdfTarget(unittest.TestCase):
def test_add_subs(self):
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_NAME}'], "ESP32")
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_PATH_NAME}'], "esp32")
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_TOOLCHAIN_NAME}'], "esp32")
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_CFG_PREFIX}'], "ESP32")
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_NAME}'], 'ESP32')
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_PATH_NAME}'], 'esp32')
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_TOOLCHAIN_NAME}'], 'esp32')
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_CFG_PREFIX}'], 'ESP32')
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_TRM_EN_URL}'],
"https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_en.pdf")
'https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_en.pdf')
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_TRM_CN_URL}'],
"https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_cn.pdf")
'https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_cn.pdf')
def test_sub(self):
content = ('This is a {IDF_TARGET_NAME}, with {IDF_TARGET_PATH_NAME}/soc.c, compiled with '
@@ -54,14 +52,14 @@ class TestFormatIdfTarget(unittest.TestCase):
content = ('{IDF_TARGET_TX_PIN:default="IO3", esp32="IO4", esp32s2="IO5"}'
'The {IDF_TARGET_NAME} UART {IDF_TARGET_TX_PIN} uses for TX')
expected = "The ESP32 UART IO4 uses for TX"
expected = 'The ESP32 UART IO4 uses for TX'
self.assertEqual(self.str_sub.substitute(content), expected)
def test_local_sub_default(self):
content = ('{IDF_TARGET_TX_PIN:default="IO3", esp32s2="IO5"}'
'The {IDF_TARGET_NAME} UART {IDF_TARGET_TX_PIN} uses for TX')
expected = "The ESP32 UART IO3 uses for TX"
expected = 'The ESP32 UART IO3 uses for TX'
self.assertEqual(self.str_sub.substitute(content), expected)
def test_local_sub_no_default(self):
@@ -76,12 +74,12 @@ class TestExclude(unittest.TestCase):
def setUp(self):
self.app = MagicMock()
self.app.tags = tags.Tags()
self.app.config.conditional_include_dict = {"esp32":["esp32.rst", "bt.rst"], "esp32s2":["esp32s2.rst"]}
self.app.config.conditional_include_dict = {'esp32':['esp32.rst', 'bt.rst'], 'esp32s2':['esp32s2.rst']}
self.app.config.docs_to_build = None
self.app.config.exclude_patterns = []
def test_update_exclude_pattern(self):
self.app.tags.add("esp32")
self.app.tags.add('esp32')
exclude_docs.update_exclude_patterns(self.app, self.app.config)
docs_to_build = set(self.app.config.conditional_include_dict['esp32'])
@@ -92,7 +90,7 @@ class TestExclude(unittest.TestCase):
class TestGenIDFToolLinks(unittest.TestCase):
def setUp(self):
self.app = MagicMock()
self.app.config.build_dir = "_build"
self.app.config.build_dir = '_build'
self.app.config.idf_path = os.environ['IDF_PATH']
def test_gen_idf_tool_links(self):

View File

@@ -9,8 +9,8 @@
try:
from conf_common import * # noqa: F403,F401
except ImportError:
import sys
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
from conf_common import * # noqa: F403,F401