diff --git a/lib/init/grass.py b/lib/init/grass.py index 380f335a041..77e068a5f26 100755 --- a/lib/init/grass.py +++ b/lib/init/grass.py @@ -1400,10 +1400,7 @@ def set_language(grass_config_dir): del os.environ['LC_ALL'] # Remove LC_ALL to not override LC_NUMERIC # From now on enforce the new language - if encoding: - gettext.install('grasslibs', gpath('locale'), codeset=encoding) - else: - gettext.install('grasslibs', gpath('locale')) + gettext.install("grasslibs", gpath("locale")) # TODO: grass_gui parameter is a hack and should be removed, see below diff --git a/tools/Makefile b/tools/Makefile index ba73ba93507..236f15aef83 100644 --- a/tools/Makefile +++ b/tools/Makefile @@ -5,10 +5,15 @@ SUBDIRS = timer g.html2man include $(MODULE_TOPDIR)/include/Make/Dir.make include $(MODULE_TOPDIR)/include/Make/Compile.make -default: parsubdirs $(TOOLSDIR)/mkhtml.py $(TOOLSDIR)/g.echo$(EXE) +default: parsubdirs $(TOOLSDIR)/mkhtml.py \ + $(TOOLSDIR)/generate_last_commit_file.py \ + $(TOOLSDIR)/g.echo$(EXE) $(TOOLSDIR)/mkhtml.py: mkhtml.py $(INSTALL) $< $@ +$(TOOLSDIR)/generate_last_commit_file.py: generate_last_commit_file.py + $(INSTALL) $< $@ + $(TOOLSDIR)/g.echo$(EXE): $(OBJDIR)/g.echo.o $(call linker_base,$(LINK),$(LDFLAGS) $(EXTRA_LDFLAGS),$(MANIFEST_OBJ)) diff --git a/tools/generate_last_commit_file.py b/tools/generate_last_commit_file.py new file mode 100644 index 00000000000..69b1851d204 --- /dev/null +++ b/tools/generate_last_commit_file.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 + +""" +Script for creating an core_modules_with_last_commit.json file contains +all core modules with their last commit. Used by GitHub "Create new +release draft" action workflow. + +JSON file structure: + +"r.pack": { + "commit": "547ff44e6aecfb4c9cbf6a4717fc14e521bec0be", + "date": "2022-02-20T09:34:17+01:00" +}, + +commit key value is commit hash +date key value is author date + +Usage: + +python utils/generate_last_commit_file.py . + +@author Tomas Zigo +""" + +import json +import os +import subprocess +import shutil +import sys + + +# Strict ISO 8601 format +COMMIT_DATE_FORMAT = "%aI" + + +def get_last_commit(src_dir): + """Generate core modules JSON object with the following structure + + "r.pack": { + "commit": "547ff44e6aecfb4c9cbf6a4717fc14e521bec0be", + "date": "2022-02-20T09:34:17+01:00" + }, + + commit key value is commit hash + date key value is author date + + :param str src_dir: root source code dir + + :return JSON obj result: core modules with last commit and commit + date + """ + result = {} + join_sep = "," + if not shutil.which("git"): + sys.exit("Git command was not found. Please install it.") + for root, _, files in os.walk(src_dir): + if ".html{}".format(join_sep) not in join_sep.join(files) + join_sep: + continue + rel_path = os.path.relpath(root) + process_result = subprocess.run( + [ + "git", + "log", + "-1", + f"--format=%H,{COMMIT_DATE_FORMAT}", + rel_path, + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) # --format=%H,COMMIT_DATE_FORMAT commit hash,author date + if process_result.returncode == 0: + try: + text = process_result.stdout.decode().strip() + if not text: + # Non-versioned directories are picked by the filter, but git log + # returns nothing which is fine, so silently skipping these. + continue + commit, date = text.split(",") + except ValueError as error: + sys.exit( + f"Cannot parse output from git log for '{rel_path}': " + f"{text} because {error}" + ) + result[os.path.basename(rel_path)] = { + "commit": commit, + "date": date, + } + else: + sys.exit(process_result.stderr.decode()) + return result + + +def main(): + if len(sys.argv) < 2: + sys.exit("Set root source dir script arg, please.") + src_dir = sys.argv[1] + with open( + os.path.join( + src_dir, + "core_modules_with_last_commit.json", + ), + "w", + ) as f: + json.dump(get_last_commit(src_dir), f, indent=4) + + +if __name__ == "__main__": + main() diff --git a/tools/mkhtml.py b/tools/mkhtml.py index 2e3825e2f99..7c9c47805fa 100644 --- a/tools/mkhtml.py +++ b/tools/mkhtml.py @@ -25,68 +25,91 @@ import locale import json import pathlib -import shutil import subprocess -import time -try: - # Python 2 import - from HTMLParser import HTMLParser -except: - # Python 3 import - from html.parser import HTMLParser - -from six.moves.urllib import request as urlrequest -from six.moves.urllib.error import HTTPError, URLError +from html.parser import HTMLParser -try: - import urlparse -except: - import urllib.parse as urlparse +from urllib import request as urlrequest +from urllib.error import HTTPError, URLError +import urllib.parse as urlparse try: import grass.script as gs except ImportError: # During compilation GRASS GIS - _ = str - - class gs: - def warning(message): - pass - - def fatal(message): - pass + gs = None +from generate_last_commit_file import COMMIT_DATE_FORMAT HEADERS = { "User-Agent": "Mozilla/5.0", } HTTP_STATUS_CODES = list(http.HTTPStatus) -if sys.version_info[0] == 2: - PY2 = True -else: - PY2 = False -if not PY2: - unicode = str +def get_version_branch(major_version, addons_git_repo_url): + """Check if version branch for the current GRASS version exists, + if not, take branch for the previous version + For the official repo we assume that at least one version branch is present + + :param major_version int: GRASS GIS major version + :param addons_git_repo_url str: Addons Git ropository URL + + :return version_branch str: version branch + """ + version_branch = f"grass{major_version}" + if gs: + branch = gs.Popen( + [ + "git", + "ls-remote", + "--heads", + addons_git_repo_url, + f"refs/heads/{version_branch}", + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + branch, stderr = branch.communicate() + if stderr: + gs.fatal( + _( + "Failed to get branch from the Git repository" + " <{repo_path}>.\n{error}" + ).format( + repo_path=addons_git_repo_url, + error=gs.decode(stderr), + ) + ) + if version_branch not in gs.decode(branch): + version_branch = "grass{}".format(int(major_version) - 1) + return version_branch grass_version = os.getenv("VERSION_NUMBER", "unknown") trunk_url = "" addons_url = "" +grass_git_branch = "main" +major, minor, patch = None, None, None if grass_version != "unknown": major, minor, patch = grass_version.split(".") - grass_git_branch = "releasebranch_{major}_{minor}".format( - major=major, - minor=minor, - ) - base_url = "https://github.com/OSGeo" - trunk_url = "{base_url}/grass/tree/{branch}/".format( - base_url=base_url, branch=grass_git_branch + base_url = "https://github.com/OSGeo/" + trunk_url = urlparse.urljoin( + base_url, + urlparse.urljoin( + "grass/tree/", + grass_git_branch + "/", + ), ) - addons_url = "{base_url}/grass-addons/tree/grass{major}/".format( - base_url=base_url, major=major + addons_url = urlparse.urljoin( + base_url, + urlparse.urljoin( + "grass-addons/tree/", + get_version_branch( + major, + urlparse.urljoin(base_url, "grass-addons/"), + ), + ), ) @@ -194,71 +217,211 @@ def download_git_commit(url, response_format, *args, **kwargs): ) -def get_last_git_commit(src_dir, is_addon, addon_path): - """Get last module/addon git commit +def get_default_git_log(src_dir, datetime_format="%A %b %d %H:%M:%S %Y"): + """Get default Git commit and commit date, when getting commit from + local Git, local JSON file and remote GitHub REST API server wasn't + successful. - :param str src_dir: module/addon source dir - :param bool is_addon: True if it is addon - :param str addon_path: addon path + :param str src_dir: addon source dir + :param str datetime_format: output commit datetime format + e.g. Sunday Jan 16 23:09:35 2022 - :return dict git_log: dict with key commit and date, if not - possible download commit from GitHub API server - values of keys have "unknown" string + :return dict: dict which store last commit and commnit date """ - unknown = "unknown" - git_log = {"commit": unknown, "date": unknown} - datetime_format = "%A %b %d %H:%M:%S %Y" # e.g. Sun Jan 16 23:09:35 2022 - if is_addon: - grass_addons_url = ( - "https://api.github.com/repos/osgeo/grass-addons/commits?path={path}" - "&page=1&per_page=1&sha=grass{major}".format( - path=addon_path, - major=major, + return { + "commit": "unknown", + "date": datetime.fromtimestamp(os.path.getmtime(src_dir)).strftime( + datetime_format + ), + } + + +def parse_git_commit( + commit, + src_dir, + git_log=None, +): + """Parse Git commit + + :param str commit: commit message + :param str src_dir: addon source dir + :param dict git_log: dict which store last commit and commnit + date + + :return dict git_log: dict which store last commit and commnit date + """ + if not git_log: + git_log = get_default_git_log(src_dir=src_dir) + if commit: + git_log["commit"], commit_date = commit.strip().split(",") + git_log["date"] = format_git_commit_date_from_local_git( + commit_datetime=commit_date, + ) + return git_log + + +def get_git_commit_from_file( + src_dir, + git_log=None, +): + """Get Git commit from JSON file + + :param str src_dir: addon source dir + :param dict git_log: dict which store last commit and commnit date + + :return dict git_log: dict which store last commit and commnit date + """ + # Accessed date time if getting commit from JSON file wasn't successful + if not git_log: + git_log = get_default_git_log(src_dir=src_dir) + json_file_path = os.path.join( + topdir, + "core_modules_with_last_commit.json", + ) + if os.path.exists(json_file_path): + with open(json_file_path) as f: + core_modules_with_last_commit = json.load(f) + if pgm in core_modules_with_last_commit: + core_module = core_modules_with_last_commit[pgm] + git_log["commit"] = core_module["commit"] + git_log["date"] = format_git_commit_date_from_local_git( + commit_datetime=core_module["date"], ) - ) # sha=git_branch_name - else: - core_module_path = os.path.join( - *(set(src_dir.split(os.path.sep)) ^ set(topdir.split(os.path.sep))) + return git_log + + +def get_git_commit_from_rest_api_for_addon_repo( + addon_path, + src_dir, + git_log=None, +): + """Get Git commit from remote GitHub REST API for addon repository + + :param str addon_path: addon path + :param str src_dir: addon source dir + :param dict git_log: dict which store last commit and commnit date + + :return dict git_log: dict which store last commit and commnit date + """ + # Accessed date time if getting commit from GitHub REST API wasn't successful + if not git_log: + git_log = get_default_git_log(src_dir=src_dir) + grass_addons_url = ( + "https://api.github.com/repos/osgeo/grass-addons/commits?" + "path={path}&page=1&per_page=1&sha=grass{major}".format( + path=addon_path, + major=major, ) - grass_modules_url = ( - "https://api.github.com/repos/osgeo/grass/commits?path={path}" - "&page=1&per_page=1&sha={branch}".format( - branch=grass_git_branch, - path=core_module_path, + ) # sha=git_branch_name + + response = download_git_commit( + url=grass_addons_url, + response_format="application/json", + ) + if response: + commit = json.loads(response.read()) + if commit: + git_log["commit"] = commit[0]["sha"] + git_log["date"] = format_git_commit_date_from_rest_api( + commit_datetime=commit[0]["commit"]["author"]["date"], ) - ) # sha=git_branch_name + return git_log - if shutil.which("git"): - if os.path.exists(src_dir): - git_log["date"] = time.ctime(os.path.getmtime(src_dir)) - stdout, stderr = subprocess.Popen( - args=["git", "log", "-1", src_dir], - stdout=subprocess.PIPE, + +def format_git_commit_date_from_rest_api( + commit_datetime, datetime_format="%A %b %d %H:%M:%S %Y" +): + """Format datetime from remote GitHub REST API + + :param str commit_datetime: commit datetime + :param str datetime_format: output commit datetime format + e.g. Sunday Jan 16 23:09:35 2022 + + :return str: output formatted commit datetime + """ + return datetime.strptime( + commit_datetime, + "%Y-%m-%dT%H:%M:%SZ", # ISO 8601 YYYY-MM-DDTHH:MM:SSZ + ).strftime(datetime_format) + + +def format_git_commit_date_from_local_git( + commit_datetime, datetime_format="%A %b %d %H:%M:%S %Y" +): + """Format datetime from local Git or JSON file + + :param str commit_datetime: commit datetime + :param str datetime_format: output commit datetime format + e.g. Sunday Jan 16 23:09:35 2022 + + :return str: output formatted commit datetime + """ + return datetime.fromisoformat( + commit_datetime, + ).strftime(datetime_format) + + +def has_src_code_git(src_dir, is_addon): + """Has core module or addon source code Git + + :param str src_dir: core module or addon root directory + :param bool is_addon: True if it is addon + + :return subprocess.CompletedProcess or None: subprocess.CompletedProcess + if core module or addon + source code has Git + """ + actual_dir = os.getcwd() + if is_addon: + os.chdir(src_dir) + else: + os.chdir(topdir) + try: + process_result = subprocess.run( + [ + "git", + "log", + "-1", + f"--format=%H,{COMMIT_DATE_FORMAT}", + src_dir, + ], stderr=subprocess.PIPE, - ).communicate() - stdout = decode(stdout) - stderr = decode(stderr) - - if stderr and "fatal: not a git repository" in stderr: - response = download_git_commit( - url=grass_addons_url if is_addon else grass_modules_url, - response_format="application/json", + stdout=subprocess.PIPE, + ) # --format=%H,COMMIT_DATE_FORMAT commit hash,author date + os.chdir(actual_dir) + return process_result if process_result.returncode == 0 else None + except FileNotFoundError: + os.chdir(actual_dir) + return None + + +def get_last_git_commit(src_dir, addon_path, is_addon): + """Get last module/addon git commit + + :param str src_dir: module/addon source dir + :param str addon_path: addon path + :param bool is_addon: True if it is addon + + :return dict git_log: dict with key commit and date, if not + possible download commit from GitHub REST API + server values of keys have "unknown" string + """ + process_result = has_src_code_git(src_dir=src_dir, is_addon=is_addon) + if process_result: + return parse_git_commit( + commit=process_result.stdout.decode(), + src_dir=src_dir, + ) + else: + if gs: + # Addons installation + return get_git_commit_from_rest_api_for_addon_repo( + addon_path=addon_path, + src_dir=src_dir, ) - if response: - commit = json.loads(response.read()) - if commit: - git_log["commit"] = commit[0]["sha"] - git_log["date"] = datetime.strptime( - commit[0]["commit"]["author"]["date"], - "%Y-%m-%dT%H:%M:%SZ", - ).strftime(datetime_format) + # During GRASS GIS compilation from source code without Git else: - if stdout: - commit = stdout.splitlines() - git_log["commit"] = commit[0].split(" ")[-1] - commit_date = commit[2].lstrip("Date:").strip() - git_log["date"] = commit_date.rsplit(" ", 1)[0] - return git_log + return get_git_commit_from_file(src_dir=src_dir) html_page_footer_pages_path = ( @@ -275,8 +438,8 @@ def get_last_git_commit(src_dir, is_addon, addon_path): header_base = """ - GRASS GIS Manual: ${PGM} - + + ${PGM} - GRASS GIS Manual @@ -287,7 +450,6 @@ def get_last_git_commit(src_dir, is_addon, addon_path):
GRASS logo -
""" header_nopgm = """

${PGM}

@@ -302,7 +464,7 @@ def get_last_git_commit(src_dir, is_addon, addon_path): """ sourcecode = string.Template( -"""

SOURCE CODE

+ """

SOURCE CODE

Available at: ${PGM} source code @@ -315,7 +477,7 @@ def get_last_git_commit(src_dir, is_addon, addon_path): ) footer_index = string.Template( -"""


+ """

Main index | ${INDEXNAMECAP} index | @@ -326,17 +488,18 @@ def get_last_git_commit(src_dir, is_addon, addon_path):

© 2003-${YEAR} -GRASS Development Team, +GRASS Development Team, GRASS GIS ${GRASS_VERSION} Reference Manual

-""") +""" +) footer_noindex = string.Template( -"""
+ """

Main index | Topics index | @@ -346,24 +509,22 @@ def get_last_git_commit(src_dir, is_addon, addon_path):

© 2003-${YEAR} -GRASS Development Team, +GRASS Development Team, GRASS GIS ${GRASS_VERSION} Reference Manual

-""") +""" +) + def read_file(name): try: - f = open(name, 'rb') - s = f.read() - f.close() - if PY2: - return s - else: - return decode(s, encoding='ISO-8859-1') + with open(name) as f: + s = f.read() + return s except IOError: return "" @@ -374,13 +535,13 @@ def __init__(self): HTMLParser.__init__(self) self.reset() self.idx = 1 - self.tag_curr = '' - self.tag_last = '' + self.tag_curr = "" + self.tag_last = "" self.process_text = False self.data = [] - self.tags_allowed = ('h1', 'h2', 'h3') - self.tags_ignored = ('img') - self.text = '' + self.tags_allowed = ("h1", "h2", "h3") + self.tags_ignored = "img" + self.text = "" def handle_starttag(self, tag, attrs): if tag in self.tags_allowed: @@ -390,11 +551,10 @@ def handle_starttag(self, tag, attrs): def handle_endtag(self, tag): if tag in self.tags_allowed: - self.data.append((tag, '%s_%d' % (tag, self.idx), - self.text)) + self.data.append((tag, "%s_%d" % (tag, self.idx), self.text)) self.idx += 1 self.process_text = False - self.text = '' + self.text = "" self.tag_curr = self.tag_last @@ -404,7 +564,7 @@ def handle_data(self, data): if self.tag_curr in self.tags_allowed or self.tag_curr in self.tags_ignored: self.text += data else: - self.text += '<%s>%s' % (self.tag_curr, data, self.tag_curr) + self.text += "<%s>%s" % (self.tag_curr, data, self.tag_curr) # instantiate the parser and fed it some HTML parser = MyHTMLParser() @@ -412,106 +572,213 @@ def handle_data(self, data): return parser.data + def escape_href(label): # remove html tags - label = re.sub('<[^<]+?>', '', label) + label = re.sub("<[^<]+?>", "", label) # fix   - label = label.replace(' ', '') + label = label.replace(" ", "") # fix " - label = label.replace('"', '') + label = label.replace('"', "") # replace space with underscore + lower - return label.replace(' ', '-').lower() + return label.replace(" ", "-").lower() + + +def write_toc(data, hamburger_menu_toc=False): + """Write Table of Contents + + :param tuple data: parsed data from MyHTMLParser class instance + :param bool hamburger_menu_toc: write hamburger menu TOC for the + mobile, tablet screen + """ -def write_toc(data): if not data: return fd = sys.stdout - fd.write('
\n') - fd.write('

Table of contents

\n') - fd.write('\n") + fd.write("
\n") + def update_toc(data): ret_data = [] - pat = re.compile(r'(<(h[2|3])>)(.+)()') + pat = re.compile(r"(<(h[2|3])>)(.+)()") idx = 1 for line in data.splitlines(): if pat.search(line): xline = pat.split(line) - line = xline[1] + '' % escape_href(xline[3]) + xline[3] + '' + xline[4] + line = ( + xline[1] + + '' % escape_href(xline[3]) + + xline[3] + + "" + + xline[4] + ) idx += 1 ret_data.append(line) - return '\n'.join(ret_data) + return "\n".join(ret_data) def get_addon_path(): """Check if pgm is in the addons list and get addon path - return: pgm path if pgm is addon else None + Make or update list of the official addons source + code paths g.extension prefix parameter plus /grass-addons directory + using Git repository + + :return str|None: pgm path if pgm is addon else None """ - addon_base = os.getenv('GRASS_ADDON_BASE') - if addon_base: - # addons_paths.json is file created during install extension - # check get_addons_paths() function in the g.extension.py file - addons_file = "addons_paths.json" - addons_paths = os.path.join(addon_base, addons_file) - if not os.path.exists(addons_paths): - # Compiled addon has own dir e.g. ~/.grass7/addons/db.join/ - # with bin/ docs/ etc/ scripts/ subdir, required for compilation - # addons on osgeo lxd container server and generation of - # modules.xml file (build-xml.py script), when addons_paths.json - # file is stored one level dir up - addons_paths = os.path.join( - os.path.abspath(os.path.join(addon_base, "..")), - addons_file, + addons_base_dir = os.getenv("GRASS_ADDON_BASE") + if addons_base_dir and major: + grass_addons_dir = pathlib.Path(addons_base_dir) / "grass-addons" + if gs: + call = gs.call + popen = gs.Popen + fatal = gs.fatal + else: + call = subprocess.call + popen = subprocess.Popen + fatal = sys.stderr.write + addons_branch = get_version_branch( + major_version=major, + addons_git_repo_url=urlparse.urljoin(base_url, "grass-addons/"), + ) + if not pathlib.Path(addons_base_dir).exists(): + pathlib.Path(addons_base_dir).mkdir(parents=True, exist_ok=True) + if not grass_addons_dir.exists(): + call( + [ + "git", + "clone", + "-q", + "--no-checkout", + f"--branch={addons_branch}", + "--filter=tree:0", + urlparse.urljoin(base_url, "grass-addons/"), + ], + cwd=addons_base_dir, ) - if not os.path.exists(addons_paths): - return - with open(addons_paths) as f: - addons_paths = json.load(f) - for addon in addons_paths["tree"]: - if pgm == pathlib.Path(addon["path"]).name: - return addon["path"] + addons_file_list = popen( + ["git", "ls-tree", "--name-only", "-r", addons_branch], + cwd=grass_addons_dir, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + addons_file_list, stderr = addons_file_list.communicate() + if stderr: + message = ( + "Failed to get addons files list from the" + " Git repository <{repo_path}>.\n{error}" + ) + if gs: + fatal( + _( + message, + ).format( + repo_path=grass_addons_dir, + error=gs.decode(stderr), + ) + ) + else: + message += "\n" + fatal( + message.format( + repo_path=grass_addons_dir, + error=stderr.decode(), + ) + ) + addon_paths = re.findall( + rf".*{pgm}*.", + gs.decode(addons_file_list) if gs else addons_file_list.decode(), + ) + for addon_path in addon_paths: + if pgm == pathlib.Path(addon_path).name: + return addon_path # process header src_data = read_file(src_file) -name = re.search('()', src_data, re.IGNORECASE) -pgm_desc = None +name = re.search("()", src_data, re.IGNORECASE) +pgm_desc = "GRASS GIS Reference Manual" if name: - pgm = name.group(2).strip().split('-', 1)[0].strip() - name_desc = re.search('()', src_data, re.IGNORECASE) + pgm = name.group(2).strip().split("-", 1)[0].strip() + name_desc = re.search( + "()", src_data, re.IGNORECASE + ) if name_desc: pgm_desc = name_desc.group(2).strip() -desc = re.search('()', src_data, - re.IGNORECASE) +desc = re.search("()", src_data, re.IGNORECASE) if desc: pgm = desc.group(2).strip() header_tmpl = string.Template(header_base + header_nopgm) @@ -521,7 +788,7 @@ def get_addon_path(): else: header_tmpl = string.Template(header_base + header_pgm_desc) -if not re.search('', src_data, re.IGNORECASE): +if not re.search("", src_data, re.IGNORECASE): tmp_data = read_file(tmp_file) """ Adjusting keywords html pages paths if add-on html man page @@ -530,12 +797,13 @@ def get_addon_path(): if html_page_footer_pages_path: new_keywords_paths = [] orig_keywords_paths = re.search( - r'KEYWORDS(.*?)', - tmp_data, re.DOTALL, + r"KEYWORDS(.*?)", + tmp_data, + re.DOTALL, ) if orig_keywords_paths: search_txt = 'href="' - for i in orig_keywords_paths.group(1).split(','): + for i in orig_keywords_paths.group(1).split(","): if search_txt in i: index = i.index(search_txt) + len(search_txt) new_keywords_paths.append( @@ -544,14 +812,24 @@ def get_addon_path(): if new_keywords_paths: tmp_data = tmp_data.replace( orig_keywords_paths.group(1), - ','.join(new_keywords_paths), + ",".join(new_keywords_paths), ) - if not re.search('', tmp_data, re.IGNORECASE): + if not re.search("", tmp_data, re.IGNORECASE): sys.stdout.write(header_tmpl.substitute(PGM=pgm, PGM_DESC=pgm_desc)) + if tmp_data: + header_logo_img_el = 'GRASS logo' for line in tmp_data.splitlines(True): - if not re.search('|', line, re.IGNORECASE): - sys.stdout.write(line) + # The cleanup happens on Makefile level too. + if not re.search( + "|| ", line, re.IGNORECASE + ): + if header_logo_img_el in line: + sys.stdout.write(line) + # create hamburger menu TOC + write_toc(create_toc(src_data), hamburger_menu_toc=True) + else: + sys.stdout.write(line) # create TOC write_toc(create_toc(src_data)) @@ -561,31 +839,31 @@ def get_addon_path(): # if is found, suppose a complete html is provided. # otherwise, generate module class reference: -if re.search('', src_data, re.IGNORECASE): +if re.search("", src_data, re.IGNORECASE): sys.exit() index_names = { - 'd' : 'display', - 'db': 'database', - 'g' : 'general', - 'i' : 'imagery', - 'm' : 'miscellaneous', - 'ps': 'postscript', - 'p' : 'paint', - 'r' : 'raster', - 'r3': 'raster3d', - 's' : 'sites', - 't' : 'temporal', - 'v' : 'vector' - } + "d": "display", + "db": "database", + "g": "general", + "i": "imagery", + "m": "miscellaneous", + "ps": "postscript", + "p": "paint", + "r": "raster", + "r3": "raster3d", + "s": "sites", + "t": "temporal", + "v": "vector", +} def to_title(name): """Convert name of command class/family to form suitable for title""" - if name == 'raster3d': - return '3D raster' - elif name == 'postscript': - return 'PostScript' + if name == "raster3d": + return "3D raster" + elif name == "postscript": + return "PostScript" else: return name.capitalize() @@ -595,17 +873,17 @@ def to_title(name): index_titles[key] = to_title(name) # process footer -index = re.search('()', src_data, re.IGNORECASE) +index = re.search("()", src_data, re.IGNORECASE) if index: index_name = index.group(2).strip() - if '|' in index_name: - index_name, index_name_cap = index_name.split('|', 1) + if "|" in index_name: + index_name, index_name_cap = index_name.split("|", 1) else: index_name_cap = to_title(index_name) else: - mod_class = pgm.split('.', 1)[0] - index_name = index_names.get(mod_class, '') - index_name_cap = index_titles.get(mod_class, '') + mod_class = pgm.split(".", 1)[0] + index_name = index_names.get(mod_class, "") + index_name_cap = index_titles.get(mod_class, "") year = os.getenv("VERSION_DATE") if not year: @@ -614,9 +892,9 @@ def to_title(name): # check the names of scripts to assign the right folder topdir = os.path.abspath(os.getenv("MODULE_TOPDIR")) curdir = os.path.abspath(os.path.curdir) -if curdir.startswith(topdir): +if curdir.startswith(topdir + os.path.sep): source_url = trunk_url - pgmdir = curdir.replace(topdir, '').lstrip(os.path.sep) + pgmdir = curdir.replace(topdir, "").lstrip(os.path.sep) else: # addons source_url = addons_url @@ -639,8 +917,8 @@ def to_title(name): ) else: url_source = urlparse.urljoin(source_url, pgmdir) -if sys.platform == 'win32': - url_source = url_source.replace(os.path.sep, '/') +if sys.platform == "win32": + url_source = url_source.replace(os.path.sep, "/") if index_name: branches = "branches"