diff --git a/etc/README.md b/etc/README.md deleted file mode 100644 index 7824777..0000000 --- a/etc/README.md +++ /dev/null @@ -1,2 +0,0 @@ -The scripts in this directory should be considered *experimental*, and may be -removed in future versions of Code Base Investigator. diff --git a/etc/coverage.py b/etc/coverage.py deleted file mode 100644 index ecc6019..0000000 --- a/etc/coverage.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (C) 2019-2024 Intel Corporation -# SPDX-License-Identifier: BSD-3-Clause -""" -Calculates the lines of code used by a single platform, as described by a -compilation database, and outputs coverage in the P3 Analysis Library format. -""" - -import argparse -import json -import logging -import os -import sys - -import codebasin.config as config -import codebasin.finder as finder -import codebasin.util as util -from codebasin.walkers.exporter import Exporter - -if __name__ == "__main__": - # Read command-line arguments - desc = "Code Base Investigator Coverage Tool" - parser = argparse.ArgumentParser(description=desc) - parser.add_argument( - "ifile", - metavar="INPUT", - help="path to compilation database JSON file", - ) - parser.add_argument( - "ofile", - metavar="OUTPUT", - help="path to coverage JSON file", - ) - args = parser.parse_args() - - dbpath = os.path.realpath(args.ifile) - covpath = os.path.realpath(args.ofile) - for path in [dbpath, covpath]: - if not util.valid_path(path): - raise ValueError(f"{path} is not a valid path.") - if not util.ensure_ext(path, [".json"]): - raise ValueError(f"{path} is not a JSON file.") - - # Ensure regular CBI output goes to stderr - stderr_log = logging.StreamHandler(sys.stderr) - stderr_log.setFormatter(logging.Formatter("[%(levelname)-8s] %(message)s")) - logging.getLogger("codebasin").addHandler(stderr_log) - logging.getLogger("codebasin").setLevel(logging.WARNING) - - # Run CBI configured as-if: - # - configuration contains a single (dummy) platform - # - codebase contains all files in the specified compilation database - db = config.load_database(dbpath, os.getcwd()) - configuration = {"cli": db} - files = [e["file"] for e in db] - codebase = {"files": files, "platforms": ["cli"], "exclude_files": []} - - state = finder.find(os.getcwd(), codebase, configuration) - - exporter = Exporter(codebase) - exports = exporter.walk(state) - for p in codebase["platforms"]: - covarray = [] - for filename in exports[p]: - covobject = {"file": filename, "regions": []} - for region in exports[p][filename]: - covobject["regions"].append(list(region)) - covarray.append(covobject) - util._validate_json(covarray, "coverage") - json_string = json.dumps(covarray) - with open(covpath, "w") as fp: - fp.write(json_string) diff --git a/etc/preprocess.py b/etc/preprocess.py deleted file mode 100755 index b82bfb0..0000000 --- a/etc/preprocess.py +++ /dev/null @@ -1,124 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (C) 2019-2024 Intel Corporation -# SPDX-License-Identifier: BSD-3-Clause -""" -Preprocess a source file using the CBI preprocessor. -""" - -import argparse -import logging -import os -import sys - -import codebasin.finder as finder -from codebasin.platform import Platform -from codebasin.preprocessor import macro_from_definition_string -from codebasin.walkers.source_printer import ( - PreprocessedSourcePrinter, - SourcePrinter, -) - -if __name__ == "__main__": - # Read command-line arguments - parser = argparse.ArgumentParser( - description="Code Base Investigator Preprocessor", - ) - parser.add_argument( - "-I", - dest="include_paths", - metavar="PATH", - action="append", - default=[], - help="add to the include path", - ) - parser.add_argument( - "-include", - dest="include_files", - metavar="PATH", - action="append", - default=[], - help="add to the include files", - ) - parser.add_argument( - "-D", - dest="defines", - metavar="DEFINE", - action="append", - default=[], - help="define a macro", - ) - parser.add_argument( - "--passthrough", - dest="passthrough", - action="store_true", - default=False, - help="print source code without preprocessing", - ) - parser.add_argument( - "--no-expand", - dest="expand", - action="store_false", - default=True, - help="do not expand macros in source code", - ) - parser.add_argument( - "--summarize", - dest="summarize", - action="store_true", - default=False, - help="summarize code blocks with SLOC count", - ) - parser.add_argument("filename", metavar="FILE", action="store") - args = parser.parse_args() - - # Ensure regular CBI output goes to stderr - # Allows preprocessed output to print to stdout by default - stderr_log = logging.StreamHandler(sys.stderr) - stderr_log.setFormatter(logging.Formatter("[%(levelname)-8s] %(message)s")) - logging.getLogger("codebasin").addHandler(stderr_log) - logging.getLogger("codebasin").setLevel(logging.WARNING) - - # Run CBI configured as-if: - # - codebase contains a single file (the file being preprocessed) - # - configuration contains a single platform (corresponding to flags) - file_path = os.path.realpath(args.filename) - codebase = {"files": [file_path], "platforms": ["cli"]} - configuration = { - "cli": [ - { - "file": file_path, - "defines": args.defines, - "include_paths": args.include_paths, - "include_files": args.include_files, - }, - ], - } - - state = finder.find( - os.getcwd(), - codebase, - configuration, - summarize_only=args.summarize, - ) - platform = Platform("cli", os.getcwd()) - for path in args.include_paths: - platform.add_include_path(path) - for definition in args.defines: - macro = macro_from_definition_string(definition) - platform.define(macro.name, macro) - - source_tree = state.get_tree(file_path) - node_associations = state.get_map(file_path) - - if args.passthrough: - source_printer = SourcePrinter(source_tree) - source_printer.walk() - else: - source_printer = PreprocessedSourcePrinter( - source_tree, - node_associations, - platform, - state, - args.expand, - ) - source_printer.walk() diff --git a/etc/sloc_translate.py b/etc/sloc_translate.py deleted file mode 100755 index c82fa0f..0000000 --- a/etc/sloc_translate.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/env python3.6 -# Copyright (C) 2019-2024 Intel Corporation -# SPDX-License-Identifier: BSD-3-Clause -""" -Parse source file, reporting sloc and physical lines. -Can optionally print logical line regions and cleaned lines. -""" - -import os -import sys -from pathlib import Path - -from codebasin.file_source import get_file_source -from codebasin.util import safe_open_read_nofollow - - -def file_sloc(path, verbose=False): - """ - Process file in path, reporting total_sloc/loc. - Optionally print logical regions. - """ - file_source = get_file_source(path) - if not file_source: - raise RuntimeError( - f"{path} doesn't appear to be a language this tool can process", - ) - with safe_open_read_nofollow( - path, - mode="r", - errors="replace", - ) as source_file: - walker = file_source(source_file, relaxed=False) - try: - while True: - logical_line = next(walker) - if verbose: - start = logical_line.current_physical_start - end = logical_line.current_physical_end - sloc = logical_line.local_sloc - flushed = logical_line.flushed_line - category = logical_line.category - print( - f"{path} [{start}, {end}) ({sloc}): " - + f"{flushed} {category}", - ) - except StopIteration as it: - total_sloc, physical_loc = it.value - - return (path, total_sloc, physical_loc) - - -def walk_sloc(in_root, extensions, verbose=False): - """ - Run file_sloc on each file that matches regexp under root path. - """ - in_root = os.path.realpath(in_root) - for root, _, files in os.walk(in_root): - for current_file in files: - full_path = os.path.realpath(os.path.join(root, current_file)) - if Path(full_path).suffix in extensions: - try: - (filename, total_sloc, physical_loc) = file_sloc(full_path) - if verbose: - print(f"{filename}, {total_sloc}, {physical_loc}") - except FileNotFoundError: - pass - - -def sloc_translate(args): - """ - Toplevel routine for script. - """ - if len(args) == 2: - path = os.path.realpath(args[1]) - (filename, total_sloc, physical_loc) = file_sloc(path, verbose=True) - print(f"{filename}, {total_sloc}, {physical_loc}") - elif len(args) == 3: - cleaned = [f".{x}" for x in args[2].split(",")] - walk_sloc(args[1], cleaned, verbose=True) - else: - print( - "Expected either 1 argument (a single file to parse" - + " and print) or 2 (a directory root & file pattern)", - ) - - -if __name__ == "__main__": - sloc_translate(sys.argv)