Skip to content

Commit

Permalink
Made module level docstrings not required
Browse files Browse the repository at this point in the history
  • Loading branch information
marsninja committed Sep 17, 2023
1 parent 5557ea6 commit cdb9443
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 4 deletions.
5 changes: 2 additions & 3 deletions examples/manual_code/circle.jac
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
"""
(Module docstring are Required in a valid Jac)
This module demonstrates a simple circle class and a function to calculate the area of a circle.
"""
import:py math;
Expand Down Expand Up @@ -54,8 +53,8 @@ with entry {c = Circle(RAD);} # Global also works here

with entry:__main__ { # TODO: add name == option abstract feature
# To run the program functionality
print(f"Area of a circle with radius 5: {calculate_area(RAD)}");
print(f"Area of a {c.shape_type.value} with radius 5: {c.area()}");
print(f"Area of a circle with radius {RAD} using function: {calculate_area(RAD)}");
print(f"Area of a {c.shape_type.value} with radius {RAD} using class: {c.area()}");
}


Expand Down
2 changes: 2 additions & 0 deletions examples/manual_code/test.jac
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
"""This is a docstring"""
with entry { print("hello"); }
22 changes: 22 additions & 0 deletions jaclang/jac/lexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,10 @@ def __init__(
input_ir: str,
base_path: str = "",
prior: Transform | None = None,
fstr_override: bool = False,
) -> None:
"""Initialize lexer."""
self.fstr_override = fstr_override
Transform.__init__(self, mod_path, input_ir, base_path, prior) # type: ignore
self.ir: Generator = self.ir

Expand Down Expand Up @@ -376,6 +378,26 @@ def transform(self, ir: str) -> Generator:
"""Tokenize the input."""
return self.tokenize(ir)

def tokenize(self, text: str) -> Generator:
"""Tokenize override for no module level docstring."""
has_doc_string_start = False
for tok in super().tokenize(text):
if (
tok.type != "DOC_STRING"
and not has_doc_string_start
and not self.fstr_override
):
dtok = Token()
dtok.type = "DOC_STRING"
dtok.value = '""""""'
dtok.lineno = 1
dtok.lineidx = 0
dtok.index = 0
dtok.end = 0
yield dtok
has_doc_string_start = True
yield tok

def error(self, t: Token) -> None:
"""Raise an error for illegal characters."""
self.cur_line = self.lineno
Expand Down
4 changes: 3 additions & 1 deletion jaclang/jac/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -1437,7 +1437,9 @@ def find_and_concat_fstr_pieces(tup: tuple) -> str:
tree = JacParserExpr(
mod_path="",
input_ir=JacLexer(
mod_path="", input_ir=find_and_concat_fstr_pieces(tree)
mod_path="",
input_ir=find_and_concat_fstr_pieces(tree),
fstr_override=True,
).ir,
).ir_tup[2]
kids = tree[2:]
Expand Down

0 comments on commit cdb9443

Please sign in to comment.