Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 3 additions & 4 deletions codebasin/finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,9 @@

from tqdm import tqdm

from codebasin import CodeBase, file_parser, platform, preprocessor
from codebasin import CodeBase, file_parser, preprocessor
from codebasin.language import FileLanguage
from codebasin.platform import Platform
from codebasin.preprocessor import CodeNode, Node, SourceTree, Visit
from codebasin.preprocessor import CodeNode, Node, Platform, SourceTree, Visit

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -229,7 +228,7 @@ def _potential_file_generator(
leave=False,
disable=not show_progress,
):
file_platform = platform.Platform(p, rootdir)
file_platform = Platform(p, rootdir)

for path in e["include_paths"]:
file_platform.add_include_path(path)
Expand Down
118 changes: 0 additions & 118 deletions codebasin/platform.py

This file was deleted.

111 changes: 109 additions & 2 deletions codebasin/preprocessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1646,6 +1646,114 @@ def replace(
return substituted_tokens


class Platform:
"""
Represents a platform, and everything associated with a platform.
Contains a list of definitions, and include paths.
"""

def __init__(self, name: str, _root_dir: str) -> None:
self._definitions: dict[str, Macro | MacroFunction] = {}
self._skip_includes: list[str] = []
self._include_paths: list[str] = []
self._root_dir = _root_dir
self.name = name
self.found_incl: dict[str, str | None] = {}

def add_include_path(self, path: str) -> None:
"""
Insert a new path into the list of include paths for this
platform.
"""
self._include_paths.append(path)

def undefine(self, identifier: str) -> None:
"""
Undefine a macro for this platform, if it's defined.
"""
if identifier in self._definitions:
del self._definitions[identifier]

def define(self, identifier: str, macro: Macro | MacroFunction) -> None:
"""
Define a new macro for this platform, only if it's not already
defined.
"""
if identifier not in self._definitions:
self._definitions[identifier] = macro

def add_include_to_skip(self, fn: str) -> None:
"""
Define a new macro for this platform, only if it's not already
defined.
"""
if fn not in self._skip_includes:
self._skip_includes.append(fn)

def process_include(self, fn: str) -> bool:
"""
Return a boolean stating if this include file should be
processed or skipped.
"""
return fn not in self._skip_includes

# FIXME: This should return a bool, but the usage relies on a str.
def is_defined(self, identifier: str) -> str:
"""
Return a string representing whether the macro named by 'identifier' is
defined.
"""
if identifier in self._definitions:
return "1"
return "0"

def get_macro(self, identifier: str) -> Macro | MacroFunction | None:
"""
Return either a macro definition (if it's defined), or None.
"""
if identifier in self._definitions:
return self._definitions[identifier]
return None

def find_include_file(
self,
filename: str,
this_path: str,
is_system_include: bool = False,
) -> str | None:
"""
Determine and return the full path to an include file, named
'filename' using the include paths for this platform.

System includes do not include the rootdir, while local includes
do.
"""
try:
return self.found_incl[filename]
except KeyError:
pass

include_file = None

local_paths = []
if not is_system_include:
local_paths += [this_path]

# Determine the path to the include file, if it exists
for path in local_paths + self._include_paths:
test_path = os.path.abspath(os.path.join(path, filename))
if os.path.isfile(test_path):
include_file = test_path
self.found_incl[filename] = include_file
return include_file

# TODO: Check this optimization is always valid.
if include_file is not None:
raise RuntimeError("Expected 'None', got '{filename}'")
self.found_incl[filename] = None
return None


class ExpanderHelper:
"""
Class to act as token stream for expansion stack.
Expand Down Expand Up @@ -1715,8 +1823,7 @@ class MacroExpander:
A specialized token parser for recognizing and expanding macros.
"""

# FIXME: Cannot define Platform type without circular import.
def __init__(self, platform: Any) -> None:
def __init__(self, platform: Platform) -> None:
self.platform = platform
self.parser_stack: list[ExpanderHelper] = []
self.no_expand: list[str] = []
Expand Down
21 changes: 11 additions & 10 deletions tests/macro_expansion/test_macro_expansion.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
import unittest
from pathlib import Path

from codebasin import CodeBase, finder, platform, preprocessor
from codebasin import CodeBase, finder, preprocessor
from codebasin.preprocessor import Platform


class TestMacroExpansion(unittest.TestCase):
Expand Down Expand Up @@ -62,7 +63,7 @@ def test_cat(self):
test_str = "CATTEST=first ## 2"
macro = preprocessor.macro_from_definition_string(test_str)
tokens = preprocessor.Lexer("CATTEST").tokenize()
p = platform.Platform("Test", self.rootdir)
p = Platform("Test", self.rootdir)
p._definitions = {macro.name: macro}
expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
expected_tokens = preprocessor.Lexer("first2").tokenize()
Expand All @@ -75,7 +76,7 @@ def test_stringify_quote(self):
test_str = "STR(x)= #x"
macro = preprocessor.macro_from_definition_string(test_str)
tokens = preprocessor.Lexer('STR(foo("4 + 5"))').tokenize()
p = platform.Platform("Test", self.rootdir)
p = Platform("Test", self.rootdir)
p._definitions = {macro.name: macro}
expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
expected_tokens = preprocessor.Lexer('"foo(\\"4 + 5\\")"').tokenize()
Expand All @@ -89,7 +90,7 @@ def test_stringify_ws(self):
macro = preprocessor.macro_from_definition_string(test_str)
to_expand_str = r'STR(L + 2-2 "\" \n")'
tokens = preprocessor.Lexer(to_expand_str).tokenize()
p = platform.Platform("Test", self.rootdir)
p = Platform("Test", self.rootdir)
p._definitions = {macro.name: macro}
expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
expected_str = r'TEST "L + 2-2 \"\\\" \\n\""'
Expand All @@ -103,7 +104,7 @@ def test_stringify_nested(self):
mac_xstr = preprocessor.macro_from_definition_string("xstr(s)=str(s)")
mac_str = preprocessor.macro_from_definition_string("str(s)=#s")
mac_def = preprocessor.macro_from_definition_string("foo=4")
p = platform.Platform("Test", self.rootdir)
p = Platform("Test", self.rootdir)
p._definitions = {x.name: x for x in [mac_xstr, mac_str, mac_def]}

tokens = preprocessor.Lexer("str(foo)").tokenize()
Expand Down Expand Up @@ -148,7 +149,7 @@ def test_variadic(self):
tokens = preprocessor.Lexer(
'eprintf("%d, %f, %e", a, b, c)',
).tokenize()
p = platform.Platform("Test", self.rootdir)
p = Platform("Test", self.rootdir)
p._definitions = {macro.name: macro}
expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
self.assertTrue(len(expanded_tokens) == len(expected_expansion))
Expand All @@ -172,7 +173,7 @@ def test_self_reference_macros_1(self):
def_string = "FOO=(4 + FOO)"
macro = preprocessor.macro_from_definition_string(def_string)
tokens = preprocessor.Lexer("FOO").tokenize()
p = platform.Platform("Test", self.rootdir)
p = Platform("Test", self.rootdir)
p._definitions = {macro.name: macro}
expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
self.assertTrue(len(expanded_tokens) == len(expected_expansion))
Expand Down Expand Up @@ -201,7 +202,7 @@ def test_self_reference_macros_2(self):
def_string = "FOO=FOO"
macro = preprocessor.macro_from_definition_string(def_string)
tokens = preprocessor.Lexer("FOO").tokenize()
p = platform.Platform("Test", self.rootdir)
p = Platform("Test", self.rootdir)
p._definitions = {macro.name: macro}
expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
self.assertTrue(len(expanded_tokens) == len(expected_expansion))
Expand All @@ -226,7 +227,7 @@ def test_self_reference_macros_3(self):
def_string = "foo(x)=bar x"
macro = preprocessor.macro_from_definition_string(def_string)
tokens = preprocessor.Lexer("foo(foo) (2)").tokenize()
p = platform.Platform("Test", self.rootdir)
p = Platform("Test", self.rootdir)
p._definitions = {macro.name: macro}
expanded_tokens = preprocessor.MacroExpander(p).expand(tokens)
expected_tokens = preprocessor.Lexer("bar foo (2)").tokenize()
Expand Down Expand Up @@ -270,7 +271,7 @@ def test_indirect_self_reference_macros(self):
x_tokens = preprocessor.Lexer("x").tokenize()
y_tokens = preprocessor.Lexer("y").tokenize()

p = platform.Platform("Test", self.rootdir)
p = Platform("Test", self.rootdir)
p._definitions = {x_macro.name: x_macro, y_macro.name: y_macro}

x_expanded_tokens = preprocessor.MacroExpander(p).expand(x_tokens)
Expand Down
5 changes: 3 additions & 2 deletions tests/operators/test_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
import unittest
from pathlib import Path

from codebasin import CodeBase, finder, platform, preprocessor
from codebasin import CodeBase, finder, preprocessor
from codebasin.preprocessor import Platform


class TestOperators(unittest.TestCase):
Expand Down Expand Up @@ -52,7 +53,7 @@ def test_operators(self):
def test_paths(self):
input_str = r"FUNCTION(looks/2like/a/path/with_/bad%%identifiers)"
tokens = preprocessor.Lexer(input_str).tokenize()
p = platform.Platform("Test", self.rootdir)
p = Platform("Test", self.rootdir)
macro = preprocessor.macro_from_definition_string("FUNCTION(x)=#x")
p._definitions = {macro.name: macro}
_ = preprocessor.MacroExpander(p).expand(tokens)
Expand Down
Loading