Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add visit to MessageReference and Term to resolve VariableReference #12

Merged
merged 2 commits into from
Nov 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ repos:
- id: "check-json"

- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: v0.7.4
rev: v0.8.0
hooks:
- id: ruff
args: [ "--fix" ]
Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,10 @@ dependencies = [

[project.optional-dependencies]
dev = [
"fluent-runtime==0.4.0",
"isort==5.13.2",
"pre-commit==4.0.1",
"ruff==0.7.4",
"ruff==0.8.0",
"mypy==1.13.0",
"typing-extensions==4.12.2",
]
Expand Down Expand Up @@ -94,7 +95,7 @@ exclude = [
select = ["ALL"]
ignore = [
"A003",
"ANN002", "ANN003", "ANN101", "ANN102", "ANN401",
"ANN002", "ANN003", "ANN401",
"C901",
"D100", "D101", "D102", "D103", "D104", "D105", "D106", "D107", "D203", "D205", "D212",
"ERA001",
Expand Down
2 changes: 1 addition & 1 deletion src/ftl_extract/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from .__version__ import __version__
from .code_extractor import extract_fluent_keys

__all__ = ("extract_fluent_keys", "__version__")
__all__ = ("__version__", "extract_fluent_keys")
15 changes: 9 additions & 6 deletions src/ftl_extract/code_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from ftl_extract.matcher import FluentKey


def find_py_files(path: Path) -> Iterator[Path]:
def find_py_files(*, path: Path) -> Iterator[Path]:
"""
First step: find all .py files in given path.

Expand All @@ -31,6 +31,7 @@ def find_py_files(path: Path) -> Iterator[Path]:


def parse_file(
*,
path: Path,
i18n_keys: str | Iterable[str],
ignore_attributes: str | Iterable[str],
Expand Down Expand Up @@ -66,7 +67,7 @@ def parse_file(
return matcher.fluent_keys


def post_process_fluent_keys(fluent_keys: dict[str, FluentKey], default_ftl_file: Path) -> None:
def post_process_fluent_keys(*, fluent_keys: dict[str, FluentKey], default_ftl_file: Path) -> None:
"""
Third step: post-process parsed `FluentKey`.

Expand All @@ -84,6 +85,7 @@ def post_process_fluent_keys(fluent_keys: dict[str, FluentKey], default_ftl_file


def find_conflicts(
*,
current_fluent_keys: dict[str, FluentKey],
new_fluent_keys: dict[str, FluentKey],
) -> None:
Expand Down Expand Up @@ -117,6 +119,7 @@ def find_conflicts(


def extract_fluent_keys(
*,
path: Path,
i18n_keys: str | Iterable[str],
ignore_attributes: str | Iterable[str],
Expand All @@ -143,22 +146,22 @@ def extract_fluent_keys(
"""
fluent_keys: dict[str, FluentKey] = {}

for file in find_py_files(path):
for file in find_py_files(path=path):
keys = parse_file(
path=file,
i18n_keys=i18n_keys,
ignore_attributes=ignore_attributes,
ignore_kwargs=ignore_kwargs,
default_ftl_file=default_ftl_file,
)
post_process_fluent_keys(keys, default_ftl_file)
find_conflicts(fluent_keys, keys)
post_process_fluent_keys(fluent_keys=keys, default_ftl_file=default_ftl_file)
find_conflicts(current_fluent_keys=fluent_keys, new_fluent_keys=keys)
fluent_keys.update(keys)

return fluent_keys


def sort_fluent_keys_by_path(fluent_keys: dict[str, FluentKey]) -> dict[Path, list[FluentKey]]:
def sort_fluent_keys_by_path(*, fluent_keys: dict[str, FluentKey]) -> dict[Path, list[FluentKey]]:
"""
Sort `FluentKey`s by their paths.

Expand Down
8 changes: 4 additions & 4 deletions src/ftl_extract/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from typing import Final, Literal
from typing import Final


I18N_LITERAL: Final[Literal["i18n"]] = "i18n"
GET_LITERAL: Final[Literal["get"]] = "get"
PATH_LITERAL: Final[Literal["_path"]] = "_path"
I18N_LITERAL: Final = "i18n"
GET_LITERAL: Final = "get"
PATH_LITERAL: Final = "_path"
IGNORE_ATTRIBUTES: Final[frozenset[str]] = frozenset(
{"set_locale", "use_locale", "use_context", "set_context"},
)
Expand Down
16 changes: 16 additions & 0 deletions src/ftl_extract/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,19 @@ def __init__(
f"Translation {key!r} already exists with different elements: "
f"{self.current_translation} != {self.new_translation}",
)


class FTLExtractorCantFindReferenceError(FTLExtractorError):
def __init__(self, key: str, key_path: Path, reference_key: str) -> None:
self.key = key
self.key_path = key_path
self.reference_key = reference_key
super().__init__(f"Can't find reference {reference_key!r} for key {key!r} at {key_path}")


class FTLExtractorCantFindTermError(FTLExtractorError):
def __init__(self, key: str, key_path: Path, term_key: str) -> None:
self.key = key
self.key_path = key_path
self.term_key = term_key
super().__init__(f"Can't find term {term_key!r} for key {key!r} at {key_path}")
43 changes: 33 additions & 10 deletions src/ftl_extract/ftl_extractor.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

from copy import deepcopy
from typing import TYPE_CHECKING

from click import echo
Expand All @@ -22,6 +23,7 @@


def extract(
*,
code_path: Path,
output_path: Path,
language: Iterable[str],
Expand Down Expand Up @@ -49,11 +51,17 @@ def extract(
)

for lang in language:
# Import fluent keys from existing FTL files
stored_fluent_keys, leave_as_is = import_ftl_from_dir(output_path, lang)
# Import fluent keys and terms from existing FTL files
stored_fluent_keys, stored_terms, leave_as_is = import_ftl_from_dir(
path=output_path,
locale=lang,
)
for fluent_key in stored_fluent_keys.values():
fluent_key.path = fluent_key.path.relative_to(output_path / lang)

for term in stored_terms.values():
term.path = term.path.relative_to(output_path / lang)

keys_to_comment: dict[str, FluentKey] = {}
keys_to_add: dict[str, FluentKey] = {}

Expand All @@ -72,12 +80,24 @@ def extract(
stored_fluent_keys[key].code_path = fluent_key.code_path

# Second step: find keys that have different kwargs
# Make copy of in_code_fluent_keys and stored_fluent_keys to check references
in_code_fluent_keys_copy = deepcopy(in_code_fluent_keys)
stored_fluent_keys_copy = deepcopy(stored_fluent_keys)

for key, fluent_key in in_code_fluent_keys.items():
if key not in stored_fluent_keys:
continue

fluent_key_placeable_set = extract_kwargs(fluent_key)
stored_fluent_key_placeable_set = extract_kwargs(stored_fluent_keys[key])
fluent_key_placeable_set = extract_kwargs(
key=fluent_key,
terms=stored_terms,
all_fluent_keys=in_code_fluent_keys_copy,
)
stored_fluent_key_placeable_set = extract_kwargs(
key=stored_fluent_keys[key],
terms=stored_terms,
all_fluent_keys=stored_fluent_keys_copy,
)

if fluent_key_placeable_set != stored_fluent_key_placeable_set:
keys_to_comment[key] = stored_fluent_keys.pop(key)
Expand All @@ -88,20 +108,23 @@ def extract(
keys_to_comment[key] = stored_fluent_keys.pop(key)

for fluent_key in keys_to_comment.values():
comment_ftl_key(fluent_key, serializer)
comment_ftl_key(key=fluent_key, serializer=serializer)

# Comment Junk elements if needed
if comment_junks is True:
for fluent_key in leave_as_is:
if isinstance(fluent_key.translation, fl_ast.Junk):
comment_ftl_key(fluent_key, serializer)
comment_ftl_key(key=fluent_key, serializer=serializer)

sorted_fluent_keys = sort_fluent_keys_by_path(fluent_keys=stored_fluent_keys)

sorted_fluent_keys = sort_fluent_keys_by_path(stored_fluent_keys)
for path, keys in sort_fluent_keys_by_path(fluent_keys=keys_to_add).items():
sorted_fluent_keys.setdefault(path, []).extend(keys)

for path, keys in sort_fluent_keys_by_path(keys_to_add).items():
for path, keys in sort_fluent_keys_by_path(fluent_keys=keys_to_comment).items():
sorted_fluent_keys.setdefault(path, []).extend(keys)

for path, keys in sort_fluent_keys_by_path(keys_to_comment).items():
for path, keys in sort_fluent_keys_by_path(fluent_keys=stored_terms).items():
sorted_fluent_keys.setdefault(path, []).extend(keys)

leave_as_is_with_path: dict[Path, list[FluentKey]] = {}
Expand All @@ -114,7 +137,7 @@ def extract(

for path, keys in sorted_fluent_keys.items():
ftl, _ = generate_ftl(
keys,
fluent_keys=keys,
serializer=serializer,
leave_as_is=leave_as_is_with_path.get(path, []),
)
Expand Down
30 changes: 23 additions & 7 deletions src/ftl_extract/ftl_importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,13 @@


def import_from_ftl(
*,
path: Path,
locale: str,
) -> tuple[dict[str, FluentKey], Resource, list[FluentKey]]:
) -> tuple[dict[str, FluentKey], dict[str, FluentKey], Resource, list[FluentKey]]:
"""Import `FluentKey`s from FTL."""
ftl_keys = {}
ftl_keys: dict[str, FluentKey] = {}
terms: dict[str, FluentKey] = {}
leave_as_is = []

resource = parse(path.read_text(encoding="utf-8"), with_spans=True)
Expand All @@ -31,6 +33,15 @@ def import_from_ftl(
locale=locale,
position=position,
)
elif isinstance(entry, ast.Term):
terms[entry.id.name] = FluentKey(
code_path=Path(),
key=entry.id.name,
translation=entry,
path=path,
locale=locale,
position=position,
)
else:
leave_as_is.append(
FluentKey(
Expand All @@ -43,18 +54,23 @@ def import_from_ftl(
),
)

return ftl_keys, resource, leave_as_is
return ftl_keys, terms, resource, leave_as_is


def import_ftl_from_dir(path: Path, locale: str) -> tuple[dict[str, FluentKey], list[FluentKey]]:
def import_ftl_from_dir(
*,
path: Path,
locale: str,
) -> tuple[dict[str, FluentKey], dict[str, FluentKey], list[FluentKey]]:
"""Import `FluentKey`s from directory of FTL files."""
ftl_files = (path / locale).rglob("*.ftl") if path.is_dir() else [path]
ftl_keys = {}
ftl_keys: dict[str, FluentKey] = {}
terms: dict[str, FluentKey] = {}
leave_as_is = []

for ftl_file in ftl_files:
keys, _, as_is_keys = import_from_ftl(ftl_file, locale)
keys, terms, _, as_is_keys = import_from_ftl(path=ftl_file, locale=locale)
ftl_keys.update(keys)
leave_as_is.extend(as_is_keys)

return ftl_keys, leave_as_is
return ftl_keys, terms, leave_as_is
25 changes: 25 additions & 0 deletions src/ftl_extract/matcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
FTLExtractorDifferentPathsError,
FTLExtractorDifferentTranslationError,
)
from ftl_extract.utils import to_json_no_span

if TYPE_CHECKING:
from collections.abc import Iterable
Expand Down Expand Up @@ -48,6 +49,30 @@ class FluentKey:
locale: str | None = field(default=None)
position: int | float = field(default=inf)

def __repr__(self) -> str:
return (
f"FluentKey("
f"code_path={self.code_path},"
f"key={self.key},"
f"path={self.path},"
f"locale={self.locale},"
f"position={self.position},"
f"translation={self.translation.to_json(fn=to_json_no_span)}"
f")"
)

def __str__(self) -> str:
return (
f"FluentKey(\n"
f"\tcode_path={self.code_path},\n"
f"\tkey={self.key},\n"
f"\tpath={self.path},\n"
f"\tlocale={self.locale},\n"
f"\tposition={self.position},\n"
f"\ttranslation={self.translation.to_json(fn=to_json_no_span)}\n"
f")"
)


class I18nMatcher(ast.NodeVisitor):
def __init__(
Expand Down
2 changes: 1 addition & 1 deletion src/ftl_extract/process/commentator.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@
from ftl_extract.matcher import FluentKey


def comment_ftl_key(key: FluentKey, serializer: FluentSerializer) -> None:
def comment_ftl_key(*, key: FluentKey, serializer: FluentSerializer) -> None:
raw_entry = serializer.serialize_entry(key.translation)
key.translation = ast.Comment(content=raw_entry.strip())
Loading