java-functional-lsp 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- java_functional_lsp/__init__.py +3 -0
- java_functional_lsp/analyzers/__init__.py +1 -0
- java_functional_lsp/analyzers/base.py +112 -0
- java_functional_lsp/analyzers/exception_checker.py +63 -0
- java_functional_lsp/analyzers/mutation_checker.py +152 -0
- java_functional_lsp/analyzers/null_checker.py +70 -0
- java_functional_lsp/analyzers/spring_checker.py +84 -0
- java_functional_lsp/server.py +145 -0
- java_functional_lsp-0.1.0.dist-info/METADATA +127 -0
- java_functional_lsp-0.1.0.dist-info/RECORD +13 -0
- java_functional_lsp-0.1.0.dist-info/WHEEL +4 -0
- java_functional_lsp-0.1.0.dist-info/entry_points.txt +2 -0
- java_functional_lsp-0.1.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""DeeperDive Java code quality analyzers using tree-sitter."""
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"""Base analyzer class and diagnostic types."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Generator
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from enum import IntEnum
|
|
8
|
+
from typing import Any, Protocol
|
|
9
|
+
|
|
10
|
+
import tree_sitter_java as tsjava
|
|
11
|
+
from tree_sitter import Language, Node, Parser
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class Severity(IntEnum):
|
|
15
|
+
ERROR = 1
|
|
16
|
+
WARNING = 2
|
|
17
|
+
INFO = 3
|
|
18
|
+
HINT = 4
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass(frozen=True)
|
|
22
|
+
class Diagnostic:
|
|
23
|
+
line: int # 0-based
|
|
24
|
+
col: int
|
|
25
|
+
end_line: int
|
|
26
|
+
end_col: int
|
|
27
|
+
severity: Severity
|
|
28
|
+
code: str # rule ID
|
|
29
|
+
message: str
|
|
30
|
+
source: str = "deeperdive-java-linter"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class Analyzer(Protocol):
|
|
34
|
+
"""Protocol for all analyzers."""
|
|
35
|
+
|
|
36
|
+
def analyze(self, tree: Any, source: bytes, config: dict[str, Any]) -> list[Diagnostic]:
|
|
37
|
+
"""Analyze a parsed tree and return diagnostics."""
|
|
38
|
+
...
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
_parser: Parser | None = None
|
|
42
|
+
_language: Language | None = None
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def get_parser() -> Parser:
|
|
46
|
+
"""Get or create a reusable tree-sitter Java parser."""
|
|
47
|
+
global _parser, _language
|
|
48
|
+
if _parser is None:
|
|
49
|
+
_language = Language(tsjava.language())
|
|
50
|
+
_parser = Parser(_language)
|
|
51
|
+
return _parser
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def get_language() -> Language:
|
|
55
|
+
"""Get the Java language for queries."""
|
|
56
|
+
global _language
|
|
57
|
+
if _language is None:
|
|
58
|
+
get_parser()
|
|
59
|
+
assert _language is not None
|
|
60
|
+
return _language
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def find_nodes(node: Node, type_name: str) -> Generator[Node, None, None]:
|
|
64
|
+
"""Recursively find all descendant nodes of a given type."""
|
|
65
|
+
if node.type == type_name:
|
|
66
|
+
yield node
|
|
67
|
+
for child in node.children:
|
|
68
|
+
yield from find_nodes(child, type_name)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def find_nodes_multi(node: Node, type_names: set[str]) -> Generator[Node, None, None]:
|
|
72
|
+
"""Recursively find all descendant nodes matching any of the given types."""
|
|
73
|
+
if node.type in type_names:
|
|
74
|
+
yield node
|
|
75
|
+
for child in node.children:
|
|
76
|
+
yield from find_nodes_multi(child, type_names)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def find_ancestor(node: Node, type_name: str) -> Node | None:
|
|
80
|
+
"""Walk up the tree to find the nearest ancestor of a given type."""
|
|
81
|
+
parent = node.parent
|
|
82
|
+
while parent:
|
|
83
|
+
if parent.type == type_name:
|
|
84
|
+
return parent
|
|
85
|
+
parent = parent.parent
|
|
86
|
+
return None
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def has_ancestor(node: Node, type_names: set[str]) -> bool:
|
|
90
|
+
"""Check if any ancestor matches one of the given types."""
|
|
91
|
+
parent = node.parent
|
|
92
|
+
while parent:
|
|
93
|
+
if parent.type in type_names:
|
|
94
|
+
return True
|
|
95
|
+
parent = parent.parent
|
|
96
|
+
return False
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def severity_from_config(config: dict[str, Any], rule_id: str, default: Severity = Severity.WARNING) -> Severity | None:
|
|
100
|
+
"""Get severity for a rule from config. Returns None if rule is disabled."""
|
|
101
|
+
rules: dict[str, str] = config.get("rules", {})
|
|
102
|
+
level = rules.get(rule_id)
|
|
103
|
+
if level is None:
|
|
104
|
+
return default
|
|
105
|
+
if level == "off":
|
|
106
|
+
return None
|
|
107
|
+
return {
|
|
108
|
+
"error": Severity.ERROR,
|
|
109
|
+
"warning": Severity.WARNING,
|
|
110
|
+
"info": Severity.INFO,
|
|
111
|
+
"hint": Severity.HINT,
|
|
112
|
+
}.get(level, default)
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"""Exception handling rules: detect throw statements and catch-rethrow patterns."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from .base import Diagnostic, find_nodes, severity_from_config
|
|
8
|
+
|
|
9
|
+
_MESSAGES = {
|
|
10
|
+
"throw-statement": ("Avoid throwing exceptions. Use Either.left(error) or Try.of(() -> ...).toEither()."),
|
|
11
|
+
"catch-rethrow": (
|
|
12
|
+
"Avoid catching and rethrowing. Use Try.of(() -> ...).toEither() to convert exceptions to values."
|
|
13
|
+
),
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ExceptionChecker:
|
|
18
|
+
"""Detects throw statements and catch-rethrow anti-patterns."""
|
|
19
|
+
|
|
20
|
+
def analyze(self, tree: Any, source: bytes, config: dict[str, Any]) -> list[Diagnostic]:
|
|
21
|
+
diagnostics: list[Diagnostic] = []
|
|
22
|
+
|
|
23
|
+
# Rule: throw-statement
|
|
24
|
+
severity = severity_from_config(config, "throw-statement")
|
|
25
|
+
if severity is not None:
|
|
26
|
+
for node in find_nodes(tree.root_node, "throw_statement"):
|
|
27
|
+
diagnostics.append(
|
|
28
|
+
Diagnostic(
|
|
29
|
+
line=node.start_point[0],
|
|
30
|
+
col=node.start_point[1],
|
|
31
|
+
end_line=node.end_point[0],
|
|
32
|
+
end_col=node.end_point[1],
|
|
33
|
+
severity=severity,
|
|
34
|
+
code="throw-statement",
|
|
35
|
+
message=_MESSAGES["throw-statement"],
|
|
36
|
+
)
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
# Rule: catch-rethrow
|
|
40
|
+
severity = severity_from_config(config, "catch-rethrow")
|
|
41
|
+
if severity is not None:
|
|
42
|
+
for node in find_nodes(tree.root_node, "catch_clause"):
|
|
43
|
+
body = node.child_by_field_name("body")
|
|
44
|
+
if body is None:
|
|
45
|
+
continue
|
|
46
|
+
# Check if the block has exactly one statement and it's a throw
|
|
47
|
+
statements = [
|
|
48
|
+
c for c in body.children if c.type not in ("{", "}", "comment", "line_comment", "block_comment")
|
|
49
|
+
]
|
|
50
|
+
if len(statements) == 1 and statements[0].type == "throw_statement":
|
|
51
|
+
diagnostics.append(
|
|
52
|
+
Diagnostic(
|
|
53
|
+
line=node.start_point[0],
|
|
54
|
+
col=node.start_point[1],
|
|
55
|
+
end_line=node.end_point[0],
|
|
56
|
+
end_col=node.end_point[1],
|
|
57
|
+
severity=severity,
|
|
58
|
+
code="catch-rethrow",
|
|
59
|
+
message=_MESSAGES["catch-rethrow"],
|
|
60
|
+
)
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
return diagnostics
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
"""Mutation and imperative pattern rules: detect mutable variables, loops, and imperative unwrapping."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from .base import Diagnostic, find_nodes, find_nodes_multi, has_ancestor, severity_from_config
|
|
8
|
+
|
|
9
|
+
_MESSAGES = {
|
|
10
|
+
"mutable-variable": "Avoid reassigning variables. Use final + functional transforms (map, flatMap, fold).",
|
|
11
|
+
"imperative-loop": "Replace imperative loop with .map(), .filter(), .flatMap(), or .foldLeft().",
|
|
12
|
+
"mutable-dto": "Use @Value instead of @Data/@Setter for immutable DTOs.",
|
|
13
|
+
"imperative-option-unwrap": "Avoid imperative unwrapping (isDefined/get). Use map(), flatMap(), or fold().",
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
_LOOP_TYPES = {"enhanced_for_statement", "for_statement", "while_statement"}
|
|
17
|
+
_METHOD_TYPES = {"method_declaration", "constructor_declaration", "lambda_expression"}
|
|
18
|
+
_CHECK_METHODS = {b"isDefined", b"isEmpty", b"isPresent", b"isNone"}
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class MutationChecker:
|
|
22
|
+
"""Detects mutable variables, imperative loops, and imperative unwrapping patterns."""
|
|
23
|
+
|
|
24
|
+
def analyze(self, tree: Any, source: bytes, config: dict[str, Any]) -> list[Diagnostic]:
|
|
25
|
+
diagnostics: list[Diagnostic] = []
|
|
26
|
+
|
|
27
|
+
self._check_mutable_dto(tree, diagnostics, config)
|
|
28
|
+
self._check_imperative_loops(tree, diagnostics, config)
|
|
29
|
+
self._check_imperative_option_unwrap(tree, diagnostics, config)
|
|
30
|
+
self._check_mutable_variables(tree, diagnostics, config)
|
|
31
|
+
|
|
32
|
+
return diagnostics
|
|
33
|
+
|
|
34
|
+
def _check_mutable_dto(self, tree: Any, diagnostics: list[Diagnostic], config: dict[str, Any]) -> None:
|
|
35
|
+
"""Detect @Data or @Setter annotations on classes."""
|
|
36
|
+
severity = severity_from_config(config, "mutable-dto")
|
|
37
|
+
if severity is None:
|
|
38
|
+
return
|
|
39
|
+
|
|
40
|
+
for node in find_nodes(tree.root_node, "marker_annotation"):
|
|
41
|
+
name_node = node.child_by_field_name("name")
|
|
42
|
+
if name_node is None:
|
|
43
|
+
continue
|
|
44
|
+
ann_text = name_node.text
|
|
45
|
+
if ann_text in (b"Data", b"Setter"):
|
|
46
|
+
# Verify it's on a class declaration
|
|
47
|
+
if node.parent and node.parent.type == "modifiers":
|
|
48
|
+
grandparent = node.parent.parent
|
|
49
|
+
if grandparent and grandparent.type == "class_declaration":
|
|
50
|
+
diagnostics.append(
|
|
51
|
+
Diagnostic(
|
|
52
|
+
line=name_node.start_point[0],
|
|
53
|
+
col=name_node.start_point[1],
|
|
54
|
+
end_line=name_node.end_point[0],
|
|
55
|
+
end_col=name_node.end_point[1],
|
|
56
|
+
severity=severity,
|
|
57
|
+
code="mutable-dto",
|
|
58
|
+
message=_MESSAGES["mutable-dto"],
|
|
59
|
+
)
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
def _check_imperative_loops(self, tree: Any, diagnostics: list[Diagnostic], config: dict[str, Any]) -> None:
|
|
63
|
+
"""Detect for/while loops that could be functional operations."""
|
|
64
|
+
severity = severity_from_config(config, "imperative-loop")
|
|
65
|
+
if severity is None:
|
|
66
|
+
return
|
|
67
|
+
|
|
68
|
+
for node in find_nodes_multi(tree.root_node, _LOOP_TYPES):
|
|
69
|
+
# Skip loops inside main methods
|
|
70
|
+
parent = node.parent
|
|
71
|
+
while parent:
|
|
72
|
+
if parent.type == "method_declaration":
|
|
73
|
+
method_name_node = parent.child_by_field_name("name")
|
|
74
|
+
if method_name_node and method_name_node.text == b"main":
|
|
75
|
+
break
|
|
76
|
+
parent = parent.parent
|
|
77
|
+
else:
|
|
78
|
+
# Highlight just the keyword (for/while)
|
|
79
|
+
keyword = node.type.split("_")[0] # "for" or "while" or "enhanced"
|
|
80
|
+
if keyword == "enhanced":
|
|
81
|
+
keyword = "for"
|
|
82
|
+
diagnostics.append(
|
|
83
|
+
Diagnostic(
|
|
84
|
+
line=node.start_point[0],
|
|
85
|
+
col=node.start_point[1],
|
|
86
|
+
end_line=node.start_point[0],
|
|
87
|
+
end_col=node.start_point[1] + len(keyword),
|
|
88
|
+
severity=severity,
|
|
89
|
+
code="imperative-loop",
|
|
90
|
+
message=_MESSAGES["imperative-loop"],
|
|
91
|
+
)
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
def _check_imperative_option_unwrap(self, tree: Any, diagnostics: list[Diagnostic], config: dict[str, Any]) -> None:
|
|
95
|
+
"""Detect if(opt.isDefined()) { opt.get() } patterns."""
|
|
96
|
+
severity = severity_from_config(config, "imperative-option-unwrap")
|
|
97
|
+
if severity is None:
|
|
98
|
+
return
|
|
99
|
+
|
|
100
|
+
for if_node in find_nodes(tree.root_node, "if_statement"):
|
|
101
|
+
condition = if_node.child_by_field_name("condition")
|
|
102
|
+
if condition is None:
|
|
103
|
+
continue
|
|
104
|
+
|
|
105
|
+
# Look for method_invocation in condition
|
|
106
|
+
for invocation in find_nodes(condition, "method_invocation"):
|
|
107
|
+
name_node = invocation.child_by_field_name("name")
|
|
108
|
+
obj_node = invocation.child_by_field_name("object")
|
|
109
|
+
if name_node is None or obj_node is None:
|
|
110
|
+
continue
|
|
111
|
+
if name_node.text not in _CHECK_METHODS:
|
|
112
|
+
continue
|
|
113
|
+
|
|
114
|
+
# Check if the body contains .get() on the same object
|
|
115
|
+
obj_name = obj_node.text
|
|
116
|
+
body_text = if_node.text
|
|
117
|
+
if obj_name is not None and body_text is not None and obj_name + b".get()" in body_text:
|
|
118
|
+
diagnostics.append(
|
|
119
|
+
Diagnostic(
|
|
120
|
+
line=if_node.start_point[0],
|
|
121
|
+
col=if_node.start_point[1],
|
|
122
|
+
end_line=if_node.end_point[0],
|
|
123
|
+
end_col=if_node.end_point[1],
|
|
124
|
+
severity=severity,
|
|
125
|
+
code="imperative-option-unwrap",
|
|
126
|
+
message=_MESSAGES["imperative-option-unwrap"],
|
|
127
|
+
)
|
|
128
|
+
)
|
|
129
|
+
break # Only check first invocation in condition
|
|
130
|
+
|
|
131
|
+
def _check_mutable_variables(self, tree: Any, diagnostics: list[Diagnostic], config: dict[str, Any]) -> None:
|
|
132
|
+
"""Detect local variables that are reassigned (non-final, mutated)."""
|
|
133
|
+
severity = severity_from_config(config, "mutable-variable")
|
|
134
|
+
if severity is None:
|
|
135
|
+
return
|
|
136
|
+
|
|
137
|
+
for node in find_nodes(tree.root_node, "assignment_expression"):
|
|
138
|
+
# Only flag reassignments inside method bodies
|
|
139
|
+
if not has_ancestor(node, _METHOD_TYPES):
|
|
140
|
+
continue
|
|
141
|
+
|
|
142
|
+
diagnostics.append(
|
|
143
|
+
Diagnostic(
|
|
144
|
+
line=node.start_point[0],
|
|
145
|
+
col=node.start_point[1],
|
|
146
|
+
end_line=node.end_point[0],
|
|
147
|
+
end_col=node.end_point[1],
|
|
148
|
+
severity=severity,
|
|
149
|
+
code="mutable-variable",
|
|
150
|
+
message=_MESSAGES["mutable-variable"],
|
|
151
|
+
)
|
|
152
|
+
)
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"""Null safety rules: detect null literals in arguments, returns, and assignments."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from .base import Diagnostic, find_nodes, severity_from_config
|
|
8
|
+
|
|
9
|
+
_MESSAGES = {
|
|
10
|
+
"null-literal-arg": "Avoid passing null as argument. Use Option.none(), a default value, or overload the method.",
|
|
11
|
+
"null-return": "Avoid returning null. Use Option.of(), Option.none(), or Either<Error, T>.",
|
|
12
|
+
"null-assignment": "Avoid assigning null to local variables. Use Option<T> to represent absence.",
|
|
13
|
+
"null-field-assignment": "Avoid null field initializers. Use Option<T> with Option.none() for optional fields.",
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class NullChecker:
|
|
18
|
+
"""Detects null literal usage in arguments, returns, and assignments."""
|
|
19
|
+
|
|
20
|
+
def analyze(self, tree: Any, source: bytes, config: dict[str, Any]) -> list[Diagnostic]:
|
|
21
|
+
diagnostics: list[Diagnostic] = []
|
|
22
|
+
|
|
23
|
+
for node in find_nodes(tree.root_node, "null_literal"):
|
|
24
|
+
parent = node.parent
|
|
25
|
+
if parent is None:
|
|
26
|
+
continue
|
|
27
|
+
|
|
28
|
+
rule_id = self._classify_null(node, parent)
|
|
29
|
+
if rule_id is None:
|
|
30
|
+
continue
|
|
31
|
+
|
|
32
|
+
severity = severity_from_config(config, rule_id)
|
|
33
|
+
if severity is None:
|
|
34
|
+
continue
|
|
35
|
+
|
|
36
|
+
diagnostics.append(
|
|
37
|
+
Diagnostic(
|
|
38
|
+
line=node.start_point[0],
|
|
39
|
+
col=node.start_point[1],
|
|
40
|
+
end_line=node.end_point[0],
|
|
41
|
+
end_col=node.end_point[1],
|
|
42
|
+
severity=severity,
|
|
43
|
+
code=rule_id,
|
|
44
|
+
message=_MESSAGES[rule_id],
|
|
45
|
+
)
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
return diagnostics
|
|
49
|
+
|
|
50
|
+
def _classify_null(self, node: Any, parent: Any) -> str | None:
|
|
51
|
+
"""Classify a null_literal by its context."""
|
|
52
|
+
# null in argument list -> null-literal-arg
|
|
53
|
+
if parent.type == "argument_list":
|
|
54
|
+
return "null-literal-arg"
|
|
55
|
+
|
|
56
|
+
# return null -> null-return
|
|
57
|
+
if parent.type == "return_statement":
|
|
58
|
+
return "null-return"
|
|
59
|
+
|
|
60
|
+
# variable_declarator with null value
|
|
61
|
+
if parent.type == "variable_declarator":
|
|
62
|
+
grandparent = parent.parent
|
|
63
|
+
if grandparent is None:
|
|
64
|
+
return None
|
|
65
|
+
if grandparent.type == "local_variable_declaration":
|
|
66
|
+
return "null-assignment"
|
|
67
|
+
if grandparent.type == "field_declaration":
|
|
68
|
+
return "null-field-assignment"
|
|
69
|
+
|
|
70
|
+
return None
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"""Spring configuration rules: detect field injection and component annotations."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from .base import Diagnostic, find_nodes, severity_from_config
|
|
8
|
+
|
|
9
|
+
_MESSAGES = {
|
|
10
|
+
"field-injection": "Avoid @Autowired field injection. Use constructor injection with @Value (Lombok) classes.",
|
|
11
|
+
"component-annotation": "Avoid @Component/@Service/@Repository. Use @Configuration + @Bean instead.",
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
_BAD_ANNOTATIONS = {b"Component", b"Service", b"Repository"}
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class SpringChecker:
|
|
18
|
+
"""Detects Spring anti-patterns: field injection and component scanning annotations."""
|
|
19
|
+
|
|
20
|
+
def analyze(self, tree: Any, source: bytes, config: dict[str, Any]) -> list[Diagnostic]:
|
|
21
|
+
diagnostics: list[Diagnostic] = []
|
|
22
|
+
|
|
23
|
+
self._check_field_injection(tree, diagnostics, config)
|
|
24
|
+
self._check_component_annotation(tree, diagnostics, config)
|
|
25
|
+
|
|
26
|
+
return diagnostics
|
|
27
|
+
|
|
28
|
+
def _check_field_injection(self, tree: Any, diagnostics: list[Diagnostic], config: dict[str, Any]) -> None:
|
|
29
|
+
"""Detect @Autowired on field declarations."""
|
|
30
|
+
severity = severity_from_config(config, "field-injection")
|
|
31
|
+
if severity is None:
|
|
32
|
+
return
|
|
33
|
+
|
|
34
|
+
for node in find_nodes(tree.root_node, "marker_annotation"):
|
|
35
|
+
name_node = node.child_by_field_name("name")
|
|
36
|
+
if name_node is None or name_node.text != b"Autowired":
|
|
37
|
+
continue
|
|
38
|
+
# Check it's on a field declaration
|
|
39
|
+
if (
|
|
40
|
+
node.parent
|
|
41
|
+
and node.parent.type == "modifiers"
|
|
42
|
+
and node.parent.parent
|
|
43
|
+
and node.parent.parent.type == "field_declaration"
|
|
44
|
+
):
|
|
45
|
+
diagnostics.append(
|
|
46
|
+
Diagnostic(
|
|
47
|
+
line=name_node.start_point[0],
|
|
48
|
+
col=name_node.start_point[1],
|
|
49
|
+
end_line=name_node.end_point[0],
|
|
50
|
+
end_col=name_node.end_point[1],
|
|
51
|
+
severity=severity,
|
|
52
|
+
code="field-injection",
|
|
53
|
+
message=_MESSAGES["field-injection"],
|
|
54
|
+
)
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
def _check_component_annotation(self, tree: Any, diagnostics: list[Diagnostic], config: dict[str, Any]) -> None:
|
|
58
|
+
"""Detect @Component, @Service, @Repository on classes."""
|
|
59
|
+
severity = severity_from_config(config, "component-annotation")
|
|
60
|
+
if severity is None:
|
|
61
|
+
return
|
|
62
|
+
|
|
63
|
+
for node in find_nodes(tree.root_node, "marker_annotation"):
|
|
64
|
+
name_node = node.child_by_field_name("name")
|
|
65
|
+
if name_node is None or name_node.text not in _BAD_ANNOTATIONS:
|
|
66
|
+
continue
|
|
67
|
+
# Check it's on a class declaration
|
|
68
|
+
if (
|
|
69
|
+
node.parent
|
|
70
|
+
and node.parent.type == "modifiers"
|
|
71
|
+
and node.parent.parent
|
|
72
|
+
and node.parent.parent.type == "class_declaration"
|
|
73
|
+
):
|
|
74
|
+
diagnostics.append(
|
|
75
|
+
Diagnostic(
|
|
76
|
+
line=name_node.start_point[0],
|
|
77
|
+
col=name_node.start_point[1],
|
|
78
|
+
end_line=name_node.end_point[0],
|
|
79
|
+
end_col=name_node.end_point[1],
|
|
80
|
+
severity=severity,
|
|
81
|
+
code="component-annotation",
|
|
82
|
+
message=_MESSAGES["component-annotation"],
|
|
83
|
+
)
|
|
84
|
+
)
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
"""Main LSP server for java-functional-lsp.
|
|
2
|
+
|
|
3
|
+
Provides custom Java diagnostics via tree-sitter analysis.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any
|
|
12
|
+
from urllib.parse import unquote, urlparse
|
|
13
|
+
|
|
14
|
+
from lsprotocol import types as lsp
|
|
15
|
+
from pygls.lsp.server import LanguageServer
|
|
16
|
+
|
|
17
|
+
from .analyzers.base import Analyzer, Severity, get_parser
|
|
18
|
+
from .analyzers.base import Diagnostic as LintDiagnostic
|
|
19
|
+
from .analyzers.exception_checker import ExceptionChecker
|
|
20
|
+
from .analyzers.mutation_checker import MutationChecker
|
|
21
|
+
from .analyzers.null_checker import NullChecker
|
|
22
|
+
from .analyzers.spring_checker import SpringChecker
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
_SEVERITY_MAP = {
|
|
27
|
+
Severity.ERROR: lsp.DiagnosticSeverity.Error,
|
|
28
|
+
Severity.WARNING: lsp.DiagnosticSeverity.Warning,
|
|
29
|
+
Severity.INFO: lsp.DiagnosticSeverity.Information,
|
|
30
|
+
Severity.HINT: lsp.DiagnosticSeverity.Hint,
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
_ANALYZERS: list[Analyzer] = [NullChecker(), ExceptionChecker(), MutationChecker(), SpringChecker()]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class JavaFunctionalLspServer(LanguageServer):
|
|
37
|
+
def __init__(self) -> None:
|
|
38
|
+
super().__init__("java-functional-lsp", "0.1.0")
|
|
39
|
+
self._parser = get_parser()
|
|
40
|
+
self._config: dict[str, Any] = {}
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
server = JavaFunctionalLspServer()
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _uri_to_path(uri: str) -> str:
|
|
47
|
+
"""Convert a file:// URI to a filesystem path."""
|
|
48
|
+
parsed = urlparse(uri)
|
|
49
|
+
return unquote(parsed.path)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _load_config(workspace_root: str | None) -> dict[str, Any]:
|
|
53
|
+
"""Load .deeperdive-linter.json from workspace root if it exists."""
|
|
54
|
+
if not workspace_root:
|
|
55
|
+
return {}
|
|
56
|
+
config_path = Path(workspace_root) / ".deeperdive-linter.json"
|
|
57
|
+
if config_path.exists():
|
|
58
|
+
try:
|
|
59
|
+
result: dict[str, Any] = json.loads(config_path.read_text())
|
|
60
|
+
return result
|
|
61
|
+
except (json.JSONDecodeError, OSError) as e:
|
|
62
|
+
logger.warning("Failed to load config from %s: %s", config_path, e)
|
|
63
|
+
return {}
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _to_lsp_diagnostic(diag: LintDiagnostic) -> lsp.Diagnostic:
|
|
67
|
+
"""Convert an internal diagnostic to an LSP diagnostic."""
|
|
68
|
+
return lsp.Diagnostic(
|
|
69
|
+
range=lsp.Range(
|
|
70
|
+
start=lsp.Position(line=diag.line, character=diag.col),
|
|
71
|
+
end=lsp.Position(line=diag.end_line, character=diag.end_col),
|
|
72
|
+
),
|
|
73
|
+
severity=_SEVERITY_MAP.get(diag.severity, lsp.DiagnosticSeverity.Warning),
|
|
74
|
+
code=diag.code,
|
|
75
|
+
source=diag.source,
|
|
76
|
+
message=diag.message,
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _analyze_document(source_text: str) -> list[lsp.Diagnostic]:
|
|
81
|
+
"""Run all analyzers on the given source text."""
|
|
82
|
+
source_bytes = source_text.encode("utf-8")
|
|
83
|
+
tree = server._parser.parse(source_bytes)
|
|
84
|
+
config = server._config
|
|
85
|
+
|
|
86
|
+
all_diagnostics = []
|
|
87
|
+
for analyzer in _ANALYZERS:
|
|
88
|
+
try:
|
|
89
|
+
diags = analyzer.analyze(tree, source_bytes, config)
|
|
90
|
+
all_diagnostics.extend(diags)
|
|
91
|
+
except Exception as e:
|
|
92
|
+
logger.error("Analyzer %s failed: %s", type(analyzer).__name__, e)
|
|
93
|
+
|
|
94
|
+
return [_to_lsp_diagnostic(d) for d in all_diagnostics]
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _publish_diagnostics(uri: str) -> None:
|
|
98
|
+
"""Analyze a document and publish diagnostics."""
|
|
99
|
+
doc = server.workspace.get_text_document(uri)
|
|
100
|
+
diagnostics = _analyze_document(doc.source)
|
|
101
|
+
server.text_document_publish_diagnostics(lsp.PublishDiagnosticsParams(uri=uri, diagnostics=diagnostics))
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
@server.feature(lsp.INITIALIZED)
|
|
105
|
+
def on_initialized(params: lsp.InitializedParams) -> None:
|
|
106
|
+
"""Load config after initialization."""
|
|
107
|
+
root = None
|
|
108
|
+
if server.workspace.root_uri:
|
|
109
|
+
root = _uri_to_path(server.workspace.root_uri)
|
|
110
|
+
elif server.workspace.root_path:
|
|
111
|
+
root = server.workspace.root_path
|
|
112
|
+
server._config = _load_config(root)
|
|
113
|
+
logger.info(
|
|
114
|
+
"java-functional-lsp initialized (workspace: %s, rules: %s)",
|
|
115
|
+
root,
|
|
116
|
+
list(server._config.get("rules", {}).keys()) or "all defaults",
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
@server.feature(lsp.TEXT_DOCUMENT_DID_OPEN)
|
|
121
|
+
def on_did_open(params: lsp.DidOpenTextDocumentParams) -> None:
|
|
122
|
+
"""Analyze document when opened."""
|
|
123
|
+
_publish_diagnostics(params.text_document.uri)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
@server.feature(lsp.TEXT_DOCUMENT_DID_CHANGE)
|
|
127
|
+
def on_did_change(params: lsp.DidChangeTextDocumentParams) -> None:
|
|
128
|
+
"""Re-analyze document on change."""
|
|
129
|
+
_publish_diagnostics(params.text_document.uri)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
@server.feature(lsp.TEXT_DOCUMENT_DID_SAVE)
|
|
133
|
+
def on_did_save(params: lsp.DidSaveTextDocumentParams) -> None:
|
|
134
|
+
"""Re-analyze document on save."""
|
|
135
|
+
_publish_diagnostics(params.text_document.uri)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def main() -> None:
|
|
139
|
+
"""Entry point for the LSP server."""
|
|
140
|
+
logging.basicConfig(level=logging.INFO, format="%(name)s %(levelname)s: %(message)s")
|
|
141
|
+
server.start_io()
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
if __name__ == "__main__":
|
|
145
|
+
main()
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: java-functional-lsp
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Java LSP server enforcing functional programming best practices — null safety, immutability, no exceptions
|
|
5
|
+
Project-URL: Homepage, https://github.com/aviadshiber/java-functional-lsp
|
|
6
|
+
Project-URL: Repository, https://github.com/aviadshiber/java-functional-lsp
|
|
7
|
+
Project-URL: Bug Tracker, https://github.com/aviadshiber/java-functional-lsp/issues
|
|
8
|
+
Project-URL: Changelog, https://github.com/aviadshiber/java-functional-lsp/releases
|
|
9
|
+
Author: Aviad S.
|
|
10
|
+
License: MIT
|
|
11
|
+
License-File: LICENSE
|
|
12
|
+
Keywords: functional-programming,java,linter,lombok,lsp,tree-sitter,vavr
|
|
13
|
+
Classifier: Development Status :: 3 - Alpha
|
|
14
|
+
Classifier: Environment :: Console
|
|
15
|
+
Classifier: Intended Audience :: Developers
|
|
16
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
17
|
+
Classifier: Programming Language :: Python :: 3
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
22
|
+
Classifier: Topic :: Software Development :: Quality Assurance
|
|
23
|
+
Requires-Python: >=3.10
|
|
24
|
+
Requires-Dist: pygls>=1.3.0
|
|
25
|
+
Requires-Dist: tree-sitter-java>=0.23.0
|
|
26
|
+
Requires-Dist: tree-sitter>=0.23.0
|
|
27
|
+
Description-Content-Type: text/markdown
|
|
28
|
+
|
|
29
|
+
# java-functional-lsp
|
|
30
|
+
|
|
31
|
+
[](https://github.com/aviadshiber/java-functional-lsp/actions/workflows/test.yml)
|
|
32
|
+
[](https://pypi.org/project/java-functional-lsp/)
|
|
33
|
+
[](https://pypi.org/project/java-functional-lsp/)
|
|
34
|
+
[](https://opensource.org/licenses/MIT)
|
|
35
|
+
|
|
36
|
+
A Java Language Server that enforces functional programming best practices. Designed for teams using **Vavr**, **Lombok**, and **Spring** with a functional-first approach.
|
|
37
|
+
|
|
38
|
+
## What it checks
|
|
39
|
+
|
|
40
|
+
| Rule | Detects | Suggests |
|
|
41
|
+
|------|---------|----------|
|
|
42
|
+
| `null-literal-arg` | `null` passed as method argument | `Option.none()` or default value |
|
|
43
|
+
| `null-return` | `return null` | `Option.of()`, `Option.none()`, or `Either` |
|
|
44
|
+
| `null-assignment` | `Type x = null` | `Option<Type>` |
|
|
45
|
+
| `null-field-assignment` | Field initialized to `null` | `Option<T>` with `Option.none()` |
|
|
46
|
+
| `throw-statement` | `throw new XxxException(...)` | `Either.left()` or `Try.of()` |
|
|
47
|
+
| `catch-rethrow` | catch block that wraps + rethrows | `Try.of().toEither()` |
|
|
48
|
+
| `mutable-variable` | Local variable reassignment | Final variables + functional transforms |
|
|
49
|
+
| `imperative-loop` | `for`/`while` loops | `.map()`/`.filter()`/`.flatMap()`/`.foldLeft()` |
|
|
50
|
+
| `mutable-dto` | `@Data` or `@Setter` on class | `@Value` (immutable) |
|
|
51
|
+
| `imperative-option-unwrap` | `if (opt.isDefined()) { opt.get() }` | `map()`/`flatMap()`/`fold()` |
|
|
52
|
+
| `field-injection` | `@Autowired` on field | Constructor injection |
|
|
53
|
+
| `component-annotation` | `@Component`/`@Service`/`@Repository` | `@Configuration` + `@Bean` |
|
|
54
|
+
|
|
55
|
+
## Install
|
|
56
|
+
|
|
57
|
+
```bash
|
|
58
|
+
pip install java-functional-lsp
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
Or from source:
|
|
62
|
+
|
|
63
|
+
```bash
|
|
64
|
+
pip install git+https://github.com/aviadshiber/java-functional-lsp.git
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
## Usage with Claude Code
|
|
68
|
+
|
|
69
|
+
Install the `deeperdive-java-linter` plugin from the DeeperDive marketplace, which registers this server as a Java LSP.
|
|
70
|
+
|
|
71
|
+
Or manually add to your Claude Code config:
|
|
72
|
+
|
|
73
|
+
```json
|
|
74
|
+
{
|
|
75
|
+
"lspServers": {
|
|
76
|
+
"java-functional": {
|
|
77
|
+
"command": "java-functional-lsp",
|
|
78
|
+
"extensionToLanguage": { ".java": "java" }
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
## Configuration
|
|
85
|
+
|
|
86
|
+
Create `.deeperdive-linter.json` in your project root to customize rules:
|
|
87
|
+
|
|
88
|
+
```json
|
|
89
|
+
{
|
|
90
|
+
"rules": {
|
|
91
|
+
"null-literal-arg": "warning",
|
|
92
|
+
"throw-statement": "info",
|
|
93
|
+
"imperative-loop": "hint",
|
|
94
|
+
"mutable-dto": "off"
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
Severity levels: `error`, `warning`, `info`, `hint`, `off`.
|
|
100
|
+
All rules default to `warning` when not configured.
|
|
101
|
+
|
|
102
|
+
## How it works
|
|
103
|
+
|
|
104
|
+
Uses [tree-sitter](https://tree-sitter.github.io/) with the Java grammar for fast, incremental AST parsing. No Java compiler or classpath needed — analysis runs on raw source files.
|
|
105
|
+
|
|
106
|
+
The server speaks the Language Server Protocol (LSP) via stdio, making it compatible with any LSP client.
|
|
107
|
+
|
|
108
|
+
## Development
|
|
109
|
+
|
|
110
|
+
```bash
|
|
111
|
+
# Clone and setup
|
|
112
|
+
git clone https://github.com/aviadshiber/java-functional-lsp.git
|
|
113
|
+
cd java-functional-lsp
|
|
114
|
+
uv sync
|
|
115
|
+
|
|
116
|
+
# Run checks
|
|
117
|
+
uv run ruff check src/ tests/
|
|
118
|
+
uv run ruff format --check src/ tests/
|
|
119
|
+
uv run mypy src/
|
|
120
|
+
uv run pytest
|
|
121
|
+
```
|
|
122
|
+
|
|
123
|
+
See [CONTRIBUTING.md](CONTRIBUTING.md) for full guidelines.
|
|
124
|
+
|
|
125
|
+
## License
|
|
126
|
+
|
|
127
|
+
MIT
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
java_functional_lsp/__init__.py,sha256=jAgfWgoUHOCUykj2m4Fw-RX-dsZIdpD4z6N_3Ez-Pso,117
|
|
2
|
+
java_functional_lsp/server.py,sha256=KIenYC_pXmASJQGbVsmONJb9MF-nb1_QBkvrkXW2KQM,4754
|
|
3
|
+
java_functional_lsp/analyzers/__init__.py,sha256=m1Iba7lX5ayNIEfolEqkUF4s_8vVyzif33gzn7pT__k,64
|
|
4
|
+
java_functional_lsp/analyzers/base.py,sha256=sZEn53NV1Igb7vm2V7r6ShCCADKanb1WX_QaGinE3OQ,3046
|
|
5
|
+
java_functional_lsp/analyzers/exception_checker.py,sha256=B8NgSTGiaA3X2gl4lx4S_nJjsuCzeMVCikl2PPzirmY,2548
|
|
6
|
+
java_functional_lsp/analyzers/mutation_checker.py,sha256=7_NwNreqU1NxwIh9eLgoo6sLp2QNKbmDyOFDY-RqpUY,7048
|
|
7
|
+
java_functional_lsp/analyzers/null_checker.py,sha256=YrVoBJP1-ZE2d7vt_tMX4iNUHfEUQrTzXxj_47whuzQ,2517
|
|
8
|
+
java_functional_lsp/analyzers/spring_checker.py,sha256=Asx81JeU7atBibtYw0otGivzki12SiTgGhoZM4f-gh8,3465
|
|
9
|
+
java_functional_lsp-0.1.0.dist-info/METADATA,sha256=MswI80b-wJ4qi1NLCf2s-mYiBOkTTzQC3taXuX98E30,4610
|
|
10
|
+
java_functional_lsp-0.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
11
|
+
java_functional_lsp-0.1.0.dist-info/entry_points.txt,sha256=WYL5uaImAwrV6k8z2y9erzfA1h8KSEhWN0wjwUkECVU,72
|
|
12
|
+
java_functional_lsp-0.1.0.dist-info/licenses/LICENSE,sha256=TKmXIfxUBfKDiWOTULE7TEZDQ5pNynIZJSRr3rqF3OE,1065
|
|
13
|
+
java_functional_lsp-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Aviad S.
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|