Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ vim.lsp.config('dexter', {
filetypes = { 'elixir', 'eelixir', 'heex' },
init_options = {
followDelegates = true, -- jump through defdelegate to the target function
-- definitionStyle = "all", -- "all" returns all function heads; "first" jumps to the first one
-- stdlibPath = "", -- override Elixir stdlib path (auto-detected)
-- debug = false, -- verbose logging to stderr (view with :LspLog)
},
Expand Down Expand Up @@ -233,6 +234,21 @@ To override the binary path manually, add this to your `settings.json`:
}
```

To configure LSP options (see [LSP options](#lsp-options)):

```json
{
"lsp": {
"dexter": {
"initialization_options": {
"followDelegates": true,
"definitionStyle": "first"
}
}
}
}
```

### Emacs

The emacs instructions assume you're using **use-package**.
Expand Down Expand Up @@ -478,6 +494,7 @@ If the persistent process can't start, dexter falls back to running `mix format`
Dexter reads `initializationOptions` from your editor configuration:

- **`followDelegates`** (boolean, default: `true`): follow `defdelegate` targets on lookup.
- **`definitionStyle`** (string, default: `"all"`): controls how many locations are returned when a function has multiple heads (clauses). `"all"` returns every definition site; `"first"` returns only the first one, which makes editors like Zed jump directly instead of showing a picker.
- **`stdlibPath`** (string): override the Elixir stdlib directory to index. Defaults to auto-detection; use this if your install is non-standard.
- **`debug`** (boolean, default: `false`): enable verbose logging to stderr. Logs timing and resolution details for every definition, hover, references, and rename request. Can also be enabled via the `DEXTER_DEBUG=true` environment variable.

Expand Down
131 changes: 131 additions & 0 deletions internal/lsp/elixir.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,137 @@ func (tf *TokenizedFile) FullExpressionAtCursor(line, col int) CursorContext {
return FullExpressionAtCursor(tf.tokens, tf.source, tf.lineStarts, line, col)
}

// ArityAtCallsite returns the call arity at the given expression position, or
// -1 when arity can't be determined. Handles:
// - Foo.bar(a, b) → 2
// - Foo.bar() → 0
// - &Foo.bar/2 → 2 (capture syntax)
// - x |> Foo.bar(y) → 2 (pipe injects one implicit arg)
// - Foo.bar → -1 (no call suffix, arity unknown)
//
// line is 0-based. startCol/endCol are the expression's 0-based column bounds
// (as returned in CursorContext.ExprStart/ExprEnd).
func (tf *TokenizedFile) ArityAtCallsite(line, startCol, endCol int) int {
return arityAtCallsite(tf.tokens, tf.source, tf.lineStarts, line, startCol, endCol)
}

func arityAtCallsite(tokens []parser.Token, source []byte, lineStarts []int, line, startCol, endCol int) int {
n := len(tokens)
if n == 0 || endCol <= 0 {
return -1
}

// Locate the last token of the expression (index of the char at endCol-1).
endOffset := parser.LineColToOffset(lineStarts, line, endCol-1)
if endOffset < 0 {
return -1
}
endIdx := parser.TokenAtOffset(tokens, endOffset)
if endIdx < 0 {
return -1
}

// Scan forward past whitespace/comments for the token immediately after
// the expression.
j := endIdx + 1
for j < n && (tokens[j].Kind == parser.TokEOL || tokens[j].Kind == parser.TokComment) {
j++
}

arity := -1
switch {
case j < n && tokens[j].Kind == parser.TokOpenParen:
arity = countCallArgs(tokens, n, j)
case j < n && tokens[j].Kind == parser.TokOther &&
tokens[j].End-tokens[j].Start == 1 && source[tokens[j].Start] == '/':
// Capture syntax: &Foo.bar/2
k := j + 1
if k < n && tokens[k].Kind == parser.TokNumber {
if a, ok := parseNumberTokenArity(source, tokens[k]); ok {
arity = a
}
}
}

if arity < 0 {
return -1
}

// Pipe adjustment: if the expression is the RHS of a |>, add one for the
// implicit first argument.
startOffset := parser.LineColToOffset(lineStarts, line, startCol)
if startOffset >= 0 {
startIdx := parser.TokenAtOffset(tokens, startOffset)
if startIdx > 0 {
for k := startIdx - 1; k >= 0; k-- {
kind := tokens[k].Kind
if kind == parser.TokPipe {
return arity + 1
}
if kind != parser.TokEOL && kind != parser.TokComment {
break
}
}
}
}

return arity
}

// countCallArgs counts top-level arguments inside a parenthesized call,
// starting at openIdx which must be a TokOpenParen. Returns -1 if the parens
// are unbalanced.
func countCallArgs(tokens []parser.Token, n, openIdx int) int {
if openIdx >= n || tokens[openIdx].Kind != parser.TokOpenParen {
return -1
}
depth := 1
args := 0
hasContent := false
for i := openIdx + 1; i < n && depth > 0; i++ {
switch tokens[i].Kind {
case parser.TokOpenParen, parser.TokOpenBracket, parser.TokOpenBrace, parser.TokOpenAngle:
depth++
hasContent = true
case parser.TokCloseParen, parser.TokCloseBracket, parser.TokCloseBrace, parser.TokCloseAngle:
depth--
if depth == 0 {
if hasContent {
return args + 1
}
return 0
}
case parser.TokComma:
if depth == 1 {
args++
hasContent = false
continue
}
hasContent = true
case parser.TokEOL, parser.TokComment:
// skip
default:
hasContent = true
}
}
return -1
}
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Keyword args inflate arity, breaking definition lookup

High Severity

countCallArgs counts top-level commas to determine call arity, but Elixir's spread keyword arguments (e.g., Repo.insert(changeset, returning: true, on_conflict: :replace)) use commas between key-value pairs while the entire keyword tail counts as a single argument. The function returns arity 3 here instead of 2. When this inflated arity is passed to LookupFunctionByArity, the exact-match SQL filter finds zero rows, and go-to-definition silently returns nothing — a regression from the previous behavior that returned all arities.

Additional Locations (1)
Fix in Cursor Fix in Web

Reviewed by Cursor Bugbot for commit bdcc215. Configure here.


func parseNumberTokenArity(source []byte, t parser.Token) (int, bool) {
text := source[t.Start:t.End]
n := 0
for _, b := range text {
if b < '0' || b > '9' {
return 0, false
}
n = n*10 + int(b-'0')
if n > 255 { // arity fits in a byte in practice
return 0, false
}
}
return n, true
}

// FirstDefmodule returns the first defmodule name found, or "".
func (tf *TokenizedFile) FirstDefmodule() string {
for i := 0; i < tf.n; i++ {
Expand Down
37 changes: 26 additions & 11 deletions internal/lsp/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ type Server struct {
client protocol.Client
followDelegates bool
debug bool
definitionStyle string // "all" (default) or "first": controls multi-head definition results
mixBin string // resolved path to the mix binary

formatters map[string]*formatterProcess // formatterExs path → persistent formatter
Expand Down Expand Up @@ -102,6 +103,7 @@ func NewServer(s *store.Store, projectRoot string) *Server {
projectRoot: projectRoot,
explicitRoot: projectRoot != "",
followDelegates: true,
definitionStyle: "all",
usingCache: make(map[string]*usingCacheEntry),
depsCache: make(map[string]bool),
}
Expand Down Expand Up @@ -298,6 +300,11 @@ func (s *Server) Initialize(ctx context.Context, params *protocol.InitializePara
if v, ok := opts["debug"].(bool); ok {
s.debug = v
}
if v, ok := opts["definitionStyle"].(string); ok {
if v == "all" || v == "first" {
s.definitionStyle = v
}
}
}
if os.Getenv("DEXTER_DEBUG") == "true" {
s.debug = true
Expand Down Expand Up @@ -549,6 +556,7 @@ func (s *Server) Definition(ctx context.Context, params *protocol.DefinitionPara

expr := tf.ResolveModuleExpr(exprCtx.Expr(), lineNum)
moduleRef, functionName := ExtractModuleAndFunction(expr)
callArity := tf.ArityAtCallsite(lineNum, exprCtx.ExprStart, exprCtx.ExprEnd)

if moduleRef != "" {
if aliasParent, inBlock := ExtractAliasBlockParent(lines, lineNum); inBlock {
Expand All @@ -558,7 +566,7 @@ func (s *Server) Definition(ctx context.Context, params *protocol.DefinitionPara

aliases := tf.ExtractAliasesInScope(lineNum)
s.mergeAliasesFromUse(text, aliases)
s.debugf("Definition: expr=%q module=%q function=%q", expr, moduleRef, functionName)
s.debugf("Definition: expr=%q module=%q function=%q arity=%d", expr, moduleRef, functionName, callArity)

// Bare identifier — check variable first (cheap tree-sitter lookup), then functions
if moduleRef == "" {
Expand Down Expand Up @@ -600,26 +608,26 @@ func (s *Server) Definition(ctx context.Context, params *protocol.DefinitionPara
var results []store.LookupResult
var err error
if s.followDelegates {
results, err = s.store.LookupFollowDelegate(fullModule, functionName)
results, err = s.store.LookupFollowDelegateByArity(fullModule, functionName, callArity)
} else {
results, err = s.store.LookupFunction(fullModule, functionName)
results, err = s.store.LookupFunctionByArity(fullModule, functionName, callArity)
}
if err == nil && len(results) > 0 {
s.debugf("Definition: found %d result(s) in store for %s.%s", len(results), fullModule, functionName)
return storeResultsToLocations(filterOutTypes(results)), nil
return s.applyDefinitionStyle(storeResultsToLocations(filterOutTypes(results))), nil
}

// fullModule may not directly define the function — try its use chain
// (e.g. `import MyApp.Factory` where MyApp.Factory uses ExMachina).
if results := s.lookupThroughUseOf(fullModule, functionName); len(results) > 0 {
s.debugf("Definition: found %d result(s) via use chain of %s for %s", len(results), fullModule, functionName)
return storeResultsToLocations(filterOutTypes(results)), nil
return s.applyDefinitionStyle(storeResultsToLocations(filterOutTypes(results))), nil
}

// Fallback for use-chain inline defs (not stored as module definitions)
if results := s.lookupThroughUse(text, functionName, aliases); len(results) > 0 {
s.debugf("Definition: found %d result(s) via current file use chain for %s", len(results), functionName)
return storeResultsToLocations(filterOutTypes(results)), nil
return s.applyDefinitionStyle(storeResultsToLocations(filterOutTypes(results))), nil
}

s.debugf("Definition: no result found for bare function %q in module %q", functionName, fullModule)
Expand All @@ -635,19 +643,19 @@ func (s *Server) Definition(ctx context.Context, params *protocol.DefinitionPara
var results []store.LookupResult
var err error
if s.followDelegates {
results, err = s.store.LookupFollowDelegate(fullModule, functionName)
results, err = s.store.LookupFollowDelegateByArity(fullModule, functionName, callArity)
} else {
results, err = s.store.LookupFunction(fullModule, functionName)
results, err = s.store.LookupFunctionByArity(fullModule, functionName, callArity)
}
if err == nil && len(results) > 0 {
s.debugf("Definition: found %d result(s) in store for %s.%s", len(results), fullModule, functionName)
return storeResultsToLocations(filterOutTypes(results)), nil
return s.applyDefinitionStyle(storeResultsToLocations(filterOutTypes(results))), nil
}
// Not directly defined — the function may have been injected by a
// `use` macro in fullModule's source (e.g. Oban.Worker injects `new`).
if results := s.lookupThroughUseOf(fullModule, functionName); len(results) > 0 {
s.debugf("Definition: found %d result(s) via use chain of %s for %s", len(results), fullModule, functionName)
return storeResultsToLocations(results), nil
return s.applyDefinitionStyle(storeResultsToLocations(results)), nil
}
s.debugf("Definition: no result for %s.%s", fullModule, functionName)
}
Expand All @@ -657,7 +665,14 @@ func (s *Server) Definition(ctx context.Context, params *protocol.DefinitionPara
if err != nil || len(results) == 0 {
return nil, nil
}
return storeResultsToLocations(results), nil
return s.applyDefinitionStyle(storeResultsToLocations(results)), nil
}

func (s *Server) applyDefinitionStyle(locations []protocol.Location) []protocol.Location {
if s.definitionStyle == "first" && len(locations) > 1 {
return locations[:1]
}
return locations
}

func storeResultsToLocations(results []store.LookupResult) []protocol.Location {
Expand Down
Loading
Loading