Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit76f4dbc

Browse files
fix(issue_graph): eliminate double API call, add timeout, fix edge output
- Refactor fetchNode to return raw issue data along with node, avoiding second API call in crawl loop- Add 30 second timeout to prevent runaway crawling- Include parent edges in output by reversing direction for display- Move cross-tool info from description to instructions.go (was already there)- Shorter tool description focusing on core functionality
1 parent91e4934 commit76f4dbc

File tree

2 files changed

+38
-31
lines changed

2 files changed

+38
-31
lines changed

‎pkg/github/__toolsnaps__/issue_graph.snap‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
"title":"Get issue relationship graph",
44
"readOnlyHint":true
55
},
6-
"description": "Get a graph representation of an issue or pull request and its related issues/PRs.\n\nThis tool helps understand the relationships between issues and PRs in a repository, especially useful for:\n- Understanding the scope of work for an issue or PR\n- Planning implementation for a task that's part of a larger epic\n- Identifying blockers or dependencies\n- Finding related work that might conflict or overlap\n- Understanding why a piece of work exists (tracing to parent epic)\n\nThe graph shows:\n- Node types: epic (large initiatives), batch (parent issues), task (regular issues), pr (pull requests)\n- Parent/child relationships from sub-issues and \"closes/fixes\" references\n- Related issues mentioned in bodies\n\nCall this tool early when working on an issue to gather appropriate context about the work hierarchy.\n\nWorks well with:\n- issue_read: After using issue_graph to identify important related issues, use issue_read to get full details of specific issues\n- pull_request_read: Use to get full PR details for PRs identified in the graph\n- search_issues: If the graph reveals related work areas, search for more issues in those areas\n- list_issues: List all issues in the repository to find additional context not captured in the graph",
6+
"description":"Get a graph representation of an issue or pull request and its related issues/PRs.\n\nThis tool helps understand the relationships between issues and PRs in a repository, especially useful for:\n- Understanding the scope of work for an issue or PR\n- Planning implementation for a task that's part of a larger epic\n- Identifying blockers or dependencies\n- Finding related work that might conflict or overlap\n- Understanding why a piece of work exists (tracing to parent epic)\n\nThe graph shows:\n- Node types: epic (large initiatives), batch (parent issues), task (regular issues), pr (pull requests)\n- Parent/child relationships from sub-issues and\"closes/fixes\" references\n- Related issues mentioned in bodies\n\nCall this tool early when working on an issue to gather appropriate context about the work hierarchy.",
77
"inputSchema": {
88
"properties": {
99
"issue_number": {

‎pkg/github/issue_graph.go‎

Lines changed: 37 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import (
88
"sort"
99
"strings"
1010
"sync"
11+
"time"
1112

1213
"github.com/github/github-mcp-server/pkg/lockdown"
1314
"github.com/github/github-mcp-server/pkg/translations"
@@ -314,28 +315,29 @@ func (gc *graphCrawler) markRepoInaccessible(owner, repo string) {
314315
}
315316

316317
// fetchNode fetches a single issue or PR and adds it to the graph
317-
func (gc*graphCrawler)fetchNode(ctx context.Context,owner,repostring,number,depthint) (*GraphNode,error) {
318+
// Returns both the node and the raw issue for further processing
319+
func (gc*graphCrawler)fetchNode(ctx context.Context,owner,repostring,number,depthint) (*GraphNode,*github.Issue,error) {
318320
key:=nodeKey(owner,repo,number)
319321

320322
// Check if already visited
321323
gc.mu.RLock()
322324
ifnode,exists:=gc.nodes[key];exists {
323325
gc.mu.RUnlock()
324-
returnnode,nil
326+
returnnode,nil,nil// Already visited, no issue to return
325327
}
326328
gc.mu.RUnlock()
327329

328330
// Check if repo is known to be inaccessible
329331
ifgc.isRepoInaccessible(owner,repo) {
330-
returnnil,nil
332+
returnnil,nil,nil
331333
}
332334

333335
// Acquire semaphore
334336
select {
335337
casegc.sem<-struct{}{}:
336338
deferfunc() {<-gc.sem }()
337339
case<-ctx.Done():
338-
returnnil,ctx.Err()
340+
returnnil,nil,ctx.Err()
339341
}
340342

341343
// Fetch issue/PR details
@@ -350,10 +352,10 @@ func (gc *graphCrawler) fetchNode(ctx context.Context, owner, repo string, numbe
350352
ifresp.StatusCode==403 {
351353
gc.markRepoInaccessible(owner,repo)
352354
}
353-
returnnil,nil
355+
returnnil,nil,nil
354356
}
355357
}
356-
returnnil,fmt.Errorf("failed to get issue %s: %w",key,err)
358+
returnnil,nil,fmt.Errorf("failed to get issue %s: %w",key,err)
357359
}
358360
deferfunc() {_=resp.Body.Close() }()
359361

@@ -364,11 +366,11 @@ func (gc *graphCrawler) fetchNode(ctx context.Context, owner, repo string, numbe
364366
isSafeContent,err:=gc.cache.IsSafeContent(ctx,login,owner,repo)
365367
iferr!=nil {
366368
// Skip this node if we can't verify safety
367-
returnnil,nil
369+
returnnil,nil,nil
368370
}
369371
if!isSafeContent {
370372
// Content is restricted, skip but don't fail
371-
returnnil,nil
373+
returnnil,nil,nil
372374
}
373375
}
374376
}
@@ -421,7 +423,7 @@ func (gc *graphCrawler) fetchNode(ctx context.Context, owner, repo string, numbe
421423
gc.nodes[key]=node
422424
gc.mu.Unlock()
423425

424-
returnnode,nil
426+
returnnode,issue,nil
425427
}
426428

427429
// crawl performs a BFS crawl from the focus node
@@ -467,8 +469,8 @@ func (gc *graphCrawler) crawl(ctx context.Context) error {
467469
continue
468470
}
469471

470-
// Fetch the node
471-
node,err:=gc.fetchNode(ctx,current.owner,current.repo,current.number,current.depth)
472+
// Fetch the node and the raw issue data
473+
node,issue,err:=gc.fetchNode(ctx,current.owner,current.repo,current.number,current.depth)
472474
iferr!=nil {
473475
// Log error but continue crawling
474476
continue
@@ -478,19 +480,11 @@ func (gc *graphCrawler) crawl(ctx context.Context) error {
478480
}
479481

480482
// Don't crawl further from nodes at max depth (they are leaf nodes for crawling)
481-
ifcurrent.depth==MaxGraphDepth {
483+
// Also skip if we didn't get issue data (already visited node)
484+
ifcurrent.depth==MaxGraphDepth||issue==nil {
482485
continue
483486
}
484487

485-
// Get issue again to extract references (we need the full body)
486-
issue,resp,err:=gc.client.Issues.Get(ctx,current.owner,current.repo,current.number)
487-
iferr!=nil {
488-
continue
489-
}
490-
ifresp!=nil {
491-
_=resp.Body.Close()
492-
}
493-
494488
bodyRefs:=extractIssueReferences(issue.GetBody(),current.owner,current.repo)
495489

496490
// Process references and add edges
@@ -757,6 +751,18 @@ func formatGraphOutput(graph *IssueGraph) string {
757751
switchedge.Relation {
758752
caseRelationTypeChild:
759753
parentChildEdges=append(parentChildEdges,edge)
754+
caseRelationTypeParent:
755+
// Parent edges: from closes ref, so ref is parent of from
756+
// Reverse the direction for display: parent → child
757+
parentChildEdges=append(parentChildEdges,GraphEdge{
758+
FromOwner:edge.ToOwner,
759+
FromRepo:edge.ToRepo,
760+
FromNumber:edge.ToNumber,
761+
ToOwner:edge.FromOwner,
762+
ToRepo:edge.FromRepo,
763+
ToNumber:edge.FromNumber,
764+
Relation:RelationTypeChild,
765+
})
760766
caseRelationTypeRelated:
761767
relatedEdges=append(relatedEdges,edge)
762768
}
@@ -799,13 +805,7 @@ The graph shows:
799805
- Parent/child relationships from sub-issues and "closes/fixes" references
800806
- Related issues mentioned in bodies
801807
802-
Call this tool early when working on an issue to gather appropriate context about the work hierarchy.
803-
804-
Works well with:
805-
- issue_read: After using issue_graph to identify important related issues, use issue_read to get full details of specific issues
806-
- pull_request_read: Use to get full PR details for PRs identified in the graph
807-
- search_issues: If the graph reveals related work areas, search for more issues in those areas
808-
- list_issues: List all issues in the repository to find additional context not captured in the graph`)),
808+
Call this tool early when working on an issue to gather appropriate context about the work hierarchy.`)),
809809
mcp.WithToolAnnotation(mcp.ToolAnnotation{
810810
Title:t("TOOL_ISSUE_GRAPH_USER_TITLE","Get issue relationship graph"),
811811
ReadOnlyHint:ToBoolPtr(true),
@@ -842,10 +842,17 @@ Works well with:
842842
returnnil,fmt.Errorf("failed to get GitHub client: %w",err)
843843
}
844844

845+
// Add timeout to prevent runaway crawling
846+
crawlCtx,cancel:=context.WithTimeout(ctx,30*time.Second)
847+
defercancel()
848+
845849
// Create crawler and build graph
846850
crawler:=newGraphCrawler(client,cache,flags,owner,repo,issueNumber)
847-
iferr:=crawler.crawl(ctx);err!=nil {
848-
returnnil,fmt.Errorf("failed to crawl issue graph: %w",err)
851+
iferr:=crawler.crawl(crawlCtx);err!=nil {
852+
// If timeout, continue with partial results; otherwise fail
853+
ifcrawlCtx.Err()!=context.DeadlineExceeded {
854+
returnnil,fmt.Errorf("failed to crawl issue graph: %w",err)
855+
}
849856
}
850857

851858
graph:=crawler.buildGraph()

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2025 Movatter.jp