package main
import (
"context"
"flag"
"os"
"path/filepath"
"intern/internal/ai/agent/anthropic"
"intern/internal/config"
"intern/internal/indexer"
"intern/internal/orchestrator"
"intern/internal/repository"
"intern/internal/repository/github"
"intern/internal/ticketing"
jiraraw "intern/internal/ticketing/jira-raw"
logger "github.com/jenish-jain/logger"
)
func main() {
initFlag := flag.Bool("init", false, "initialize sample config and state files")
buildIndexFlag := flag.Bool("build-index", false, "build file index for smart context selection")
flag.Parse()
logger.Init("debug")
if *initFlag {
writeSampleFiles()
logger.Info("Sample config.yaml, .env.example, and agent_state.jsonc created.")
return
}
if *buildIndexFlag {
buildIndex()
return
}
cfg, err := config.LoadConfig()
if err != nil {
logger.Error("Failed to load config: %v", err)
os.Exit(1)
}
jiraClient, err := jiraraw.NewRawClient(cfg.JiraURL, cfg.JiraEmail, cfg.JiraAPIToken)
// jiraClient, err := jira.NewClient(cfg.JiraURL, cfg.JiraEmail, cfg.JiraAPIToken)
if err != nil {
logger.Error("Failed to init JIRA client: %v", err)
os.Exit(1)
}
if err := jiraClient.HealthCheck(context.Background()); err != nil {
logger.Error("JIRA health check failed: %v", err)
os.Exit(1)
}
ticketingSvc := ticketing.NewTicketingService(jiraClient)
githubClient := github.NewClient(cfg.GitHubToken, cfg.GitHubOwner, cfg.GitHubRepo)
repoSvc := repository.NewRepositoryService(githubClient)
stateFile := "agent_state.jsonc"
state := orchestrator.NewState(stateFile)
_ = state.Load() // ignore error if file doesn't exist
agent := anthropic.NewClient(cfg.AnthropicAPIKey)
coordinator := orchestrator.NewCoordinator(ticketingSvc, repoSvc, agent, cfg, state)
logger.Info("Starting AI Intern Agent MVP...")
coordinator.Run(context.Background())
}
func writeSampleFiles() {
os.WriteFile(".env.example", []byte(`JIRA_URL="https://company.atlassian.net"
JIRA_EMAIL="ai-agent@company.com"
JIRA_API_TOKEN="your-jira-api-token"
JIRA_PROJECT_KEY="PROJ"
JIRA_TRANSITION_TO_DO="11"
JIRA_TRANSITION_IN_PROGRESS="21"
JIRA_TRANSITION_DONE="31"
GITHUB_TOKEN="your-github-token"
GITHUB_OWNER="company"
GITHUB_REPO="main-repo"
ANTHROPIC_API_KEY="your-anthropic-api-key"
AGENT_USERNAME="ai-intern"
POLLING_INTERVAL="30s"
MAX_CONCURRENT_TICKETS=1
WORKING_DIR="./workspace" # Will be ./workspace/{GITHUB_REPO} automatically
BASE_BRANCH="master"
BRANCH_PREFIX="feature/"
CONTEXT_MAX_FILES=40
CONTEXT_MAX_BYTES=32
PLAN_MAX_FILES=10
ALLOWED_WRITE_DIRS="internal,cmd,pkg,docs,config,."
`), 0644)
os.WriteFile("agent_state.jsonc", []byte(`{"processed":{}}`), 0644)
}
func buildIndex() {
logger.Info("Building file index for smart context selection...")
// Load config to get repository path
cfg, err := config.LoadConfig()
if err != nil {
logger.Error("Failed to load config", "error", err)
os.Exit(1)
}
// Determine repository root
workingDir := os.Getenv("AGENT_WORKING_DIR")
if workingDir == "" {
workingDir = cfg.WorkingDir
}
repoRoot := filepath.Join(workingDir, cfg.GitHubRepo)
// Check if repository exists
if _, err := os.Stat(repoRoot); os.IsNotExist(err) {
logger.Error("Repository not found", "path", repoRoot)
logger.Info("Make sure to clone the repository first or set AGENT_WORKING_DIR correctly")
os.Exit(1)
}
logger.Info("Indexing repository", "path", repoRoot)
// Build index
idx := indexer.New(repoRoot)
fileIndex, err := idx.BuildIndex()
if err != nil {
logger.Error("Failed to build index", "error", err)
os.Exit(1)
}
logger.Info("Index built successfully", "files", len(fileIndex.Files), "modules", len(fileIndex.Modules))
// Save index
if err := idx.SaveIndex(fileIndex); err != nil {
logger.Error("Failed to save index", "error", err)
os.Exit(1)
}
indexPath := filepath.Join(repoRoot, indexer.IndexDirName, indexer.IndexFileName)
logger.Info("Index saved successfully", "path", indexPath)
// Show some statistics
categoryCounts := make(map[string]int)
for _, meta := range fileIndex.Files {
categoryCounts[meta.Category]++
}
logger.Info("Index statistics:")
for category, count := range categoryCounts {
logger.Info(" - "+category, "count", count)
}
}
package anthropic
import (
"bytes"
"context"
"encoding/base64"
"encoding/json"
"fmt"
"io"
"net/http"
"time"
"intern/internal/ai/agent"
"intern/internal/util"
"github.com/jenish-jain/logger"
)
// Ensure Client implements agent.Agent
var _ agent.Agent = (*Client)(nil)
const url = "https://api.anthropic.com/v1/messages"
const anthropicVersion = "2023-06-01"
const model = "claude-sonnet-4-20250514"
// Client is an implementation of the agent.Agent interface for Anthropic's Claude API.
// It handles communication with the Anthropic API for code generation tasks.
type Client struct {
APIKey string // Anthropic API key for authentication
Model string // Claude model identifier to use (e.g., "claude-sonnet-4-20250514")
HTTP *http.Client // HTTP client with configured timeout
}
// NewClient creates a new Anthropic API client with default settings.
// The client is configured with a 60-second timeout and the latest Claude model.
func NewClient(apiKey string) *Client {
return &Client{
APIKey: apiKey,
Model: model,
HTTP: &http.Client{Timeout: 60 * time.Second},
}
}
// PlanChanges asks the model to emit a minimal JSON array of CodeChange items.
func (c *Client) PlanChanges(ctx context.Context, ticketKey, ticketSummary, ticketDescription, repoContext string) ([]agent.CodeChange, error) {
prompt := agent.BuildPlanChangesPrompt(ticketKey, ticketSummary, ticketDescription, repoContext, agent.PlanPromptOptions{AllowBase64: true})
logger.Debug("prompt in anthropic", "prompt", prompt)
reqBody := codeGenRequest{
Model: c.Model,
MaxTokens: 16000, // Increased for complex tickets (e.g., Next.js initialization with multiple files)
Messages: []messagePart{{Role: "user", Content: prompt}},
}
payload, err := json.Marshal(reqBody)
if err != nil {
return nil, fmt.Errorf("failed to marshal request: %w", err)
}
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(payload))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("x-api-key", c.APIKey)
req.Header.Set("anthropic-version", anthropicVersion)
resp, err := c.HTTP.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
b, readErr := io.ReadAll(resp.Body)
if readErr != nil {
return nil, fmt.Errorf("anthropic error %d: failed to read response body: %w", resp.StatusCode, readErr)
}
return nil, fmt.Errorf("anthropic error %d: %s", resp.StatusCode, string(b))
}
var cg codeGenResponse
if err := json.NewDecoder(resp.Body).Decode(&cg); err != nil {
return nil, err
}
if len(cg.Content) == 0 {
return nil, fmt.Errorf("empty anthropic response")
}
raw := agent.SanitizeResponse(cg.Content[0].Text)
logger.Debug("AI response (sanitized)", "length", len(raw), "preview", raw[:util.Min(500, len(raw))])
var changes []agent.CodeChange
if err := json.Unmarshal([]byte(raw), &changes); err != nil {
// Log the full response on error for debugging
logger.Error("Failed to parse AI response",
"error", err,
"response_length", len(raw),
"response_preview", raw[:util.Min(1000, len(raw))],
"stop_reason", cg.StopReason)
return nil, fmt.Errorf("invalid JSON from model: %w", err)
}
// Decode base64 content if provided
for i := range changes {
if changes[i].Content == "" && changes[i].ContentB64 != "" {
data, derr := base64.StdEncoding.DecodeString(changes[i].ContentB64)
if derr == nil {
changes[i].Content = string(data)
}
}
}
return changes, nil
}
package agent
import (
"fmt"
"strings"
)
// PlanPromptOptions configures the prompt generation
type PlanPromptOptions struct {
AllowBase64 bool
}
// BuildPlanChangesPrompt builds a strict JSON-only prompt for planning code changes.
// It asks for a JSON array of CodeChange with optional content_b64 to avoid escaping issues.
func BuildPlanChangesPrompt(ticketKey, ticketSummary, ticketDescription, repoContext string, opts PlanPromptOptions) string {
var rules []string
rules = append(rules, "Output ONLY compact JSON. No markdown, no backticks, no commentary.follow the instructions given in ticket carefully and adher to acceptance criteria if any in the ticket and make sure all are fullfilled and don't add any additional changes that are not in the ticket.")
rules = append(rules, "Schema: [{\"path\":\"relative/path.ext\",\"operation\":\"create|update\",\"content\":\"full file content\"}]")
if opts.AllowBase64 {
rules = append(rules, "You MAY use {\"content_b64\":\"<base64>\"} instead of content for large or complex content.")
}
rules = append(rules, "try compiling code if possible before creating a changeset.")
rules = append(rules, "Use POSIX-style relative paths under repo root.")
return fmt.Sprintf(
"You are a senior Go engineer\nTicket: %s - %s\nDescription:\n%s\n\nRepository context (truncated):\n%s\n\nRules:\n- %s\n\nJSON:",
strings.TrimSpace(ticketKey),
strings.TrimSpace(ticketSummary),
strings.TrimSpace(ticketDescription),
strings.TrimSpace(repoContext),
strings.Join(rules, "\n- "),
)
}
package agent
import (
"regexp"
"strings"
)
func SanitizeResponse(s string) string {
// sanitizeResponse tries to strip code fences and extract the JSON array
s = strings.TrimSpace(s)
s = strings.TrimPrefix(s, "```json")
s = strings.TrimPrefix(s, "```JSON")
s = strings.TrimSuffix(s, "```")
s = strings.TrimSpace(s)
// Extract first JSON array if extra text present
re := regexp.MustCompile(`(?s)\[.*\]`)
if m := re.FindString(s); m != "" {
return m
}
return s
}
package ai
import (
"errors"
"fmt"
"io/fs"
"os"
"path/filepath"
"strings"
"intern/internal/indexer"
"intern/internal/util"
)
// BuildRepoContext reads a subset of files (small text/code files) to provide
// a lightweight context string for the LLM. It skips binaries, vendor, node_modules, and large files.
func BuildRepoContext(repoRoot string, maxFiles int, maxBytesPerFile int) string {
var b strings.Builder
count := 0
stop := errors.New("stop-walk")
_ = filepath.WalkDir(repoRoot, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return nil
}
rel, rErr := filepath.Rel(repoRoot, path)
if rErr != nil {
return nil
}
lower := strings.ToLower(rel)
// Skip common large/noise directories early
if d.IsDir() {
if lower == ".git" || strings.HasPrefix(lower, ".git/") ||
lower == "vendor" || strings.HasPrefix(lower, "vendor/") ||
lower == "node_modules" || strings.HasPrefix(lower, "node_modules/") ||
lower == ".idea" || lower == ".vscode" ||
lower == "build" || lower == "dist" || lower == "out" {
return fs.SkipDir
}
return nil
}
if count >= maxFiles {
return stop
}
// Skip obvious binaries
if hasAnySuffix(lower, ".png", ".jpg", ".jpeg", ".gif", ".pdf", ".zip", ".exe", ".bin", ".mp4", ".mov", ".dll") {
return nil
}
// Read up to maxBytesPerFile
data, rerr := os.ReadFile(path)
if rerr != nil {
return nil
}
if len(data) > maxBytesPerFile {
data = data[:maxBytesPerFile]
}
b.WriteString("\n\n# FILE: ")
b.WriteString(rel)
b.WriteString("\n")
b.Write(data)
count++
return nil
})
return b.String()
}
func hasAnySuffix(s string, suff ...string) bool {
for _, x := range suff {
if strings.HasSuffix(s, x) {
return true
}
}
return false
}
// BuildSmartRepoContext builds repository context using intelligent file selection
// based on keywords extracted from ticket description.
// Falls back to BuildRepoContext if index is not available or keywords are empty.
func BuildSmartRepoContext(repoRoot, ticketDescription string, maxFiles int) (string, error) {
// Try to use smart selection with index
idx := indexer.New(repoRoot)
// Check if index exists
if !idx.IndexExists() {
// No index available, fall back to simple context builder
return BuildRepoContext(repoRoot, maxFiles, 32*1024), nil
}
// Load index
fileIndex, err := idx.LoadIndex()
if err != nil {
// Failed to load index, fall back
return BuildRepoContext(repoRoot, maxFiles, 32*1024), nil
}
// Extract keywords from ticket description
keywords := indexer.ExtractKeywords(ticketDescription)
// If no keywords, fall back to simple selection
if len(keywords) == 0 {
return BuildRepoContext(repoRoot, maxFiles, 32*1024), nil
}
// Score files based on keywords
scores := indexer.ScoreFiles(fileIndex, keywords)
// Select top files
topScores := indexer.SelectTopFiles(scores, maxFiles)
// Build context from top files
var sb strings.Builder
sb.WriteString(fmt.Sprintf("# Repository Context (Smart Selection)\n"))
sb.WriteString(fmt.Sprintf("# Based on keywords: %v\n", keywords[:util.Min(5, len(keywords))]))
sb.WriteString(fmt.Sprintf("# Selected %d most relevant files\n\n", len(topScores)))
for _, fileScore := range topScores {
filePath := filepath.Join(repoRoot, fileScore.Path)
sb.WriteString(fmt.Sprintf("\n## FILE: %s (relevance: %.1f)\n", fileScore.Path, fileScore.Score))
// Extract minimal context for Go files, full content for others
context, err := indexer.ExtractMinimalContext(filePath)
if err != nil {
// If extraction fails, try reading file directly
data, readErr := os.ReadFile(filePath)
if readErr == nil {
sb.Write(data)
}
continue
}
sb.WriteString(context)
sb.WriteString("\n")
}
return sb.String(), nil
}
package config
import (
"fmt"
"strings"
"github.com/joho/godotenv"
"github.com/spf13/viper"
)
type Config struct {
JiraURL string
JiraEmail string
JiraAPIToken string
JiraProject string
JiraTransitions map[string]string
GitHubToken string
GitHubOwner string
GitHubRepo string
AnthropicAPIKey string
AgentUsername string
PollingInterval string
MaxConcurrentTickets int
WorkingDir string // Base working directory, will be joined with GitHubRepo to create ./workspace/{repoName}
BaseBranch string
BranchPrefix string
ContextMaxFiles int
ContextMaxBytes int
PlanMaxFiles int
AllowedWriteDirs []string // TODO: add to config
RunTestsBeforePR bool
RunVetBeforePR bool
}
func LoadConfig() (*Config, error) {
_ = godotenv.Load()
viper.AutomaticEnv()
cfg := &Config{
JiraURL: viper.GetString("JIRA_URL"),
JiraEmail: viper.GetString("JIRA_EMAIL"),
JiraAPIToken: viper.GetString("JIRA_API_TOKEN"),
JiraProject: viper.GetString("JIRA_PROJECT_KEY"),
JiraTransitions: map[string]string{
"To Do": viper.GetString("JIRA_TRANSITION_TO_DO"),
"In Progress": viper.GetString("JIRA_TRANSITION_IN_PROGRESS"),
"Done": viper.GetString("JIRA_TRANSITION_DONE"),
},
GitHubToken: viper.GetString("GITHUB_TOKEN"),
GitHubOwner: viper.GetString("GITHUB_OWNER"),
GitHubRepo: viper.GetString("GITHUB_REPO"),
AnthropicAPIKey: viper.GetString("ANTHROPIC_API_KEY"),
AgentUsername: viper.GetString("AGENT_USERNAME"),
PollingInterval: viper.GetString("POLLING_INTERVAL"),
MaxConcurrentTickets: viper.GetInt("MAX_CONCURRENT_TICKETS"),
WorkingDir: viper.GetString("WORKING_DIR"),
BaseBranch: viper.GetString("BASE_BRANCH"),
BranchPrefix: viper.GetString("BRANCH_PREFIX"),
ContextMaxFiles: viper.GetInt("CONTEXT_MAX_FILES"),
ContextMaxBytes: viper.GetInt("CONTEXT_MAX_BYTES"),
PlanMaxFiles: viper.GetInt("PLAN_MAX_FILES"),
RunTestsBeforePR: viper.GetBool("RUN_TESTS_BEFORE_PR"),
RunVetBeforePR: viper.GetBool("RUN_VET_BEFORE_PR"),
}
// Defaults
if cfg.ContextMaxFiles <= 0 {
cfg.ContextMaxFiles = 40
}
if cfg.ContextMaxBytes <= 0 {
cfg.ContextMaxBytes = 32 * 1024
}
if cfg.PlanMaxFiles <= 0 {
cfg.PlanMaxFiles = 20
}
allowed := viper.GetString("ALLOWED_WRITE_DIRS")
if strings.TrimSpace(allowed) == "" {
cfg.AllowedWriteDirs = []string{"internal", "cmd", "pkg", "docs", "config", "."}
} else {
parts := strings.Split(allowed, ",")
for i := range parts {
parts[i] = strings.TrimSpace(parts[i])
}
cfg.AllowedWriteDirs = parts
}
if err := cfg.Validate(); err != nil {
return nil, err
}
return cfg, nil
}
func (c *Config) Validate() error {
if c.JiraURL == "" || c.JiraEmail == "" || c.JiraAPIToken == "" || c.JiraProject == "" {
return fmt.Errorf("missing JIRA configuration")
}
if c.GitHubToken == "" || c.GitHubOwner == "" || c.GitHubRepo == "" {
return fmt.Errorf("missing GitHub configuration")
}
if c.AnthropicAPIKey == "" {
return fmt.Errorf("missing Anthropic API key")
}
if c.AgentUsername == "" {
return fmt.Errorf("missing agent username")
}
if c.PollingInterval == "" {
return fmt.Errorf("missing polling interval")
}
if c.MaxConcurrentTickets <= 0 {
return fmt.Errorf("max concurrent tickets must be > 0")
}
return nil
}
package config
import (
"fmt"
)
func ValidateConfig(cfg *Config) error {
if cfg.JiraURL == "" || cfg.JiraEmail == "" || cfg.JiraAPIToken == "" || cfg.JiraProject == "" {
return fmt.Errorf("missing JIRA configuration")
}
if cfg.GitHubToken == "" || cfg.GitHubOwner == "" || cfg.GitHubRepo == "" {
return fmt.Errorf("missing GitHub configuration")
}
if cfg.AnthropicAPIKey == "" {
return fmt.Errorf("missing Anthropic API key")
}
if cfg.AgentUsername == "" {
return fmt.Errorf("missing agent username")
}
if cfg.PollingInterval == "" {
return fmt.Errorf("missing polling interval")
}
if cfg.MaxConcurrentTickets <= 0 {
return fmt.Errorf("max concurrent tickets must be > 0")
}
return nil
}
package indexer
import (
"encoding/json"
"errors"
"io/fs"
"os"
"path/filepath"
"regexp"
"strings"
"time"
)
const (
IndexVersion = "1.0"
IndexFileName = "file_index.json"
IndexDirName = ".ai-intern"
ProjectIndexName = "PROJECT_INDEX.md"
)
// Excluded directories that should be skipped during indexing
var excludedDirs = []string{
".git", "vendor", "node_modules", ".idea", ".vscode",
"build", "dist", "out", ".ai-intern", "workspace",
}
// Binary and media file extensions that should be skipped during indexing
var binaryExts = []string{
".png", ".jpg", ".jpeg", ".gif", ".pdf", ".zip",
".exe", ".bin", ".mp4", ".mov", ".dll", ".so", ".dylib",
".tar", ".gz", ".bz2", ".7z", ".rar",
}
// Indexer generates and manages repository file indexes
type Indexer struct {
repoRoot string
}
// New creates a new repository indexer
func New(repoRoot string) *Indexer {
return &Indexer{repoRoot: repoRoot}
}
// BuildIndex scans the repository and creates a complete file index
func (idx *Indexer) BuildIndex() (*FileIndex, error) {
index := &FileIndex{
Version: IndexVersion,
IndexedAt: time.Now(),
RepoRoot: idx.repoRoot,
Files: make(map[string]FileMetadata),
Modules: make(map[string][]string),
}
err := filepath.WalkDir(idx.repoRoot, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return nil // Skip errors, continue walking
}
relPath, rErr := filepath.Rel(idx.repoRoot, path)
if rErr != nil {
return nil
}
// Skip excluded directories
if d.IsDir() {
if idx.shouldSkipDir(relPath) {
return fs.SkipDir
}
return nil
}
// Skip excluded files
if idx.shouldSkipFile(relPath) {
return nil
}
// Analyze file and add to index
metadata := idx.analyzeFile(path, relPath)
if metadata != nil {
index.Files[relPath] = *metadata
// Group by module
module := idx.extractModule(relPath)
if module != "" {
index.Modules[module] = append(index.Modules[module], relPath)
}
}
return nil
})
if err != nil {
return nil, err
}
return index, nil
}
// shouldSkipDir determines if a directory should be excluded from indexing
func (idx *Indexer) shouldSkipDir(relPath string) bool {
lower := strings.ToLower(relPath)
for _, excluded := range excludedDirs {
if lower == excluded || strings.HasPrefix(lower, excluded+"/") {
return true
}
}
return false
}
// shouldSkipFile determines if a file should be excluded from indexing
func (idx *Indexer) shouldSkipFile(relPath string) bool {
lower := strings.ToLower(relPath)
// Skip binary and media files
for _, ext := range binaryExts {
if strings.HasSuffix(lower, ext) {
return true
}
}
// Skip very large files (>1MB)
info, err := os.Stat(filepath.Join(idx.repoRoot, relPath))
if err == nil && info.Size() > 1*1024*1024 {
return true
}
return false
}
// analyzeFile examines a file and generates metadata
func (idx *Indexer) analyzeFile(absPath, relPath string) *FileMetadata {
info, err := os.Stat(absPath)
if err != nil {
return nil
}
metadata := &FileMetadata{
Path: relPath,
Size: info.Size(),
LastModified: info.ModTime(),
Category: idx.categorizeFile(relPath),
Importance: idx.calculateImportance(relPath),
Dependencies: idx.extractDependencies(absPath, relPath),
Summary: idx.generateSummary(relPath),
}
return metadata
}
// categorizeFile assigns a category to a file
func (idx *Indexer) categorizeFile(relPath string) string {
lower := strings.ToLower(relPath)
if strings.Contains(lower, "_test.go") || strings.Contains(lower, "/test/") {
return "test"
}
if strings.HasSuffix(lower, ".md") || strings.Contains(lower, "/docs/") {
return "doc"
}
if strings.Contains(lower, "config") || strings.Contains(lower, ".env") ||
strings.HasSuffix(lower, ".yaml") || strings.HasSuffix(lower, ".yml") ||
strings.HasSuffix(lower, ".json") {
return "config"
}
if strings.Contains(lower, "/cmd/") || strings.Contains(lower, "main.go") {
return "core"
}
if strings.Contains(lower, "internal/") {
return "core"
}
return "other"
}
// calculateImportance assigns an importance score (0-10)
func (idx *Indexer) calculateImportance(relPath string) float64 {
score := 5.0 // Default importance
lower := strings.ToLower(relPath)
// High importance for entry points
if strings.Contains(lower, "main.go") {
score += 5.0
}
// High importance for core orchestrator
if strings.Contains(lower, "/orchestrator/coordinator.go") {
score += 4.0
}
// High importance for core modules
if strings.Contains(lower, "/orchestrator/") || strings.Contains(lower, "/ai/") {
score += 2.0
}
// Medium importance for other internal packages
if strings.Contains(lower, "/internal/") {
score += 1.0
}
// Lower importance for tests
if strings.Contains(lower, "_test.go") {
score -= 2.0
}
// Lower importance for docs
if strings.HasSuffix(lower, ".md") {
score -= 1.0
}
// Clamp to 0-10 range
if score < 0 {
score = 0
}
if score > 10 {
score = 10
}
return score
}
// extractDependencies finds Go imports or module dependencies
func (idx *Indexer) extractDependencies(absPath, relPath string) []string {
if !strings.HasSuffix(relPath, ".go") {
return nil
}
content, err := os.ReadFile(absPath)
if err != nil {
return nil
}
// Extract import statements using regex
importRegex := regexp.MustCompile(`import\s+(?:\(([^)]+)\)|"([^"]+)")`)
matches := importRegex.FindAllStringSubmatch(string(content), -1)
deps := make(map[string]bool)
for _, match := range matches {
// Handle both single import and import blocks
if match[1] != "" {
// Import block
lines := strings.Split(match[1], "\n")
for _, line := range lines {
line = strings.TrimSpace(line)
if strings.HasPrefix(line, "\"") && strings.HasSuffix(line, "\"") {
dep := strings.Trim(line, "\"")
if dep != "" {
deps[dep] = true
}
}
}
} else if match[2] != "" {
// Single import
deps[match[2]] = true
}
}
// Convert to sorted slice
result := make([]string, 0, len(deps))
for dep := range deps {
result = append(result, dep)
}
return result
}
// extractModule determines the module name from file path
func (idx *Indexer) extractModule(relPath string) string {
// For paths like "internal/orchestrator/coordinator.go", return "orchestrator"
parts := strings.Split(relPath, "/")
if len(parts) >= 2 {
if parts[0] == "internal" || parts[0] == "cmd" || parts[0] == "pkg" {
return parts[1]
}
}
if len(parts) >= 1 {
return parts[0]
}
return ""
}
// generateSummary creates a brief summary from the file path
func (idx *Indexer) generateSummary(relPath string) string {
base := filepath.Base(relPath)
name := strings.TrimSuffix(base, filepath.Ext(base))
// Convert snake_case or kebab-case to words
name = strings.ReplaceAll(name, "_", " ")
name = strings.ReplaceAll(name, "-", " ")
// Add context from directory
dir := filepath.Dir(relPath)
if dir != "." && !strings.Contains(dir, "/") {
return dir + " - " + name
}
return name
}
// SaveIndex writes the index to disk
func (idx *Indexer) SaveIndex(index *FileIndex) error {
indexDir := filepath.Join(idx.repoRoot, IndexDirName)
if err := os.MkdirAll(indexDir, 0755); err != nil {
return err
}
indexPath := filepath.Join(indexDir, IndexFileName)
data, err := json.MarshalIndent(index, "", " ")
if err != nil {
return err
}
if err := os.WriteFile(indexPath, data, 0644); err != nil {
return err
}
return nil
}
// LoadIndex reads the index from disk
func (idx *Indexer) LoadIndex() (*FileIndex, error) {
indexPath := filepath.Join(idx.repoRoot, IndexDirName, IndexFileName)
data, err := os.ReadFile(indexPath)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
return nil, errors.New("index not found, run with --reindex to create")
}
return nil, err
}
var index FileIndex
if err := json.Unmarshal(data, &index); err != nil {
return nil, err
}
return &index, nil
}
// IndexExists checks if an index file exists
func (idx *Indexer) IndexExists() bool {
indexPath := filepath.Join(idx.repoRoot, IndexDirName, IndexFileName)
_, err := os.Stat(indexPath)
return err == nil
}
package indexer
import (
"regexp"
"strings"
)
// Common English stop words that don't add semantic value
var stopWords = map[string]bool{
"a": true, "an": true, "and": true, "are": true, "as": true, "at": true,
"be": true, "but": true, "by": true, "for": true, "if": true, "in": true,
"into": true, "is": true, "it": true, "no": true, "not": true, "of": true,
"on": true, "or": true, "such": true, "that": true, "the": true, "their": true,
"then": true, "there": true, "these": true, "they": true, "this": true, "to": true,
"was": true, "will": true, "with": true, "when": true, "where": true, "which": true,
}
// ExtractKeywords extracts meaningful keywords from ticket description
// Returns a slice of normalized keywords useful for file matching
func ExtractKeywords(text string) []string {
if text == "" {
return nil
}
keywords := make(map[string]bool)
// Extract file paths (e.g., "internal/auth/login.go")
filePaths := extractFilePaths(text)
for _, path := range filePaths {
keywords[path] = true
// Also extract individual path segments for partial matching
segments := strings.Split(path, "/")
for _, seg := range segments {
seg = strings.TrimSpace(seg)
if seg != "" && len(seg) > 2 {
keywords[strings.ToLower(seg)] = true
}
}
}
// Extract technical terms and camelCase/snake_case identifiers
identifiers := extractIdentifiers(text)
for _, id := range identifiers {
keywords[id] = true
}
// Extract regular words (filter stop words)
words := extractWords(text)
for _, word := range words {
word = strings.ToLower(word)
if len(word) > 2 && !stopWords[word] {
keywords[word] = true
}
}
// Convert map to slice
result := make([]string, 0, len(keywords))
for kw := range keywords {
result = append(result, kw)
}
return result
}
// extractFilePaths finds file path patterns in text
// Matches patterns like: internal/auth/login.go, src/components/Button.tsx
func extractFilePaths(text string) []string {
// Match paths with directory separators and file extensions
pathRegex := regexp.MustCompile(`\b([a-zA-Z0-9_\-]+/[a-zA-Z0-9_\-/]*\.[a-zA-Z0-9]+)\b`)
matches := pathRegex.FindAllStringSubmatch(text, -1)
paths := make([]string, 0, len(matches))
for _, match := range matches {
if len(match) > 1 {
paths = append(paths, match[1])
}
}
return paths
}
// extractIdentifiers finds camelCase, PascalCase, and snake_case identifiers
// These often represent function names, types, or variables mentioned in tickets
func extractIdentifiers(text string) []string {
identifiers := make(map[string]bool)
// Match camelCase (e.g., getUserData) - starts with lowercase
camelRegex := regexp.MustCompile(`\b([a-z]+[A-Z][a-zA-Z0-9]*)\b`)
matches := camelRegex.FindAllStringSubmatch(text, -1)
for _, match := range matches {
if len(match) > 1 {
id := match[1]
identifiers[strings.ToLower(id)] = true
// Split camelCase into parts for partial matching
// e.g., "getUserData" -> ["get", "user", "data"]
parts := splitCamelCase(id)
for _, part := range parts {
if len(part) > 2 && !stopWords[part] {
identifiers[part] = true
}
}
}
}
// Match PascalCase (e.g., AuthService) - starts with uppercase
pascalRegex := regexp.MustCompile(`\b([A-Z][a-z]+[A-Z][a-zA-Z0-9]*)\b`)
matches = pascalRegex.FindAllStringSubmatch(text, -1)
for _, match := range matches {
if len(match) > 1 {
id := match[1]
identifiers[strings.ToLower(id)] = true
// Split PascalCase into parts for partial matching
// e.g., "AuthService" -> ["auth", "service"]
parts := splitCamelCase(id)
for _, part := range parts {
if len(part) > 2 && !stopWords[part] {
identifiers[part] = true
}
}
}
}
// Match snake_case (e.g., user_service, get_auth_token)
snakeRegex := regexp.MustCompile(`\b([a-z][a-z0-9_]+[a-z0-9])\b`)
matches = snakeRegex.FindAllStringSubmatch(text, -1)
for _, match := range matches {
if len(match) > 1 && strings.Contains(match[1], "_") {
id := match[1]
identifiers[id] = true
// Also extract individual parts
parts := strings.Split(id, "_")
for _, part := range parts {
if len(part) > 2 && !stopWords[part] {
identifiers[part] = true
}
}
}
}
result := make([]string, 0, len(identifiers))
for id := range identifiers {
result = append(result, id)
}
return result
}
// extractWords extracts individual words from text
func extractWords(text string) []string {
// Match word characters
wordRegex := regexp.MustCompile(`\b([a-zA-Z]{3,})\b`)
matches := wordRegex.FindAllStringSubmatch(text, -1)
words := make([]string, 0, len(matches))
for _, match := range matches {
if len(match) > 1 {
words = append(words, match[1])
}
}
return words
}
// splitCamelCase splits camelCase or PascalCase into individual words
// Example: "getUserData" -> ["get", "user", "data"]
func splitCamelCase(s string) []string {
var result []string
var current strings.Builder
for i, r := range s {
if i > 0 && r >= 'A' && r <= 'Z' {
if current.Len() > 0 {
result = append(result, strings.ToLower(current.String()))
current.Reset()
}
}
current.WriteRune(r)
}
if current.Len() > 0 {
result = append(result, strings.ToLower(current.String()))
}
return result
}
// NormalizeKeyword normalizes a keyword for consistent matching
func NormalizeKeyword(keyword string) string {
// Convert to lowercase
keyword = strings.ToLower(keyword)
// Remove common file extensions for matching
keyword = strings.TrimSuffix(keyword, ".go")
keyword = strings.TrimSuffix(keyword, ".md")
keyword = strings.TrimSuffix(keyword, ".json")
keyword = strings.TrimSuffix(keyword, ".yaml")
keyword = strings.TrimSuffix(keyword, ".yml")
// Trim special characters
keyword = strings.Trim(keyword, ".,;:!?()[]{}\"'")
return keyword
}
package indexer
import (
"bufio"
"fmt"
"go/ast"
"go/parser"
"go/token"
"io"
"os"
"strings"
)
// ExtractMinimalContext extracts only the essential parts of a Go file
// Returns: package declaration, imports, type definitions, function signatures (no bodies)
// This reduces token usage by 60-80% compared to full file content
func ExtractMinimalContext(filePath string) (string, error) {
// For non-Go files, return a simple summary
if !strings.HasSuffix(filePath, ".go") {
return extractNonGoContext(filePath)
}
fset := token.NewFileSet()
node, err := parser.ParseFile(fset, filePath, nil, parser.ParseComments)
if err != nil {
// If parsing fails, fall back to simple file reading
return extractNonGoContext(filePath)
}
var sb strings.Builder
// Write package declaration with package-level comments
if node.Doc != nil {
sb.WriteString(node.Doc.Text())
sb.WriteString("\n")
}
sb.WriteString(fmt.Sprintf("package %s\n\n", node.Name.Name))
// Write imports
if len(node.Imports) > 0 {
sb.WriteString("import (\n")
for _, imp := range node.Imports {
if imp.Doc != nil {
sb.WriteString("\t// " + strings.TrimSpace(imp.Doc.Text()) + "\n")
}
if imp.Name != nil {
sb.WriteString(fmt.Sprintf("\t%s %s\n", imp.Name.Name, imp.Path.Value))
} else {
sb.WriteString(fmt.Sprintf("\t%s\n", imp.Path.Value))
}
}
sb.WriteString(")\n\n")
}
// Write constants, variables, types, and function signatures
for _, decl := range node.Decls {
switch d := decl.(type) {
case *ast.GenDecl:
// Handle constants, variables, and types
if d.Doc != nil {
sb.WriteString(d.Doc.Text())
}
sb.WriteString(d.Tok.String())
if len(d.Specs) == 1 {
sb.WriteString(" ")
writeSpec(&sb, d.Specs[0], fset)
sb.WriteString("\n\n")
} else {
sb.WriteString(" (\n")
for _, spec := range d.Specs {
writeSpec(&sb, spec, fset)
sb.WriteString("\n")
}
sb.WriteString(")\n\n")
}
case *ast.FuncDecl:
// Handle function declarations (signature only, no body)
if d.Doc != nil {
sb.WriteString(d.Doc.Text())
}
sb.WriteString("func ")
if d.Recv != nil {
// Method receiver
sb.WriteString("(")
if len(d.Recv.List) > 0 {
writeField(&sb, d.Recv.List[0], fset)
}
sb.WriteString(") ")
}
sb.WriteString(d.Name.Name)
writeFuncType(&sb, d.Type, fset)
sb.WriteString("\n\n")
}
}
return sb.String(), nil
}
// writeSpec writes a single AST spec (type, const, or var declaration) to the string builder.
// It handles TypeSpec (type definitions), ValueSpec (const/var), and ImportSpec (imports).
// For ValueSpec, actual values are replaced with "..." to omit implementation details.
func writeSpec(sb *strings.Builder, spec ast.Spec, fset *token.FileSet) {
switch s := spec.(type) {
case *ast.TypeSpec:
if s.Doc != nil {
sb.WriteString(s.Doc.Text())
}
sb.WriteString(s.Name.Name)
sb.WriteString(" ")
writeExpr(sb, s.Type, fset)
case *ast.ValueSpec:
if s.Doc != nil {
sb.WriteString(s.Doc.Text())
}
for i, name := range s.Names {
if i > 0 {
sb.WriteString(", ")
}
sb.WriteString(name.Name)
}
if s.Type != nil {
sb.WriteString(" ")
writeExpr(sb, s.Type, fset)
}
if len(s.Values) > 0 {
sb.WriteString(" = ")
for i := range s.Values {
if i > 0 {
sb.WriteString(", ")
}
// For values, just write "..." to indicate there's a value
// We don't need the actual implementation
sb.WriteString("...")
}
}
case *ast.ImportSpec:
if s.Name != nil {
sb.WriteString(s.Name.Name)
sb.WriteString(" ")
}
sb.WriteString(s.Path.Value)
}
}
// writeExpr writes a Go type expression to the string builder.
// It recursively handles all Go type expressions including pointers, arrays, maps,
// structs, interfaces, function types, selectors, channels, and ellipsis.
func writeExpr(sb *strings.Builder, expr ast.Expr, fset *token.FileSet) {
switch e := expr.(type) {
case *ast.Ident:
sb.WriteString(e.Name)
case *ast.StarExpr:
sb.WriteString("*")
writeExpr(sb, e.X, fset)
case *ast.ArrayType:
sb.WriteString("[]")
writeExpr(sb, e.Elt, fset)
case *ast.MapType:
sb.WriteString("map[")
writeExpr(sb, e.Key, fset)
sb.WriteString("]")
writeExpr(sb, e.Value, fset)
case *ast.StructType:
sb.WriteString("struct {\n")
if e.Fields != nil {
for _, field := range e.Fields.List {
sb.WriteString("\t")
writeFieldInStruct(sb, field, fset)
sb.WriteString("\n")
}
}
sb.WriteString("}")
case *ast.InterfaceType:
if e.Methods != nil && len(e.Methods.List) > 0 {
sb.WriteString("interface {\n")
for _, method := range e.Methods.List {
sb.WriteString("\t")
writeFieldInInterface(sb, method, fset)
sb.WriteString("\n")
}
sb.WriteString("}")
} else {
sb.WriteString("interface{}")
}
case *ast.FuncType:
writeFuncType(sb, e, fset)
case *ast.SelectorExpr:
writeExpr(sb, e.X, fset)
sb.WriteString(".")
sb.WriteString(e.Sel.Name)
case *ast.ChanType:
if e.Dir == ast.RECV {
sb.WriteString("<-chan ")
} else if e.Dir == ast.SEND {
sb.WriteString("chan<- ")
} else {
sb.WriteString("chan ")
}
writeExpr(sb, e.Value, fset)
case *ast.Ellipsis:
sb.WriteString("...")
if e.Elt != nil {
writeExpr(sb, e.Elt, fset)
}
default:
// For complex expressions, just use a placeholder
sb.WriteString("...")
}
}
// writeField writes a generic AST field (function parameter, return value, etc.) to the string builder.
// It includes field names, types, and inline comments if present.
func writeField(sb *strings.Builder, field *ast.Field, fset *token.FileSet) {
if field.Doc != nil {
sb.WriteString(field.Doc.Text())
}
// Write field names
for i, name := range field.Names {
if i > 0 {
sb.WriteString(", ")
}
sb.WriteString(name.Name)
}
if len(field.Names) > 0 && field.Type != nil {
sb.WriteString(" ")
}
// Write field type
if field.Type != nil {
writeExpr(sb, field.Type, fset)
}
if field.Comment != nil {
sb.WriteString(" // ")
sb.WriteString(strings.TrimSpace(field.Comment.Text()))
}
}
// writeFieldInStruct writes a struct field to the string builder.
// It handles function-type fields specially by including the "func" keyword,
// and preserves struct tags (e.g., `json:"name"`) if present.
func writeFieldInStruct(sb *strings.Builder, field *ast.Field, fset *token.FileSet) {
if field.Doc != nil {
sb.WriteString(field.Doc.Text())
}
// Write field names
for i, name := range field.Names {
if i > 0 {
sb.WriteString(", ")
}
sb.WriteString(name.Name)
}
if len(field.Names) > 0 && field.Type != nil {
sb.WriteString(" ")
}
// Write field type
if field.Type != nil {
// Function types in structs need "func" keyword
if funcType, ok := field.Type.(*ast.FuncType); ok {
sb.WriteString("func")
writeFuncType(sb, funcType, fset)
} else {
writeExpr(sb, field.Type, fset)
}
}
// Write field tag if present
if field.Tag != nil {
sb.WriteString(" ")
sb.WriteString(field.Tag.Value)
}
if field.Comment != nil {
sb.WriteString(" // ")
sb.WriteString(strings.TrimSpace(field.Comment.Text()))
}
}
// writeFieldInInterface writes an interface method signature to the string builder.
// It omits the "func" keyword (which is implicit in interface methods) and
// handles embedded interfaces (anonymous fields without names).
func writeFieldInInterface(sb *strings.Builder, field *ast.Field, fset *token.FileSet) {
if field.Doc != nil {
sb.WriteString(field.Doc.Text())
}
// Write method name
for i, name := range field.Names {
if i > 0 {
sb.WriteString(", ")
}
sb.WriteString(name.Name)
}
if len(field.Names) > 0 && field.Type != nil {
sb.WriteString(" ")
}
// Write method signature (func type without "func" keyword)
if field.Type != nil {
if funcType, ok := field.Type.(*ast.FuncType); ok {
writeFuncType(sb, funcType, fset)
} else {
// Embedded interface
writeExpr(sb, field.Type, fset)
}
}
if field.Comment != nil {
sb.WriteString(" // ")
sb.WriteString(strings.TrimSpace(field.Comment.Text()))
}
}
// writeFuncType writes a function type signature (parameters and return values) to the string builder.
// It handles multiple parameters, multiple return values, and named/unnamed parameters.
// Return values with multiple entries or named returns are wrapped in parentheses.
func writeFuncType(sb *strings.Builder, ft *ast.FuncType, fset *token.FileSet) {
sb.WriteString("(")
if ft.Params != nil {
for i, param := range ft.Params.List {
if i > 0 {
sb.WriteString(", ")
}
writeField(sb, param, fset)
}
}
sb.WriteString(")")
if ft.Results != nil && len(ft.Results.List) > 0 {
sb.WriteString(" ")
if len(ft.Results.List) > 1 || len(ft.Results.List[0].Names) > 1 {
sb.WriteString("(")
}
for i, result := range ft.Results.List {
if i > 0 {
sb.WriteString(", ")
}
writeField(sb, result, fset)
}
if len(ft.Results.List) > 1 || len(ft.Results.List[0].Names) > 1 {
sb.WriteString(")")
}
}
}
// extractNonGoContext extracts context from non-Go files (config, docs, etc.).
// For small files (<10KB), it returns the full content.
// For larger files, it returns the first 100 lines to avoid excessive context.
func extractNonGoContext(filePath string) (string, error) {
file, err := os.Open(filePath)
if err != nil {
return "", err
}
defer file.Close()
var sb strings.Builder
sb.WriteString(fmt.Sprintf("# File: %s\n\n", filePath))
// For non-code files, include first 50 lines or 2KB, whichever is smaller
scanner := bufio.NewScanner(file)
lineCount := 0
maxLines := 50
maxBytes := 2048
bytesRead := 0
for scanner.Scan() && lineCount < maxLines && bytesRead < maxBytes {
line := scanner.Text()
sb.WriteString(line)
sb.WriteString("\n")
lineCount++
bytesRead += len(line) + 1
}
if err := scanner.Err(); err != nil && err != io.EOF {
return sb.String(), err
}
if lineCount >= maxLines || bytesRead >= maxBytes {
sb.WriteString("\n... (truncated)\n")
}
return sb.String(), nil
}
package indexer
import (
"sort"
"strings"
)
// ScoreFiles ranks files in the index based on relevance to the given keywords
// Returns a sorted slice of FileScore (highest scores first)
func ScoreFiles(index *FileIndex, keywords []string) []FileScore {
if index == nil || len(keywords) == 0 {
return nil
}
scores := make([]FileScore, 0, len(index.Files))
// Score each file
for path, metadata := range index.Files {
score := scoreFile(path, metadata, keywords)
if score > 0 {
scores = append(scores, FileScore{
Path: path,
Score: score,
})
}
}
// Sort by score (highest first)
sort.Slice(scores, func(i, j int) bool {
return scores[i].Score > scores[j].Score
})
return scores
}
// scoreFile calculates relevance score for a single file based on keyword matches.
// It uses a tiered scoring system:
// - Exact path match: +15 points
// - Path contains keyword: +8 points
// - Path segment matches keyword: +5 points
// - Segment contains keyword: +2 points
// The final score is multiplied by a category-based multiplier.
func scoreFile(path string, metadata FileMetadata, keywords []string) float64 {
score := 0.0
// Start with base importance from metadata
score += metadata.Importance
// Normalize path for matching
lowerPath := strings.ToLower(path)
pathWithoutExt := strings.TrimSuffix(lowerPath, ".go")
pathWithoutExt = strings.TrimSuffix(pathWithoutExt, ".md")
pathWithoutExt = strings.TrimSuffix(pathWithoutExt, ".json")
for _, keyword := range keywords {
keyword = strings.ToLower(keyword)
// HIGHEST BOOST: Exact path match
// e.g., keyword "internal/auth/login.go" matches path exactly
if lowerPath == keyword {
score += 15.0
continue
}
// HIGH BOOST: Path contains full keyword as substring
// e.g., keyword "internal/auth" matches "internal/auth/login.go"
if strings.Contains(lowerPath, keyword) {
score += 8.0
continue
}
// MEDIUM BOOST: Path segment matches keyword
// e.g., keyword "auth" matches any path with "/auth/" or ending in "auth.go"
pathSegments := strings.Split(lowerPath, "/")
for _, segment := range pathSegments {
segmentWithoutExt := strings.TrimSuffix(segment, ".go")
segmentWithoutExt = strings.TrimSuffix(segmentWithoutExt, ".md")
segmentWithoutExt = strings.TrimSuffix(segmentWithoutExt, ".json")
if segmentWithoutExt == keyword {
score += 5.0
break
}
// SMALL BOOST: Segment contains keyword
// e.g., keyword "user" matches "user_service.go"
if strings.Contains(segmentWithoutExt, keyword) {
score += 2.0
break
}
}
}
// Apply category multipliers
score *= getCategoryMultiplier(metadata.Category)
return score
}
// getCategoryMultiplier returns a relevance multiplier based on file category.
// Categories are prioritized as follows:
// - core: 1.5x (most important for understanding codebase)
// - config: 1.2x (often relevant for configuration-related tasks)
// - other: 1.0x (neutral)
// - test: 0.7x (less relevant for code generation)
// - doc: 0.5x (least relevant for code context)
func getCategoryMultiplier(category string) float64 {
switch category {
case "core":
return 1.5 // Boost core files significantly
case "config":
return 1.2 // Config files often relevant
case "other":
return 1.0 // Neutral
case "test":
return 0.7 // Tests less likely to be needed for understanding
case "doc":
return 0.5 // Docs rarely needed in code context
default:
return 1.0
}
}
// SelectTopFiles returns the top N files by score
func SelectTopFiles(scores []FileScore, n int) []FileScore {
if n <= 0 || len(scores) == 0 {
return nil
}
if n >= len(scores) {
return scores
}
return scores[:n]
}
package orchestrator
import (
"context"
"math"
"math/rand"
"time"
)
type BackoffConfig struct {
Initial time.Duration
Max time.Duration
Multiplier float64
Jitter float64 // 0..1
MaxRetries int
}
func (b BackoffConfig) next(attempt int) time.Duration {
base := float64(b.Initial) * math.Pow(b.Multiplier, float64(attempt))
if base > float64(b.Max) {
base = float64(b.Max)
}
j := 1 + (rand.Float64()*2-1)*b.Jitter // 1±Jitter
return time.Duration(base * j)
}
// Retry runs op with backoff on transient errors until MaxRetries or context cancel.
// Returns the number of retry attempts performed.
func Retry(ctx context.Context, cfg BackoffConfig, op func() error) (err error, attempts int) {
for attempt := 0; attempt <= cfg.MaxRetries; attempt++ {
if attempt > 0 {
attempts++
}
err = op()
if err == nil || IsPermanent(err) {
return err, attempts
}
if !IsTransient(err) {
return err, attempts
}
d := cfg.next(attempt)
select {
case <-time.After(d):
case <-ctx.Done():
return ctx.Err(), attempts
}
}
return err, attempts
}
package orchestrator
import (
"regexp"
"strings"
)
var nonAlnum = regexp.MustCompile(`[^a-z0-9\-]+`)
func buildBranchName(prefix, ticketKey string) string {
base := ticketKey
slug := strings.ReplaceAll(base, " ", "-")
slug = nonAlnum.ReplaceAllString(slug, "")
if len(slug) > 30 {
slug = slug[:30]
}
return prefix + "/" + strings.Trim(slug, "-")
}
package orchestrator
import (
"context"
"fmt"
"os"
"path/filepath"
"sync"
"time"
"intern/internal/ai"
"intern/internal/ai/agent"
"intern/internal/config"
"intern/internal/repository"
"intern/internal/ticketing"
logger "github.com/jenish-jain/logger"
)
type Coordinator struct {
Ticketing *ticketing.TicketingService
Repository *repository.RepositoryService
Agent agent.Agent
Cfg *config.Config
State *State
Metrics *Metrics
}
func NewCoordinator(ticketing *ticketing.TicketingService, repository *repository.RepositoryService, agent agent.Agent, cfg *config.Config, state *State) *Coordinator {
return &Coordinator{Ticketing: ticketing, Repository: repository, Agent: agent, Cfg: cfg, State: state, Metrics: NewMetrics()}
}
func (c *Coordinator) Run(ctx context.Context) {
interval, err := time.ParseDuration(c.Cfg.PollingInterval)
if err != nil {
interval = 30 * time.Second
}
workingDir := c.Cfg.WorkingDir
if workingDir == "" {
workingDir = "./workspace"
}
_ = os.MkdirAll(workingDir, 0755)
_ = os.Setenv("AGENT_WORKING_DIR", workingDir)
for {
select {
case <-ctx.Done():
return
default:
// Ensure local repo is up to date before each cycle
if err := c.prepareRepository(ctx); err != nil {
logger.Error("Repository preparation failed", "error", err)
backoffSleep(interval)
continue
}
tickets, err := func() ([]ticketing.Ticket, error) {
var out []ticketing.Ticket
err, attempts := Retry(ctx, BackoffConfig{Initial: time.Second, Max: 10 * time.Second, Multiplier: 2, Jitter: 0.2, MaxRetries: 3}, func() error {
t, e := c.Ticketing.GetTickets(ctx, c.Cfg.AgentUsername, c.Cfg.JiraProject)
if e != nil {
return MakeTransient(e)
}
out = t
return nil
})
c.Metrics.AddRetries(attempts)
return out, err
}()
if err != nil {
logger.Error("Failed to fetch tickets", "error", err)
backoffSleep(interval)
continue
}
if len(tickets) == 0 {
logger.Info("No tickets to process; sleeping", "interval", interval.String())
time.Sleep(interval)
continue
}
maxWorkers := c.Cfg.MaxConcurrentTickets
if maxWorkers <= 0 {
maxWorkers = 1
}
sem := make(chan struct{}, maxWorkers)
var wg sync.WaitGroup
for _, t := range tickets {
if c.State.IsProcessed(t.Key) {
continue
}
sem <- struct{}{}
wg.Add(1)
go func(key, summary, description string) {
defer wg.Done()
defer func() { <-sem }()
if err := c.processTicket(ctx, key, summary, description); err != nil {
logger.Error("Failed processing ticket", "key", key, "error", err)
return
}
c.State.MarkProcessed(key)
}(t.Key, t.Summary, t.Description)
}
wg.Wait()
// log metrics summary
s := c.Metrics.Snapshot()
logger.Info("Run summary", "tickets", s.TicketsProcessed, "prs", s.PRsCreated, "retries", s.Retries, "ai_failures", s.AIPlanFailures)
time.Sleep(interval)
}
}
}
func backoffSleep(base time.Duration) {
t := base
if t < time.Second*5 {
t = time.Second * 5
}
time.Sleep(t)
}
func (c *Coordinator) prepareRepository(ctx context.Context) error {
repoPath := filepath.Join(os.Getenv("AGENT_WORKING_DIR"), c.Cfg.GitHubRepo)
if _, err := os.Stat(filepath.Join(repoPath, ".git")); os.IsNotExist(err) {
logger.Info("Cloning repository...")
if err := c.Repository.CloneRepository(ctx, repoPath); err != nil {
return err
}
}
base := c.Cfg.BaseBranch
if base == "" {
base = "main"
}
_ = c.Repository.SwitchBranch(ctx, base)
if err := c.Repository.SyncWithRemote(ctx); err != nil {
logger.Error("Sync failed", "error", err)
}
return nil
}
func (c *Coordinator) processTicket(ctx context.Context, key, summary, description string) error {
branchName := buildBranchName(c.Cfg.BranchPrefix, key)
logger.Info("Creating branch", "branch", branchName)
if err := c.Repository.CreateBranch(ctx, branchName); err != nil {
return fmt.Errorf("create branch: %w", err)
}
_ = c.Repository.SwitchBranch(ctx, branchName)
repoRoot := filepath.Join(os.Getenv("AGENT_WORKING_DIR"), c.Cfg.GitHubRepo)
// Use smart context builder with ticket description for better file selection
ctxStr, ctxErr := ai.BuildSmartRepoContext(repoRoot, description, c.Cfg.ContextMaxFiles)
if ctxErr != nil {
// Fall back to simple context builder on error
logger.Warn("Smart context builder failed, falling back to simple builder", "error", ctxErr)
ctxStr = ai.BuildRepoContext(repoRoot, c.Cfg.ContextMaxFiles, c.Cfg.ContextMaxBytes)
}
var changes []agent.CodeChange
planErr, attempts := Retry(ctx, BackoffConfig{Initial: time.Second, Max: 10 * time.Second, Multiplier: 2, Jitter: 0.2, MaxRetries: 3}, func() error {
ch, e := c.Agent.PlanChanges(ctx, key, summary, description, ctxStr)
if e != nil {
return MakeTransient(e)
}
changes = ch
return nil
})
c.Metrics.AddRetries(attempts)
if planErr != nil {
c.Metrics.IncAIPlanFailures()
return fmt.Errorf("AI planning failed: %w", planErr)
}
valid, verr := validatePlannedChanges(repoRoot, changes, c.Cfg.AllowedWriteDirs, c.Cfg.PlanMaxFiles)
if verr != nil {
return fmt.Errorf("validation failed: %w", verr)
}
for _, ch := range valid {
abs := filepath.Join(repoRoot, ch.Path)
if err := os.MkdirAll(filepath.Dir(abs), 0755); err != nil {
return fmt.Errorf("mkdir: %w", err)
}
if err := os.WriteFile(abs, []byte(ch.Content), 0644); err != nil {
return fmt.Errorf("write: %w", err)
}
if err := c.Repository.AddFile(ctx, ch.Path); err != nil {
return fmt.Errorf("git add: %w", err)
}
}
if len(valid) > 0 {
if err := c.Repository.Commit(ctx, fmt.Sprintf("feat(%s): apply planned changes", key)); err != nil {
return fmt.Errorf("commit: %w", err)
}
}
changed, err := c.Repository.HasLocalChanges(ctx)
if err != nil {
logger.Error("status failed", "error", err)
}
if !changed && len(valid) == 0 {
logger.Info("No effective changes; skipping push/PR", "key", key)
return nil
}
// quality gates before push/PR
// reuse existing repoRoot
notes, ok := runQualityGates(ctx, c.Cfg, repoRoot)
if !ok {
logger.Error("Quality gates failed; skipping push/PR", "key", key)
return nil
}
if err := c.Repository.Push(ctx, branchName); err != nil {
return fmt.Errorf("push: %w", err)
}
base := c.Cfg.BaseBranch
if base == "" {
base = "main"
}
title := buildPRTitle(key, summary)
body := buildPRBody(key, summary, description, valid, notes)
var prURL string
prErr, prAttempts := Retry(ctx, BackoffConfig{Initial: time.Second, Max: 10 * time.Second, Multiplier: 2, Jitter: 0.2, MaxRetries: 3}, func() error {
u, e := c.Repository.CreatePullRequest(ctx, base, branchName, title, body)
if e != nil {
return MakeTransient(e)
}
prURL = u
return nil
})
c.Metrics.AddRetries(prAttempts)
if prErr != nil {
return fmt.Errorf("create PR: %w", prErr)
}
logger.Info("Created PR", "url", prURL)
c.Metrics.IncPRsCreated()
// Mark Done
if err := c.Ticketing.UpdateTicketStatus(ctx, key, "Done", c.Cfg.JiraTransitions); err != nil {
logger.Error("Failed to move ticket to Done", "error", err)
}
c.Metrics.IncTicketsProcessed()
return nil
}
package orchestrator
import "errors"
var (
// ErrTransient is a wrapper to mark transient failures (retryable)
ErrTransient = errors.New("transient")
// ErrPermanent is a wrapper to mark permanent failures (do not retry)
ErrPermanent = errors.New("permanent")
)
// MakeTransient wraps an error as transient
func MakeTransient(err error) error {
if err == nil {
return nil
}
return errors.Join(ErrTransient, err)
}
// MakePermanent wraps an error as permanent
func MakePermanent(err error) error {
if err == nil {
return nil
}
return errors.Join(ErrPermanent, err)
}
// IsTransient returns true if error contains ErrTransient
func IsTransient(err error) bool {
return err != nil && errors.Is(err, ErrTransient)
}
// IsPermanent returns true if error contains ErrPermanent
func IsPermanent(err error) bool {
return err != nil && errors.Is(err, ErrPermanent)
}
package orchestrator
import "sync/atomic"
type Metrics struct {
ticketsProcessed int64
prsCreated int64
retries int64
aiPlanFailures int64
}
func NewMetrics() *Metrics { return &Metrics{} }
func (m *Metrics) IncTicketsProcessed() { atomic.AddInt64(&m.ticketsProcessed, 1) }
func (m *Metrics) IncPRsCreated() { atomic.AddInt64(&m.prsCreated, 1) }
func (m *Metrics) AddRetries(n int) {
if n > 0 {
atomic.AddInt64(&m.retries, int64(n))
}
}
func (m *Metrics) IncAIPlanFailures() { atomic.AddInt64(&m.aiPlanFailures, 1) }
type MetricsSnapshot struct {
TicketsProcessed int64
PRsCreated int64
Retries int64
AIPlanFailures int64
}
func (m *Metrics) Snapshot() MetricsSnapshot {
return MetricsSnapshot{
TicketsProcessed: atomic.LoadInt64(&m.ticketsProcessed),
PRsCreated: atomic.LoadInt64(&m.prsCreated),
Retries: atomic.LoadInt64(&m.retries),
AIPlanFailures: atomic.LoadInt64(&m.aiPlanFailures),
}
}
package orchestrator
import (
"fmt"
"intern/internal/ai/agent"
"strings"
)
func buildPRTitle(ticketKey, summary string) string {
if strings.TrimSpace(summary) == "" {
return ticketKey
}
return fmt.Sprintf("%s: %s", ticketKey, summary)
}
// buildPRBody renders a markdown body including ticket info, description and file list
func buildPRBody(ticketKey, summary, description string, changes []agent.CodeChange, notes []string) string {
var b strings.Builder
b.WriteString("## Ticket\n")
b.WriteString(fmt.Sprintf("- Key: %s\n", ticketKey))
if strings.TrimSpace(summary) != "" {
b.WriteString(fmt.Sprintf("- Summary: %s\n", summary))
}
b.WriteString("\n## Description\n")
if strings.TrimSpace(description) == "" {
b.WriteString("(no description provided)\n")
} else {
b.WriteString(description)
b.WriteString("\n")
}
b.WriteString("\n## Changeset\n")
if len(changes) == 0 {
b.WriteString("(no changes)\n")
} else {
for _, ch := range changes {
b.WriteString(fmt.Sprintf("- %s (%s)\n", ch.Path, ch.Operation))
}
}
if len(notes) > 0 {
b.WriteString("\n## Notes\n")
for _, n := range notes {
b.WriteString(fmt.Sprintf("- %s\n", n))
}
}
b.WriteString("\n## Checklist\n")
b.WriteString("- [ ] Code compiles\n")
b.WriteString("- [ ] Tests (if any) pass locally\n")
b.WriteString("- [ ] Review requested\n")
return b.String()
}
package orchestrator
import (
"context"
"fmt"
"os/exec"
"strings"
"time"
"intern/internal/config"
)
func runCommandCapture(ctx context.Context, dir string, name string, args ...string) (string, error) {
cmd := exec.CommandContext(ctx, name, args...)
cmd.Dir = dir
out, err := cmd.CombinedOutput()
return string(out), err
}
func truncateMiddle(s string, max int) string {
if len(s) <= max {
return s
}
head := max / 2
tail := max - head
return s[:head] + "\n...\n" + s[len(s)-tail:]
}
// runQualityGates executes optional go vet and go test before PR.
// Returns notes to include in PR body and ok=false when any enabled gate fails.
func runQualityGates(ctx context.Context, cfg *config.Config, repoRoot string) ([]string, bool) {
notes := []string{}
ok := true
// Use a short timeout per command to avoid hanging
perCmdTimeout := 10 * time.Minute
if cfg.RunVetBeforePR {
ctxVet, cancel := context.WithTimeout(ctx, perCmdTimeout)
out, err := runCommandCapture(ctxVet, repoRoot, "go", "vet", "./...")
cancel()
if err != nil {
notes = append(notes, "go vet: FAILED")
notes = append(notes, fmt.Sprintf("```\n%s\n```", truncateMiddle(strings.TrimSpace(out), 8000)))
ok = false
} else {
notes = append(notes, "go vet: PASSED")
}
} else {
notes = append(notes, "go vet: skipped")
}
if cfg.RunTestsBeforePR {
ctxTest, cancel := context.WithTimeout(ctx, perCmdTimeout)
out, err := runCommandCapture(ctxTest, repoRoot, "go", "test", "./...")
cancel()
if err != nil {
notes = append(notes, "go test: FAILED")
notes = append(notes, fmt.Sprintf("```\n%s\n```", truncateMiddle(strings.TrimSpace(out), 8000)))
ok = false
} else {
// keep summary short
summary := out
if idx := strings.LastIndex(summary, "\n"); idx > -1 {
summary = summary[idx+1:]
}
notes = append(notes, fmt.Sprintf("go test: PASSED (%s)", strings.TrimSpace(summary)))
}
} else {
notes = append(notes, "go test: skipped")
}
return notes, ok
}
package orchestrator
import (
"encoding/json"
"os"
"sync"
)
type State struct {
Processed map[string]bool `json:"processed"`
mu sync.Mutex `json:"-"`
filePath string `json:"-"`
}
func NewState(filePath string) *State {
return &State{
Processed: make(map[string]bool),
filePath: filePath,
}
}
func (s *State) IsProcessed(key string) bool {
s.mu.Lock()
defer s.mu.Unlock()
return s.Processed[key]
}
func (s *State) MarkProcessed(key string) {
s.mu.Lock()
defer s.mu.Unlock()
s.Processed[key] = true
s.save()
}
func (s *State) save() {
f, err := os.Create(s.filePath)
if err != nil {
return
}
defer f.Close()
_ = json.NewEncoder(f).Encode(s)
}
func (s *State) Load() error {
f, err := os.Open(s.filePath)
if err != nil {
return err
}
defer f.Close()
return json.NewDecoder(f).Decode(s)
}
package orchestrator
import (
"fmt"
"intern/internal/ai/agent"
"path/filepath"
"strings"
logger "github.com/jenish-jain/logger"
)
func validatePlannedChanges(root string, changes []agent.CodeChange, allowedDirs []string, maxFiles int) ([]agent.CodeChange, error) {
logger.Debug("Validating planned changes", "total_changes", len(changes), "allowed_dirs", allowedDirs)
if len(changes) > maxFiles {
logger.Debug("Truncating changes due to max files limit", "original", len(changes), "max", maxFiles)
changes = changes[:maxFiles]
}
var out []agent.CodeChange
for _, ch := range changes {
p := strings.TrimSpace(ch.Path)
if p == "" {
logger.Debug("Skipping empty path")
continue
}
// No absolute paths
if filepath.IsAbs(p) {
logger.Debug("Skipping absolute path", "path", p)
continue
}
// Normalize and guard traversal
clean := filepath.Clean(p)
if strings.HasPrefix(clean, "..") {
logger.Debug("Skipping path with traversal", "path", clean)
continue
}
// Enforce allowlist
first := firstSegment(clean)
// Allow root-level files if "." is in allowedDirs
if !inList(first, allowedDirs) && !(first == clean && inList(".", allowedDirs)) {
logger.Debug("Skipping path not in allowed directories", "path", clean, "first_segment", first, "allowed_dirs", allowedDirs)
continue
}
// Ensure content is present (content or content_b64 decoded earlier)
if strings.TrimSpace(ch.Content) == "" {
logger.Debug("Skipping file with empty content", "path", clean)
continue
}
logger.Debug("Accepting change", "path", clean, "operation", ch.Operation)
out = append(out, agent.CodeChange{Path: clean, Operation: ch.Operation, Content: ch.Content})
}
logger.Debug("Validation complete", "accepted_changes", len(out), "rejected_changes", len(changes)-len(out))
if len(out) == 0 {
return nil, fmt.Errorf("no valid changes after validation")
}
return out, nil
}
func firstSegment(p string) string {
i := strings.IndexByte(p, filepath.Separator)
if i == -1 {
return p
}
return p[:i]
}
func inList(s string, list []string) bool {
for _, x := range list {
if s == x {
return true
}
}
return false
}
package github
import (
"context"
"fmt"
"os"
"path/filepath"
"time"
"intern/internal/repository"
"github.com/go-git/go-git/v5"
"github.com/go-git/go-git/v5/config"
"github.com/go-git/go-git/v5/plumbing"
"github.com/go-git/go-git/v5/plumbing/object"
"github.com/go-git/go-git/v5/plumbing/transport/http"
gh "github.com/google/go-github/v58/github"
"golang.org/x/oauth2"
)
type githubClient struct {
ghClient *gh.Client
owner string
repo string
token string // Store the token for git operations
repoURL string // Optional: override repository URL for testing
}
func NewClient(token, owner, repo string) repository.RepositoryClient {
ts := oauth2.StaticTokenSource(&oauth2.Token{AccessToken: token})
client := gh.NewClient(oauth2.NewClient(context.Background(), ts))
return &githubClient{
ghClient: client,
owner: owner,
repo: repo,
token: token,
}
}
func (c *githubClient) HealthCheck(ctx context.Context) error {
repo, _, err := c.ghClient.Repositories.Get(ctx, c.owner, c.repo)
if err != nil {
return fmt.Errorf("GitHub health check failed: %w", err)
}
if repo == nil || repo.GetName() == "" {
return fmt.Errorf("GitHub health check: repo info missing")
}
return nil
}
func (c *githubClient) Raw() *gh.Client {
return c.ghClient
}
// Implement RepositoryClient interface methods
func (c *githubClient) CloneRepository(ctx context.Context, destPath string) error {
// Use override URL if set (for testing), otherwise use GitHub URL
url := c.repoURL
if url == "" {
url = fmt.Sprintf("https://github.com/%s/%s.git", c.owner, c.repo)
}
_, err := git.PlainCloneContext(ctx, destPath, false, &git.CloneOptions{
URL: url,
Auth: &http.BasicAuth{Username: c.owner, Password: c.token}, // Using token as password
Progress: os.Stdout,
})
if err != nil {
return fmt.Errorf("failed to clone repository %s/%s: %w", c.owner, c.repo, err)
}
return nil
}
func (c *githubClient) SyncWithRemote(ctx context.Context) error {
repoPath := filepath.Join(os.Getenv("AGENT_WORKING_DIR"), c.repo) // Assuming working dir is set
repo, err := git.PlainOpen(repoPath)
if err != nil {
return fmt.Errorf("failed to open repository at %s: %w", repoPath, err)
}
w, err := repo.Worktree()
if err != nil {
return fmt.Errorf("failed to get worktree: %w", err)
}
err = w.PullContext(ctx, &git.PullOptions{
Auth: &http.BasicAuth{Username: c.owner, Password: c.token},
Progress: os.Stdout,
})
if err != nil && err != git.NoErrAlreadyUpToDate {
return fmt.Errorf("failed to pull from remote: %w", err)
}
return nil
}
func (c *githubClient) ListFiles(ctx context.Context, path string) ([]string, error) {
repoPath := filepath.Join(os.Getenv("AGENT_WORKING_DIR"), c.repo)
var files []string
err := filepath.Walk(filepath.Join(repoPath, path), func(p string, info os.FileInfo, err error) error {
if err != nil {
return err
}
// Skip .git directory
if info.IsDir() && info.Name() == ".git" {
return filepath.SkipDir
}
if !info.IsDir() {
relPath, _ := filepath.Rel(repoPath, p)
files = append(files, relPath)
}
return nil
})
if err != nil {
return nil, fmt.Errorf("failed to list files in %s: %w", path, err)
}
return files, nil
}
func (c *githubClient) CreateBranch(ctx context.Context, branchName string) error {
repoPath := filepath.Join(os.Getenv("AGENT_WORKING_DIR"), c.repo)
repo, err := git.PlainOpen(repoPath)
if err != nil {
return fmt.Errorf("failed to open repository at %s: %w", repoPath, err)
}
headRef, err := repo.Head()
if err != nil {
return fmt.Errorf("failed to get HEAD ref: %w", err)
}
newRef := plumbing.ReferenceName(fmt.Sprintf("refs/heads/%s", branchName))
err = repo.Storer.SetReference(plumbing.NewHashReference(newRef, headRef.Hash()))
if err != nil {
return fmt.Errorf("failed to create local branch %s: %w", branchName, err)
}
return nil
}
func (c *githubClient) SwitchBranch(ctx context.Context, branchName string) error {
repoPath := filepath.Join(os.Getenv("AGENT_WORKING_DIR"), c.repo)
repo, err := git.PlainOpen(repoPath)
if err != nil {
return fmt.Errorf("failed to open repository at %s: %w", repoPath, err)
}
w, err := repo.Worktree()
if err != nil {
return fmt.Errorf("failed to get worktree: %w", err)
}
err = w.Checkout(&git.CheckoutOptions{
Branch: plumbing.ReferenceName(fmt.Sprintf("refs/heads/%s", branchName)),
})
if err != nil {
return fmt.Errorf("failed to switch to branch %s: %w", branchName, err)
}
return nil
}
func (c *githubClient) AddFile(ctx context.Context, filePath string) error {
repoPath := filepath.Join(os.Getenv("AGENT_WORKING_DIR"), c.repo)
repo, err := git.PlainOpen(repoPath)
if err != nil {
return fmt.Errorf("failed to open repository at %s: %w", repoPath, err)
}
w, err := repo.Worktree()
if err != nil {
return fmt.Errorf("failed to get worktree: %w", err)
}
_, err = w.Add(filePath)
if err != nil {
return fmt.Errorf("failed to add file %s: %w", filePath, err)
}
return nil
}
func (c *githubClient) Commit(ctx context.Context, message string) error {
repoPath := filepath.Join(os.Getenv("AGENT_WORKING_DIR"), c.repo)
repo, err := git.PlainOpen(repoPath)
if err != nil {
return fmt.Errorf("failed to open repository at %s: %w", repoPath, err)
}
w, err := repo.Worktree()
if err != nil {
return fmt.Errorf("failed to get worktree: %w", err)
}
_, err = w.Commit(message, &git.CommitOptions{
Author: &object.Signature{
Name: "AI Intern",
Email: "ai-intern@example.com",
When: time.Now(),
},
})
if err != nil {
return fmt.Errorf("failed to commit changes: %w", err)
}
return nil
}
func (c *githubClient) Push(ctx context.Context, branchName string) error {
repoPath := filepath.Join(os.Getenv("AGENT_WORKING_DIR"), c.repo)
repo, err := git.PlainOpen(repoPath)
if err != nil {
return fmt.Errorf("failed to open repository at %s: %w", repoPath, err)
}
refspec := fmt.Sprintf("refs/heads/%s:refs/heads/%s", branchName, branchName)
err = repo.PushContext(ctx, &git.PushOptions{
Auth: &http.BasicAuth{Username: c.owner, Password: c.token},
RefSpecs: []config.RefSpec{config.RefSpec(refspec)},
})
if err != nil && err != git.NoErrAlreadyUpToDate {
return fmt.Errorf("failed to push to remote: %w", err)
}
return nil
}
func (c *githubClient) CreatePullRequest(ctx context.Context, baseBranch, headBranch, title, body string) (string, error) {
// Determine a valid base branch: prefer provided, else repo default
base := baseBranch
if base == "" {
repo, _, err := c.ghClient.Repositories.Get(ctx, c.owner, c.repo)
if err == nil && repo != nil && repo.GetDefaultBranch() != "" {
base = repo.GetDefaultBranch()
}
}
// If still empty or invalid, try to validate/fallback
if base == "" {
base = "master"
}
// Validate base exists; if not, fallback to repo default if available
if _, _, err := c.ghClient.Git.GetRef(ctx, c.owner, c.repo, fmt.Sprintf("refs/heads/%s", base)); err != nil {
repo, _, rerr := c.ghClient.Repositories.Get(ctx, c.owner, c.repo)
if rerr == nil && repo != nil && repo.GetDefaultBranch() != "" {
base = repo.GetDefaultBranch()
}
}
newPR := &gh.NewPullRequest{
Title: gh.String(title),
Head: gh.String(headBranch),
Base: gh.String(base),
Body: gh.String(body),
}
pr, _, err := c.ghClient.PullRequests.Create(ctx, c.owner, c.repo, newPR)
if err != nil {
return "", fmt.Errorf("failed to create pull request: %w", err)
}
if pr == nil || pr.GetHTMLURL() == "" {
return "", fmt.Errorf("pull request created but URL missing")
}
return pr.GetHTMLURL(), nil
}
func (c *githubClient) HasLocalChanges(ctx context.Context) (bool, error) {
repoPath := filepath.Join(os.Getenv("AGENT_WORKING_DIR"), c.repo)
repo, err := git.PlainOpen(repoPath)
if err != nil {
return false, fmt.Errorf("failed to open repository at %s: %w", repoPath, err)
}
w, err := repo.Worktree()
if err != nil {
return false, fmt.Errorf("failed to get worktree: %w", err)
}
st, err := w.Status()
if err != nil {
return false, fmt.Errorf("failed to get status: %w", err)
}
return !st.IsClean(), nil
}
// Code generated by MockGen. DO NOT EDIT.
// Source: intern/internal/github (interfaces: Client)
//
// Generated by this command:
//
// mockgen -destination=internal/github/mocks/mock_github.go -package=mocks intern/internal/github Client
//
// Package mocks is a generated GoMock package.
package mocks
import (
context "context"
reflect "reflect"
github "github.com/google/go-github/v58/github"
gomock "go.uber.org/mock/gomock"
)
// MockClient is a mock of Client interface.
type MockClient struct {
ctrl *gomock.Controller
recorder *MockClientMockRecorder
isgomock struct{}
}
// MockClientMockRecorder is the mock recorder for MockClient.
type MockClientMockRecorder struct {
mock *MockClient
}
// NewMockClient creates a new mock instance.
func NewMockClient(ctrl *gomock.Controller) *MockClient {
mock := &MockClient{ctrl: ctrl}
mock.recorder = &MockClientMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockClient) EXPECT() *MockClientMockRecorder {
return m.recorder
}
// HealthCheck mocks base method.
func (m *MockClient) HealthCheck(ctx context.Context) error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "HealthCheck", ctx)
ret0, _ := ret[0].(error)
return ret0
}
// HealthCheck indicates an expected call of HealthCheck.
func (mr *MockClientMockRecorder) HealthCheck(ctx any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "HealthCheck", reflect.TypeOf((*MockClient)(nil).HealthCheck), ctx)
}
// Raw mocks base method.
func (m *MockClient) Raw() *github.Client {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Raw")
ret0, _ := ret[0].(*github.Client)
return ret0
}
// Raw indicates an expected call of Raw.
func (mr *MockClientMockRecorder) Raw() *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Raw", reflect.TypeOf((*MockClient)(nil).Raw))
}
package repository
import (
"context"
)
type RepositoryClient interface {
CloneRepository(ctx context.Context, destPath string) error
SyncWithRemote(ctx context.Context) error
ListFiles(ctx context.Context, path string) ([]string, error)
CreateBranch(ctx context.Context, branchName string) error
SwitchBranch(ctx context.Context, branchName string) error
AddFile(ctx context.Context, filePath string) error
Commit(ctx context.Context, message string) error
Push(ctx context.Context, branchName string) error
CreatePullRequest(ctx context.Context, baseBranch, headBranch, title, body string) (string, error)
HasLocalChanges(ctx context.Context) (bool, error)
}
type RepositoryService struct {
Client RepositoryClient
}
func NewRepositoryService(client RepositoryClient) *RepositoryService {
return &RepositoryService{Client: client}
}
func (r *RepositoryService) CloneRepository(ctx context.Context, destPath string) error {
return r.Client.CloneRepository(ctx, destPath)
}
func (r *RepositoryService) SyncWithRemote(ctx context.Context) error {
return r.Client.SyncWithRemote(ctx)
}
func (r *RepositoryService) ListFiles(ctx context.Context, path string) ([]string, error) {
return r.Client.ListFiles(ctx, path)
}
func (r *RepositoryService) CreateBranch(ctx context.Context, branchName string) error {
return r.Client.CreateBranch(ctx, branchName)
}
func (r *RepositoryService) SwitchBranch(ctx context.Context, branchName string) error {
return r.Client.SwitchBranch(ctx, branchName)
}
func (r *RepositoryService) AddFile(ctx context.Context, filePath string) error {
return r.Client.AddFile(ctx, filePath)
}
func (r *RepositoryService) Commit(ctx context.Context, message string) error {
return r.Client.Commit(ctx, message)
}
func (r *RepositoryService) Push(ctx context.Context, branchName string) error {
return r.Client.Push(ctx, branchName)
}
func (r *RepositoryService) CreatePullRequest(ctx context.Context, baseBranch, headBranch, title, body string) (string, error) {
return r.Client.CreatePullRequest(ctx, baseBranch, headBranch, title, body)
}
func (r *RepositoryService) HasLocalChanges(ctx context.Context) (bool, error) {
return r.Client.HasLocalChanges(ctx)
}
package jiraraw
import (
"bytes"
"context"
"encoding/base64"
"encoding/json"
"fmt"
"intern/internal/ticketing"
"io"
"net/http"
"net/url"
"strings"
"time"
"github.com/jenish-jain/logger"
)
// Client interface matches the existing JIRA client interface
type Client interface {
HealthCheck(ctx context.Context) error
GetTickets(ctx context.Context, assignee, project string) ([]ticketing.Ticket, error)
UpdateTicketStatus(ctx context.Context, ticketKey, status string, transitions map[string]string) error
}
// client implements the raw JIRA HTTP client
type client struct {
baseURL string
httpClient *http.Client
authHeader string
}
// ClientConfig holds configuration for the JIRA raw client
type ClientConfig struct {
BaseURL string
Email string
APIToken string
Timeout time.Duration
}
// NewClient creates a new JIRA raw client
func NewClient(config ClientConfig) (Client, error) {
if config.BaseURL == "" {
return nil, fmt.Errorf("base URL is required")
}
if config.Email == "" {
return nil, fmt.Errorf("email is required")
}
if config.APIToken == "" {
return nil, fmt.Errorf("API token is required")
}
// Ensure base URL doesn't end with slash
baseURL := strings.TrimSuffix(config.BaseURL, "/")
// Create auth header for basic authentication
auth := base64.StdEncoding.EncodeToString([]byte(config.Email + ":" + config.APIToken))
// Set default timeout if not provided
timeout := config.Timeout
if timeout == 0 {
timeout = 30 * time.Second
}
return &client{
baseURL: baseURL,
httpClient: &http.Client{
Timeout: timeout,
},
authHeader: "Basic " + auth,
}, nil
}
// makeRequest performs HTTP requests to JIRA API
func (c *client) makeRequest(ctx context.Context, method, endpoint string, body interface{}) (*http.Response, error) {
var reqBody io.Reader
if body != nil {
jsonBody, err := json.Marshal(body)
if err != nil {
return nil, fmt.Errorf("failed to marshal request body: %w", err)
}
reqBody = bytes.NewBuffer(jsonBody)
}
url := c.baseURL + endpoint
req, err := http.NewRequestWithContext(ctx, method, url, reqBody)
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}
req.Header.Set("Authorization", c.authHeader)
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Accept", "application/json")
logger.Debug("making JIRA API request", "method", method, "url", url)
resp, err := c.httpClient.Do(req)
if err != nil {
return nil, fmt.Errorf("failed to execute request: %w", err)
}
return resp, nil
}
// handleErrorResponse processes error responses from JIRA API
func (c *client) handleErrorResponse(resp *http.Response) error {
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return fmt.Errorf("failed to read error response body: %w", err)
}
var errorResp ErrorResponse
if err := json.Unmarshal(body, &errorResp); err != nil {
// If we can't parse as JSON error, return the raw response
return fmt.Errorf("JIRA API error (status %d): %s", resp.StatusCode, string(body))
}
var errorMsg strings.Builder
if len(errorResp.ErrorMessages) > 0 {
errorMsg.WriteString(strings.Join(errorResp.ErrorMessages, "; "))
}
if len(errorResp.Errors) > 0 {
if errorMsg.Len() > 0 {
errorMsg.WriteString("; ")
}
for key, value := range errorResp.Errors {
errorMsg.WriteString(fmt.Sprintf("%s: %s", key, value))
}
}
if errorMsg.Len() == 0 {
errorMsg.WriteString(fmt.Sprintf("HTTP %d", resp.StatusCode))
}
return fmt.Errorf("JIRA API error: %s", errorMsg.String())
}
// HealthCheck verifies the connection to JIRA by getting current user info
func (c *client) HealthCheck(ctx context.Context) error {
resp, err := c.makeRequest(ctx, "GET", "/rest/api/3/myself", nil)
if err != nil {
return fmt.Errorf("JIRA health check failed: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return c.handleErrorResponse(resp)
}
var myself MyselfResponse
if err := json.NewDecoder(resp.Body).Decode(&myself); err != nil {
return fmt.Errorf("failed to decode user info: %w", err)
}
if myself.EmailAddress == "" {
return fmt.Errorf("JIRA health check: user email address missing")
}
logger.Debug("JIRA health check successful", "user", myself.EmailAddress)
return nil
}
// GetTickets retrieves tickets from JIRA using JQL
func (c *client) GetTickets(ctx context.Context, assignee, project string) ([]ticketing.Ticket, error) {
// Escape assignee and project for JQL
assigneeEscaped := strings.ReplaceAll(assignee, "'", "\\'")
projectEscaped := strings.ReplaceAll(project, "'", "\\'")
jql := fmt.Sprintf("assignee = '%s' AND project = '%s' AND statusCategory = 'To Do' ORDER BY priority ASC",
assigneeEscaped, projectEscaped)
logger.Debug("fetching tickets from JIRA", "query", jql)
// Build query parameters for GET request
params := url.Values{}
params.Set("jql", jql)
params.Set("maxResults", "100")
params.Set("fields", "id,key,summary,description,status,priority,assignee,reporter")
params.Set("expand", "schema,names")
endpoint := "/rest/api/3/search/jql?" + params.Encode()
resp, err := c.makeRequest(ctx, "GET", endpoint, nil)
if err != nil {
return nil, fmt.Errorf("failed to search JIRA issues: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, c.handleErrorResponse(resp)
}
var searchResp SearchResponse
if err := json.NewDecoder(resp.Body).Decode(&searchResp); err != nil {
return nil, fmt.Errorf("failed to decode search response: %w", err)
}
var tickets []ticketing.Ticket
for _, issue := range searchResp.Issues {
tickets = append(tickets, issue.ToTicket())
fmt.Println("Ticket", issue.ToTicket())
}
logger.Debug("fetched tickets from JIRA", "count", len(tickets))
return tickets, nil
}
// UpdateTicketStatus transitions a ticket to a new status
func (c *client) UpdateTicketStatus(ctx context.Context, ticketKey, status string, transitions map[string]string) error {
transitionID, ok := transitions[status]
if !ok {
return fmt.Errorf("no transition ID found for status: %s", status)
}
// URL encode the ticket key to handle special characters
ticketKeyEscaped := url.PathEscape(ticketKey)
transitionReq := TransitionRequest{
Transition: struct {
ID string `json:"id"`
}{
ID: transitionID,
},
}
endpoint := fmt.Sprintf("/rest/api/3/issue/%s/transitions", ticketKeyEscaped)
resp, err := c.makeRequest(ctx, "POST", endpoint, transitionReq)
if err != nil {
return fmt.Errorf("failed to transition ticket %s: %w", ticketKey, err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusNoContent {
return c.handleErrorResponse(resp)
}
logger.Debug("successfully transitioned ticket", "ticket", ticketKey, "status", status)
return nil
}
package jiraraw
import (
"fmt"
"intern/internal/ticketing"
)
// NewRawClient creates a new JIRA raw client with the same interface as the original client
// This function provides a drop-in replacement for the go-jira library client
func NewRawClient(jiraURL, email, apiToken string) (ticketing.Client, error) {
config := ClientConfig{
BaseURL: jiraURL,
Email: email,
APIToken: apiToken,
}
client, err := NewClient(config)
if err != nil {
return nil, fmt.Errorf("failed to create JIRA raw client: %w", err)
}
return client, nil
}
package jiraraw
import (
"intern/internal/ticketing"
"strings"
)
// JIRA API Response Types
// MyselfResponse represents the response from /rest/api/3/myself
type MyselfResponse struct {
AccountID string `json:"accountId"`
DisplayName string `json:"displayName"`
EmailAddress string `json:"emailAddress"`
Key string `json:"key"`
Name string `json:"name"`
Self string `json:"self"`
}
// SearchRequest represents the request body for issue search
type SearchRequest struct {
JQL string `json:"jql"`
StartAt int `json:"startAt"`
MaxResults int `json:"maxResults"`
Fields []string `json:"fields"`
}
// SearchResponse represents the response from /rest/api/3/search
type SearchResponse struct {
Expand string `json:"expand"`
StartAt int `json:"startAt"`
MaxResults int `json:"maxResults"`
Total int `json:"total"`
Issues []Issue `json:"issues"`
}
// Issue represents a JIRA issue
type Issue struct {
Expand string `json:"expand"`
ID string `json:"id"`
Self string `json:"self"`
Key string `json:"key"`
Fields struct {
Summary string `json:"summary"`
Description interface{} `json:"description"` // Can be string or Atlassian Document Format object
Status struct {
Self string `json:"self"`
Description string `json:"description"`
IconURL string `json:"iconUrl"`
Name string `json:"name"`
ID string `json:"id"`
StatusCategory struct {
Self string `json:"self"`
ID int `json:"id"`
Key string `json:"key"`
ColorName string `json:"colorName"`
Name string `json:"name"`
} `json:"statusCategory"`
} `json:"status"`
Priority struct {
Self string `json:"self"`
IconURL string `json:"iconUrl"`
Name string `json:"name"`
ID string `json:"id"`
} `json:"priority"`
Assignee *User `json:"assignee"`
Reporter *User `json:"reporter"`
} `json:"fields"`
}
// User represents a JIRA user
type User struct {
Self string `json:"self"`
AccountID string `json:"accountId"`
EmailAddress string `json:"emailAddress,omitempty"`
DisplayName string `json:"displayName,omitempty"`
Name string `json:"name,omitempty"`
Key string `json:"key,omitempty"`
}
// Atlassian Document Format types for description parsing
type Document struct {
Type string `json:"type"`
Version int `json:"version"`
Content []Content `json:"content"`
}
type Content struct {
Type string `json:"type"`
Content []TextBlock `json:"content,omitempty"`
Text string `json:"text,omitempty"`
}
type TextBlock struct {
Type string `json:"type"`
Text string `json:"text"`
}
// TransitionRequest represents the request body for issue transition
type TransitionRequest struct {
Transition struct {
ID string `json:"id"`
} `json:"transition"`
}
// TransitionResponse represents the response from transition endpoint
type TransitionResponse struct {
// Empty response for successful transitions
}
// ErrorResponse represents JIRA API error response
type ErrorResponse struct {
ErrorMessages []string `json:"errorMessages"`
Errors map[string]string `json:"errors"`
}
// extractTextFromDescription extracts plain text from JIRA description field
// which can be a string or Atlassian Document Format object
func extractTextFromDescription(desc interface{}) string {
if desc == nil {
return ""
}
// Handle string description
if descStr, ok := desc.(string); ok {
return descStr
}
// Handle Atlassian Document Format
if descMap, ok := desc.(map[string]interface{}); ok {
return extractTextFromDocument(descMap)
}
// Log unexpected description format for debugging
// This will help identify if there are other description formats we need to handle
return ""
}
// extractTextFromDocument recursively extracts text from Atlassian Document Format
func extractTextFromDocument(doc map[string]interface{}) string {
var result strings.Builder
// Check if this is a text node
if text, ok := doc["text"].(string); ok {
result.WriteString(text)
}
// Process content array
if content, ok := doc["content"].([]interface{}); ok {
for _, item := range content {
if itemMap, ok := item.(map[string]interface{}); ok {
result.WriteString(extractTextFromDocument(itemMap))
}
}
}
return result.String()
}
// ToTicket converts a JIRA Issue to ticketing.Ticket
func (i *Issue) ToTicket() ticketing.Ticket {
ticket := ticketing.Ticket{
ID: i.ID,
Key: i.Key,
Summary: i.Fields.Summary,
Description: extractTextFromDescription(i.Fields.Description),
Status: i.Fields.Status.Name,
Priority: i.Fields.Priority.Name,
URL: i.Self,
}
// Handle assignee
if i.Fields.Assignee != nil {
ticket.Assignee = getUserName(i.Fields.Assignee)
}
// Handle reporter
if i.Fields.Reporter != nil {
ticket.Reporter = getUserName(i.Fields.Reporter)
}
return ticket
}
// getUserName extracts display name from user, falling back to name or key
func getUserName(user *User) string {
if user == nil {
return ""
}
if user.DisplayName != "" {
return user.DisplayName
}
if user.Name != "" {
return user.Name
}
return user.Key
}
package jira
import (
"context"
"fmt"
"intern/internal/ticketing"
"github.com/andygrunwald/go-jira"
"github.com/jenish-jain/logger"
)
type Client interface {
HealthCheck(ctx context.Context) error
// TicketingClient methods
GetTickets(ctx context.Context, assignee, project string) ([]ticketing.Ticket, error)
UpdateTicketStatus(ctx context.Context, ticketKey, status string, transitions map[string]string) error
}
type client struct {
jiraClient *jira.Client
}
func NewClient(jiraURL, email, apiToken string) (Client, error) {
tp := jira.BasicAuthTransport{
Username: email,
Password: apiToken,
}
c, err := jira.NewClient(tp.Client(), jiraURL)
if err != nil {
return nil, fmt.Errorf("failed to create JIRA client: %w", err)
}
return &client{jiraClient: c}, nil
}
func (c *client) HealthCheck(ctx context.Context) error {
me, _, err := c.jiraClient.User.GetSelfWithContext(ctx)
if err != nil {
return fmt.Errorf("JIRA health check failed: %w", err)
}
if me == nil || me.EmailAddress == "" {
return fmt.Errorf("JIRA health check: user info missing")
}
return nil
}
func (c *client) GetTickets(ctx context.Context, assignee, project string) ([]ticketing.Ticket, error) {
jql := fmt.Sprintf("assignee = '%s' AND project = '%s' AND statusCategory = 'To Do' ORDER BY priority ASC", assignee, project)
logger.Debug("fetching tickets from JIRA", "query", jql)
issues, _, err := c.jiraClient.Issue.SearchWithContext(ctx, jql, nil)
if err != nil {
return nil, fmt.Errorf("failed to fetch JIRA tickets: %w", err)
}
var tickets []ticketing.Ticket
for _, issue := range issues {
tickets = append(tickets, ticketing.Ticket{
ID: issue.ID,
Key: issue.Key,
Summary: issue.Fields.Summary,
Description: issue.Fields.Description,
Status: issue.Fields.Status.Name,
Priority: issue.Fields.Priority.Name,
Assignee: getUserName(issue.Fields.Assignee),
Reporter: getUserName(issue.Fields.Reporter),
URL: issue.Self,
})
}
logger.Debug("fetched tickets from JIRA", "tickets", tickets)
return tickets, nil
}
func (c *client) UpdateTicketStatus(ctx context.Context, ticketKey, status string, transitions map[string]string) error {
transitionID, ok := transitions[status]
if !ok {
return fmt.Errorf("no transition ID found for status: %s", status)
}
_, err := c.jiraClient.Issue.DoTransitionWithContext(ctx, ticketKey, transitionID)
if err != nil {
return fmt.Errorf("failed to transition ticket %s to %s: %w", ticketKey, status, err)
}
return nil
}
func getUserName(user *jira.User) string {
if user == nil {
return ""
}
if user.DisplayName != "" {
return user.DisplayName
}
return user.Name
}
package ticketing
import (
"context"
)
type TicketingService struct {
Client Client
}
func NewTicketingService(client Client) *TicketingService {
return &TicketingService{Client: client}
}
func (t *TicketingService) GetTickets(ctx context.Context, assignee, project string) ([]Ticket, error) {
return t.Client.GetTickets(ctx, assignee, project)
}
func (t *TicketingService) UpdateTicketStatus(ctx context.Context, ticketKey, status string, transitions map[string]string) error {
return t.Client.UpdateTicketStatus(ctx, ticketKey, status, transitions)
}
package util
// Min returns the minimum of two integers
func Min(a, b int) int {
if a < b {
return a
}
return b
}
// Max returns the maximum of two integers
func Max(a, b int) int {
if a > b {
return a
}
return b
}