package main
import (
"flag"
"fmt"
"os"
"github.com/adicens/systemverilog-lsp/internal/lsp"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
)
var version = "dev"
// parseFlags parses command line flags and returns the configuration
func parseFlags() (mode string, verbose bool, ver bool) {
flag.StringVar(&mode, "mode", "stdio", "Connection mode: stdio, tcp, socket")
flag.BoolVar(&verbose, "verbose", false, "Enable verbose logging")
flag.BoolVar(&ver, "version", false, "Print version and exit")
flag.Parse()
return mode, verbose, ver
}
// printVersion prints the version and exits
func printVersion() {
fmt.Printf("SystemVerilog Language Server %s\n", version)
os.Exit(0)
}
// configureLogging sets up the logging configuration
func configureLogging(verbose bool, mode string) {
zerolog.TimeFieldFormat = zerolog.TimeFormatUnix
if verbose {
zerolog.SetGlobalLevel(zerolog.DebugLevel)
} else {
zerolog.SetGlobalLevel(zerolog.InfoLevel)
}
// Pretty print for development
if mode == "stdio" && verbose {
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr})
}
}
// isValidMode checks if the given mode is valid
func isValidMode(mode string) bool {
switch mode {
case "stdio", "tcp", "socket":
return true
default:
return false
}
}
// runServer starts the server in the specified mode
func runServer(mode string) error {
if !isValidMode(mode) {
return fmt.Errorf("invalid mode: %s", mode)
}
server := lsp.NewServer()
switch mode {
case "stdio":
return server.RunStdio()
case "tcp":
return server.RunTCP(":4389")
case "socket":
return server.RunSocket("/tmp/svls.sock")
default:
return fmt.Errorf("invalid mode: %s", mode)
}
}
func main() {
mode, verbose, ver := parseFlags()
if ver {
printVersion()
}
configureLogging(verbose, mode)
log.Info().
Str("version", version).
Str("mode", mode).
Msg("Starting SystemVerilog Language Server")
if err := runServer(mode); err != nil {
log.Fatal().Err(err).Msg("Server failed")
}
}
package main
import (
"fmt"
"io/ioutil"
"log"
"os"
"github.com/adicens/systemverilog-lsp/internal/lsp"
"github.com/adicens/systemverilog-lsp/internal/symbols"
)
func main() {
if len(os.Args) < 2 {
log.Fatal("Usage: go run main.go <systemverilog_file>")
}
filename := os.Args[1]
content, err := ioutil.ReadFile(filename)
if err != nil {
log.Fatalf("Error reading file: %v", err)
}
fmt.Printf("Testing document manager with: %s\n", filename)
fmt.Printf("File size: %d bytes\n", len(content))
// Create document manager
dm := lsp.NewDocumentManager()
// Open document
doc, err := dm.OpenDocument(filename, 1, string(content))
if err != nil {
log.Fatalf("Error opening document: %v", err)
}
fmt.Printf("\nDocument opened successfully!\n")
fmt.Printf("URI: %s\n", doc.URI)
fmt.Printf("Version: %d\n", doc.GetVersion())
fmt.Printf("Content length: %d\n", len(doc.GetContent()))
fmt.Printf("Has errors: %v\n", doc.HasErrors())
fmt.Printf("Error count: %d\n", doc.GetErrorCount())
// Show document manager stats
stats := dm.GetStats()
fmt.Printf("\nDocument Manager Statistics:\n")
fmt.Printf("Document count: %d\n", stats["document_count"])
fmt.Printf("Total symbols: %d\n", stats["total_symbols"])
fmt.Printf("Total scopes: %d\n", stats["total_scopes"])
fmt.Printf("Total errors: %d\n", stats["total_errors"])
// Show diagnostics
diagnostics := dm.GetDiagnostics(filename)
fmt.Printf("\nDiagnostics: %d\n", len(diagnostics))
for i, diag := range diagnostics {
fmt.Printf(" %d. %s (line %d, col %d)\n",
i+1, diag.Message, diag.Range.Start.Line+1, diag.Range.Start.Character+1)
}
// Show symbols
symbolTable := doc.GetSymbolTable()
allSymbols := symbolTable.GetAllSymbols()
fmt.Printf("\nSymbols found: %d\n", len(allSymbols))
for _, symbol := range allSymbols {
fmt.Printf(" %s %s (%s) at line %d\n",
symbol.Type.String(),
symbol.Name,
symbol.DataType,
symbol.Position.Line)
}
// Test finding symbols across documents
fmt.Printf("\nTesting symbol search:\n")
// Find modules
modules := dm.FindSymbolsOfType(symbols.SymbolTypeModule)
fmt.Printf("Modules found: %d\n", len(modules))
for _, module := range modules {
fmt.Printf(" Module: %s\n", module.Name)
}
// Find parameters
parameters := dm.FindSymbolsOfType(symbols.SymbolTypeParameter)
fmt.Printf("Parameters found: %d\n", len(parameters))
for _, param := range parameters {
fmt.Printf(" Parameter: %s\n", param.Name)
}
// Find ports
ports := dm.FindSymbolsOfType(symbols.SymbolTypePort)
fmt.Printf("Ports found: %d\n", len(ports))
for _, port := range ports {
fmt.Printf(" Port: %s\n", port.Name)
}
// Test document update
fmt.Printf("\nTesting document update...\n")
updatedContent := string(content) + "\n// Updated content\n"
updatedDoc, err := dm.UpdateDocument(filename, 2, updatedContent)
if err != nil {
log.Fatalf("Error updating document: %v", err)
}
fmt.Printf("Document updated!\n")
fmt.Printf("New version: %d\n", updatedDoc.GetVersion())
fmt.Printf("New content length: %d\n", len(updatedDoc.GetContent()))
// Test closing document
fmt.Printf("\nTesting document close...\n")
err = dm.CloseDocument(filename)
if err != nil {
log.Fatalf("Error closing document: %v", err)
}
fmt.Printf("Document closed!\n")
fmt.Printf("Document count: %d\n", dm.GetDocumentCount())
// Try to access closed document
_, exists := dm.GetDocument(filename)
fmt.Printf("Document still exists: %v\n", exists)
}
package main
import (
"fmt"
"io/ioutil"
"log"
"os"
"github.com/adicens/systemverilog-lsp/internal/parser"
)
func main() {
if len(os.Args) < 2 {
log.Fatal("Usage: go run main.go <systemverilog_file>")
}
filename := os.Args[1]
content, err := ioutil.ReadFile(filename)
if err != nil {
log.Fatalf("Error reading file: %v", err)
}
fmt.Printf("Parsing file: %s\n", filename)
fmt.Printf("File size: %d bytes\n", len(content))
p := parser.NewParser(string(content))
ast := p.Parse()
fmt.Printf("\nParsing completed!\n")
fmt.Printf("Errors: %d\n", len(ast.Errors))
if len(ast.Errors) > 0 {
fmt.Printf("\nParsing errors:\n")
for i, err := range ast.Errors {
fmt.Printf(" %d. %s (line %d, col %d)\n", i+1, err.Message, err.Position.Line, err.Position.Column)
}
}
if ast.Root != nil {
switch node := ast.Root.(type) {
case *parser.ModuleNode:
fmt.Printf("\nModule: %s\n", node.Name)
fmt.Printf("Parameters: %d\n", len(node.Parameters))
fmt.Printf("Ports: %d\n", len(node.Ports))
fmt.Printf("Items: %d\n", len(node.Items))
if len(node.Parameters) > 0 {
fmt.Printf("\nParameters:\n")
for _, param := range node.Parameters {
fmt.Printf(" - %s = %s\n", param.Name, param.DefaultValue)
}
}
if len(node.Ports) > 0 {
fmt.Printf("\nPorts:\n")
for _, port := range node.Ports {
direction := "input"
if port.Direction == parser.PortDirectionOutput {
direction = "output"
} else if port.Direction == parser.PortDirectionInout {
direction = "inout"
}
width := ""
if port.Width != nil {
width = fmt.Sprintf("[%s:%s]", port.Width.High, port.Width.Low)
}
fmt.Printf(" - %s %s %s%s\n", direction, port.DataType, width, port.Name)
}
}
case *parser.InterfaceNode:
fmt.Printf("\nInterface: %s\n", node.Name)
fmt.Printf("Parameters: %d\n", len(node.Parameters))
fmt.Printf("Ports: %d\n", len(node.Ports))
fmt.Printf("Items: %d\n", len(node.Items))
case *parser.ClassNode:
fmt.Printf("\nClass: %s\n", node.Name)
if node.Extends != "" {
fmt.Printf("Extends: %s\n", node.Extends)
}
fmt.Printf("Items: %d\n", len(node.Items))
}
}
}
package main
import (
"fmt"
"io/ioutil"
"log"
"os"
"github.com/adicens/systemverilog-lsp/internal/parser"
"github.com/adicens/systemverilog-lsp/internal/symbols"
)
func main() {
if len(os.Args) < 2 {
log.Fatal("Usage: go run main.go <systemverilog_file>")
}
filename := os.Args[1]
content, err := ioutil.ReadFile(filename)
if err != nil {
log.Fatalf("Error reading file: %v", err)
}
fmt.Printf("Building symbol table for: %s\n", filename)
fmt.Printf("File size: %d bytes\n", len(content))
// Parse the file
p := parser.NewParser(string(content))
ast := p.Parse()
if len(ast.Errors) > 0 {
fmt.Printf("Parsing errors: %d\n", len(ast.Errors))
for _, err := range ast.Errors {
fmt.Printf(" - %s (line %d)\n", err.Message, err.Position.Line)
}
}
// Build symbol table
builder := symbols.NewSymbolBuilder(filename)
table, err := builder.Build(ast)
if err != nil {
log.Fatalf("Error building symbol table: %v", err)
}
// Display symbol table statistics
stats := table.GetStats()
fmt.Printf("\nSymbol Table Statistics:\n")
fmt.Printf("Total symbols: %d\n", stats["total_symbols"])
fmt.Printf("Total scopes: %d\n", stats["total_scopes"])
fmt.Printf("Total documents: %d\n", stats["total_documents"])
// Display symbols by type
if symbolCounts, ok := stats["symbol_counts"].(map[string]int); ok {
fmt.Printf("\nSymbols by type:\n")
for symbolType, count := range symbolCounts {
fmt.Printf(" %s: %d\n", symbolType, count)
}
}
// Display all symbols
fmt.Printf("\nAll symbols:\n")
allSymbols := table.GetAllSymbols()
for _, symbol := range allSymbols {
fmt.Printf(" %s %s (%s) at %s:%d:%d\n",
symbol.Type.String(),
symbol.Name,
symbol.DataType,
symbol.Position.File,
symbol.Position.Line,
symbol.Position.Column)
// Show attributes for some symbol types
if symbol.Type == symbols.SymbolTypeParameter {
if defaultValue, exists := symbol.GetAttribute("default_value"); exists {
fmt.Printf(" default: %s\n", defaultValue)
}
}
if symbol.Type == symbols.SymbolTypePort {
if direction, exists := symbol.GetAttribute("direction"); exists {
dirStr := "input"
if direction == parser.PortDirectionOutput {
dirStr = "output"
} else if direction == parser.PortDirectionInout {
dirStr = "inout"
}
fmt.Printf(" direction: %s\n", dirStr)
}
}
}
// Show scope hierarchy
fmt.Printf("\nScope hierarchy:\n")
printScopeHierarchy(table.GetRootScope(), 0)
}
func printScopeHierarchy(scope *symbols.Scope, depth int) {
indent := ""
for i := 0; i < depth; i++ {
indent += " "
}
fmt.Printf("%s%s (%s) - %d symbols\n",
indent,
scope.Name,
scope.ScopeType.String(),
len(scope.GetAllSymbols()))
for _, child := range scope.Children {
printScopeHierarchy(child, depth+1)
}
}
package analyzer
import (
"github.com/adicens/systemverilog-lsp/internal/parser"
"github.com/adicens/systemverilog-lsp/internal/symbols"
)
// Analyzer performs semantic analysis on SystemVerilog code
type Analyzer struct {
typeChecker *TypeChecker
usageAnalyzer *UsageAnalyzer
diagnostics []Diagnostic
}
// Diagnostic represents a semantic diagnostic
type Diagnostic struct {
Range DiagnosticRange
Severity DiagnosticSeverity
Message string
Source string
}
// DiagnosticRange represents a range in the source code
type DiagnosticRange struct {
Start Position
End Position
}
// Position represents a position in the source code
type Position struct {
Line int
Character int
}
// DiagnosticSeverity represents the severity of a diagnostic
type DiagnosticSeverity int
const (
DiagnosticSeverityError DiagnosticSeverity = iota
DiagnosticSeverityWarning
DiagnosticSeverityInformation
DiagnosticSeverityHint
)
// NewAnalyzer creates a new semantic analyzer
func NewAnalyzer() *Analyzer {
return &Analyzer{
typeChecker: NewTypeChecker(),
usageAnalyzer: NewUsageAnalyzer(),
diagnostics: make([]Diagnostic, 0),
}
}
// Analyze performs semantic analysis on an AST
func (a *Analyzer) Analyze(ast *parser.AST, symbolTable *symbols.SymbolTable) []Diagnostic {
a.diagnostics = make([]Diagnostic, 0)
// Perform type checking
typeErrors := a.typeChecker.Check(ast, symbolTable)
a.diagnostics = append(a.diagnostics, typeErrors...)
// Analyze symbol usage
usageErrors := a.usageAnalyzer.Analyze(ast, symbolTable)
a.diagnostics = append(a.diagnostics, usageErrors...)
return a.diagnostics
}
// GetDiagnostics returns all diagnostics from the last analysis
func (a *Analyzer) GetDiagnostics() []Diagnostic {
return a.diagnostics
}
// ClearDiagnostics clears all diagnostics
func (a *Analyzer) ClearDiagnostics() {
a.diagnostics = make([]Diagnostic, 0)
}
package analyzer
import (
"fmt"
"strings"
"github.com/adicens/systemverilog-lsp/internal/parser"
"github.com/adicens/systemverilog-lsp/internal/symbols"
)
// TypeChecker performs type checking on SystemVerilog code
type TypeChecker struct {
currentScope *symbols.Scope
errors []Diagnostic
}
// NewTypeChecker creates a new type checker
func NewTypeChecker() *TypeChecker {
return &TypeChecker{
errors: make([]Diagnostic, 0),
}
}
// Check performs type checking on an AST
func (tc *TypeChecker) Check(ast *parser.AST, symbolTable *symbols.SymbolTable) []Diagnostic {
tc.errors = make([]Diagnostic, 0)
if ast.Root != nil {
tc.checkNode(ast.Root, symbolTable)
}
return tc.errors
}
// checkNode recursively checks a node
func (tc *TypeChecker) checkNode(node parser.Node, symbolTable *symbols.SymbolTable) {
if node == nil {
return
}
switch n := node.(type) {
case *parser.ModuleNode:
tc.checkModule(n, symbolTable)
case *parser.InterfaceNode:
tc.checkInterface(n, symbolTable)
case *parser.ClassNode:
tc.checkClass(n, symbolTable)
case *parser.FunctionNode:
tc.checkFunction(n, symbolTable)
case *parser.TaskNode:
tc.checkTask(n, symbolTable)
case *parser.VariableNode:
tc.checkVariable(n, symbolTable)
case *parser.AssignmentNode:
tc.checkAssignment(n, symbolTable)
case *parser.InstanceNode:
tc.checkInstance(n, symbolTable)
case *parser.AlwaysNode:
tc.checkAlways(n, symbolTable)
}
// Check children
for _, child := range node.Children() {
tc.checkNode(child, symbolTable)
}
}
// checkModule checks a module declaration
func (tc *TypeChecker) checkModule(module *parser.ModuleNode, symbolTable *symbols.SymbolTable) {
// Check parameters
for _, param := range module.Parameters {
tc.checkParameter(param, symbolTable)
}
// Check ports
for _, port := range module.Ports {
tc.checkPort(port, symbolTable)
}
// Check module items
for _, item := range module.Items {
tc.checkNode(item, symbolTable)
}
}
// checkInterface checks an interface declaration
func (tc *TypeChecker) checkInterface(iface *parser.InterfaceNode, symbolTable *symbols.SymbolTable) {
// Check parameters
for _, param := range iface.Parameters {
tc.checkParameter(param, symbolTable)
}
// Check interface items
for _, item := range iface.Items {
tc.checkNode(item, symbolTable)
}
}
// checkClass checks a class declaration
func (tc *TypeChecker) checkClass(class *parser.ClassNode, symbolTable *symbols.SymbolTable) {
// Check if base class exists
if class.Extends != "" {
if _, exists := symbolTable.FindSymbol(class.Extends); !exists {
tc.addError(
class.Range(),
fmt.Sprintf("Base class '%s' not found", class.Extends),
DiagnosticSeverityError,
)
}
}
// Check class items
for _, item := range class.Items {
tc.checkNode(item, symbolTable)
}
}
// checkFunction checks a function declaration
func (tc *TypeChecker) checkFunction(function *parser.FunctionNode, symbolTable *symbols.SymbolTable) {
// Check return type
if function.ReturnType != "" && !tc.isValidType(function.ReturnType) {
tc.addError(
function.Range(),
fmt.Sprintf("Unknown return type '%s'", function.ReturnType),
DiagnosticSeverityError,
)
}
// Check parameters
for _, param := range function.Parameters {
tc.checkParameter(param, symbolTable)
}
// Check body
for _, stmt := range function.Body {
tc.checkNode(stmt, symbolTable)
}
}
// checkTask checks a task declaration
func (tc *TypeChecker) checkTask(task *parser.TaskNode, symbolTable *symbols.SymbolTable) {
// Check parameters
for _, param := range task.Parameters {
tc.checkParameter(param, symbolTable)
}
// Check body
for _, stmt := range task.Body {
tc.checkNode(stmt, symbolTable)
}
}
// checkVariable checks a variable declaration
func (tc *TypeChecker) checkVariable(variable *parser.VariableNode, symbolTable *symbols.SymbolTable) {
// Check data type
if variable.DataType != "" && !tc.isValidType(variable.DataType) {
// Check if it's a user-defined type
if _, exists := symbolTable.FindSymbol(variable.DataType); !exists {
tc.addError(
variable.Range(),
fmt.Sprintf("Unknown data type '%s'", variable.DataType),
DiagnosticSeverityError,
)
}
}
// TODO: Check initial value type matches variable type
}
// checkAssignment checks an assignment statement
func (tc *TypeChecker) checkAssignment(assignment *parser.AssignmentNode, symbolTable *symbols.SymbolTable) {
// TODO: Check LHS and RHS types match
// For now, just check if LHS symbol exists
lhs := strings.TrimSpace(assignment.LHS)
if lhs != "" {
// Extract the base identifier (before any array indexing or bit selection)
baseName := tc.extractBaseIdentifier(lhs)
if _, exists := symbolTable.FindSymbol(baseName); !exists {
tc.addError(
assignment.Range(),
fmt.Sprintf("Undefined symbol '%s'", baseName),
DiagnosticSeverityError,
)
}
}
}
// checkInstance checks a module/interface instantiation
func (tc *TypeChecker) checkInstance(instance *parser.InstanceNode, symbolTable *symbols.SymbolTable) {
// Check if module exists
if _, exists := symbolTable.FindSymbol(instance.ModuleName); !exists {
tc.addError(
instance.Range(),
fmt.Sprintf("Module '%s' not found", instance.ModuleName),
DiagnosticSeverityError,
)
}
// TODO: Check parameters match module definition
// TODO: Check port connections match module definition
}
// checkAlways checks an always block
func (tc *TypeChecker) checkAlways(always *parser.AlwaysNode, symbolTable *symbols.SymbolTable) {
// Check sensitivity list
for _, signal := range always.Sensitivity {
if signal != "*" {
// Extract signal name from posedge/negedge
signalName := signal
if strings.HasPrefix(signal, "posedge ") {
signalName = strings.TrimSpace(signal[8:])
} else if strings.HasPrefix(signal, "negedge ") {
signalName = strings.TrimSpace(signal[8:])
}
if signalName != "" {
if _, exists := symbolTable.FindSymbol(signalName); !exists {
tc.addError(
always.Range(),
fmt.Sprintf("Signal '%s' in sensitivity list not found", signalName),
DiagnosticSeverityError,
)
}
}
}
}
// Check body
for _, stmt := range always.Body {
tc.checkNode(stmt, symbolTable)
}
}
// checkParameter checks a parameter declaration
func (tc *TypeChecker) checkParameter(param *parser.ParameterNode, symbolTable *symbols.SymbolTable) {
// Check data type if specified
if param.DataType != "" && !tc.isValidType(param.DataType) {
tc.addError(
param.Range(),
fmt.Sprintf("Unknown parameter type '%s'", param.DataType),
DiagnosticSeverityError,
)
}
// TODO: Check default value type matches parameter type
}
// checkPort checks a port declaration
func (tc *TypeChecker) checkPort(port *parser.PortNode, symbolTable *symbols.SymbolTable) {
// Check data type
if port.DataType != "" && !tc.isValidType(port.DataType) {
// Check if it's a user-defined type or interface
if _, exists := symbolTable.FindSymbol(port.DataType); !exists {
tc.addError(
port.Range(),
fmt.Sprintf("Unknown port type '%s'", port.DataType),
DiagnosticSeverityError,
)
}
}
}
// isValidType checks if a type is a valid built-in SystemVerilog type
func (tc *TypeChecker) isValidType(typeName string) bool {
validTypes := []string{
"logic", "bit", "byte", "int", "integer", "real", "time",
"wire", "reg", "string", "shortint", "longint", "shortreal",
"chandle", "event", "void",
}
for _, validType := range validTypes {
if typeName == validType {
return true
}
}
return false
}
// extractBaseIdentifier extracts the base identifier from a complex expression
func (tc *TypeChecker) extractBaseIdentifier(expr string) string {
// Remove array indexing
if idx := strings.Index(expr, "["); idx != -1 {
expr = expr[:idx]
}
// Remove bit selection
if idx := strings.Index(expr, "."); idx != -1 {
expr = expr[:idx]
}
return strings.TrimSpace(expr)
}
// addError adds a type checking error
func (tc *TypeChecker) addError(r parser.Range, message string, severity DiagnosticSeverity) {
tc.errors = append(tc.errors, Diagnostic{
Range: DiagnosticRange{
Start: Position{
Line: r.Start.Line - 1,
Character: r.Start.Column - 1,
},
End: Position{
Line: r.End.Line - 1,
Character: r.End.Column - 1,
},
},
Severity: severity,
Message: message,
Source: "type-checker",
})
}
package analyzer
import (
"fmt"
"github.com/adicens/systemverilog-lsp/internal/parser"
"github.com/adicens/systemverilog-lsp/internal/symbols"
)
// UsageAnalyzer analyzes symbol usage in SystemVerilog code
type UsageAnalyzer struct {
symbolUsage map[string]*UsageInfo
errors []Diagnostic
}
// UsageInfo tracks usage information for a symbol
type UsageInfo struct {
Declaration *symbols.Symbol
References []parser.Position
IsUsed bool
IsAssigned bool
IsRead bool
}
// NewUsageAnalyzer creates a new usage analyzer
func NewUsageAnalyzer() *UsageAnalyzer {
return &UsageAnalyzer{
symbolUsage: make(map[string]*UsageInfo),
errors: make([]Diagnostic, 0),
}
}
// Analyze performs usage analysis on an AST
func (ua *UsageAnalyzer) Analyze(ast *parser.AST, symbolTable *symbols.SymbolTable) []Diagnostic {
ua.errors = make([]Diagnostic, 0)
ua.symbolUsage = make(map[string]*UsageInfo)
// Initialize usage info for all symbols
ua.initializeUsageInfo(symbolTable)
// Analyze the AST
if ast.Root != nil {
ua.analyzeNode(ast.Root, symbolTable)
}
// Check for unused symbols
ua.checkUnusedSymbols()
return ua.errors
}
// initializeUsageInfo initializes usage information for all symbols
func (ua *UsageAnalyzer) initializeUsageInfo(symbolTable *symbols.SymbolTable) {
// Get all symbols and initialize their usage info
for _, symbolType := range []symbols.SymbolType{
symbols.SymbolTypeVariable,
symbols.SymbolTypeParameter,
symbols.SymbolTypePort,
symbols.SymbolTypeFunction,
symbols.SymbolTypeTask,
} {
syms := symbolTable.GetSymbolsByType(symbolType)
for _, sym := range syms {
ua.symbolUsage[sym.ID] = &UsageInfo{
Declaration: sym,
References: make([]parser.Position, 0),
IsUsed: false,
IsAssigned: false,
IsRead: false,
}
}
}
}
// analyzeNode recursively analyzes a node for symbol usage
func (ua *UsageAnalyzer) analyzeNode(node parser.Node, symbolTable *symbols.SymbolTable) {
if node == nil {
return
}
switch n := node.(type) {
case *parser.AssignmentNode:
ua.analyzeAssignment(n, symbolTable)
case *parser.InstanceNode:
ua.analyzeInstance(n, symbolTable)
case *parser.FunctionNode:
// Mark function as declared
if sym, exists := symbolTable.FindSymbol(n.Name); exists {
if usage, ok := ua.symbolUsage[sym.ID]; ok {
usage.IsUsed = true
}
}
case *parser.TaskNode:
// Mark task as declared
if sym, exists := symbolTable.FindSymbol(n.Name); exists {
if usage, ok := ua.symbolUsage[sym.ID]; ok {
usage.IsUsed = true
}
}
}
// Analyze children
for _, child := range node.Children() {
ua.analyzeNode(child, symbolTable)
}
}
// analyzeAssignment analyzes an assignment for symbol usage
func (ua *UsageAnalyzer) analyzeAssignment(assignment *parser.AssignmentNode, symbolTable *symbols.SymbolTable) {
// Mark LHS as assigned
lhsName := ua.extractIdentifier(assignment.LHS)
if lhsName != "" {
if sym, exists := symbolTable.FindSymbol(lhsName); exists {
if usage, ok := ua.symbolUsage[sym.ID]; ok {
usage.IsUsed = true
usage.IsAssigned = true
usage.References = append(usage.References, assignment.Range().Start)
}
}
}
// Mark RHS symbols as read
// TODO: Parse RHS expression properly to extract all referenced symbols
// For now, this is simplified
}
// analyzeInstance analyzes a module instantiation
func (ua *UsageAnalyzer) analyzeInstance(instance *parser.InstanceNode, symbolTable *symbols.SymbolTable) {
// Mark module as used
if sym, exists := symbolTable.FindSymbol(instance.ModuleName); exists {
if usage, ok := ua.symbolUsage[sym.ID]; ok {
usage.IsUsed = true
usage.References = append(usage.References, instance.Range().Start)
}
}
// Mark connected signals as used
for _, connection := range instance.Connections {
connName := ua.extractIdentifier(connection)
if connName != "" {
if sym, exists := symbolTable.FindSymbol(connName); exists {
if usage, ok := ua.symbolUsage[sym.ID]; ok {
usage.IsUsed = true
usage.IsRead = true
usage.References = append(usage.References, instance.Range().Start)
}
}
}
}
}
// checkUnusedSymbols checks for unused symbols and generates warnings
func (ua *UsageAnalyzer) checkUnusedSymbols() {
for _, usage := range ua.symbolUsage {
if !usage.IsUsed && usage.Declaration != nil {
// Don't warn about unused ports or parameters
if usage.Declaration.Type == symbols.SymbolTypePort ||
usage.Declaration.Type == symbols.SymbolTypeParameter {
continue
}
// Generate warning for unused symbol
ua.addWarning(
usage.Declaration.Position,
fmt.Sprintf("Symbol '%s' is declared but never used", usage.Declaration.Name),
)
}
// Check for write-only variables
if usage.IsAssigned && !usage.IsRead &&
usage.Declaration != nil &&
usage.Declaration.Type == symbols.SymbolTypeVariable {
ua.addWarning(
usage.Declaration.Position,
fmt.Sprintf("Variable '%s' is assigned but never read", usage.Declaration.Name),
)
}
}
}
// GetUsageInfo returns usage information for a symbol
func (ua *UsageAnalyzer) GetUsageInfo(symbolID string) *UsageInfo {
return ua.symbolUsage[symbolID]
}
// GetReferences returns all references to a symbol
func (ua *UsageAnalyzer) GetReferences(symbolID string) []parser.Position {
if usage, ok := ua.symbolUsage[symbolID]; ok {
return usage.References
}
return nil
}
// extractIdentifier extracts a simple identifier from an expression
func (ua *UsageAnalyzer) extractIdentifier(expr string) string {
// TODO: Implement proper expression parsing
// For now, just return the expression as-is
return expr
}
// addWarning adds a usage warning
func (ua *UsageAnalyzer) addWarning(pos symbols.Position, message string) {
ua.errors = append(ua.errors, Diagnostic{
Range: DiagnosticRange{
Start: Position{
Line: pos.Line - 1,
Character: pos.Column - 1,
},
End: Position{
Line: pos.Line - 1,
Character: pos.Column - 1,
},
},
Severity: DiagnosticSeverityWarning,
Message: message,
Source: "usage-analyzer",
})
}
package lsp
import (
"fmt"
"strings"
"github.com/adicens/systemverilog-lsp/internal/symbols"
"github.com/sourcegraph/go-lsp"
)
// CallHierarchyItem represents a call hierarchy item
type CallHierarchyItem struct {
Name string `json:"name"`
Kind lsp.SymbolKind `json:"kind"`
Detail string `json:"detail,omitempty"`
URI lsp.DocumentURI `json:"uri"`
Range lsp.Range `json:"range"`
SelectionRange lsp.Range `json:"selectionRange"`
Data interface{} `json:"data,omitempty"`
}
// CallHierarchyIncomingCall represents an incoming call
type CallHierarchyIncomingCall struct {
From CallHierarchyItem `json:"from"`
FromRanges []lsp.Range `json:"fromRanges"`
}
// CallHierarchyOutgoingCall represents an outgoing call
type CallHierarchyOutgoingCall struct {
To CallHierarchyItem `json:"to"`
FromRanges []lsp.Range `json:"fromRanges"`
}
// CallHierarchyProvider provides call hierarchy functionality
type CallHierarchyProvider struct {
documents *DocumentManager
}
// NewCallHierarchyProvider creates a new call hierarchy provider
func NewCallHierarchyProvider(documents *DocumentManager) *CallHierarchyProvider {
return &CallHierarchyProvider{
documents: documents,
}
}
// PrepareCallHierarchy prepares the call hierarchy for a given position
func (p *CallHierarchyProvider) PrepareCallHierarchy(params *lsp.TextDocumentPositionParams) ([]CallHierarchyItem, error) {
doc, exists := p.documents.GetDocument(string(params.TextDocument.URI))
if !exists {
return nil, fmt.Errorf("document not found: %s", params.TextDocument.URI)
}
// Get the word at the position
content := doc.GetContent()
word := getWordAtPosition(content, params.Position)
if word == "" {
return nil, nil
}
// Find the symbol at the position
symbol := p.findSymbolAtPosition(doc, params.Position, word)
if symbol == nil {
return nil, nil
}
// Only functions and tasks can have call hierarchies
if symbol.Type != symbols.SymbolTypeFunction && symbol.Type != symbols.SymbolTypeTask {
return nil, nil
}
// Create call hierarchy item
item := p.symbolToCallHierarchyItem(symbol, string(params.TextDocument.URI))
return []CallHierarchyItem{item}, nil
}
// IncomingCalls finds all incoming calls to the given call hierarchy item
func (p *CallHierarchyProvider) IncomingCalls(params *CallHierarchyIncomingCallsParams) ([]CallHierarchyIncomingCall, error) {
// Get the target symbol from the call hierarchy item
targetSymbol := params.Item.Name
var incomingCalls []CallHierarchyIncomingCall
// Search all documents for calls to this function/task
docs := p.documents.GetAllDocuments()
for _, doc := range docs {
if doc.AST == nil {
continue
}
// Find all function calls in the document
calls := p.findFunctionCalls(doc, targetSymbol)
// Group calls by the containing function/task
callsByContainer := make(map[*symbols.Symbol][]lsp.Range)
for _, call := range calls {
container := p.findContainingFunction(doc, call.Start)
if container != nil {
callsByContainer[container] = append(callsByContainer[container], call)
}
}
// Create incoming call entries
for container, ranges := range callsByContainer {
fromItem := p.symbolToCallHierarchyItem(container, doc.URI)
incomingCalls = append(incomingCalls, CallHierarchyIncomingCall{
From: fromItem,
FromRanges: ranges,
})
}
}
return incomingCalls, nil
}
// OutgoingCalls finds all outgoing calls from the given call hierarchy item
func (p *CallHierarchyProvider) OutgoingCalls(params *CallHierarchyOutgoingCallsParams) ([]CallHierarchyOutgoingCall, error) {
// Get the document containing the function/task
doc, exists := p.documents.GetDocument(string(params.Item.URI))
if !exists {
return nil, fmt.Errorf("document not found: %s", params.Item.URI)
}
// Find the function/task symbol
symbol := p.findSymbolByName(doc, params.Item.Name)
if symbol == nil {
return nil, nil
}
// Find all function calls within this function/task
// For simplicity in this implementation, we'll use text-based search
// A real implementation would parse the AST to find actual function calls
// within the function body
var outgoingCalls []CallHierarchyOutgoingCall
// Find the function/task body range
content := doc.GetContent()
lines := strings.Split(content, "\n")
// Find the function definition and its body
functionStart := -1
functionEnd := -1
for lineNum, line := range lines {
// Check for function definition (more flexible pattern to handle return types)
if strings.Contains(line, "function ") && strings.Contains(line, params.Item.Name+"(") {
functionStart = lineNum + 1 // Start after the function definition line
}
if strings.Contains(line, "task ") && strings.Contains(line, params.Item.Name+"(") {
functionStart = lineNum + 1 // Start after the function definition line
}
if functionStart != -1 && (strings.Contains(line, "endfunction") || strings.Contains(line, "endtask")) {
functionEnd = lineNum - 1 // End before the endfunction/endtask line
break
}
}
// If we couldn't find the function body, return empty
if functionStart == -1 || functionEnd == -1 || functionStart > functionEnd {
return outgoingCalls, nil
}
// Look for simple function call patterns only within the function body
for lineNum := functionStart; lineNum <= functionEnd; lineNum++ {
line := lines[lineNum]
// Simple pattern matching for function calls
for i := 0; i < len(line)-1; i++ {
if line[i] == '(' && i > 0 {
// Find the start of the identifier
start := i - 1
for start >= 0 && isWordCharacter(line[start]) {
start--
}
start++
if start < i {
funcName := line[start:i]
if funcName != "" && funcName != params.Item.Name {
// Try to find this function in the workspace
targetSymbol := p.findSymbolInWorkspace(funcName)
if targetSymbol != nil {
targetURI := p.findSymbolDocument(targetSymbol)
if targetURI != "" {
// Check if we already have this call
found := false
for j := range outgoingCalls {
if outgoingCalls[j].To.Name == funcName {
found = true
outgoingCalls[j].FromRanges = append(outgoingCalls[j].FromRanges, lsp.Range{
Start: lsp.Position{Line: lineNum, Character: start},
End: lsp.Position{Line: lineNum, Character: i},
})
break
}
}
if !found {
toItem := p.symbolToCallHierarchyItem(targetSymbol, targetURI)
outgoingCalls = append(outgoingCalls, CallHierarchyOutgoingCall{
To: toItem,
FromRanges: []lsp.Range{{
Start: lsp.Position{Line: lineNum, Character: start},
End: lsp.Position{Line: lineNum, Character: i},
}},
})
}
}
}
}
}
}
}
}
return outgoingCalls, nil
}
// Helper functions
// getWordAtPosition extracts the word at the given position
func getWordAtPosition(content string, pos lsp.Position) string {
lines := strings.Split(content, "\n")
if int(pos.Line) >= len(lines) {
return ""
}
currentLine := lines[pos.Line]
character := int(pos.Character)
if character >= len(currentLine) {
character = len(currentLine) - 1
}
if character < 0 {
return ""
}
// Find word boundaries
start := character
end := character
// Go backwards to find start of word
for start > 0 && isWordCharacter(currentLine[start-1]) {
start--
}
// Go forwards to find end of word
for end < len(currentLine) && isWordCharacter(currentLine[end]) {
end++
}
if start >= end {
return ""
}
return currentLine[start:end]
}
func (p *CallHierarchyProvider) findSymbolAtPosition(doc *Document, pos lsp.Position, word string) *symbols.Symbol {
if doc.SymbolTable == nil {
return nil
}
// Search through all symbols to find one with matching name
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, symbol := range allSymbols {
if symbol.Name == word {
// For now, return the first match
// In a real implementation, we would check position to find the exact symbol
return symbol
}
}
return nil
}
func (p *CallHierarchyProvider) findSymbolByName(doc *Document, name string) *symbols.Symbol {
if doc.SymbolTable == nil {
return nil
}
// Search through all symbols to find one with matching name
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, symbol := range allSymbols {
if symbol.Name == name {
return symbol
}
}
return nil
}
func (p *CallHierarchyProvider) findSymbolInWorkspace(name string) *symbols.Symbol {
docs := p.documents.GetAllDocuments()
for _, doc := range docs {
if doc.SymbolTable != nil {
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, symbol := range allSymbols {
if symbol.Name == name {
return symbol
}
}
}
}
return nil
}
func (p *CallHierarchyProvider) findSymbolDocument(symbol *symbols.Symbol) string {
docs := p.documents.GetAllDocuments()
for _, doc := range docs {
if doc.SymbolTable != nil {
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, s := range allSymbols {
if s == symbol {
return doc.URI
}
}
}
}
return ""
}
func (p *CallHierarchyProvider) symbolToCallHierarchyItem(symbol *symbols.Symbol, uri string) CallHierarchyItem {
detail := ""
if symbol.Type == symbols.SymbolTypeFunction {
detail = "function"
if params, exists := symbol.GetAttribute("parameters"); exists && params != nil {
// Format parameters properly
paramStr := p.formatParameters(params)
detail = fmt.Sprintf("function(%s)", paramStr)
}
if retType, exists := symbol.GetAttribute("returnType"); exists && retType != nil && retType != "" {
detail = fmt.Sprintf("%s -> %s", detail, retType)
}
} else if symbol.Type == symbols.SymbolTypeTask {
detail = "task"
if params, exists := symbol.GetAttribute("parameters"); exists && params != nil {
// Format parameters properly
paramStr := p.formatParameters(params)
detail = fmt.Sprintf("task(%s)", paramStr)
}
}
return CallHierarchyItem{
Name: symbol.Name,
Kind: symbolTypeToLSPKind(symbol.Type),
Detail: detail,
URI: lsp.DocumentURI(uri),
Range: p.symbolPositionToRange(symbol.Position),
SelectionRange: p.symbolPositionToRange(symbol.Position),
}
}
// formatParameters formats parameter information for display
func (p *CallHierarchyProvider) formatParameters(params interface{}) string {
switch p := params.(type) {
case []interface{}:
var paramStrs []string
for _, param := range p {
if paramMap, ok := param.(map[string]interface{}); ok {
paramType := ""
paramName := ""
if t, exists := paramMap["type"]; exists {
paramType = fmt.Sprintf("%v", t)
}
if n, exists := paramMap["name"]; exists {
paramName = fmt.Sprintf("%v", n)
}
if paramType != "" && paramName != "" {
paramStrs = append(paramStrs, fmt.Sprintf("%s %s", paramType, paramName))
}
}
}
return strings.Join(paramStrs, ", ")
case string:
return p
default:
return fmt.Sprintf("%v", params)
}
}
func (p *CallHierarchyProvider) findFunctionCalls(doc *Document, targetName string) []lsp.Range {
var calls []lsp.Range
// Simple text-based search for function calls
// This is a simplified implementation - a real implementation would
// parse the AST to find actual function call nodes
lines := strings.Split(doc.Content, "\n")
for lineNum, line := range lines {
// Look for function call patterns: targetName(
idx := 0
for {
pos := strings.Index(line[idx:], targetName)
if pos == -1 {
break
}
actualPos := idx + pos
// Check if this is followed by '('
if actualPos+len(targetName) < len(line) && line[actualPos+len(targetName)] == '(' {
// Check word boundary before
if actualPos == 0 || !isWordCharacter(line[actualPos-1]) {
calls = append(calls, lsp.Range{
Start: lsp.Position{Line: lineNum, Character: actualPos},
End: lsp.Position{Line: lineNum, Character: actualPos + len(targetName)},
})
}
}
idx = actualPos + 1
}
}
return calls
}
func (p *CallHierarchyProvider) findFunctionCallsInRange(doc *Document, rangeToSearch lsp.Range) []lsp.Range {
var calls []lsp.Range
// Extract the content within the range
lines := strings.Split(doc.Content, "\n")
startLine := int(rangeToSearch.Start.Line)
endLine := int(rangeToSearch.End.Line)
if startLine >= len(lines) || endLine >= len(lines) {
return calls
}
// Search for function calls within the range
for lineNum := startLine; lineNum <= endLine; lineNum++ {
line := lines[lineNum]
// Skip function/task definitions
if strings.Contains(line, "function ") || strings.Contains(line, "task ") {
continue
}
// Simple pattern matching for function calls
// Look for identifier followed by '('
for i := 0; i < len(line)-1; i++ {
if line[i] == '(' && i > 0 {
// Find the start of the identifier
start := i - 1
for start >= 0 && isWordCharacter(line[start]) {
start--
}
start++
if start < i {
// Extract the identifier to check it's not a keyword
identifier := line[start:i]
// Skip SystemVerilog keywords and constructs that use parentheses
if identifier == "if" || identifier == "for" || identifier == "while" ||
identifier == "case" || identifier == "casez" || identifier == "casex" ||
identifier == "function" || identifier == "task" || identifier == "module" ||
identifier == "interface" || identifier == "class" || identifier == "package" {
continue
}
calls = append(calls, lsp.Range{
Start: lsp.Position{Line: lineNum, Character: start},
End: lsp.Position{Line: lineNum, Character: i},
})
}
}
}
}
return calls
}
func (p *CallHierarchyProvider) findContainingFunction(doc *Document, pos lsp.Position) *symbols.Symbol {
if doc.SymbolTable == nil {
return nil
}
// Get all functions and tasks
allSymbols := doc.SymbolTable.GetAllSymbols()
var candidates []*symbols.Symbol
for _, symbol := range allSymbols {
if symbol.Type == symbols.SymbolTypeFunction || symbol.Type == symbols.SymbolTypeTask {
// Check if this function/task starts before the given position
if symbol.Position.Line <= int(pos.Line) {
candidates = append(candidates, symbol)
}
}
}
if len(candidates) == 0 {
return nil
}
// Find the containing function by looking for the function boundaries
// This is a more accurate approach that finds the actual function body
content := doc.GetContent()
lines := strings.Split(content, "\n")
for _, candidate := range candidates {
// Find the function body range for this candidate
functionStart := -1
functionEnd := -1
for lineNum, line := range lines {
// Check for function definition (more flexible pattern to handle return types)
if strings.Contains(line, "function ") && strings.Contains(line, candidate.Name+"(") {
functionStart = lineNum + 1 // Start after the function definition line
}
if strings.Contains(line, "task ") && strings.Contains(line, candidate.Name+"(") {
functionStart = lineNum + 1 // Start after the function definition line
}
if functionStart != -1 && (strings.Contains(line, "endfunction") || strings.Contains(line, "endtask")) {
functionEnd = lineNum - 1 // End before the endfunction/endtask line
break
}
}
// Check if the position is within this function's body
if functionStart != -1 && functionEnd != -1 &&
int(pos.Line) >= functionStart && int(pos.Line) <= functionEnd {
return candidate
}
}
return nil
}
func (p *CallHierarchyProvider) extractFunctionNameFromCall(content string, pos lsp.Position) string {
lines := strings.Split(content, "\n")
if int(pos.Line) >= len(lines) {
return ""
}
line := lines[pos.Line]
character := int(pos.Character)
if character >= len(line) {
character = len(line) - 1
}
if character < 0 {
return ""
}
// Find word boundaries - go both backward and forward
start := character
end := character
// Go backwards to find start of word
for start > 0 && isWordCharacter(line[start-1]) {
start--
}
// Go forwards to find end of word
for end < len(line) && isWordCharacter(line[end]) {
end++
}
if start >= end {
return ""
}
return line[start:end]
}
// symbolPositionToRange converts internal position to LSP range
func (p *CallHierarchyProvider) symbolPositionToRange(position symbols.Position) lsp.Range {
// For now, create a range that starts and ends at the symbol position
// In a more sophisticated implementation, we would track the full range of the symbol
lspPos := lsp.Position{
Line: position.Line,
Character: position.Column,
}
return lsp.Range{
Start: lspPos,
End: lspPos,
}
}
// symbolTypeToLSPKind converts internal symbol type to LSP symbol kind
func symbolTypeToLSPKind(symbolType symbols.SymbolType) lsp.SymbolKind {
switch symbolType {
case symbols.SymbolTypeModule:
return lsp.SKModule
case symbols.SymbolTypeInterface:
return lsp.SKInterface
case symbols.SymbolTypeClass:
return lsp.SKClass
case symbols.SymbolTypeFunction:
return lsp.SKFunction
case symbols.SymbolTypeTask:
return lsp.SKFunction // LSP doesn't have a specific task kind
case symbols.SymbolTypeVariable:
return lsp.SKVariable
case symbols.SymbolTypeParameter:
return lsp.SKConstant
case symbols.SymbolTypePort:
return lsp.SKField
case symbols.SymbolTypeInstance:
return lsp.SKVariable
case symbols.SymbolTypeGenerate:
return lsp.SKNamespace
case symbols.SymbolTypeAlways:
return lsp.SKFunction
case symbols.SymbolTypeInitial:
return lsp.SKFunction
case symbols.SymbolTypeConstraint:
return lsp.SKFunction
case symbols.SymbolTypeTypedef:
return lsp.SKClass
case symbols.SymbolTypeEnum:
return lsp.SKEnum
case symbols.SymbolTypeStruct:
return lsp.SKStruct
case symbols.SymbolTypeUnion:
return lsp.SKStruct
case symbols.SymbolTypePackage:
return lsp.SKPackage
case symbols.SymbolTypeProperty:
return lsp.SKProperty
case symbols.SymbolTypeSequence:
return lsp.SKFunction
case symbols.SymbolTypeAssertion:
return lsp.SKFunction
case symbols.SymbolTypeCoverage:
return lsp.SKFunction
default:
return lsp.SKVariable
}
}
// Request parameter types
type CallHierarchyIncomingCallsParams struct {
Item CallHierarchyItem `json:"item"`
}
type CallHierarchyOutgoingCallsParams struct {
Item CallHierarchyItem `json:"item"`
}
package lsp
import (
"context"
"fmt"
"regexp"
"strings"
"github.com/adicens/systemverilog-lsp/internal/symbols"
"github.com/sourcegraph/go-lsp"
)
// CodeActionsProvider handles textDocument/codeAction requests
type CodeActionsProvider struct {
documentManager *DocumentManager
}
// NewCodeActionsProvider creates a new code actions provider
func NewCodeActionsProvider(dm *DocumentManager) *CodeActionsProvider {
return &CodeActionsProvider{
documentManager: dm,
}
}
// ProvideCodeActions handles code action requests
func (cap *CodeActionsProvider) ProvideCodeActions(ctx context.Context, params *lsp.CodeActionParams) ([]CodeAction, error) {
uri := string(params.TextDocument.URI)
// Get the document
doc, exists := cap.documentManager.GetDocument(uri)
if !exists {
return nil, nil
}
var actions []CodeAction
// Handle diagnostics-based code actions (quick fixes)
if len(params.Context.Diagnostics) > 0 {
for _, diagnostic := range params.Context.Diagnostics {
diagnosticActions := cap.getQuickFixesForDiagnostic(doc, diagnostic, params.Range)
actions = append(actions, diagnosticActions...)
}
}
// Handle refactoring code actions
refactoringActions := cap.getRefactoringActions(doc, params.Range)
actions = append(actions, refactoringActions...)
return actions, nil
}
// getQuickFixesForDiagnostic returns quick fixes for a specific diagnostic
func (cap *CodeActionsProvider) getQuickFixesForDiagnostic(doc *Document, diagnostic lsp.Diagnostic, range_ lsp.Range) []CodeAction {
var actions []CodeAction
message := diagnostic.Message
// Quick fix for undefined symbols
if strings.Contains(message, "undefined") || strings.Contains(message, "not declared") {
// Extract symbol name from error message
re := regexp.MustCompile(`'(\w+)'`)
matches := re.FindStringSubmatch(message)
if len(matches) > 1 {
symbolName := matches[1]
action := cap.createDeclareSymbolAction(doc, symbolName, diagnostic.Range)
if action != nil {
actions = append(actions, *action)
}
}
}
// Quick fix for missing semicolon
if strings.Contains(message, "missing semicolon") || strings.Contains(message, "expected ';'") {
action := cap.createAddSemicolonAction(doc, diagnostic.Range)
if action != nil {
actions = append(actions, *action)
}
}
// Quick fix for unused symbols
if strings.Contains(message, "unused") || strings.Contains(message, "never used") {
action := cap.createRemoveUnusedAction(doc, diagnostic.Range)
if action != nil {
actions = append(actions, *action)
}
}
// Quick fix for missing module imports
if strings.Contains(message, "module") && (strings.Contains(message, "not found") || strings.Contains(message, "unknown")) {
re := regexp.MustCompile(`module\s*'(\w+)'`)
matches := re.FindStringSubmatch(message)
if len(matches) > 1 {
moduleName := matches[1]
action := cap.createImportModuleAction(doc, moduleName, diagnostic.Range)
if action != nil {
actions = append(actions, *action)
}
}
}
return actions
}
// getRefactoringActions returns available refactoring actions for the selection
func (cap *CodeActionsProvider) getRefactoringActions(doc *Document, range_ lsp.Range) []CodeAction {
var actions []CodeAction
// Check if we have a selection
if range_.Start.Line != range_.End.Line || range_.Start.Character != range_.End.Character {
// Extract to function/task
action := cap.createExtractFunctionAction(doc, range_)
if action != nil {
actions = append(actions, *action)
}
}
// Get word at cursor position
content := doc.GetContent()
word := cap.getWordAtPosition(content, range_.Start.Line, range_.Start.Character)
if word != "" {
// Quick access to rename
renameAction := cap.createRenameAction(word, range_)
if renameAction != nil {
actions = append(actions, *renameAction)
}
// Inline variable
if cap.isVariable(doc, word) {
inlineAction := cap.createInlineVariableAction(doc, word, range_)
if inlineAction != nil {
actions = append(actions, *inlineAction)
}
}
}
// Convert always block styles
alwaysAction := cap.createConvertAlwaysBlockAction(doc, range_)
if alwaysAction != nil {
actions = append(actions, *alwaysAction)
}
return actions
}
// createDeclareSymbolAction creates a code action to declare an undefined symbol
func (cap *CodeActionsProvider) createDeclareSymbolAction(doc *Document, symbolName string, range_ lsp.Range) *CodeAction {
// Create text edit to declare the symbol
declaration := fmt.Sprintf("logic %s;\n", symbolName)
// Find a good insertion point (e.g., after module declaration)
insertLine := cap.findDeclarationInsertPoint(doc)
edit := lsp.TextEdit{
Range: lsp.Range{
Start: lsp.Position{Line: insertLine, Character: 0},
End: lsp.Position{Line: insertLine, Character: 0},
},
NewText: " " + declaration,
}
return &CodeAction{
Title: fmt.Sprintf("Declare '%s' as logic", symbolName),
Kind: QuickFix,
Edit: &lsp.WorkspaceEdit{
Changes: map[string][]lsp.TextEdit{
doc.URI: {edit},
},
},
}
}
// createAddSemicolonAction creates a code action to add a missing semicolon
func (cap *CodeActionsProvider) createAddSemicolonAction(doc *Document, range_ lsp.Range) *CodeAction {
// Add semicolon at the end of the line
lines := strings.Split(doc.GetContent(), "\n")
if range_.End.Line >= len(lines) {
return nil
}
line := lines[range_.End.Line]
endChar := len(strings.TrimRight(line, " \t"))
edit := lsp.TextEdit{
Range: lsp.Range{
Start: lsp.Position{Line: range_.End.Line, Character: endChar},
End: lsp.Position{Line: range_.End.Line, Character: endChar},
},
NewText: ";",
}
return &CodeAction{
Title: "Add missing semicolon",
Kind: QuickFix,
Edit: &lsp.WorkspaceEdit{
Changes: map[string][]lsp.TextEdit{
doc.URI: {edit},
},
},
}
}
// createRemoveUnusedAction creates a code action to remove or comment out unused code
func (cap *CodeActionsProvider) createRemoveUnusedAction(doc *Document, range_ lsp.Range) *CodeAction {
// Get the lines to remove
lines := strings.Split(doc.GetContent(), "\n")
// Create text edit to comment out the range
var commentedLines []string
for i := range_.Start.Line; i <= range_.End.Line && i < len(lines); i++ {
commentedLines = append(commentedLines, "// "+lines[i])
}
edit := lsp.TextEdit{
Range: range_,
NewText: strings.Join(commentedLines, "\n"),
}
return &CodeAction{
Title: "Comment out unused code",
Kind: QuickFix,
Edit: &lsp.WorkspaceEdit{
Changes: map[string][]lsp.TextEdit{
doc.URI: {edit},
},
},
}
}
// createImportModuleAction creates a code action to import a missing module
func (cap *CodeActionsProvider) createImportModuleAction(doc *Document, moduleName string, range_ lsp.Range) *CodeAction {
// Create import statement
importStmt := fmt.Sprintf("`include \"%s.sv\"\n", moduleName)
// Insert at the beginning of the file
edit := lsp.TextEdit{
Range: lsp.Range{
Start: lsp.Position{Line: 0, Character: 0},
End: lsp.Position{Line: 0, Character: 0},
},
NewText: importStmt,
}
return &CodeAction{
Title: fmt.Sprintf("Import module '%s'", moduleName),
Kind: QuickFix,
Edit: &lsp.WorkspaceEdit{
Changes: map[string][]lsp.TextEdit{
doc.URI: {edit},
},
},
}
}
// createExtractFunctionAction creates a code action to extract selected code into a function
func (cap *CodeActionsProvider) createExtractFunctionAction(doc *Document, range_ lsp.Range) *CodeAction {
// Get selected text
content := doc.GetContent()
lines := strings.Split(content, "\n")
var selectedLines []string
for i := range_.Start.Line; i <= range_.End.Line && i < len(lines); i++ {
if i == range_.Start.Line && i == range_.End.Line {
// Single line selection
line := lines[i]
if range_.Start.Character < len(line) && range_.End.Character <= len(line) {
selectedLines = append(selectedLines, line[range_.Start.Character:range_.End.Character])
}
} else if i == range_.Start.Line {
// First line of multi-line selection
line := lines[i]
if range_.Start.Character < len(line) {
selectedLines = append(selectedLines, line[range_.Start.Character:])
}
} else if i == range_.End.Line {
// Last line of multi-line selection
line := lines[i]
if range_.End.Character <= len(line) {
selectedLines = append(selectedLines, line[:range_.End.Character])
}
} else {
// Middle lines
selectedLines = append(selectedLines, lines[i])
}
}
if len(selectedLines) == 0 {
return nil
}
// Create function
functionName := "extracted_function"
functionBody := strings.Join(selectedLines, "\n")
function := fmt.Sprintf("function void %s();\n%s\nendfunction\n\n", functionName, functionBody)
// Create edits
edits := []lsp.TextEdit{
// Replace selection with function call
{
Range: range_,
NewText: fmt.Sprintf("%s();", functionName),
},
// Insert function definition
{
Range: lsp.Range{
Start: lsp.Position{Line: cap.findFunctionInsertPoint(doc), Character: 0},
End: lsp.Position{Line: cap.findFunctionInsertPoint(doc), Character: 0},
},
NewText: function,
},
}
return &CodeAction{
Title: "Extract to function",
Kind: RefactorExtract,
Edit: &lsp.WorkspaceEdit{
Changes: map[string][]lsp.TextEdit{
doc.URI: edits,
},
},
}
}
// createRenameAction creates a code action for quick access to rename
func (cap *CodeActionsProvider) createRenameAction(symbolName string, range_ lsp.Range) *CodeAction {
return &CodeAction{
Title: fmt.Sprintf("Rename '%s'", symbolName),
Kind: RefactorRename,
Command: &lsp.Command{
Title: "Rename",
Command: "editor.action.rename",
},
}
}
// createInlineVariableAction creates a code action to inline a variable
func (cap *CodeActionsProvider) createInlineVariableAction(doc *Document, varName string, range_ lsp.Range) *CodeAction {
// Find variable declaration and all usages
declaration, value := cap.findVariableDeclaration(doc, varName)
if declaration == nil || value == "" {
return nil
}
// Find all references
references := cap.findVariableReferences(doc, varName)
// Create edits
edits := []lsp.TextEdit{
// Remove declaration
{
Range: *declaration,
NewText: "",
},
}
// Replace all references with value
for _, ref := range references {
edits = append(edits, lsp.TextEdit{
Range: ref,
NewText: value,
})
}
return &CodeAction{
Title: fmt.Sprintf("Inline variable '%s'", varName),
Kind: RefactorInline,
Edit: &lsp.WorkspaceEdit{
Changes: map[string][]lsp.TextEdit{
doc.URI: edits,
},
},
}
}
// createConvertAlwaysBlockAction creates a code action to convert between always block styles
func (cap *CodeActionsProvider) createConvertAlwaysBlockAction(doc *Document, range_ lsp.Range) *CodeAction {
// Check if cursor is in an always block
content := doc.GetContent()
lines := strings.Split(content, "\n")
if range_.Start.Line >= len(lines) {
return nil
}
// Find the always block
line := lines[range_.Start.Line]
var newBlock string
var title string
if strings.Contains(line, "always @") {
// Convert to always_comb or always_ff
if strings.Contains(line, "posedge") || strings.Contains(line, "negedge") {
newBlock = "always_ff @" + line[strings.Index(line, "@")+1:]
title = "Convert to always_ff"
} else {
newBlock = "always_comb"
title = "Convert to always_comb"
}
} else if strings.Contains(line, "always_comb") {
// Convert to always @(*)
newBlock = "always @(*)"
title = "Convert to always @(*)"
} else if strings.Contains(line, "always_ff") {
// Extract the sensitivity list
start := strings.Index(line, "@")
if start != -1 {
newBlock = "always " + line[start:]
title = "Convert to always @"
} else {
return nil
}
} else {
return nil
}
// Find the range of the always keyword
start := strings.Index(line, "always")
if start == -1 {
return nil
}
end := start
for end < len(line) && line[end] != '\n' && (line[end] != 'b' || (end+5 < len(line) && line[end:end+5] != "begin")) {
end++
}
edit := lsp.TextEdit{
Range: lsp.Range{
Start: lsp.Position{Line: range_.Start.Line, Character: start},
End: lsp.Position{Line: range_.Start.Line, Character: end},
},
NewText: newBlock,
}
return &CodeAction{
Title: title,
Kind: Refactor,
Edit: &lsp.WorkspaceEdit{
Changes: map[string][]lsp.TextEdit{
doc.URI: {edit},
},
},
}
}
// Helper functions
// getWordAtPosition extracts the word at the given position
func (cap *CodeActionsProvider) getWordAtPosition(content string, line, character int) string {
lines := strings.Split(content, "\n")
if line >= len(lines) {
return ""
}
currentLine := lines[line]
if character >= len(currentLine) {
character = len(currentLine) - 1
}
if character < 0 {
return ""
}
// Find word boundaries
start := character
end := character
// Go backwards to find start of word
for start > 0 && isWordCharacter(currentLine[start-1]) {
start--
}
// Go forwards to find end of word
for end < len(currentLine) && isWordCharacter(currentLine[end]) {
end++
}
if start >= end {
return ""
}
return currentLine[start:end]
}
// isVariable checks if a symbol is a variable
func (cap *CodeActionsProvider) isVariable(doc *Document, symbolName string) bool {
symbolTable := doc.GetSymbolTable()
if symbol, found := symbolTable.FindSymbol(symbolName); found {
return symbol.Type == symbols.SymbolTypeVariable ||
symbol.Type == symbols.SymbolTypePort ||
symbol.Type == symbols.SymbolTypeParameter
}
return false
}
// findDeclarationInsertPoint finds a good place to insert declarations
func (cap *CodeActionsProvider) findDeclarationInsertPoint(doc *Document) int {
lines := strings.Split(doc.GetContent(), "\n")
// Look for module declaration
for i, line := range lines {
if strings.Contains(line, "module") && !strings.HasPrefix(strings.TrimSpace(line), "//") {
// Skip past the port list
for j := i + 1; j < len(lines); j++ {
if strings.Contains(lines[j], ");") {
return j + 1
}
}
return i + 1
}
}
return 0
}
// findFunctionInsertPoint finds a good place to insert functions
func (cap *CodeActionsProvider) findFunctionInsertPoint(doc *Document) int {
lines := strings.Split(doc.GetContent(), "\n")
// Look for existing functions or endmodule
lastFunction := -1
for i, line := range lines {
if strings.Contains(line, "function") || strings.Contains(line, "task") {
lastFunction = i
} else if strings.Contains(line, "endmodule") {
if lastFunction != -1 {
// Find the end of the last function
for j := lastFunction; j < i; j++ {
if strings.Contains(lines[j], "endfunction") || strings.Contains(lines[j], "endtask") {
return j + 2 // Add some spacing
}
}
}
return i // Insert before endmodule
}
}
return len(lines) - 1
}
// findVariableDeclaration finds a variable declaration and its initial value
func (cap *CodeActionsProvider) findVariableDeclaration(doc *Document, varName string) (*lsp.Range, string) {
lines := strings.Split(doc.GetContent(), "\n")
// Simple regex to find variable declaration with assignment
pattern := fmt.Sprintf(`\b(\w+\s+)?%s\s*=\s*([^;]+);`, regexp.QuoteMeta(varName))
re := regexp.MustCompile(pattern)
for i, line := range lines {
matches := re.FindStringSubmatch(line)
if len(matches) > 2 {
// Found declaration with value
start := strings.Index(line, matches[0])
end := start + len(matches[0])
return &lsp.Range{
Start: lsp.Position{Line: i, Character: start},
End: lsp.Position{Line: i, Character: end},
}, strings.TrimSpace(matches[2])
}
}
return nil, ""
}
// findVariableReferences finds all references to a variable
func (cap *CodeActionsProvider) findVariableReferences(doc *Document, varName string) []lsp.Range {
var references []lsp.Range
lines := strings.Split(doc.GetContent(), "\n")
// Simple word boundary regex
pattern := fmt.Sprintf(`\b%s\b`, regexp.QuoteMeta(varName))
re := regexp.MustCompile(pattern)
for i, line := range lines {
matches := re.FindAllStringIndex(line, -1)
for _, match := range matches {
references = append(references, lsp.Range{
Start: lsp.Position{Line: i, Character: match[0]},
End: lsp.Position{Line: i, Character: match[1]},
})
}
}
return references
}
package lsp
import (
"context"
"strings"
"github.com/adicens/systemverilog-lsp/internal/symbols"
"github.com/sourcegraph/go-lsp"
)
// CompletionProvider handles textDocument/completion requests
type CompletionProvider struct {
documentManager *DocumentManager
}
// NewCompletionProvider creates a new completion provider
func NewCompletionProvider(dm *DocumentManager) *CompletionProvider {
return &CompletionProvider{
documentManager: dm,
}
}
// ProvideCompletion handles completion requests
func (cp *CompletionProvider) ProvideCompletion(ctx context.Context, params *lsp.CompletionParams) (*lsp.CompletionList, error) {
uri := string(params.TextDocument.URI)
line := params.Position.Line
character := params.Position.Character
// Get the document
doc, exists := cp.documentManager.GetDocument(uri)
if !exists {
return &lsp.CompletionList{
IsIncomplete: false,
Items: []lsp.CompletionItem{},
}, nil
}
// Get the text at the current position to determine context
content := doc.GetContent()
context := cp.getCompletionContext(content, line, character)
var items []lsp.CompletionItem
// Add SystemVerilog keywords
items = append(items, cp.getKeywordCompletions(context)...)
// Add symbols from the current document
symbolTable := doc.GetSymbolTable()
items = append(items, cp.getSymbolCompletions(symbolTable, context)...)
// Add symbols from all other documents for cross-file references
items = append(items, cp.getCrossFileCompletions(context)...)
return &lsp.CompletionList{
IsIncomplete: false,
Items: items,
}, nil
}
// getCompletionContext analyzes the text at the cursor position to determine completion context
func (cp *CompletionProvider) getCompletionContext(content string, line, character int) string {
lines := strings.Split(content, "\n")
if line >= len(lines) {
return ""
}
currentLine := lines[line]
if character > len(currentLine) {
character = len(currentLine)
}
// Get the word being typed (go back to find word boundary)
start := character
for start > 0 && (isAlphaNumeric(currentLine[start-1]) || currentLine[start-1] == '_') {
start--
}
return currentLine[start:character]
}
// getKeywordCompletions returns SystemVerilog keyword completions
func (cp *CompletionProvider) getKeywordCompletions(context string) []lsp.CompletionItem {
keywords := []string{
"module", "endmodule", "interface", "endinterface", "class", "endclass",
"function", "endfunction", "task", "endtask", "package", "endpackage",
"program", "endprogram", "generate", "endgenerate",
"always", "always_comb", "always_ff", "always_latch",
"initial", "final", "forever", "repeat", "while", "for", "foreach",
"if", "else", "case", "casex", "casez", "default", "endcase",
"begin", "end", "fork", "join", "join_any", "join_none",
"input", "output", "inout", "ref", "const",
"logic", "bit", "byte", "shortint", "int", "longint", "integer",
"real", "shortreal", "string", "reg", "wire",
"signed", "unsigned", "packed", "unpacked",
"parameter", "localparam", "typedef", "enum", "struct", "union",
"import", "export", "bind", "config", "design", "instance",
"genvar", "static", "automatic", "extern", "pure", "virtual",
"protected", "local", "rand", "randc", "constraint", "solve",
"before", "soft", "inside", "dist", "with", "matches",
"assert", "assume", "cover", "restrict", "property", "sequence",
"clocking", "endclocking", "default", "input", "output", "global",
"disable", "iff", "throughout", "within", "intersect", "first_match",
}
var items []lsp.CompletionItem
for _, keyword := range keywords {
if strings.HasPrefix(keyword, strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: keyword,
Kind: lsp.CIKKeyword,
Detail: "SystemVerilog keyword",
Documentation: "SystemVerilog language keyword",
InsertText: keyword,
})
}
}
return items
}
// getSymbolCompletions returns symbol completions from the current document
func (cp *CompletionProvider) getSymbolCompletions(symbolTable *symbols.SymbolTable, context string) []lsp.CompletionItem {
var items []lsp.CompletionItem
// Get all symbols from the symbol table
stats := symbolTable.GetStats()
if stats["total_symbols"].(int) == 0 {
return items
}
// Get symbols by type
modules := symbolTable.GetSymbolsByType(symbols.SymbolTypeModule)
interfaces := symbolTable.GetSymbolsByType(symbols.SymbolTypeInterface)
classes := symbolTable.GetSymbolsByType(symbols.SymbolTypeClass)
functions := symbolTable.GetSymbolsByType(symbols.SymbolTypeFunction)
tasks := symbolTable.GetSymbolsByType(symbols.SymbolTypeTask)
parameters := symbolTable.GetSymbolsByType(symbols.SymbolTypeParameter)
variables := symbolTable.GetSymbolsByType(symbols.SymbolTypeVariable)
ports := symbolTable.GetSymbolsByType(symbols.SymbolTypePort)
// Add module completions
for _, symbol := range modules {
if strings.HasPrefix(strings.ToLower(symbol.Name), strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: symbol.Name,
Kind: lsp.CIKModule,
Detail: "module",
Documentation: "SystemVerilog module",
InsertText: symbol.Name,
})
}
}
// Add interface completions
for _, symbol := range interfaces {
if strings.HasPrefix(strings.ToLower(symbol.Name), strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: symbol.Name,
Kind: lsp.CIKInterface,
Detail: "interface",
Documentation: "SystemVerilog interface",
InsertText: symbol.Name,
})
}
}
// Add class completions
for _, symbol := range classes {
if strings.HasPrefix(strings.ToLower(symbol.Name), strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: symbol.Name,
Kind: lsp.CIKClass,
Detail: "class",
Documentation: "SystemVerilog class",
InsertText: symbol.Name,
})
}
}
// Add function completions
for _, symbol := range functions {
if strings.HasPrefix(strings.ToLower(symbol.Name), strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: symbol.Name,
Kind: lsp.CIKFunction,
Detail: "function",
Documentation: "SystemVerilog function",
InsertText: symbol.Name,
})
}
}
// Add task completions
for _, symbol := range tasks {
if strings.HasPrefix(strings.ToLower(symbol.Name), strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: symbol.Name,
Kind: lsp.CIKFunction,
Detail: "task",
Documentation: "SystemVerilog task",
InsertText: symbol.Name,
})
}
}
// Add parameter completions
for _, symbol := range parameters {
if strings.HasPrefix(strings.ToLower(symbol.Name), strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: symbol.Name,
Kind: lsp.CIKConstant,
Detail: "parameter",
Documentation: "SystemVerilog parameter",
InsertText: symbol.Name,
})
}
}
// Add variable completions
for _, symbol := range variables {
if strings.HasPrefix(strings.ToLower(symbol.Name), strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: symbol.Name,
Kind: lsp.CIKVariable,
Detail: "variable",
Documentation: "SystemVerilog variable",
InsertText: symbol.Name,
})
}
}
// Add port completions
for _, symbol := range ports {
if strings.HasPrefix(strings.ToLower(symbol.Name), strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: symbol.Name,
Kind: lsp.CIKField,
Detail: "port",
Documentation: "SystemVerilog port",
InsertText: symbol.Name,
})
}
}
return items
}
// getCrossFileCompletions returns symbol completions from other documents
func (cp *CompletionProvider) getCrossFileCompletions(context string) []lsp.CompletionItem {
var items []lsp.CompletionItem
// Get modules from all documents (these are typically accessible across files)
modules := cp.documentManager.FindSymbolsOfType(symbols.SymbolTypeModule)
for _, symbol := range modules {
if strings.HasPrefix(strings.ToLower(symbol.Name), strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: symbol.Name,
Kind: lsp.CIKModule,
Detail: "module (external)",
Documentation: "SystemVerilog module from another file",
InsertText: symbol.Name,
})
}
}
// Get interfaces from all documents
interfaces := cp.documentManager.FindSymbolsOfType(symbols.SymbolTypeInterface)
for _, symbol := range interfaces {
if strings.HasPrefix(strings.ToLower(symbol.Name), strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: symbol.Name,
Kind: lsp.CIKInterface,
Detail: "interface (external)",
Documentation: "SystemVerilog interface from another file",
InsertText: symbol.Name,
})
}
}
// Get classes from all documents
classes := cp.documentManager.FindSymbolsOfType(symbols.SymbolTypeClass)
for _, symbol := range classes {
if strings.HasPrefix(strings.ToLower(symbol.Name), strings.ToLower(context)) {
items = append(items, lsp.CompletionItem{
Label: symbol.Name,
Kind: lsp.CIKClass,
Detail: "class (external)",
Documentation: "SystemVerilog class from another file",
InsertText: symbol.Name,
})
}
}
return items
}
// isAlphaNumeric checks if a character is alphanumeric
func isAlphaNumeric(c byte) bool {
return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9')
}
package lsp
import (
"context"
"strings"
"github.com/adicens/systemverilog-lsp/internal/symbols"
"github.com/sourcegraph/go-lsp"
)
// DefinitionProvider handles textDocument/definition requests
type DefinitionProvider struct {
documentManager *DocumentManager
}
// NewDefinitionProvider creates a new definition provider
func NewDefinitionProvider(dm *DocumentManager) *DefinitionProvider {
return &DefinitionProvider{
documentManager: dm,
}
}
// ProvideDefinition handles definition requests
func (dp *DefinitionProvider) ProvideDefinition(ctx context.Context, params *lsp.TextDocumentPositionParams) ([]lsp.Location, error) {
uri := string(params.TextDocument.URI)
line := params.Position.Line
character := params.Position.Character
// Get the document
doc, exists := dp.documentManager.GetDocument(uri)
if !exists {
return []lsp.Location{}, nil
}
// Get the word at the current position
content := doc.GetContent()
word := dp.getWordAtPosition(content, line, character)
if word == "" {
return []lsp.Location{}, nil
}
// Find the symbol definition
locations := dp.findSymbolDefinition(word, uri)
return locations, nil
}
// getWordAtPosition extracts the word at the given position
func (dp *DefinitionProvider) getWordAtPosition(content string, line, character int) string {
lines := strings.Split(content, "\n")
if line >= len(lines) {
return ""
}
currentLine := lines[line]
if character >= len(currentLine) {
character = len(currentLine) - 1
}
if character < 0 {
return ""
}
// Find word boundaries
start := character
end := character
// Go backwards to find start of word
for start > 0 && (isWordCharacter(currentLine[start-1])) {
start--
}
// Go forwards to find end of word
for end < len(currentLine) && (isWordCharacter(currentLine[end])) {
end++
}
if start >= end {
return ""
}
return currentLine[start:end]
}
// findSymbolDefinition finds the definition location for a symbol
func (dp *DefinitionProvider) findSymbolDefinition(symbolName, requestingURI string) []lsp.Location {
var locations []lsp.Location
// First, try to find the symbol in the current document
if doc, exists := dp.documentManager.GetDocument(requestingURI); exists {
symbolTable := doc.GetSymbolTable()
if symbol, found := symbolTable.FindSymbol(symbolName); found {
location := dp.symbolToLocation(symbol)
if location != nil {
locations = append(locations, *location)
return locations // Return early if found in current document
}
}
}
// If not found in current document, search all documents
symbols := dp.documentManager.FindSymbol(symbolName)
for _, symbol := range symbols {
location := dp.symbolToLocation(symbol)
if location != nil {
locations = append(locations, *location)
}
}
// Prioritize certain symbol types for definition lookup
if len(locations) > 1 {
locations = dp.prioritizeDefinitions(locations, symbolName)
}
return locations
}
// symbolToLocation converts a symbol to an LSP location
func (dp *DefinitionProvider) symbolToLocation(symbol *symbols.Symbol) *lsp.Location {
if symbol == nil {
return nil
}
// Use the file from the symbol's position, or fall back to empty if not set
uri := symbol.Position.File
if uri == "" {
return nil
}
return &lsp.Location{
URI: lsp.DocumentURI(uri),
Range: lsp.Range{
Start: lsp.Position{
Line: symbol.Position.Line - 1, // LSP uses 0-based line numbers
Character: symbol.Position.Column - 1, // LSP uses 0-based column numbers
},
End: lsp.Position{
Line: symbol.Position.Line - 1,
Character: symbol.Position.Column + len(symbol.Name) - 1,
},
},
}
}
// prioritizeDefinitions prioritizes definitions based on symbol type
func (dp *DefinitionProvider) prioritizeDefinitions(locations []lsp.Location, symbolName string) []lsp.Location {
// For now, just return the first location
// In the future, we could implement more sophisticated prioritization:
// 1. Prefer declarations over usages
// 2. Prefer local scope over global scope
// 3. Prefer modules/interfaces/classes over variables
if len(locations) > 0 {
return []lsp.Location{locations[0]}
}
return locations
}
// isWordCharacter checks if a character is part of a word (identifier)
func isWordCharacter(c byte) bool {
return (c >= 'a' && c <= 'z') ||
(c >= 'A' && c <= 'Z') ||
(c >= '0' && c <= '9') ||
c == '_' || c == '$' // SystemVerilog allows $ in identifiers
}
package lsp
import (
"fmt"
"sync"
"time"
"github.com/adicens/systemverilog-lsp/internal/analyzer"
"github.com/adicens/systemverilog-lsp/internal/parser"
"github.com/adicens/systemverilog-lsp/internal/symbols"
)
// Document represents a single SystemVerilog document
type Document struct {
URI string
Version int
Content string
AST *parser.AST
SymbolTable *symbols.SymbolTable
Diagnostics []analyzer.Diagnostic
LastUpdated time.Time
mutex sync.RWMutex
}
// DocumentManager manages all open documents
type DocumentManager struct {
documents map[string]*Document
analyzer *analyzer.Analyzer
mutex sync.RWMutex
}
// NewDocumentManager creates a new document manager
func NewDocumentManager() *DocumentManager {
return &DocumentManager{
documents: make(map[string]*Document),
analyzer: analyzer.NewAnalyzer(),
}
}
// OpenDocument opens a new document
func (dm *DocumentManager) OpenDocument(uri string, version int, content string) (*Document, error) {
dm.mutex.Lock()
defer dm.mutex.Unlock()
// Parse the document
p := parser.NewParser(content)
ast := p.Parse()
// Build symbol table
builder := symbols.NewSymbolBuilder(uri)
symbolTable, err := builder.Build(ast)
if err != nil {
// If symbol table building fails, create empty symbol table
symbolTable = symbols.NewSymbolTable()
}
// Create document
doc := &Document{
URI: uri,
Version: version,
Content: content,
AST: ast,
SymbolTable: symbolTable,
Diagnostics: make([]analyzer.Diagnostic, 0),
LastUpdated: time.Now(),
}
// Run semantic analysis
if ast != nil && symbolTable != nil {
doc.Diagnostics = dm.analyzer.Analyze(ast, symbolTable)
}
dm.documents[uri] = doc
return doc, nil
}
// UpdateDocument updates an existing document
func (dm *DocumentManager) UpdateDocument(uri string, version int, content string) (*Document, error) {
dm.mutex.Lock()
defer dm.mutex.Unlock()
doc, exists := dm.documents[uri]
if !exists {
return nil, fmt.Errorf("document not found: %s", uri)
}
doc.mutex.Lock()
defer doc.mutex.Unlock()
// Update document content
doc.Version = version
doc.Content = content
doc.LastUpdated = time.Now()
// Re-parse the document
p := parser.NewParser(content)
ast := p.Parse()
doc.AST = ast
// Rebuild symbol table
builder := symbols.NewSymbolBuilder(uri)
symbolTable, err := builder.Build(ast)
if err != nil {
// If symbol table building fails, create empty symbol table
symbolTable = symbols.NewSymbolTable()
}
doc.SymbolTable = symbolTable
// Run semantic analysis
if ast != nil && symbolTable != nil {
doc.Diagnostics = dm.analyzer.Analyze(ast, symbolTable)
}
return doc, nil
}
// CloseDocument closes a document
func (dm *DocumentManager) CloseDocument(uri string) error {
dm.mutex.Lock()
defer dm.mutex.Unlock()
if _, exists := dm.documents[uri]; !exists {
return fmt.Errorf("document not found: %s", uri)
}
delete(dm.documents, uri)
return nil
}
// GetDocument retrieves a document by URI
func (dm *DocumentManager) GetDocument(uri string) (*Document, bool) {
dm.mutex.RLock()
defer dm.mutex.RUnlock()
doc, exists := dm.documents[uri]
return doc, exists
}
// GetAllDocuments returns all open documents
func (dm *DocumentManager) GetAllDocuments() map[string]*Document {
dm.mutex.RLock()
defer dm.mutex.RUnlock()
docs := make(map[string]*Document)
for uri, doc := range dm.documents {
docs[uri] = doc
}
return docs
}
// GetDocumentCount returns the number of open documents
func (dm *DocumentManager) GetDocumentCount() int {
dm.mutex.RLock()
defer dm.mutex.RUnlock()
return len(dm.documents)
}
// GetDocumentURIs returns all document URIs
func (dm *DocumentManager) GetDocumentURIs() []string {
dm.mutex.RLock()
defer dm.mutex.RUnlock()
uris := make([]string, 0, len(dm.documents))
for uri := range dm.documents {
uris = append(uris, uri)
}
return uris
}
// FindSymbol finds a symbol by name across all documents
func (dm *DocumentManager) FindSymbol(name string) []*symbols.Symbol {
dm.mutex.RLock()
defer dm.mutex.RUnlock()
var results []*symbols.Symbol
for _, doc := range dm.documents {
doc.mutex.RLock()
if symbol, exists := doc.SymbolTable.FindSymbol(name); exists {
results = append(results, symbol)
}
doc.mutex.RUnlock()
}
return results
}
// FindSymbolsOfType finds all symbols of a specific type across all documents
func (dm *DocumentManager) FindSymbolsOfType(symbolType symbols.SymbolType) []*symbols.Symbol {
dm.mutex.RLock()
defer dm.mutex.RUnlock()
var results []*symbols.Symbol
for _, doc := range dm.documents {
doc.mutex.RLock()
symbols := doc.SymbolTable.GetSymbolsByType(symbolType)
results = append(results, symbols...)
doc.mutex.RUnlock()
}
return results
}
// GetSymbolReferences finds all references to a symbol across all documents
func (dm *DocumentManager) GetSymbolReferences(symbolID string) []symbols.Position {
dm.mutex.RLock()
defer dm.mutex.RUnlock()
var results []symbols.Position
for _, doc := range dm.documents {
doc.mutex.RLock()
refs := doc.SymbolTable.GetSymbolReferences(symbolID)
results = append(results, refs...)
doc.mutex.RUnlock()
}
return results
}
// GetDiagnostics returns diagnostics for a document
func (dm *DocumentManager) GetDiagnostics(uri string) []Diagnostic {
dm.mutex.RLock()
defer dm.mutex.RUnlock()
doc, exists := dm.documents[uri]
if !exists {
return []Diagnostic{}
}
doc.mutex.RLock()
defer doc.mutex.RUnlock()
var diagnostics []Diagnostic
// Add parser errors
for _, err := range doc.AST.Errors {
diagnostics = append(diagnostics, Diagnostic{
Range: DiagnosticRange{
Start: DiagnosticPosition{
Line: err.Position.Line - 1, // LSP uses 0-based line numbers
Character: err.Position.Column - 1, // LSP uses 0-based column numbers
},
End: DiagnosticPosition{
Line: err.Position.Line - 1,
Character: err.Position.Column - 1,
},
},
Severity: DiagnosticSeverityError,
Message: err.Message,
Source: "systemverilog-lsp",
})
}
// Add semantic diagnostics
for _, diag := range doc.Diagnostics {
diagnostics = append(diagnostics, Diagnostic{
Range: DiagnosticRange{
Start: DiagnosticPosition{
Line: diag.Range.Start.Line,
Character: diag.Range.Start.Character,
},
End: DiagnosticPosition{
Line: diag.Range.End.Line,
Character: diag.Range.End.Character,
},
},
Severity: DiagnosticSeverity(diag.Severity),
Message: diag.Message,
Source: diag.Source,
})
}
return diagnostics
}
// GetStats returns statistics about the document manager
func (dm *DocumentManager) GetStats() map[string]interface{} {
dm.mutex.RLock()
defer dm.mutex.RUnlock()
stats := make(map[string]interface{})
stats["document_count"] = len(dm.documents)
totalSymbols := 0
totalScopes := 0
totalErrors := 0
for _, doc := range dm.documents {
doc.mutex.RLock()
docStats := doc.SymbolTable.GetStats()
totalSymbols += docStats["total_symbols"].(int)
totalScopes += docStats["total_scopes"].(int)
totalErrors += len(doc.AST.Errors)
doc.mutex.RUnlock()
}
stats["total_symbols"] = totalSymbols
stats["total_scopes"] = totalScopes
stats["total_errors"] = totalErrors
return stats
}
// Document methods
// GetContent returns the document content
func (d *Document) GetContent() string {
d.mutex.RLock()
defer d.mutex.RUnlock()
return d.Content
}
// GetVersion returns the document version
func (d *Document) GetVersion() int {
d.mutex.RLock()
defer d.mutex.RUnlock()
return d.Version
}
// GetAST returns the document AST
func (d *Document) GetAST() *parser.AST {
d.mutex.RLock()
defer d.mutex.RUnlock()
return d.AST
}
// GetSymbolTable returns the document symbol table
func (d *Document) GetSymbolTable() *symbols.SymbolTable {
d.mutex.RLock()
defer d.mutex.RUnlock()
return d.SymbolTable
}
// GetLastUpdated returns when the document was last updated
func (d *Document) GetLastUpdated() time.Time {
d.mutex.RLock()
defer d.mutex.RUnlock()
return d.LastUpdated
}
// HasErrors returns true if the document has parsing errors
func (d *Document) HasErrors() bool {
d.mutex.RLock()
defer d.mutex.RUnlock()
return len(d.AST.Errors) > 0
}
// GetErrorCount returns the number of parsing errors
func (d *Document) GetErrorCount() int {
d.mutex.RLock()
defer d.mutex.RUnlock()
return len(d.AST.Errors)
}
// Diagnostic types for LSP integration
// Diagnostic represents a diagnostic message
type Diagnostic struct {
Range DiagnosticRange `json:"range"`
Severity DiagnosticSeverity `json:"severity"`
Message string `json:"message"`
Source string `json:"source"`
}
// DiagnosticRange represents a range in a document
type DiagnosticRange struct {
Start DiagnosticPosition `json:"start"`
End DiagnosticPosition `json:"end"`
}
// DiagnosticPosition represents a position in a document
type DiagnosticPosition struct {
Line int `json:"line"`
Character int `json:"character"`
}
// DiagnosticSeverity represents the severity of a diagnostic
type DiagnosticSeverity int
const (
DiagnosticSeverityError DiagnosticSeverity = 1
DiagnosticSeverityWarning DiagnosticSeverity = 2
DiagnosticSeverityInformation DiagnosticSeverity = 3
DiagnosticSeverityHint DiagnosticSeverity = 4
)
package lsp
import (
"context"
"fmt"
"path/filepath"
"regexp"
"strings"
"github.com/sourcegraph/go-lsp"
)
// DocumentLinksProvider provides document links for include files
type DocumentLinksProvider struct {
documentManager *DocumentManager
includePaths []string
includeRegex *regexp.Regexp
}
// NewDocumentLinksProvider creates a new document links provider
func NewDocumentLinksProvider(dm *DocumentManager) *DocumentLinksProvider {
return &DocumentLinksProvider{
documentManager: dm,
includePaths: []string{},
includeRegex: regexp.MustCompile("`include\\s+\"([^\"]+)\""),
}
}
// SetIncludePaths sets the include paths for resolving include files
func (p *DocumentLinksProvider) SetIncludePaths(paths []string) {
p.includePaths = paths
}
// ProvideDocumentLinks provides document links for a document
func (p *DocumentLinksProvider) ProvideDocumentLinks(ctx context.Context, params *DocumentLinkParams) ([]DocumentLink, error) {
doc, exists := p.documentManager.GetDocument(string(params.TextDocument.URI))
if !exists {
return nil, nil
}
content := doc.GetContent()
lines := strings.Split(content, "\n")
var links []DocumentLink
for lineNum, line := range lines {
matches := p.includeRegex.FindAllStringSubmatch(line, -1)
for _, match := range matches {
if len(match) > 1 {
includeFile := match[1]
startCol := strings.Index(line, match[0])
// Find the position of the filename within the match
fileStartCol := startCol + strings.Index(match[0], includeFile)
link := DocumentLink{
Range: lsp.Range{
Start: lsp.Position{
Line: lineNum,
Character: fileStartCol,
},
End: lsp.Position{
Line: lineNum,
Character: fileStartCol + len(includeFile),
},
},
}
// Try to resolve the file path
if resolvedPath := p.resolveIncludePath(includeFile); resolvedPath != "" {
target := fmt.Sprintf("file://%s", resolvedPath)
link.Target = &target
}
links = append(links, link)
}
}
}
return links, nil
}
// resolveIncludePath tries to resolve an include file path
func (p *DocumentLinksProvider) resolveIncludePath(includeFile string) string {
// First try as absolute path
if filepath.IsAbs(includeFile) {
return includeFile
}
// Try each include path
for _, includePath := range p.includePaths {
fullPath := filepath.Join(includePath, includeFile)
// In a real implementation, we would check if the file exists
// For now, just return the constructed path
return fullPath
}
// Try relative to current directory
return includeFile
}
package lsp
import (
"context"
"sort"
"github.com/adicens/systemverilog-lsp/internal/symbols"
"github.com/sourcegraph/go-lsp"
)
// DocumentSymbolProvider provides document symbol functionality
type DocumentSymbolProvider struct {
documentManager *DocumentManager
}
// NewDocumentSymbolProvider creates a new document symbol provider
func NewDocumentSymbolProvider(documentManager *DocumentManager) *DocumentSymbolProvider {
return &DocumentSymbolProvider{
documentManager: documentManager,
}
}
// ProvideDocumentSymbols provides document symbols for the given document
func (p *DocumentSymbolProvider) ProvideDocumentSymbols(ctx context.Context, params *lsp.DocumentSymbolParams) ([]lsp.SymbolInformation, error) {
uri := string(params.TextDocument.URI)
// Get document from document manager
document, exists := p.documentManager.GetDocument(uri)
if !exists || document == nil {
return []lsp.SymbolInformation{}, nil
}
// Get symbol table for the document
symbolTable := document.GetSymbolTable()
if symbolTable == nil {
return []lsp.SymbolInformation{}, nil
}
// Get all symbols for this document
documentSymbols, exists := symbolTable.GetDocumentSymbols(uri)
if !exists || documentSymbols == nil {
return []lsp.SymbolInformation{}, nil
}
// Convert to LSP SymbolInformation
return p.buildSymbolInformation(documentSymbols.Symbols, uri), nil
}
// buildSymbolInformation converts internal symbols to LSP SymbolInformation
func (p *DocumentSymbolProvider) buildSymbolInformation(symbolMap map[string]*symbols.Symbol, uri string) []lsp.SymbolInformation {
result := make([]lsp.SymbolInformation, 0)
// Convert all symbols to SymbolInformation
for _, symbol := range symbolMap {
// Only include symbols from the requested document
if symbol.Position.File != uri {
continue
}
symbolInfo := lsp.SymbolInformation{
Name: symbol.Name,
Kind: p.symbolTypeToLSPKind(symbol.Type),
Location: lsp.Location{
URI: lsp.DocumentURI(uri),
Range: p.symbolPositionToRange(symbol.Position),
},
ContainerName: p.getContainerName(symbol),
}
result = append(result, symbolInfo)
}
// Sort by position
sort.Slice(result, func(i, j int) bool {
return result[i].Location.Range.Start.Line < result[j].Location.Range.Start.Line ||
(result[i].Location.Range.Start.Line == result[j].Location.Range.Start.Line &&
result[i].Location.Range.Start.Character < result[j].Location.Range.Start.Character)
})
return result
}
// symbolTypeToLSPKind converts internal symbol type to LSP SymbolKind
func (p *DocumentSymbolProvider) symbolTypeToLSPKind(symbolType symbols.SymbolType) lsp.SymbolKind {
switch symbolType {
case symbols.SymbolTypeModule:
return lsp.SKModule
case symbols.SymbolTypeInterface:
return lsp.SKInterface
case symbols.SymbolTypeClass:
return lsp.SKClass
case symbols.SymbolTypeFunction:
return lsp.SKFunction
case symbols.SymbolTypeTask:
return lsp.SKMethod // Tasks are similar to methods
case symbols.SymbolTypeVariable:
return lsp.SKVariable
case symbols.SymbolTypeParameter:
return lsp.SKConstant // Parameters are similar to constants
case symbols.SymbolTypePort:
return lsp.SKField // Ports are similar to fields
case symbols.SymbolTypeInstance:
return lsp.SKObject // Module instances are objects
case symbols.SymbolTypeGenerate:
return lsp.SKNamespace // Generate blocks create namespaces
case symbols.SymbolTypeAlways:
return lsp.SKFunction // Always blocks are like functions
case symbols.SymbolTypeInitial:
return lsp.SKConstructor // Initial blocks are like constructors
case symbols.SymbolTypeConstraint:
return lsp.SKOperator // Constraints are like operators
case symbols.SymbolTypeTypedef:
return lsp.SKTypeParameter
case symbols.SymbolTypeEnum:
return lsp.SKEnum
case symbols.SymbolTypeStruct:
return lsp.SKStruct
case symbols.SymbolTypeUnion:
return lsp.SKStruct // Unions are similar to structs
case symbols.SymbolTypePackage:
return lsp.SKPackage
case symbols.SymbolTypeProperty:
return lsp.SKProperty
case symbols.SymbolTypeSequence:
return lsp.SKEvent // Sequences are like events
case symbols.SymbolTypeAssertion:
return lsp.SKBoolean // Assertions are boolean-like
case symbols.SymbolTypeCoverage:
return lsp.SKEvent // Coverage points are like events
default:
return lsp.SKVariable
}
}
// getContainerName returns the container name for hierarchical display
func (p *DocumentSymbolProvider) getContainerName(symbol *symbols.Symbol) string {
if symbol.Scope == nil || symbol.Scope.Name == "" || symbol.Scope.Name == "root" {
return ""
}
// Find the top-level structural container (module, interface, class, package)
current := symbol.Scope
var topLevelContainer *symbols.Scope = nil
for current != nil && current.Name != "" && current.Name != "root" {
// Check if this is a structural container
if current.ScopeType == symbols.SymbolTypeModule ||
current.ScopeType == symbols.SymbolTypeInterface ||
current.ScopeType == symbols.SymbolTypeClass ||
current.ScopeType == symbols.SymbolTypePackage {
topLevelContainer = current
}
current = current.Parent
}
if topLevelContainer == nil {
return ""
}
return topLevelContainer.Name
}
// symbolPositionToRange converts internal position to LSP range
func (p *DocumentSymbolProvider) symbolPositionToRange(position symbols.Position) lsp.Range {
// For now, create a range that starts and ends at the symbol position
// In a more sophisticated implementation, we would track the full range of the symbol
lspPos := lsp.Position{
Line: position.Line,
Character: position.Column,
}
return lsp.Range{
Start: lspPos,
End: lspPos,
}
}
// Helper method to create a range with proper end position (could be enhanced)
func (p *DocumentSymbolProvider) createSymbolRange(symbol *symbols.Symbol) lsp.Range {
start := lsp.Position{
Line: symbol.Position.Line,
Character: symbol.Position.Column,
}
// For now, estimate end position by adding symbol name length
// In a real implementation, we would track actual ranges during parsing
end := lsp.Position{
Line: symbol.Position.Line,
Character: symbol.Position.Column + len(symbol.Name),
}
return lsp.Range{
Start: start,
End: end,
}
}
package lsp
import (
"context"
"strings"
"github.com/adicens/systemverilog-lsp/internal/parser"
)
// FoldingRangeProvider provides folding ranges for code folding
type FoldingRangeProvider struct {
documentManager *DocumentManager
}
// NewFoldingRangeProvider creates a new folding range provider
func NewFoldingRangeProvider(dm *DocumentManager) *FoldingRangeProvider {
return &FoldingRangeProvider{
documentManager: dm,
}
}
// ProvideFoldingRanges provides folding ranges for a document
func (p *FoldingRangeProvider) ProvideFoldingRanges(ctx context.Context, params *FoldingRangeParams) ([]FoldingRange, error) {
doc, exists := p.documentManager.GetDocument(string(params.TextDocument.URI))
if !exists {
return nil, nil
}
var ranges []FoldingRange
// Extract AST-based folding ranges
if doc.AST != nil && doc.AST.Root != nil {
p.extractFoldingRanges(doc.AST.Root, &ranges)
}
// Extract comment folding ranges
commentRanges := p.extractCommentFoldingRanges(doc.GetContent())
ranges = append(ranges, commentRanges...)
// Return empty array instead of nil
if ranges == nil {
ranges = []FoldingRange{}
}
return ranges, nil
}
// extractFoldingRanges recursively extracts folding ranges from an AST node
func (p *FoldingRangeProvider) extractFoldingRanges(node parser.Node, ranges *[]FoldingRange) {
if node == nil {
return
}
// Check if this node should create a folding range
if p.shouldCreateFoldingRange(node) {
r := node.Range()
startLine := r.Start.Line - 1 // Convert to 0-based
endLine := r.End.Line - 1
// Only create a folding range if it spans multiple lines
// For single-line constructs, don't create a folding range
if endLine > startLine && endLine-startLine > 1 {
foldingRange := FoldingRange{
StartLine: startLine,
EndLine: endLine,
}
// Set the kind based on node type
switch n := node.(type) {
case *parser.ModuleNode, *parser.InterfaceNode, *parser.ClassNode,
*parser.FunctionNode, *parser.TaskNode:
kind := "region"
foldingRange.Kind = &kind
case *parser.AlwaysNode:
kind := "region"
foldingRange.Kind = &kind
// For always blocks, try to get more precise range
if len(n.Body) > 0 {
// Adjust end line based on body content
lastBodyNode := n.Body[len(n.Body)-1]
if lastBodyNode != nil {
bodyEndLine := lastBodyNode.Range().End.Line - 1
if bodyEndLine > foldingRange.StartLine {
foldingRange.EndLine = bodyEndLine
}
}
}
case *parser.GenerateNode, *parser.IfNode:
kind := "region"
foldingRange.Kind = &kind
}
*ranges = append(*ranges, foldingRange)
}
}
// Process children
for _, child := range node.Children() {
p.extractFoldingRanges(child, ranges)
}
}
// shouldCreateFoldingRange determines if a node should create a folding range
func (p *FoldingRangeProvider) shouldCreateFoldingRange(node parser.Node) bool {
switch node.(type) {
case *parser.ModuleNode,
*parser.InterfaceNode,
*parser.ClassNode,
*parser.FunctionNode,
*parser.TaskNode,
*parser.AlwaysNode,
*parser.GenerateNode,
*parser.IfNode:
return true
default:
return false
}
}
// extractCommentFoldingRanges extracts folding ranges for comment blocks
func (p *FoldingRangeProvider) extractCommentFoldingRanges(content string) []FoldingRange {
var ranges []FoldingRange
lines := strings.Split(content, "\n")
// Track block comment state
inBlockComment := false
blockCommentStart := -1
for i, line := range lines {
// Check for block comment start
if !inBlockComment && strings.Contains(line, "/*") {
inBlockComment = true
blockCommentStart = i
// Check if comment ends on same line
if strings.Contains(line, "*/") {
// Single line block comment, don't create folding range
inBlockComment = false
blockCommentStart = -1
}
} else if inBlockComment && strings.Contains(line, "*/") {
// End of block comment
inBlockComment = false
if blockCommentStart >= 0 && i > blockCommentStart {
kind := "comment"
ranges = append(ranges, FoldingRange{
StartLine: blockCommentStart,
EndLine: i,
Kind: &kind,
})
}
blockCommentStart = -1
}
}
// Extract consecutive line comment blocks
lineCommentStart := -1
for i, line := range lines {
trimmed := strings.TrimSpace(line)
if strings.HasPrefix(trimmed, "//") {
if lineCommentStart == -1 {
lineCommentStart = i
}
} else {
// End of line comment block
if lineCommentStart >= 0 && i-1 > lineCommentStart {
kind := "comment"
ranges = append(ranges, FoldingRange{
StartLine: lineCommentStart,
EndLine: i - 1,
Kind: &kind,
})
}
lineCommentStart = -1
}
}
// Handle comment block that extends to end of file
if lineCommentStart >= 0 && len(lines)-1 > lineCommentStart {
kind := "comment"
ranges = append(ranges, FoldingRange{
StartLine: lineCommentStart,
EndLine: len(lines) - 1,
Kind: &kind,
})
}
return ranges
}
package lsp
import (
"context"
"fmt"
"strings"
"github.com/adicens/systemverilog-lsp/internal/symbols"
"github.com/sourcegraph/go-lsp"
)
// HoverProvider handles textDocument/hover requests
type HoverProvider struct {
documentManager *DocumentManager
}
// NewHoverProvider creates a new hover provider
func NewHoverProvider(dm *DocumentManager) *HoverProvider {
return &HoverProvider{
documentManager: dm,
}
}
// ProvideHover handles hover requests
func (hp *HoverProvider) ProvideHover(ctx context.Context, params *lsp.TextDocumentPositionParams) (*lsp.Hover, error) {
uri := string(params.TextDocument.URI)
line := params.Position.Line
character := params.Position.Character
// Get the document
doc, exists := hp.documentManager.GetDocument(uri)
if !exists {
return nil, nil
}
// Get the word at the current position
content := doc.GetContent()
word := hp.getWordAtPosition(content, line, character)
if word == "" {
return nil, nil
}
// Find the symbol information
hoverInfo := hp.getSymbolHoverInfo(word, uri)
if hoverInfo == nil {
return nil, nil
}
return hoverInfo, nil
}
// getWordAtPosition extracts the word at the given position
func (hp *HoverProvider) getWordAtPosition(content string, line, character int) string {
lines := strings.Split(content, "\n")
if line >= len(lines) {
return ""
}
currentLine := lines[line]
if character >= len(currentLine) {
character = len(currentLine) - 1
}
if character < 0 {
return ""
}
// Find word boundaries
start := character
end := character
// Go backwards to find start of word
for start > 0 && (isWordCharacter(currentLine[start-1])) {
start--
}
// Go forwards to find end of word
for end < len(currentLine) && (isWordCharacter(currentLine[end])) {
end++
}
if start >= end {
return ""
}
return currentLine[start:end]
}
// getSymbolHoverInfo gets hover information for a symbol
func (hp *HoverProvider) getSymbolHoverInfo(symbolName, requestingURI string) *lsp.Hover {
// First, try to find the symbol in the current document
if doc, exists := hp.documentManager.GetDocument(requestingURI); exists {
symbolTable := doc.GetSymbolTable()
if symbol, found := symbolTable.FindSymbol(symbolName); found {
return hp.createHoverFromSymbol(symbol)
}
}
// If not found in current document, search all documents
symbols := hp.documentManager.FindSymbol(symbolName)
if len(symbols) > 0 {
// Use the first symbol found (could be enhanced to prioritize)
return hp.createHoverFromSymbol(symbols[0])
}
// Check if it's a SystemVerilog keyword
if keywordInfo := hp.getKeywordHoverInfo(symbolName); keywordInfo != nil {
return keywordInfo
}
return nil
}
// createHoverFromSymbol creates hover information from a symbol
func (hp *HoverProvider) createHoverFromSymbol(symbol *symbols.Symbol) *lsp.Hover {
if symbol == nil {
return nil
}
var content strings.Builder
// Add symbol signature
content.WriteString("```systemverilog\n")
content.WriteString(hp.formatSymbolSignature(symbol))
content.WriteString("\n```\n\n")
// Add symbol type information
content.WriteString(fmt.Sprintf("**Type:** %s\n\n", symbol.Type.String()))
// Add data type if available
if symbol.DataType != "" {
content.WriteString(fmt.Sprintf("**Data Type:** %s\n\n", symbol.DataType))
}
// Add scope information
if symbol.Scope != nil {
content.WriteString(fmt.Sprintf("**Scope:** %s\n\n", symbol.Scope.Name))
}
// Add location information
if symbol.Position.File != "" {
content.WriteString(fmt.Sprintf("**Location:** %s:%d:%d\n\n",
symbol.Position.File, symbol.Position.Line, symbol.Position.Column))
}
// Add attributes if any
if len(symbol.Attributes) > 0 {
content.WriteString("**Attributes:**\n")
for key, value := range symbol.Attributes {
content.WriteString(fmt.Sprintf("- %s: %v\n", key, value))
}
content.WriteString("\n")
}
// Add references count
references := symbol.GetReferences()
if len(references) > 0 {
content.WriteString(fmt.Sprintf("**References:** %d\n\n", len(references)))
}
return &lsp.Hover{
Contents: []lsp.MarkedString{
lsp.RawMarkedString(content.String()),
},
}
}
// formatSymbolSignature formats a symbol into a signature string
func (hp *HoverProvider) formatSymbolSignature(symbol *symbols.Symbol) string {
switch symbol.Type {
case symbols.SymbolTypeModule:
return hp.formatModuleSignature(symbol)
case symbols.SymbolTypeInterface:
return hp.formatInterfaceSignature(symbol)
case symbols.SymbolTypeClass:
return hp.formatClassSignature(symbol)
case symbols.SymbolTypeFunction:
return hp.formatFunctionSignature(symbol)
case symbols.SymbolTypeTask:
return hp.formatTaskSignature(symbol)
case symbols.SymbolTypeVariable:
return hp.formatVariableSignature(symbol)
case symbols.SymbolTypeParameter:
return hp.formatParameterSignature(symbol)
case symbols.SymbolTypePort:
return hp.formatPortSignature(symbol)
default:
return fmt.Sprintf("%s %s", symbol.Type.String(), symbol.Name)
}
}
// formatModuleSignature formats a module signature
func (hp *HoverProvider) formatModuleSignature(symbol *symbols.Symbol) string {
// Basic module signature
signature := fmt.Sprintf("module %s", symbol.Name)
// Add parameters if any
if params, exists := symbol.GetAttribute("parameters"); exists {
if paramList, ok := params.([]string); ok && len(paramList) > 0 {
signature += fmt.Sprintf(" #(%s)", strings.Join(paramList, ", "))
}
}
// Add ports if any
if ports, exists := symbol.GetAttribute("ports"); exists {
if portList, ok := ports.([]string); ok && len(portList) > 0 {
signature += fmt.Sprintf(" (%s)", strings.Join(portList, ", "))
}
}
return signature
}
// formatInterfaceSignature formats an interface signature
func (hp *HoverProvider) formatInterfaceSignature(symbol *symbols.Symbol) string {
signature := fmt.Sprintf("interface %s", symbol.Name)
// Add parameters if any
if params, exists := symbol.GetAttribute("parameters"); exists {
if paramList, ok := params.([]string); ok && len(paramList) > 0 {
signature += fmt.Sprintf(" #(%s)", strings.Join(paramList, ", "))
}
}
return signature
}
// formatClassSignature formats a class signature
func (hp *HoverProvider) formatClassSignature(symbol *symbols.Symbol) string {
signature := fmt.Sprintf("class %s", symbol.Name)
// Add inheritance if any
if extends, exists := symbol.GetAttribute("extends"); exists {
if extendClass, ok := extends.(string); ok && extendClass != "" {
signature += fmt.Sprintf(" extends %s", extendClass)
}
}
return signature
}
// formatFunctionSignature formats a function signature
func (hp *HoverProvider) formatFunctionSignature(symbol *symbols.Symbol) string {
signature := "function"
// Add return type if available
if symbol.DataType != "" {
signature += fmt.Sprintf(" %s", symbol.DataType)
}
signature += fmt.Sprintf(" %s", symbol.Name)
// Add parameters if any
if params, exists := symbol.GetAttribute("parameters"); exists {
if paramList, ok := params.([]string); ok && len(paramList) > 0 {
signature += fmt.Sprintf("(%s)", strings.Join(paramList, ", "))
} else {
signature += "()"
}
}
return signature
}
// formatTaskSignature formats a task signature
func (hp *HoverProvider) formatTaskSignature(symbol *symbols.Symbol) string {
signature := fmt.Sprintf("task %s", symbol.Name)
// Add parameters if any
if params, exists := symbol.GetAttribute("parameters"); exists {
if paramList, ok := params.([]string); ok && len(paramList) > 0 {
signature += fmt.Sprintf("(%s)", strings.Join(paramList, ", "))
} else {
signature += "()"
}
}
return signature
}
// formatVariableSignature formats a variable signature
func (hp *HoverProvider) formatVariableSignature(symbol *symbols.Symbol) string {
if symbol.DataType != "" {
return fmt.Sprintf("%s %s", symbol.DataType, symbol.Name)
}
return fmt.Sprintf("var %s", symbol.Name)
}
// formatParameterSignature formats a parameter signature
func (hp *HoverProvider) formatParameterSignature(symbol *symbols.Symbol) string {
signature := "parameter"
if symbol.DataType != "" {
signature += fmt.Sprintf(" %s", symbol.DataType)
}
signature += fmt.Sprintf(" %s", symbol.Name)
// Add default value if any
if value, exists := symbol.GetAttribute("default_value"); exists {
signature += fmt.Sprintf(" = %v", value)
}
return signature
}
// formatPortSignature formats a port signature
func (hp *HoverProvider) formatPortSignature(symbol *symbols.Symbol) string {
signature := ""
// Add direction if available
if direction, exists := symbol.GetAttribute("direction"); exists {
signature += fmt.Sprintf("%s ", direction)
}
// Add data type
if symbol.DataType != "" {
signature += fmt.Sprintf("%s ", symbol.DataType)
}
signature += symbol.Name
// Add width if available
if width, exists := symbol.GetAttribute("width"); exists {
signature += fmt.Sprintf(" %v", width)
}
return signature
}
// getKeywordHoverInfo provides hover information for SystemVerilog keywords
func (hp *HoverProvider) getKeywordHoverInfo(keyword string) *lsp.Hover {
keywordDocs := map[string]string{
"module": "A module is the basic building block of SystemVerilog design hierarchy. It encapsulates functionality and can be instantiated multiple times.",
"interface": "An interface is a named bundle of nets and variables. It provides a mechanism for communication between modules.",
"class": "A class is a data type that contains data and methods. Classes support object-oriented programming in SystemVerilog.",
"function": "A function is a callable piece of code that returns a value. Functions execute in zero simulation time.",
"task": "A task is a callable piece of code that does not return a value. Tasks can consume simulation time.",
"always": "An always block is a procedural block that executes continuously during simulation.",
"always_comb": "An always_comb block is used for combinational logic. It is sensitive to all inputs.",
"always_ff": "An always_ff block is used for sequential logic. It typically uses a clock edge.",
"initial": "An initial block executes once at the beginning of simulation.",
"generate": "Generate constructs allow conditional or iterative instantiation of modules, interfaces, or other constructs.",
"parameter": "A parameter is a constant that can be set at compile time or module instantiation.",
"localparam": "A localparam is a local parameter that cannot be overridden during module instantiation.",
"logic": "The logic data type can represent 0, 1, X, or Z values. It can be used for both combinational and sequential logic.",
"bit": "The bit data type can represent 0 or 1 values. It is a 2-state data type.",
"reg": "The reg data type can store values. It can represent 0, 1, X, or Z.",
"wire": "The wire data type represents a connection between components. It cannot store values.",
"input": "Declares an input port of a module, interface, or program.",
"output": "Declares an output port of a module, interface, or program.",
"inout": "Declares a bidirectional port of a module, interface, or program.",
"begin": "Begins a sequential block of statements.",
"end": "Ends a sequential block of statements.",
"if": "Conditional statement that executes code based on a boolean expression.",
"else": "Alternative branch for an if statement.",
"case": "Multi-way conditional statement that compares an expression against multiple values.",
"default": "Default case in a case statement, executed when no other cases match.",
"for": "Loop construct that executes a block of statements a specified number of times.",
"while": "Loop construct that executes while a condition is true.",
"repeat": "Loop construct that executes a block a fixed number of times.",
"forever": "Loop construct that executes indefinitely.",
"constraint": "Defines constraints for random variables in a class.",
"assert": "SystemVerilog assertion used for verification.",
"assume": "SystemVerilog assumption used for verification.",
"cover": "SystemVerilog coverage statement used for verification.",
}
if doc, exists := keywordDocs[strings.ToLower(keyword)]; exists {
return &lsp.Hover{
Contents: []lsp.MarkedString{
lsp.RawMarkedString(fmt.Sprintf("```systemverilog\n%s\n```\n\n**SystemVerilog Keyword**\n\n%s",
keyword, doc)),
},
}
}
return nil
}
package lsp
import (
"fmt"
"github.com/adicens/systemverilog-lsp/internal/symbols"
"github.com/sourcegraph/go-lsp"
)
// InlayHintsProvider provides inline hints for SystemVerilog code
type InlayHintsProvider struct {
documentManager *DocumentManager
}
// NewInlayHintsProvider creates a new inlay hints provider
func NewInlayHintsProvider(dm *DocumentManager) *InlayHintsProvider {
return &InlayHintsProvider{
documentManager: dm,
}
}
// InlayHintKind represents the kind of an inlay hint
type InlayHintKind int
const (
// InlayHintKindType is a type hint
InlayHintKindType InlayHintKind = 1
// InlayHintKindParameter is a parameter hint
InlayHintKindParameter InlayHintKind = 2
)
// InlayHint represents a single inlay hint in a document
type InlayHint struct {
// Position where the hint should be displayed
Position lsp.Position `json:"position"`
// Label to display
Label string `json:"label"`
// Kind of the hint
Kind *InlayHintKind `json:"kind,omitempty"`
// Tooltip text
Tooltip *string `json:"tooltip,omitempty"`
// Padding left
PaddingLeft *bool `json:"paddingLeft,omitempty"`
// Padding right
PaddingRight *bool `json:"paddingRight,omitempty"`
}
// InlayHintParams represents the parameters for textDocument/inlayHint request
type InlayHintParams struct {
// TextDocument identifies the document
TextDocument lsp.TextDocumentIdentifier `json:"textDocument"`
// Range to compute hints for
Range lsp.Range `json:"range"`
}
// ProvideInlayHints generates inlay hints for the given document range
func (p *InlayHintsProvider) ProvideInlayHints(params InlayHintParams) ([]InlayHint, error) {
doc, exists := p.documentManager.GetDocument(string(params.TextDocument.URI))
if !exists {
return nil, fmt.Errorf("document not found: %s", params.TextDocument.URI)
}
hints := []InlayHint{}
// Get symbols for the document
if doc.SymbolTable != nil {
// Convert symbol map to slice for methods that need it
symbolMap := doc.SymbolTable.GetAllSymbols()
symbolList := make([]*symbols.Symbol, 0, len(symbolMap))
for _, sym := range symbolMap {
symbolList = append(symbolList, sym)
}
// Process symbol table to find opportunities for hints
// Add type hints for variable declarations
hints = append(hints, p.generateTypeHints(doc)...)
// Add parameter hints for function/task calls
hints = append(hints, p.generateParameterHints(doc, symbolList)...)
// Add port direction hints
hints = append(hints, p.generatePortDirectionHints(doc, symbolList)...)
}
// Filter hints by the requested range
filteredHints := []InlayHint{}
for _, hint := range hints {
if isPositionInRange(hint.Position, params.Range) {
filteredHints = append(filteredHints, hint)
}
}
return filteredHints, nil
}
// generateTypeHints generates type hints for variable declarations
func (p *InlayHintsProvider) generateTypeHints(doc *Document) []InlayHint {
hints := []InlayHint{}
// Get all variable symbols and show type hints for them
if doc.SymbolTable != nil {
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, sym := range allSymbols {
if sym.Type == symbols.SymbolTypeVariable || sym.Type == symbols.SymbolTypePort {
// Check if we should show a type hint
if sym.DataType != "" && sym.DataType != "logic" {
trueVal := true
hint := InlayHint{
Position: lsp.Position{Line: sym.Position.Line - 1, Character: sym.Position.Column + len(sym.Name)},
Label: ": " + sym.DataType,
Kind: (*InlayHintKind)(intPtr(int(InlayHintKindType))),
PaddingLeft: &trueVal,
PaddingRight: &trueVal,
}
hints = append(hints, hint)
}
}
}
}
return hints
}
// generateParameterHints generates parameter hints for function/task calls
func (p *InlayHintsProvider) generateParameterHints(doc *Document, symbolList []*symbols.Symbol) []InlayHint {
hints := []InlayHint{}
// For now, we'll show parameter hints for function and task definitions
// Since we don't have function call nodes in the AST yet
for _, sym := range symbolList {
if sym.Type == symbols.SymbolTypeFunction || sym.Type == symbols.SymbolTypeTask {
// Show hints for parameter names in the definition
if sym.Attributes != nil && len(sym.Attributes) > 0 {
// Parameter hints could be shown when viewing the function signature
// This is a simplified implementation
}
}
}
return hints
}
// generatePortDirectionHints generates hints for port directions
func (p *InlayHintsProvider) generatePortDirectionHints(doc *Document, symbolList []*symbols.Symbol) []InlayHint {
hints := []InlayHint{}
// Show hints for port directions based on symbol information
for _, sym := range symbolList {
if sym.Type == symbols.SymbolTypePort {
// Check if port direction might be implicit
if sym.Attributes != nil {
// Check for direction attribute
if direction, ok := sym.Attributes["direction"].(string); ok && direction == "" {
direction = "inout" // Default in SystemVerilog
trueVal := true
tooltip := "Default port direction in SystemVerilog"
hint := InlayHint{
Position: lsp.Position{Line: sym.Position.Line - 1, Character: sym.Position.Column - 1},
Label: "[" + direction + "]",
Kind: (*InlayHintKind)(intPtr(int(InlayHintKindType))),
Tooltip: &tooltip,
PaddingRight: &trueVal,
}
hints = append(hints, hint)
}
}
}
}
return hints
}
// findSymbol finds a symbol by name and type
func (p *InlayHintsProvider) findSymbol(symbolList []*symbols.Symbol, name string, symbolType symbols.SymbolType) *symbols.Symbol {
for _, sym := range symbolList {
if sym.Name == name && sym.Type == symbolType {
return sym
}
}
return nil
}
// isPositionInRange checks if a position is within a range
func isPositionInRange(pos lsp.Position, r lsp.Range) bool {
if pos.Line < r.Start.Line || pos.Line > r.End.Line {
return false
}
if pos.Line == r.Start.Line && pos.Character < r.Start.Character {
return false
}
if pos.Line == r.End.Line && pos.Character > r.End.Character {
return false
}
return true
}
// intPtr returns a pointer to an int
func intPtr(i int) *int {
return &i
}
package lsp
import (
"context"
"fmt"
"strings"
"github.com/adicens/systemverilog-lsp/internal/symbols"
"github.com/sourcegraph/go-lsp"
)
// ReferencesProvider handles textDocument/references requests
type ReferencesProvider struct {
documentManager *DocumentManager
}
// NewReferencesProvider creates a new references provider
func NewReferencesProvider(dm *DocumentManager) *ReferencesProvider {
return &ReferencesProvider{
documentManager: dm,
}
}
// ProvideReferences handles references requests
func (rp *ReferencesProvider) ProvideReferences(ctx context.Context, params *lsp.ReferenceParams) ([]lsp.Location, error) {
uri := string(params.TextDocument.URI)
line := params.Position.Line
character := params.Position.Character
includeDeclaration := params.Context.IncludeDeclaration
// Get the document
doc, exists := rp.documentManager.GetDocument(uri)
if !exists {
return []lsp.Location{}, nil
}
// Get the word at the current position
content := doc.GetContent()
word := rp.getWordAtPosition(content, line, character)
if word == "" {
return []lsp.Location{}, nil
}
// Find all references to this symbol
locations := rp.findSymbolReferences(word, uri, includeDeclaration)
return locations, nil
}
// getWordAtPosition extracts the word at the given position
func (rp *ReferencesProvider) getWordAtPosition(content string, line, character int) string {
lines := strings.Split(content, "\n")
if line >= len(lines) {
return ""
}
currentLine := lines[line]
if character >= len(currentLine) {
return ""
}
if character < 0 {
return ""
}
// Find word boundaries
start := character
end := character
// Go backwards to find start of word
for start > 0 && (isWordCharacter(currentLine[start-1])) {
start--
}
// Go forwards to find end of word
for end < len(currentLine) && (isWordCharacter(currentLine[end])) {
end++
}
if start >= end {
return ""
}
return currentLine[start:end]
}
// findSymbolReferences finds all references to a symbol
func (rp *ReferencesProvider) findSymbolReferences(symbolName, requestingURI string, includeDeclaration bool) []lsp.Location {
var locations []lsp.Location
// First, find the symbol definition to get the symbol ID
var targetSymbol *symbols.Symbol
var found bool
// Try to find the symbol in the current document first
if doc, exists := rp.documentManager.GetDocument(requestingURI); exists {
symbolTable := doc.GetSymbolTable()
if symbol, symbolFound := symbolTable.FindSymbol(symbolName); symbolFound {
targetSymbol = symbol
found = true
}
}
// If not found in current document, search all documents
if !found {
allSymbols := rp.documentManager.FindSymbol(symbolName)
if len(allSymbols) > 0 {
// Take the first matching symbol (could be enhanced with better logic)
targetSymbol = allSymbols[0]
found = true
}
}
if !found || targetSymbol == nil {
// Fallback to text search across all documents when symbol not found in symbol table
textSearchRefs := rp.findReferencesInAllDocuments(symbolName, symbols.Position{})
locations = append(locations, textSearchRefs...)
} else {
// Include the declaration if requested
if includeDeclaration {
if declLocation := rp.symbolToLocation(targetSymbol); declLocation != nil {
locations = append(locations, *declLocation)
}
}
// Get all references from the symbol
references := targetSymbol.GetReferences()
for _, ref := range references {
location := rp.positionToLocation(ref)
if location != nil {
locations = append(locations, *location)
}
}
// Also search for references across all documents by symbol name
// This provides a fallback in case reference tracking is incomplete
additionalRefs := rp.findReferencesInAllDocuments(symbolName, targetSymbol.Position)
locations = append(locations, additionalRefs...)
}
// Remove duplicates
locations = rp.removeDuplicateLocations(locations)
return locations
}
// findReferencesInAllDocuments searches for symbol usage in all open documents
func (rp *ReferencesProvider) findReferencesInAllDocuments(symbolName string, declarationPos symbols.Position) []lsp.Location {
var locations []lsp.Location
// Get all document URIs
allDocs := rp.documentManager.GetAllDocuments()
for uri, doc := range allDocs {
content := doc.GetContent()
refs := rp.findSymbolOccurrences(content, symbolName, uri)
// Filter out the declaration itself if we have position info
for _, ref := range refs {
// Skip if this is the declaration position (only if we have valid declaration info)
if declarationPos.File != "" &&
ref.URI == lsp.DocumentURI(declarationPos.File) &&
ref.Range.Start.Line == declarationPos.Line-1 &&
ref.Range.Start.Character == declarationPos.Column-1 {
continue
}
locations = append(locations, ref)
}
}
return locations
}
// findSymbolOccurrences finds all occurrences of a symbol name in content
func (rp *ReferencesProvider) findSymbolOccurrences(content, symbolName, uri string) []lsp.Location {
var locations []lsp.Location
lines := strings.Split(content, "\n")
for lineIdx, line := range lines {
startCol := 0
for {
// Find the next occurrence of the symbol
idx := strings.Index(line[startCol:], symbolName)
if idx == -1 {
break
}
actualCol := startCol + idx
// Check if this is a complete word (not part of another identifier)
if rp.isCompleteWord(line, actualCol, len(symbolName)) {
location := &lsp.Location{
URI: lsp.DocumentURI(uri),
Range: lsp.Range{
Start: lsp.Position{
Line: lineIdx,
Character: actualCol,
},
End: lsp.Position{
Line: lineIdx,
Character: actualCol + len(symbolName),
},
},
}
locations = append(locations, *location)
}
startCol = actualCol + len(symbolName)
}
}
return locations
}
// isCompleteWord checks if the occurrence is a complete word
func (rp *ReferencesProvider) isCompleteWord(line string, start, length int) bool {
// Check character before
if start > 0 && isWordCharacter(line[start-1]) {
return false
}
// Check character after
end := start + length
if end < len(line) && isWordCharacter(line[end]) {
return false
}
return true
}
// symbolToLocation converts a symbol to an LSP location
func (rp *ReferencesProvider) symbolToLocation(symbol *symbols.Symbol) *lsp.Location {
if symbol == nil {
return nil
}
// Use the file from the symbol's position, or fall back to empty if not set
uri := symbol.Position.File
if uri == "" {
return nil
}
return &lsp.Location{
URI: lsp.DocumentURI(uri),
Range: lsp.Range{
Start: lsp.Position{
Line: symbol.Position.Line - 1, // LSP uses 0-based line numbers
Character: symbol.Position.Column - 1, // LSP uses 0-based column numbers
},
End: lsp.Position{
Line: symbol.Position.Line - 1,
Character: symbol.Position.Column + len(symbol.Name) - 1,
},
},
}
}
// positionToLocation converts a position to an LSP location
func (rp *ReferencesProvider) positionToLocation(pos symbols.Position) *lsp.Location {
if pos.File == "" {
return nil
}
return &lsp.Location{
URI: lsp.DocumentURI(pos.File),
Range: lsp.Range{
Start: lsp.Position{
Line: pos.Line - 1, // LSP uses 0-based line numbers
Character: pos.Column - 1, // LSP uses 0-based column numbers
},
End: lsp.Position{
Line: pos.Line - 1,
Character: pos.Column, // Single character range for references
},
},
}
}
// removeDuplicateLocations removes duplicate locations from the list
func (rp *ReferencesProvider) removeDuplicateLocations(locations []lsp.Location) []lsp.Location {
seen := make(map[string]bool)
var unique []lsp.Location
for _, loc := range locations {
key := fmt.Sprintf("%s:%d:%d:%d:%d",
string(loc.URI),
loc.Range.Start.Line,
loc.Range.Start.Character,
loc.Range.End.Line,
loc.Range.End.Character)
if !seen[key] {
seen[key] = true
unique = append(unique, loc)
}
}
return unique
}
package lsp
import (
"context"
"fmt"
"regexp"
"strings"
"github.com/sourcegraph/go-lsp"
"github.com/sourcegraph/jsonrpc2"
)
// RenameProvider handles textDocument/rename and textDocument/prepareRename requests
type RenameProvider struct {
documentManager *DocumentManager
referencesProvider *ReferencesProvider
}
// NewRenameProvider creates a new rename provider
func NewRenameProvider(dm *DocumentManager, rp *ReferencesProvider) *RenameProvider {
return &RenameProvider{
documentManager: dm,
referencesProvider: rp,
}
}
// PreparRenameParams are params for textDocument/prepareRename request
type PrepareRenameParams struct {
lsp.TextDocumentPositionParams
}
// PrepareRenameResult is the result of prepareRename request
type PrepareRenameResult struct {
Range lsp.Range `json:"range"`
Placeholder string `json:"placeholder"`
}
// PrepareRename validates if rename is possible at the given position
func (rp *RenameProvider) PrepareRename(ctx context.Context, params *PrepareRenameParams) (*PrepareRenameResult, error) {
uri := string(params.TextDocument.URI)
line := params.Position.Line
character := params.Position.Character
// Get the document
doc, exists := rp.documentManager.GetDocument(uri)
if !exists {
return nil, &jsonrpc2.Error{
Code: jsonrpc2.CodeInvalidParams,
Message: "Document not found",
}
}
// Get the word at the current position
content := doc.GetContent()
wordInfo := rp.getWordInfoAtPosition(content, line, character)
if wordInfo == nil {
return nil, &jsonrpc2.Error{
Code: jsonrpc2.CodeInvalidParams,
Message: "No symbol found at position",
}
}
// Check if the symbol can be renamed
if !rp.isRenameable(wordInfo.text) {
return nil, &jsonrpc2.Error{
Code: jsonrpc2.CodeInvalidParams,
Message: fmt.Sprintf("'%s' cannot be renamed", wordInfo.text),
}
}
// Check if symbol exists in symbol table
symbolTable := doc.GetSymbolTable()
if _, found := symbolTable.FindSymbol(wordInfo.text); !found {
// Try text-based search fallback
if !rp.textContainsWord(content, wordInfo.text) {
return nil, &jsonrpc2.Error{
Code: jsonrpc2.CodeInvalidParams,
Message: fmt.Sprintf("Symbol '%s' not found", wordInfo.text),
}
}
}
return &PrepareRenameResult{
Range: lsp.Range{
Start: lsp.Position{Line: line, Character: wordInfo.start},
End: lsp.Position{Line: line, Character: wordInfo.end},
},
Placeholder: wordInfo.text,
}, nil
}
// Rename performs the rename operation across all references
func (rp *RenameProvider) Rename(ctx context.Context, params *lsp.RenameParams) (*lsp.WorkspaceEdit, error) {
uri := string(params.TextDocument.URI)
line := params.Position.Line
character := params.Position.Character
newName := params.NewName
// Validate the new name
if err := rp.validateNewName(newName); err != nil {
return nil, &jsonrpc2.Error{
Code: jsonrpc2.CodeInvalidParams,
Message: err.Error(),
}
}
// Get the document
doc, exists := rp.documentManager.GetDocument(uri)
if !exists {
return nil, &jsonrpc2.Error{
Code: jsonrpc2.CodeInvalidParams,
Message: "Document not found",
}
}
// Get the word at the current position
content := doc.GetContent()
wordInfo := rp.getWordInfoAtPosition(content, line, character)
if wordInfo == nil {
return nil, &jsonrpc2.Error{
Code: jsonrpc2.CodeInvalidParams,
Message: "No symbol found at position",
}
}
oldName := wordInfo.text
// Check for name conflicts
if err := rp.checkNameConflicts(oldName, newName, uri); err != nil {
return nil, &jsonrpc2.Error{
Code: jsonrpc2.CodeInvalidParams,
Message: err.Error(),
}
}
// Find all references using the references provider
refParams := &lsp.ReferenceParams{
TextDocumentPositionParams: lsp.TextDocumentPositionParams{
TextDocument: params.TextDocument,
Position: params.Position,
},
Context: lsp.ReferenceContext{
IncludeDeclaration: true,
},
}
locations, err := rp.referencesProvider.ProvideReferences(ctx, refParams)
if err != nil {
return nil, err
}
// Group edits by URI and validate each location
editsByURI := make(map[string][]lsp.TextEdit)
for _, location := range locations {
uri := string(location.URI)
// Validate that the location actually contains the old name
if doc, exists := rp.documentManager.GetDocument(uri); exists {
content := doc.GetContent()
lines := strings.Split(content, "\n")
// Check if the location is valid
if location.Range.Start.Line < len(lines) {
line := lines[location.Range.Start.Line]
startChar := location.Range.Start.Character
endChar := location.Range.End.Character
// Ensure the range is valid
if startChar >= 0 && endChar <= len(line) && startChar < endChar {
actualText := line[startChar:endChar]
// Only add the edit if the text matches the old name
if actualText == oldName {
textEdit := lsp.TextEdit{
Range: location.Range,
NewText: newName,
}
if _, ok := editsByURI[uri]; !ok {
editsByURI[uri] = []lsp.TextEdit{}
}
editsByURI[uri] = append(editsByURI[uri], textEdit)
}
}
}
}
}
// Sort edits in reverse order to avoid position shifts
for uri, edits := range editsByURI {
sortEditsReverse(edits)
editsByURI[uri] = edits
}
// Create workspace edit
changes := make(map[string][]lsp.TextEdit)
for uri, edits := range editsByURI {
changes[uri] = edits
}
return &lsp.WorkspaceEdit{
Changes: changes,
}, nil
}
// wordInfo holds information about a word at a position
type wordInfo struct {
text string
start int
end int
}
// getWordInfoAtPosition extracts word information at the given position
func (rp *RenameProvider) getWordInfoAtPosition(content string, line, character int) *wordInfo {
lines := strings.Split(content, "\n")
if line >= len(lines) || line < 0 {
return nil
}
currentLine := lines[line]
if character >= len(currentLine) || character < 0 {
return nil
}
// Find word boundaries
start := character
end := character
// Go backwards to find start of word
for start > 0 && isRenameWordChar(currentLine[start-1]) {
start--
}
// Go forwards to find end of word
for end < len(currentLine) && isRenameWordChar(currentLine[end]) {
end++
}
if start >= end {
return nil
}
word := currentLine[start:end]
if !rp.isValidIdentifier(word) {
return nil
}
return &wordInfo{
text: word,
start: start,
end: end,
}
}
// isRenameWordChar checks if a character is valid in a SystemVerilog identifier
func isRenameWordChar(ch byte) bool {
return (ch >= 'a' && ch <= 'z') ||
(ch >= 'A' && ch <= 'Z') ||
(ch >= '0' && ch <= '9') ||
ch == '_' || ch == '$'
}
// isValidIdentifier checks if a string is a valid SystemVerilog identifier
func (rp *RenameProvider) isValidIdentifier(identifier string) bool {
if len(identifier) == 0 {
return false
}
// System functions/tasks start with $
if strings.HasPrefix(identifier, "$") {
// System functions have specific format: $name
validPattern := regexp.MustCompile(`^\$[a-zA-Z_][a-zA-Z0-9_]*$`)
return validPattern.MatchString(identifier)
}
// Must start with letter or underscore
if !((identifier[0] >= 'a' && identifier[0] <= 'z') ||
(identifier[0] >= 'A' && identifier[0] <= 'Z') ||
identifier[0] == '_') {
return false
}
// Rest can be letters, numbers, underscore, or dollar sign
validPattern := regexp.MustCompile(`^[a-zA-Z_][a-zA-Z0-9_$]*$`)
return validPattern.MatchString(identifier)
}
// isRenameable checks if a symbol can be renamed
func (rp *RenameProvider) isRenameable(word string) bool {
// Check if it's a keyword
if isSystemVerilogKeyword(word) {
return false
}
// Check if it's a built-in symbol
if isBuiltInSymbol(word) {
return false
}
return true
}
// validateNewName validates the new name for rename
func (rp *RenameProvider) validateNewName(newName string) error {
if newName == "" || strings.TrimSpace(newName) == "" {
return fmt.Errorf("new name cannot be empty")
}
newName = strings.TrimSpace(newName)
if !rp.isValidIdentifier(newName) {
return fmt.Errorf("new name must be a valid SystemVerilog identifier")
}
if isSystemVerilogKeyword(newName) {
return fmt.Errorf("new name cannot be a SystemVerilog keyword")
}
if isBuiltInSymbol(newName) {
return fmt.Errorf("new name cannot be a built-in symbol")
}
return nil
}
// checkNameConflicts checks for naming conflicts
func (rp *RenameProvider) checkNameConflicts(oldName, newName, uri string) error {
if oldName == newName {
return nil
}
// Check all documents for conflicts
for _, doc := range rp.documentManager.GetAllDocuments() {
symbolTable := doc.GetSymbolTable()
if existingSymbol, found := symbolTable.FindSymbol(newName); found {
// Check if it's in the same scope
if oldSymbol, found := symbolTable.FindSymbol(oldName); found {
if rp.symbolsInSameScope(oldSymbol, existingSymbol) {
return fmt.Errorf("symbol '%s' already exists in this scope", newName)
}
}
}
}
return nil
}
// symbolsInSameScope checks if two symbols are in the same scope
func (rp *RenameProvider) symbolsInSameScope(sym1, sym2 interface{}) bool {
// Simplified check - in a real implementation, we'd check the scope hierarchy
// For now, we'll assume symbols with the same parent scope conflict
return true
}
// textContainsWord checks if the text contains the word as a whole word
func (rp *RenameProvider) textContainsWord(text, word string) bool {
pattern := fmt.Sprintf(`\b%s\b`, regexp.QuoteMeta(word))
re := regexp.MustCompile(pattern)
return re.MatchString(text)
}
// sortEditsReverse sorts text edits in reverse order by position
func sortEditsReverse(edits []lsp.TextEdit) {
for i := 0; i < len(edits)-1; i++ {
for j := i + 1; j < len(edits); j++ {
if comparePositions(edits[i].Range.Start, edits[j].Range.Start) < 0 {
edits[i], edits[j] = edits[j], edits[i]
}
}
}
}
// comparePositions compares two positions
func comparePositions(p1, p2 lsp.Position) int {
if p1.Line != p2.Line {
return p1.Line - p2.Line
}
return p1.Character - p2.Character
}
// isSystemVerilogKeyword checks if a word is a SystemVerilog keyword
func isSystemVerilogKeyword(word string) bool {
keywords := map[string]bool{
// Verilog keywords
"always": true, "and": true, "assign": true, "automatic": true, "begin": true,
"buf": true, "bufif0": true, "bufif1": true, "case": true, "casex": true,
"casez": true, "cmos": true, "deassign": true, "default": true, "defparam": true,
"disable": true, "edge": true, "else": true, "end": true, "endcase": true,
"endfunction": true, "endmodule": true, "endprimitive": true, "endspecify": true,
"endtable": true, "endtask": true, "event": true, "for": true, "force": true,
"forever": true, "fork": true, "function": true, "highz0": true, "highz1": true,
"if": true, "ifnone": true, "initial": true, "inout": true, "input": true,
"integer": true, "join": true, "large": true, "macromodule": true, "medium": true,
"module": true, "nand": true, "negedge": true, "nmos": true, "nor": true,
"not": true, "notif0": true, "notif1": true, "or": true, "output": true,
"parameter": true, "pmos": true, "posedge": true, "primitive": true, "pull0": true,
"pull1": true, "pulldown": true, "pullup": true, "rcmos": true, "real": true,
"realtime": true, "reg": true, "release": true, "repeat": true, "rnmos": true,
"rpmos": true, "rtran": true, "rtranif0": true, "rtranif1": true, "scalared": true,
"small": true, "specify": true, "specparam": true, "strong0": true, "strong1": true,
"supply0": true, "supply1": true, "table": true, "task": true, "time": true,
"tran": true, "tranif0": true, "tranif1": true, "tri": true, "tri0": true,
"tri1": true, "triand": true, "trior": true, "trireg": true, "vectored": true,
"wait": true, "wand": true, "weak0": true, "weak1": true, "while": true,
"wire": true, "wor": true, "xnor": true, "xor": true,
// SystemVerilog keywords
"alias": true, "always_comb": true, "always_ff": true, "always_latch": true,
"assert": true, "assume": true, "before": true, "bind": true, "bins": true,
"binsof": true, "break": true, "chandle": true,
"class": true, "clocking": true, "const": true, "constraint": true, "context": true,
"continue": true, "cover": true, "covergroup": true, "coverpoint": true, "cross": true,
"dist": true, "do": true, "endclass": true, "endclocking": true, "endgroup": true,
"endinterface": true, "endpackage": true, "endprogram": true, "endproperty": true,
"endsequence": true, "enum": true, "expect": true, "export": true, "extends": true,
"extern": true, "final": true, "first_match": true, "foreach": true, "forkjoin": true,
"iff": true, "ignore_bins": true, "illegal_bins": true, "import": true, "inside": true,
"interface": true, "intersect": true, "join_any": true, "join_none": true,
"local": true, "matches": true, "modport": true,
"new": true, "null": true, "package": true, "packed": true, "priority": true,
"program": true, "property": true, "protected": true, "pure": true, "rand": true,
"randc": true, "randcase": true, "randsequence": true, "ref": true, "return": true,
"sequence": true, "shortreal": true, "solve": true, "static": true,
"struct": true, "super": true, "tagged": true, "this": true,
"throughout": true, "timeprecision": true, "timeunit": true, "type": true,
"typedef": true, "union": true, "unique": true, "var": true, "virtual": true,
"void": true, "wait_order": true, "wildcard": true, "with": true, "within": true,
"generate": true, "endgenerate": true, "genvar": true,
}
return keywords[word]
}
// isBuiltInSymbol checks if a word is a built-in symbol
func isBuiltInSymbol(word string) bool {
// System functions and tasks start with $
if strings.HasPrefix(word, "$") {
return true
}
// Built-in types
builtInTypes := map[string]bool{
"bit": true, "logic": true, "reg": true, "wire": true, "byte": true,
"shortint": true, "int": true, "longint": true, "integer": true,
"time": true, "real": true, "realtime": true, "string": true,
"event": true, "chandle": true,
}
return builtInTypes[word]
}
package lsp
import (
"context"
"sort"
"github.com/adicens/systemverilog-lsp/internal/parser"
"github.com/adicens/systemverilog-lsp/internal/symbols"
"github.com/sourcegraph/go-lsp"
)
// SemanticTokensProvider provides semantic tokens for syntax highlighting
type SemanticTokensProvider struct {
documentManager *DocumentManager
}
// NewSemanticTokensProvider creates a new semantic tokens provider
func NewSemanticTokensProvider(dm *DocumentManager) *SemanticTokensProvider {
return &SemanticTokensProvider{
documentManager: dm,
}
}
// Token types (must match the order in GetLegend)
const (
TokenTypeNamespace = iota
TokenTypeClass
TokenTypeEnum
TokenTypeInterface
TokenTypeStruct
TokenTypeTypeParameter
TokenTypeParameter
TokenTypeVariable
TokenTypeProperty
TokenTypeEnumMember
TokenTypeEvent
TokenTypeFunction
TokenTypeMethod
TokenTypeMacro
TokenTypeKeyword
TokenTypeModifier
TokenTypeComment
TokenTypeString
TokenTypeNumber
TokenTypeRegexp
TokenTypeOperator
)
// Token modifiers
const (
TokenModifierDeclaration = 1 << iota
TokenModifierDefinition
TokenModifierReadonly
TokenModifierStatic
TokenModifierDeprecated
TokenModifierAbstract
TokenModifierAsync
TokenModifierModification
TokenModifierDocumentation
TokenModifierDefaultLibrary
)
// SemanticToken represents a semantic token
type SemanticToken struct {
Line int
StartChar int
Length int
TokenType int
Modifiers int
}
// GetLegend returns the semantic tokens legend
func (p *SemanticTokensProvider) GetLegend() SemanticTokensLegend {
return SemanticTokensLegend{
TokenTypes: []string{
"namespace",
"class",
"enum",
"interface",
"struct",
"typeParameter",
"parameter",
"variable",
"property",
"enumMember",
"event",
"function",
"method",
"macro",
"keyword",
"modifier",
"comment",
"string",
"number",
"regexp",
"operator",
},
TokenModifiers: []string{
"declaration",
"definition",
"readonly",
"static",
"deprecated",
"abstract",
"async",
"modification",
"documentation",
"defaultLibrary",
},
}
}
// ProvideSemanticTokens provides semantic tokens for a document
func (p *SemanticTokensProvider) ProvideSemanticTokens(ctx context.Context, params *SemanticTokensParams) (*SemanticTokens, error) {
doc, exists := p.documentManager.GetDocument(string(params.TextDocument.URI))
if !exists {
return nil, nil
}
tokens := p.extractTokens(doc)
encodedTokens := p.encodeTokens(tokens)
return &SemanticTokens{
Data: encodedTokens,
}, nil
}
// ProvideSemanticTokensRange provides semantic tokens for a range
func (p *SemanticTokensProvider) ProvideSemanticTokensRange(ctx context.Context, params *SemanticTokensRangeParams) (*SemanticTokens, error) {
doc, exists := p.documentManager.GetDocument(string(params.TextDocument.URI))
if !exists {
return nil, nil
}
tokens := p.extractTokensInRange(doc, params.Range)
encodedTokens := p.encodeTokens(tokens)
return &SemanticTokens{
Data: encodedTokens,
}, nil
}
// extractTokens extracts semantic tokens from a document
func (p *SemanticTokensProvider) extractTokens(doc *Document) []SemanticToken {
var tokens []SemanticToken
if doc.AST != nil && doc.AST.Root != nil {
p.extractTokensFromNode(doc.AST.Root, doc.SymbolTable, &tokens, doc)
}
// Sort tokens by position
sort.Slice(tokens, func(i, j int) bool {
if tokens[i].Line != tokens[j].Line {
return tokens[i].Line < tokens[j].Line
}
return tokens[i].StartChar < tokens[j].StartChar
})
return tokens
}
// extractTokensInRange extracts semantic tokens in a specific range
func (p *SemanticTokensProvider) extractTokensInRange(doc *Document, r lsp.Range) []SemanticToken {
allTokens := p.extractTokens(doc)
var tokens []SemanticToken
for _, token := range allTokens {
if token.Line >= r.Start.Line && token.Line <= r.End.Line {
tokens = append(tokens, token)
}
}
return tokens
}
// extractTokensFromNode recursively extracts tokens from an AST node
func (p *SemanticTokensProvider) extractTokensFromNode(node parser.Node, symbolTable *symbols.SymbolTable, tokens *[]SemanticToken, doc *Document) {
if node == nil {
return
}
switch n := node.(type) {
case *parser.FileNode:
// Process all top-level items in the file
for _, item := range n.Items {
p.extractTokensFromNode(item, symbolTable, tokens, doc)
}
case *parser.ModuleNode:
// Module name
*tokens = append(*tokens, SemanticToken{
Line: n.Range().Start.Line - 1,
StartChar: n.Range().Start.Column - 1,
Length: len(n.Name),
TokenType: TokenTypeNamespace,
Modifiers: TokenModifierDeclaration | TokenModifierDefinition,
})
// Process parameters
for _, param := range n.Parameters {
p.extractTokensFromNode(param, symbolTable, tokens, doc)
}
// Process ports
for _, port := range n.Ports {
p.extractTokensFromNode(port, symbolTable, tokens, doc)
}
// Process items
for _, item := range n.Items {
p.extractTokensFromNode(item, symbolTable, tokens, doc)
}
case *parser.InterfaceNode:
// Interface name
*tokens = append(*tokens, SemanticToken{
Line: n.Range().Start.Line - 1,
StartChar: n.Range().Start.Column - 1,
Length: len(n.Name),
TokenType: TokenTypeInterface,
Modifiers: TokenModifierDeclaration | TokenModifierDefinition,
})
case *parser.ClassNode:
// Class name
*tokens = append(*tokens, SemanticToken{
Line: n.Range().Start.Line - 1,
StartChar: n.Range().Start.Column - 1,
Length: len(n.Name),
TokenType: TokenTypeClass,
Modifiers: TokenModifierDeclaration | TokenModifierDefinition,
})
case *parser.FunctionNode:
// Function name - need to find the actual position of the name
// The node start position is at "function" keyword, but we need the name position
// Debug: print when we find a function
if n.Name != "" {
*tokens = append(*tokens, SemanticToken{
Line: n.Range().Start.Line - 1,
StartChar: n.Range().Start.Column - 1,
Length: len(n.Name),
TokenType: TokenTypeFunction,
Modifiers: TokenModifierDeclaration | TokenModifierDefinition,
})
}
case *parser.TaskNode:
// Task name
*tokens = append(*tokens, SemanticToken{
Line: n.Range().Start.Line - 1,
StartChar: n.Range().Start.Column - 1,
Length: len(n.Name),
TokenType: TokenTypeFunction,
Modifiers: TokenModifierDeclaration | TokenModifierDefinition,
})
case *parser.ParameterNode:
// Parameter name
*tokens = append(*tokens, SemanticToken{
Line: n.Range().Start.Line - 1,
StartChar: n.Range().Start.Column - 1,
Length: len(n.Name),
TokenType: TokenTypeParameter,
Modifiers: TokenModifierDeclaration | TokenModifierReadonly,
})
case *parser.VariableNode:
// Variable name
*tokens = append(*tokens, SemanticToken{
Line: n.Range().Start.Line - 1,
StartChar: n.Range().Start.Column - 1,
Length: len(n.Name),
TokenType: TokenTypeVariable,
Modifiers: TokenModifierDeclaration,
})
case *parser.PortNode:
// Port name
modifiers := TokenModifierDeclaration
if n.Direction == parser.PortDirectionInput {
modifiers |= TokenModifierReadonly
}
*tokens = append(*tokens, SemanticToken{
Line: n.Range().Start.Line - 1,
StartChar: n.Range().Start.Column - 1,
Length: len(n.Name),
TokenType: TokenTypeProperty,
Modifiers: modifiers,
})
}
// Process children
for _, child := range node.Children() {
p.extractTokensFromNode(child, symbolTable, tokens, doc)
}
}
// encodeTokens encodes semantic tokens according to LSP specification
func (p *SemanticTokensProvider) encodeTokens(tokens []SemanticToken) []uint32 {
if len(tokens) == 0 {
return []uint32{}
}
encoded := make([]uint32, 0, len(tokens)*5)
prevLine := 0
prevChar := 0
for _, token := range tokens {
// Delta encoding as per LSP spec
deltaLine := token.Line - prevLine
deltaStart := token.StartChar
if deltaLine == 0 {
deltaStart = token.StartChar - prevChar
}
encoded = append(encoded,
uint32(deltaLine),
uint32(deltaStart),
uint32(token.Length),
uint32(token.TokenType),
uint32(token.Modifiers),
)
prevLine = token.Line
prevChar = token.StartChar
}
return encoded
}
package lsp
import (
"context"
"encoding/json"
"fmt"
"net"
"os"
"github.com/rs/zerolog/log"
"github.com/sourcegraph/go-lsp"
"github.com/sourcegraph/jsonrpc2"
)
// Server represents the SystemVerilog Language Server
type Server struct {
conn *jsonrpc2.Conn
config *Config
documentManager *DocumentManager
completionProvider *CompletionProvider
definitionProvider *DefinitionProvider
hoverProvider *HoverProvider
referencesProvider *ReferencesProvider
documentSymbolProvider *DocumentSymbolProvider
workspaceSymbolProvider *WorkspaceSymbolProvider
renameProvider *RenameProvider
signatureHelpProvider *SignatureHelpProvider
codeActionsProvider *CodeActionsProvider
inlayHintsProvider *InlayHintsProvider
callHierarchyProvider *CallHierarchyProvider
typeHierarchyProvider *TypeHierarchyProvider
semanticTokensProvider *SemanticTokensProvider
foldingRangeProvider *FoldingRangeProvider
documentLinksProvider *DocumentLinksProvider
shutdown bool
}
// Config holds server configuration
type Config struct {
MaxFileSize int64
EnableTrace bool
}
// NewServer creates a new LSP server instance
func NewServer() *Server {
documentManager := NewDocumentManager()
referencesProvider := NewReferencesProvider(documentManager)
return &Server{
config: &Config{
MaxFileSize: 100 * 1024 * 1024, // 100MB
EnableTrace: false,
},
documentManager: documentManager,
completionProvider: NewCompletionProvider(documentManager),
definitionProvider: NewDefinitionProvider(documentManager),
hoverProvider: NewHoverProvider(documentManager),
referencesProvider: referencesProvider,
documentSymbolProvider: NewDocumentSymbolProvider(documentManager),
workspaceSymbolProvider: NewWorkspaceSymbolProvider(documentManager),
renameProvider: NewRenameProvider(documentManager, referencesProvider),
signatureHelpProvider: NewSignatureHelpProvider(documentManager),
codeActionsProvider: NewCodeActionsProvider(documentManager),
inlayHintsProvider: NewInlayHintsProvider(documentManager),
callHierarchyProvider: NewCallHierarchyProvider(documentManager),
typeHierarchyProvider: NewTypeHierarchyProvider(documentManager),
semanticTokensProvider: NewSemanticTokensProvider(documentManager),
foldingRangeProvider: NewFoldingRangeProvider(documentManager),
documentLinksProvider: NewDocumentLinksProvider(documentManager),
}
}
// RunStdio runs the server using stdio for communication
func (s *Server) RunStdio() error {
log.Info().Msg("Running LSP server in stdio mode")
stream := jsonrpc2.NewBufferedStream(
&stdioReadWriteCloser{},
jsonrpc2.VSCodeObjectCodec{},
)
conn := jsonrpc2.NewConn(
context.Background(),
stream,
jsonrpc2.HandlerWithError(s.handle),
)
s.conn = conn
<-conn.DisconnectNotify()
return nil
}
// RunTCP runs the server on a TCP port
func (s *Server) RunTCP(addr string) error {
log.Info().Str("addr", addr).Msg("Running LSP server in TCP mode")
listener, err := net.Listen("tcp", addr)
if err != nil {
return fmt.Errorf("failed to listen: %w", err)
}
defer listener.Close()
for {
conn, err := listener.Accept()
if err != nil {
log.Error().Err(err).Msg("Failed to accept connection")
continue
}
go s.handleConnection(conn)
}
}
// RunSocket runs the server on a Unix socket
func (s *Server) RunSocket(path string) error {
log.Info().Str("path", path).Msg("Running LSP server in socket mode")
// Remove existing socket
os.Remove(path)
listener, err := net.Listen("unix", path)
if err != nil {
return fmt.Errorf("failed to listen: %w", err)
}
defer listener.Close()
for {
conn, err := listener.Accept()
if err != nil {
log.Error().Err(err).Msg("Failed to accept connection")
continue
}
go s.handleConnection(conn)
}
}
func (s *Server) handleConnection(netConn net.Conn) {
stream := jsonrpc2.NewBufferedStream(netConn, jsonrpc2.VSCodeObjectCodec{})
conn := jsonrpc2.NewConn(
context.Background(),
stream,
jsonrpc2.HandlerWithError(s.handle),
)
<-conn.DisconnectNotify()
netConn.Close()
}
// handle processes incoming JSON-RPC requests
func (s *Server) handle(ctx context.Context, conn *jsonrpc2.Conn, req *jsonrpc2.Request) (interface{}, error) {
method := req.Method
log.Debug().
Str("method", method).
Msg("Handling request")
switch method {
case "initialize":
var params lsp.InitializeParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.initialize(ctx, ¶ms)
case "initialized":
return nil, nil
case "shutdown":
s.shutdown = true
return nil, nil
case "exit":
if s.shutdown {
os.Exit(0)
} else {
os.Exit(1)
}
return nil, nil
case "textDocument/didOpen":
var params lsp.DidOpenTextDocumentParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return nil, s.didOpen(ctx, ¶ms)
case "textDocument/didChange":
var params lsp.DidChangeTextDocumentParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return nil, s.didChange(ctx, ¶ms)
case "textDocument/didClose":
var params lsp.DidCloseTextDocumentParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return nil, s.didClose(ctx, ¶ms)
case "textDocument/completion":
var params lsp.CompletionParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.completionProvider.ProvideCompletion(ctx, ¶ms)
case "textDocument/definition":
var params lsp.TextDocumentPositionParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.definitionProvider.ProvideDefinition(ctx, ¶ms)
case "textDocument/hover":
var params lsp.TextDocumentPositionParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.hoverProvider.ProvideHover(ctx, ¶ms)
case "textDocument/references":
var params lsp.ReferenceParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.referencesProvider.ProvideReferences(ctx, ¶ms)
case "textDocument/documentSymbol":
var params lsp.DocumentSymbolParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.documentSymbolProvider.ProvideDocumentSymbols(ctx, ¶ms)
case "workspace/symbol":
var params lsp.WorkspaceSymbolParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.workspaceSymbolProvider.ProvideWorkspaceSymbols(params.Query), nil
case "textDocument/prepareRename":
var params PrepareRenameParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.renameProvider.PrepareRename(ctx, ¶ms)
case "textDocument/rename":
var params lsp.RenameParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.renameProvider.Rename(ctx, ¶ms)
case "textDocument/signatureHelp":
var params lsp.TextDocumentPositionParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.signatureHelpProvider.ProvideSignatureHelp(ctx, ¶ms)
case "textDocument/codeAction":
var params lsp.CodeActionParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.codeActionsProvider.ProvideCodeActions(ctx, ¶ms)
case "textDocument/inlayHint":
var params InlayHintParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.inlayHintsProvider.ProvideInlayHints(params)
case "textDocument/semanticTokens/full":
var params SemanticTokensParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.semanticTokensProvider.ProvideSemanticTokens(ctx, ¶ms)
case "textDocument/semanticTokens/range":
var params SemanticTokensRangeParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.semanticTokensProvider.ProvideSemanticTokensRange(ctx, ¶ms)
case "textDocument/foldingRange":
var params FoldingRangeParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.foldingRangeProvider.ProvideFoldingRanges(ctx, ¶ms)
case "textDocument/documentLink":
var params DocumentLinkParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.documentLinksProvider.ProvideDocumentLinks(ctx, ¶ms)
case "textDocument/prepareCallHierarchy":
var params lsp.TextDocumentPositionParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.callHierarchyProvider.PrepareCallHierarchy(¶ms)
case "callHierarchy/incomingCalls":
var params CallHierarchyIncomingCallsParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.callHierarchyProvider.IncomingCalls(¶ms)
case "callHierarchy/outgoingCalls":
var params CallHierarchyOutgoingCallsParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.callHierarchyProvider.OutgoingCalls(¶ms)
case "textDocument/prepareTypeHierarchy":
var params lsp.TextDocumentPositionParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.typeHierarchyProvider.PrepareTypeHierarchy(¶ms)
case "typeHierarchy/supertypes":
var params TypeHierarchySupertypesParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.typeHierarchyProvider.Supertypes(¶ms)
case "typeHierarchy/subtypes":
var params TypeHierarchySubtypesParams
if err := json.Unmarshal(*req.Params, ¶ms); err != nil {
return nil, err
}
return s.typeHierarchyProvider.Subtypes(¶ms)
default:
return nil, &jsonrpc2.Error{
Code: jsonrpc2.CodeMethodNotFound,
Message: fmt.Sprintf("method not found: %s", method),
}
}
}
// initialize handles the initialize request
func (s *Server) initialize(ctx context.Context, params *lsp.InitializeParams) (interface{}, error) {
log.Info().
Str("rootUri", string(params.RootURI)).
Msg("Initializing server")
syncOptions := &lsp.TextDocumentSyncOptions{
OpenClose: true,
Change: lsp.TDSKIncremental,
}
return &CustomInitializeResult{
Capabilities: CustomServerCapabilities{
ServerCapabilities: lsp.ServerCapabilities{
TextDocumentSync: &lsp.TextDocumentSyncOptionsOrKind{
Options: syncOptions,
},
CompletionProvider: &lsp.CompletionOptions{
TriggerCharacters: []string{".", "::", "(", "[", " "},
},
HoverProvider: true,
DefinitionProvider: true,
ReferencesProvider: true,
DocumentSymbolProvider: true,
WorkspaceSymbolProvider: true,
RenameProvider: true,
SignatureHelpProvider: &lsp.SignatureHelpOptions{
TriggerCharacters: []string{"(", ","},
},
CodeActionProvider: true,
},
// LSP 3.17 features - using custom types since go-lsp doesn't support them
CallHierarchyProvider: true,
TypeHierarchyProvider: true,
InlayHintProvider: true,
SemanticTokensProvider: &SemanticTokensOptions{
Legend: s.semanticTokensProvider.GetLegend(),
Range: true,
Full: true,
},
FoldingRangeProvider: true,
DocumentLinkProvider: &DocumentLinkOptions{
ResolveProvider: false,
},
},
}, nil
}
// didOpen handles textDocument/didOpen notifications
func (s *Server) didOpen(ctx context.Context, params *lsp.DidOpenTextDocumentParams) error {
uri := string(params.TextDocument.URI)
version := params.TextDocument.Version
content := params.TextDocument.Text
log.Info().
Str("uri", uri).
Int("version", version).
Int("content_length", len(content)).
Msg("Document opened")
// Add document to document manager
_, err := s.documentManager.OpenDocument(uri, version, content)
if err != nil {
log.Error().Err(err).Str("uri", uri).Msg("Failed to open document")
return err
}
// Send diagnostics
s.publishDiagnostics(ctx, uri)
return nil
}
// didChange handles textDocument/didChange notifications
func (s *Server) didChange(ctx context.Context, params *lsp.DidChangeTextDocumentParams) error {
uri := string(params.TextDocument.URI)
version := params.TextDocument.Version
log.Debug().
Str("uri", uri).
Int("version", version).
Int("changes", len(params.ContentChanges)).
Msg("Document changed")
// For now, we'll assume full document sync (incremental sync can be added later)
if len(params.ContentChanges) > 0 {
// Get the full content from the last change (assuming full document sync)
content := params.ContentChanges[len(params.ContentChanges)-1].Text
// Update document in document manager
_, err := s.documentManager.UpdateDocument(uri, version, content)
if err != nil {
log.Error().Err(err).Str("uri", uri).Msg("Failed to update document")
return err
}
// Send diagnostics
s.publishDiagnostics(ctx, uri)
}
return nil
}
// didClose handles textDocument/didClose notifications
func (s *Server) didClose(ctx context.Context, params *lsp.DidCloseTextDocumentParams) error {
uri := string(params.TextDocument.URI)
log.Info().Str("uri", uri).Msg("Document closed")
// Remove document from document manager
err := s.documentManager.CloseDocument(uri)
if err != nil {
log.Error().Err(err).Str("uri", uri).Msg("Failed to close document")
return err
}
// Send empty diagnostics to clear any existing diagnostics
s.publishEmptyDiagnostics(ctx, uri)
return nil
}
// publishDiagnostics sends diagnostics for a document to the client
func (s *Server) publishDiagnostics(ctx context.Context, uri string) {
if s.conn == nil {
return
}
diagnostics := s.documentManager.GetDiagnostics(uri)
// Convert our diagnostics to LSP diagnostics
lspDiagnostics := make([]lsp.Diagnostic, len(diagnostics))
for i, diag := range diagnostics {
lspDiagnostics[i] = lsp.Diagnostic{
Range: lsp.Range{
Start: lsp.Position{
Line: diag.Range.Start.Line,
Character: diag.Range.Start.Character,
},
End: lsp.Position{
Line: diag.Range.End.Line,
Character: diag.Range.End.Character,
},
},
Severity: lsp.DiagnosticSeverity(diag.Severity),
Message: diag.Message,
Source: diag.Source,
}
}
params := &lsp.PublishDiagnosticsParams{
URI: lsp.DocumentURI(uri),
Diagnostics: lspDiagnostics,
}
err := s.conn.Notify(ctx, "textDocument/publishDiagnostics", params)
if err != nil {
log.Error().Err(err).Str("uri", uri).Msg("Failed to publish diagnostics")
} else {
log.Debug().
Str("uri", uri).
Int("count", len(diagnostics)).
Msg("Published diagnostics")
}
}
// publishEmptyDiagnostics sends empty diagnostics to clear existing ones
func (s *Server) publishEmptyDiagnostics(ctx context.Context, uri string) {
if s.conn == nil {
return
}
params := &lsp.PublishDiagnosticsParams{
URI: lsp.DocumentURI(uri),
Diagnostics: []lsp.Diagnostic{},
}
err := s.conn.Notify(ctx, "textDocument/publishDiagnostics", params)
if err != nil {
log.Error().Err(err).Str("uri", uri).Msg("Failed to clear diagnostics")
} else {
log.Debug().Str("uri", uri).Msg("Cleared diagnostics")
}
}
// IO helpers for stdio mode
type stdioReadWriteCloser struct{}
func (stdioReadWriteCloser) Read(p []byte) (int, error) {
return os.Stdin.Read(p)
}
func (stdioReadWriteCloser) Write(p []byte) (int, error) {
return os.Stdout.Write(p)
}
func (stdioReadWriteCloser) Close() error {
return nil
}
package lsp
import (
"context"
"fmt"
"strings"
"github.com/adicens/systemverilog-lsp/internal/symbols"
"github.com/sourcegraph/go-lsp"
)
// SignatureHelpProvider handles textDocument/signatureHelp requests
type SignatureHelpProvider struct {
documentManager *DocumentManager
}
// NewSignatureHelpProvider creates a new signature help provider
func NewSignatureHelpProvider(dm *DocumentManager) *SignatureHelpProvider {
return &SignatureHelpProvider{
documentManager: dm,
}
}
// ProvideSignatureHelp handles signature help requests
func (shp *SignatureHelpProvider) ProvideSignatureHelp(ctx context.Context, params *lsp.TextDocumentPositionParams) (*lsp.SignatureHelp, error) {
uri := string(params.TextDocument.URI)
line := params.Position.Line
character := params.Position.Character
// Get the document
doc, exists := shp.documentManager.GetDocument(uri)
if !exists {
return nil, nil
}
// Get the content and find the current context
content := doc.GetContent()
// Find the function/task call context
callInfo := shp.findCallContext(content, line, character)
if callInfo == nil {
return nil, nil
}
// Get the symbol information for the function/task
signatureHelp := shp.getSignatureHelp(callInfo, uri)
if signatureHelp == nil {
return nil, nil
}
return signatureHelp, nil
}
// callContext represents the context of a function/task call
type callContext struct {
functionName string
activeParam int
paramStartPos int
}
// findCallContext finds the function/task call context at the given position
func (shp *SignatureHelpProvider) findCallContext(content string, line, character int) *callContext {
lines := strings.Split(content, "\n")
if line < 0 || line >= len(lines) {
return nil
}
currentLine := lines[line]
if character < 0 || character > len(currentLine) {
return nil
}
// Look backwards from cursor position to find opening parenthesis
parenDepth := 0
commaCount := 0
// Check if cursor is at or near opening parenthesis
if character >= 0 && character <= len(currentLine) {
// Check if cursor is just after opening parenthesis
if character > 0 && currentLine[character-1] == '(' {
functionName := shp.extractFunctionName(currentLine[:character-1])
if functionName != "" {
return &callContext{
functionName: functionName,
activeParam: 0,
paramStartPos: character,
}
}
}
// Check if cursor is right before opening parenthesis (at end of function name)
if character < len(currentLine) && currentLine[character] == '(' {
functionName := shp.extractFunctionName(currentLine[:character])
if functionName != "" {
return &callContext{
functionName: functionName,
activeParam: 0,
paramStartPos: character + 1,
}
}
}
}
// Scan backwards from cursor position
for i := character - 1; i >= 0; i-- {
ch := currentLine[i]
if ch == ')' {
parenDepth++
} else if ch == '(' {
if parenDepth == 0 {
// Found unmatched opening parenthesis
functionName := shp.extractFunctionName(currentLine[:i])
if functionName != "" {
// Count commas from opening paren to cursor
commaCount = 0
for j := i + 1; j < character; j++ {
if currentLine[j] == ',' {
commaCount++
}
}
return &callContext{
functionName: functionName,
activeParam: commaCount,
paramStartPos: i + 1,
}
}
} else {
parenDepth--
}
}
}
return nil
}
// extractFunctionName extracts the function/task name before the opening parenthesis
func (shp *SignatureHelpProvider) extractFunctionName(text string) string {
// Trim whitespace from the end
text = strings.TrimRight(text, " \t")
if text == "" {
return ""
}
// Find the start of the identifier
end := len(text)
start := end - 1
for start >= 0 {
ch := text[start]
if (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') ||
(ch >= '0' && ch <= '9') || ch == '_' || ch == '$' {
start--
} else {
break
}
}
start++
if start < end {
return text[start:end]
}
return ""
}
// getSignatureHelp gets signature help for the given call context
func (shp *SignatureHelpProvider) getSignatureHelp(callInfo *callContext, uri string) *lsp.SignatureHelp {
// First try to find in symbol table
doc, _ := shp.documentManager.GetDocument(uri)
if doc == nil {
return nil
}
// Look for function/task in current document
var signatureInfo *lsp.SignatureInformation
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, sym := range allSymbols {
if sym.Name == callInfo.functionName &&
(sym.Type == symbols.SymbolTypeFunction || sym.Type == symbols.SymbolTypeTask) {
signatureInfo = shp.createSignatureInfo(sym)
break
}
}
// If not found in current document, search all documents
if signatureInfo == nil {
allDocs := shp.documentManager.GetAllDocuments()
for docURI, otherDoc := range allDocs {
if docURI == uri {
continue
}
otherSymbols := otherDoc.SymbolTable.GetAllSymbols()
for _, sym := range otherSymbols {
if sym.Name == callInfo.functionName &&
(sym.Type == symbols.SymbolTypeFunction || sym.Type == symbols.SymbolTypeTask) {
signatureInfo = shp.createSignatureInfo(sym)
break
}
}
if signatureInfo != nil {
break
}
}
}
if signatureInfo == nil {
return nil
}
activeParam := callInfo.activeParam
return &lsp.SignatureHelp{
Signatures: []lsp.SignatureInformation{*signatureInfo},
ActiveSignature: 0,
ActiveParameter: activeParam,
}
}
// createSignatureInfo creates signature information from a symbol
func (shp *SignatureHelpProvider) createSignatureInfo(sym *symbols.Symbol) *lsp.SignatureInformation {
// Build the signature string
var signature strings.Builder
if sym.Type == symbols.SymbolTypeFunction {
// Function signature
if returnType, ok := sym.Attributes["returnType"].(string); ok && returnType != "" {
signature.WriteString(fmt.Sprintf("function %s %s(", returnType, sym.Name))
} else {
signature.WriteString(fmt.Sprintf("function %s(", sym.Name))
}
} else {
// Task signature
signature.WriteString(fmt.Sprintf("task %s(", sym.Name))
}
// Get parameters
var paramLabels []lsp.ParameterInformation
var paramStrings []string
if params, ok := sym.Attributes["parameters"].([]interface{}); ok {
for _, param := range params {
if paramMap, ok := param.(map[string]interface{}); ok {
paramStr := shp.formatParameter(paramMap)
paramStrings = append(paramStrings, paramStr)
// Create parameter label with documentation
paramLabel := lsp.ParameterInformation{
Label: paramStr,
}
// Add parameter documentation if available
if doc, ok := paramMap["doc"].(string); ok && doc != "" {
paramLabel.Documentation = doc
}
paramLabels = append(paramLabels, paramLabel)
}
}
}
signature.WriteString(strings.Join(paramStrings, ", "))
signature.WriteString(")")
// Create signature information
sigInfo := &lsp.SignatureInformation{
Label: signature.String(),
Parameters: paramLabels,
}
// Add function/task documentation if available
if doc, ok := sym.Attributes["documentation"].(string); ok && doc != "" {
sigInfo.Documentation = doc
}
return sigInfo
}
// formatParameter formats a parameter for display
func (shp *SignatureHelpProvider) formatParameter(param map[string]interface{}) string {
var result strings.Builder
// Add direction if present
if direction, ok := param["direction"].(string); ok && direction != "" {
result.WriteString(direction)
result.WriteString(" ")
}
// Add type
if dataType, ok := param["type"].(string); ok && dataType != "" {
result.WriteString(dataType)
result.WriteString(" ")
}
// Add name
if name, ok := param["name"].(string); ok {
result.WriteString(name)
}
// Add default value if present
if defaultVal, ok := param["default"].(string); ok && defaultVal != "" {
result.WriteString(" = ")
result.WriteString(defaultVal)
}
return result.String()
}
package lsp
import (
"fmt"
"github.com/adicens/systemverilog-lsp/internal/symbols"
"github.com/sourcegraph/go-lsp"
)
// TypeHierarchyProvider provides type hierarchy functionality for classes and interfaces
type TypeHierarchyProvider struct {
documents *DocumentManager
}
// NewTypeHierarchyProvider creates a new type hierarchy provider
func NewTypeHierarchyProvider(documents *DocumentManager) *TypeHierarchyProvider {
return &TypeHierarchyProvider{
documents: documents,
}
}
// PrepareTypeHierarchy prepares the type hierarchy for a given position
func (p *TypeHierarchyProvider) PrepareTypeHierarchy(params *lsp.TextDocumentPositionParams) ([]TypeHierarchyItem, error) {
doc, exists := p.documents.GetDocument(string(params.TextDocument.URI))
if !exists {
return nil, fmt.Errorf("document not found: %s", params.TextDocument.URI)
}
// Get the word at the position
content := doc.GetContent()
word := getWordAtPosition(content, params.Position)
if word == "" {
return nil, nil
}
// Find the symbol at the position
symbol := p.findSymbolAtPosition(doc, params.Position, word)
if symbol == nil {
return nil, nil
}
// Only classes and interfaces can have type hierarchies
if symbol.Type != symbols.SymbolTypeClass && symbol.Type != symbols.SymbolTypeInterface {
return nil, nil
}
// Create type hierarchy item
item := p.symbolToTypeHierarchyItem(symbol, string(params.TextDocument.URI))
return []TypeHierarchyItem{item}, nil
}
// Supertypes finds all supertypes (parent classes/interfaces) of the given type hierarchy item
func (p *TypeHierarchyProvider) Supertypes(params *TypeHierarchySupertypesParams) ([]TypeHierarchyItem, error) {
// Get the document containing the type
doc, exists := p.documents.GetDocument(string(params.Item.URI))
if !exists {
return nil, fmt.Errorf("document not found: %s", params.Item.URI)
}
// Find the type symbol
symbol := p.findSymbolByName(doc, params.Item.Name)
if symbol == nil {
return nil, nil
}
var supertypes []TypeHierarchyItem
// Follow the inheritance chain upward
current := symbol
visited := make(map[string]bool) // Prevent infinite loops
for current != nil {
// Get the extends attribute
extendsValue, hasExtends := current.GetAttribute("extends")
if !hasExtends || extendsValue == nil {
break
}
parentName := fmt.Sprintf("%v", extendsValue)
if parentName == "" || visited[parentName] {
break
}
visited[parentName] = true
// Find the parent class/interface in the workspace
parentSymbol := p.findSymbolInWorkspace(parentName)
if parentSymbol == nil {
break
}
// Only include classes and interfaces
if parentSymbol.Type == symbols.SymbolTypeClass || parentSymbol.Type == symbols.SymbolTypeInterface {
parentURI := p.findSymbolDocument(parentSymbol)
if parentURI != "" {
supertypeItem := p.symbolToTypeHierarchyItem(parentSymbol, parentURI)
supertypes = append(supertypes, supertypeItem)
current = parentSymbol
} else {
break
}
} else {
break
}
}
return supertypes, nil
}
// Subtypes finds all subtypes (child classes/interfaces) of the given type hierarchy item
func (p *TypeHierarchyProvider) Subtypes(params *TypeHierarchySubtypesParams) ([]TypeHierarchyItem, error) {
targetName := params.Item.Name
var subtypes []TypeHierarchyItem
// Search all documents for classes/interfaces that extend the target
docs := p.documents.GetAllDocuments()
for _, doc := range docs {
if doc.SymbolTable == nil {
continue
}
// Get all symbols in the document
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, symbol := range allSymbols {
// Only check classes and interfaces
if symbol.Type != symbols.SymbolTypeClass && symbol.Type != symbols.SymbolTypeInterface {
continue
}
// Check if this symbol extends the target
extendsValue, hasExtends := symbol.GetAttribute("extends")
if !hasExtends || extendsValue == nil {
continue
}
parentName := fmt.Sprintf("%v", extendsValue)
if parentName == targetName {
// This symbol extends our target
subtypeItem := p.symbolToTypeHierarchyItem(symbol, doc.URI)
subtypes = append(subtypes, subtypeItem)
}
}
}
return subtypes, nil
}
// Helper functions
func (p *TypeHierarchyProvider) findSymbolAtPosition(doc *Document, pos lsp.Position, word string) *symbols.Symbol {
if doc.SymbolTable == nil {
return nil
}
// Search through all symbols to find one with matching name
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, symbol := range allSymbols {
if symbol.Name == word {
// For now, return the first match
// In a real implementation, we would check position to find the exact symbol
return symbol
}
}
return nil
}
func (p *TypeHierarchyProvider) findSymbolByName(doc *Document, name string) *symbols.Symbol {
if doc.SymbolTable == nil {
return nil
}
// Search through all symbols to find one with matching name
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, symbol := range allSymbols {
if symbol.Name == name {
return symbol
}
}
return nil
}
func (p *TypeHierarchyProvider) findSymbolInWorkspace(name string) *symbols.Symbol {
docs := p.documents.GetAllDocuments()
for _, doc := range docs {
if doc.SymbolTable != nil {
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, symbol := range allSymbols {
if symbol.Name == name {
return symbol
}
}
}
}
return nil
}
func (p *TypeHierarchyProvider) findSymbolDocument(symbol *symbols.Symbol) string {
docs := p.documents.GetAllDocuments()
for _, doc := range docs {
if doc.SymbolTable != nil {
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, s := range allSymbols {
if s == symbol {
return doc.URI
}
}
}
}
return ""
}
func (p *TypeHierarchyProvider) symbolToTypeHierarchyItem(symbol *symbols.Symbol, uri string) TypeHierarchyItem {
detail := ""
if symbol.Type == symbols.SymbolTypeClass {
detail = "class"
// Add inheritance information to detail
if extendsValue, hasExtends := symbol.GetAttribute("extends"); hasExtends && extendsValue != nil {
parentName := fmt.Sprintf("%v", extendsValue)
if parentName != "" {
detail = fmt.Sprintf("class extends %s", parentName)
}
}
} else if symbol.Type == symbols.SymbolTypeInterface {
detail = "interface"
// Add inheritance information to detail for interfaces
if extendsValue, hasExtends := symbol.GetAttribute("extends"); hasExtends && extendsValue != nil {
parentName := fmt.Sprintf("%v", extendsValue)
if parentName != "" {
detail = fmt.Sprintf("interface extends %s", parentName)
}
}
}
return TypeHierarchyItem{
Name: symbol.Name,
Kind: symbolTypeToLSPKind(symbol.Type),
Detail: detail,
URI: lsp.DocumentURI(uri),
Range: p.symbolPositionToRange(symbol.Position),
SelectionRange: p.symbolPositionToRange(symbol.Position),
}
}
// symbolPositionToRange converts internal position to LSP range
func (p *TypeHierarchyProvider) symbolPositionToRange(position symbols.Position) lsp.Range {
// For now, create a range that starts and ends at the symbol position
// In a more sophisticated implementation, we would track the full range of the symbol
lspPos := lsp.Position{
Line: position.Line,
Character: position.Column,
}
return lsp.Range{
Start: lspPos,
End: lspPos,
}
}
package lsp
import (
"sort"
"strings"
"github.com/adicens/systemverilog-lsp/internal/symbols"
)
// WorkspaceSymbolProvider provides workspace-wide symbol search functionality for the LSP.
// Enables users to quickly find and navigate to any symbol across the entire workspace.
type WorkspaceSymbolProvider struct {
documentManager *DocumentManager
}
// NewWorkspaceSymbolProvider creates a new WorkspaceSymbolProvider instance.
func NewWorkspaceSymbolProvider(documentManager *DocumentManager) *WorkspaceSymbolProvider {
return &WorkspaceSymbolProvider{
documentManager: documentManager,
}
}
// LSPSymbolKind represents LSP SymbolKind constants
type LSPSymbolKind int
const (
SymbolKindFile LSPSymbolKind = 1
SymbolKindModule LSPSymbolKind = 2
SymbolKindNamespace LSPSymbolKind = 3
SymbolKindPackage LSPSymbolKind = 4
SymbolKindClass LSPSymbolKind = 5
SymbolKindMethod LSPSymbolKind = 6
SymbolKindProperty LSPSymbolKind = 7
SymbolKindField LSPSymbolKind = 8
SymbolKindConstructor LSPSymbolKind = 9
SymbolKindEnum LSPSymbolKind = 10
SymbolKindInterface LSPSymbolKind = 11
SymbolKindFunction LSPSymbolKind = 12
SymbolKindVariable LSPSymbolKind = 13
SymbolKindConstant LSPSymbolKind = 14
SymbolKindString LSPSymbolKind = 15
SymbolKindNumber LSPSymbolKind = 16
SymbolKindBoolean LSPSymbolKind = 17
SymbolKindArray LSPSymbolKind = 18
SymbolKindObject LSPSymbolKind = 19
SymbolKindKey LSPSymbolKind = 20
SymbolKindNull LSPSymbolKind = 21
SymbolKindEnumMember LSPSymbolKind = 22
SymbolKindStruct LSPSymbolKind = 23
SymbolKindEvent LSPSymbolKind = 24
SymbolKindOperator LSPSymbolKind = 25
SymbolKindTypeParameter LSPSymbolKind = 26
)
// WorkspaceSymbol represents a symbol in the workspace
type WorkspaceSymbol struct {
Name string `json:"name"`
Kind LSPSymbolKind `json:"kind"`
Location Location `json:"location"`
ContainerName string `json:"containerName,omitempty"`
}
// Location represents a location in a document
type Location struct {
URI string `json:"uri"`
Range *LSPRange `json:"range"`
}
// LSPRange represents a range in a document
type LSPRange struct {
Start LSPPosition `json:"start"`
End LSPPosition `json:"end"`
}
// LSPPosition represents a position in a document
type LSPPosition struct {
Line int `json:"line"`
Character int `json:"character"`
}
// ProvideWorkspaceSymbols provides workspace symbols matching the given query.
// Implements fuzzy matching for flexible symbol search.
func (p *WorkspaceSymbolProvider) ProvideWorkspaceSymbols(query string) []WorkspaceSymbol {
// Return empty array for empty query to avoid overwhelming results
if query == "" || strings.TrimSpace(query) == "" {
return []WorkspaceSymbol{}
}
normalizedQuery := strings.ToLower(strings.TrimSpace(query))
var workspaceSymbols []WorkspaceSymbol
// Get all symbols from all documents
documents := p.documentManager.GetAllDocuments()
for _, doc := range documents {
if doc.SymbolTable == nil {
continue
}
// Get all symbols from this document
allSymbols := doc.SymbolTable.GetAllSymbols()
for _, symbol := range allSymbols {
// Perform fuzzy matching
if p.fuzzyMatch(symbol.Name, normalizedQuery) {
workspaceSymbol := WorkspaceSymbol{
Name: symbol.Name,
Kind: p.mapToSymbolKind(symbol.Type),
Location: Location{
URI: doc.URI,
Range: &LSPRange{
Start: LSPPosition{
Line: symbol.Position.Line,
Character: symbol.Position.Column,
},
End: LSPPosition{
Line: symbol.Position.Line,
Character: symbol.Position.Column + len(symbol.Name),
},
},
},
ContainerName: p.getContainerName(symbol),
}
workspaceSymbols = append(workspaceSymbols, workspaceSymbol)
}
}
}
// Sort results by relevance (exact matches first, then fuzzy matches)
p.sortWorkspaceSymbols(workspaceSymbols, normalizedQuery)
// Limit results to prevent overwhelming the client
const maxResults = 1000
if len(workspaceSymbols) > maxResults {
workspaceSymbols = workspaceSymbols[:maxResults]
}
return workspaceSymbols
}
// fuzzyMatch performs fuzzy matching between a symbol name and a query.
// Supports:
// - Case-insensitive matching
// - Partial matches
// - Non-contiguous character matching (e.g., "uc" matches "uart_controller")
func (p *WorkspaceSymbolProvider) fuzzyMatch(symbolName, query string) bool {
name := strings.ToLower(symbolName)
queryLower := strings.ToLower(query)
// Exact substring match
if strings.Contains(name, queryLower) {
return true
}
// Fuzzy match: all query characters must appear in order
queryIndex := 0
for i := 0; i < len(name) && queryIndex < len(queryLower); i++ {
if name[i] == queryLower[queryIndex] {
queryIndex++
}
}
return queryIndex == len(queryLower)
}
// mapToSymbolKind maps internal symbol types to LSP SymbolKind enum.
func (p *WorkspaceSymbolProvider) mapToSymbolKind(symbolType symbols.SymbolType) LSPSymbolKind {
switch symbolType {
case symbols.SymbolTypeModule:
return SymbolKindModule
case symbols.SymbolTypeInterface:
return SymbolKindInterface
case symbols.SymbolTypePackage:
return SymbolKindPackage
case symbols.SymbolTypeClass:
return SymbolKindClass
case symbols.SymbolTypeFunction:
return SymbolKindFunction
case symbols.SymbolTypeTask:
return SymbolKindFunction
case symbols.SymbolTypeParameter:
return SymbolKindConstant
case symbols.SymbolTypeVariable:
return SymbolKindVariable
case symbols.SymbolTypePort:
return SymbolKindVariable
case symbols.SymbolTypeTypedef:
return SymbolKindTypeParameter
case symbols.SymbolTypeEnum:
return SymbolKindEnum
case symbols.SymbolTypeStruct:
return SymbolKindStruct
case symbols.SymbolTypeUnion:
return SymbolKindStruct
case symbols.SymbolTypeInstance:
return SymbolKindObject
case symbols.SymbolTypeGenerate:
return SymbolKindNamespace
case symbols.SymbolTypeAlways:
return SymbolKindMethod
case symbols.SymbolTypeInitial:
return SymbolKindMethod
case symbols.SymbolTypeConstraint:
return SymbolKindProperty
case symbols.SymbolTypeProperty:
return SymbolKindProperty
case symbols.SymbolTypeSequence:
return SymbolKindMethod
case symbols.SymbolTypeAssertion:
return SymbolKindEvent
case symbols.SymbolTypeCoverage:
return SymbolKindObject
default:
return SymbolKindVariable
}
}
// getContainerName gets container name for a symbol to provide context.
func (p *WorkspaceSymbolProvider) getContainerName(symbol *symbols.Symbol) string {
// If symbol has a parent scope, use it as container
if symbol.Scope != nil && symbol.Scope.Name != "" && symbol.Scope.Name != "global" {
return symbol.Scope.Name
}
// Return the symbol type as additional context if no scope
return strings.ToLower(symbol.Type.String())
}
// sortWorkspaceSymbols sorts results by relevance (exact matches first, then fuzzy matches)
func (p *WorkspaceSymbolProvider) sortWorkspaceSymbols(symbols []WorkspaceSymbol, query string) {
sort.Slice(symbols, func(i, j int) bool {
a, b := symbols[i], symbols[j]
// Exact matches first
aExact := strings.ToLower(a.Name) == query
bExact := strings.ToLower(b.Name) == query
if aExact && !bExact {
return true
}
if !aExact && bExact {
return false
}
// Secondary sort by whether query appears at the start
aStarts := strings.HasPrefix(strings.ToLower(a.Name), query)
bStarts := strings.HasPrefix(strings.ToLower(b.Name), query)
if aStarts && !bStarts {
return true
}
if !aStarts && bStarts {
return false
}
// Finally sort alphabetically
return strings.Compare(a.Name, b.Name) < 0
})
}
package parser
// Position represents a position in the source code
type Position struct {
Line int
Column int
Offset int
}
// Range represents a range in the source code
type Range struct {
Start Position
End Position
}
// Node is the base interface for all AST nodes
type Node interface {
Type() NodeType
Range() Range
Children() []Node
}
// NodeType represents the type of an AST node
type NodeType int
const (
NodeTypeFile NodeType = iota
NodeTypeModule
NodeTypeInterface
NodeTypeClass
NodeTypeFunction
NodeTypeTask
NodeTypePort
NodeTypeParameter
NodeTypeVariable
NodeTypeAssignment
NodeTypeInstance
NodeTypeAlways
NodeTypeInitial
NodeTypeGenerate
NodeTypeIf
NodeTypeCase
NodeTypeFor
NodeTypeWhile
// SVA node types
NodeTypeProperty
NodeTypeSequence
NodeTypeAssertion
NodeTypeClockingEvent
NodeTypeCovergroup
NodeTypeCoverpoint
NodeTypeCross
NodeTypeBin
NodeTypeConstraint
// SVA expression node types
NodeTypePropertyExpr
NodeTypeSequenceExpr
NodeTypeBinaryOp
NodeTypeUnaryOp
NodeTypeRepetition
NodeTypeDelay
NodeTypePropertyRef
NodeTypeSequenceRef
NodeTypeExpression
// Constraint node types
NodeTypeSolveBefore
NodeTypeIfConstraint
NodeTypeForeachConstraint
NodeTypeDisableSoft
NodeTypeInsideExpr
NodeTypeDistExpr
NodeTypeRangeExpr
NodeTypeDistItem
NodeTypeIdentifier
NodeTypeNumber
NodeTypeFunctionCall
NodeTypeArrayAccess
NodeTypeArrayLiteral
NodeTypeSystemFunctionCall
)
// BaseNode provides common functionality for all nodes
type BaseNode struct {
NodeType NodeType
Pos Range
Childs []Node
}
func (n *BaseNode) Type() NodeType { return n.NodeType }
func (n *BaseNode) Range() Range { return n.Pos }
func (n *BaseNode) Children() []Node { return n.Childs }
// FileNode represents the root node containing all top-level constructs
type FileNode struct {
BaseNode
Items []Node
}
// ModuleNode represents a SystemVerilog module
type ModuleNode struct {
BaseNode
Name string
Parameters []*ParameterNode
Ports []*PortNode
Items []Node
}
// InterfaceNode represents a SystemVerilog interface
type InterfaceNode struct {
BaseNode
Name string
Extends string
Parameters []*ParameterNode
Ports []*PortNode
Items []Node
}
// ClassNode represents a SystemVerilog class
type ClassNode struct {
BaseNode
Name string
Extends string
Parameters []*ParameterNode
Items []Node
}
// PortNode represents a module/interface port
type PortNode struct {
BaseNode
Direction PortDirection
Name string
DataType string
Width *RangeNode
IsInterface bool
Modport string
}
// PortDirection represents the direction of a port
type PortDirection int
const (
PortDirectionInput PortDirection = iota
PortDirectionOutput
PortDirectionInout
PortDirectionInterface
)
// ParameterNode represents a parameter or localparam
type ParameterNode struct {
BaseNode
Name string
DataType string
DefaultValue string
IsLocal bool
IsType bool
}
// VariableNode represents a variable declaration
type VariableNode struct {
BaseNode
Name string
DataType string
Width *RangeNode
InitValue string
IsRand bool
IsRandc bool
}
// RangeNode represents a range specification [high:low]
type RangeNode struct {
BaseNode
High string
Low string
}
// FunctionNode represents a function declaration
type FunctionNode struct {
BaseNode
Name string
ReturnType string
Parameters []*ParameterNode
Body []Node
}
// TaskNode represents a task declaration
type TaskNode struct {
BaseNode
Name string
Parameters []*ParameterNode
Body []Node
}
// AlwaysNode represents an always block
type AlwaysNode struct {
BaseNode
AlwaysType AlwaysType
Sensitivity []string
Body []Node
}
// AlwaysType represents the type of always block
type AlwaysType int
const (
AlwaysTypeBasic AlwaysType = iota
AlwaysTypeComb
AlwaysTypeLatch
AlwaysTypeFF
)
// AssignmentNode represents an assignment statement
type AssignmentNode struct {
BaseNode
LHS string
RHS string
IsContinuous bool
IsBlocking bool
}
// InstanceNode represents a module/interface instantiation
type InstanceNode struct {
BaseNode
ModuleName string
InstanceName string
Parameters map[string]string
Connections map[string]string
}
// IfNode represents an if statement
type IfNode struct {
BaseNode
Condition string
ThenBranch Node
ElseBranch Node
}
// GenerateNode represents a generate block
type GenerateNode struct {
BaseNode
GenerateType GenerateType
Condition string
Body []Node
}
// PropertyNode represents an SVA property declaration
type PropertyNode struct {
BaseNode
Name string
Parameters []*ParameterNode
ClockingEvent *ClockingEventNode
DisableIff string
Body Node // PropertyExpression
}
// SequenceNode represents an SVA sequence declaration
type SequenceNode struct {
BaseNode
Name string
Parameters []*ParameterNode
Body Node // SequenceExpression
}
// AssertionNode represents an SVA assertion statement
type AssertionNode struct {
BaseNode
AssertionType AssertionType
Label string
Property Node // PropertyExpression or property name
ActionBlock Node // optional else clause
IsConcurrent bool
}
// ClockingEventNode represents a clocking event @(edge signal)
type ClockingEventNode struct {
BaseNode
Edge EdgeType
Expression string
}
// CovergroupNode represents a coverage group
type CovergroupNode struct {
BaseNode
Name string
Parameters []*ParameterNode
Event *ClockingEventNode
Coverpoints []*CoverpointNode
Crosses []*CrossNode
}
// CoverpointNode represents a coverpoint in a covergroup
type CoverpointNode struct {
BaseNode
Name string
Expression string
Bins []BinNode
Options map[string]string
}
// CrossNode represents a cross coverage in a covergroup
type CrossNode struct {
BaseNode
Name string
Coverpoints []string
Bins []BinNode
Options map[string]string
}
// BinNode represents a bin specification
type BinNode struct {
BaseNode
Name string
BinType BinType
ArraySize string
Values []string
Options map[string]string
}
// ConstraintNode represents a constraint block
type ConstraintNode struct {
BaseNode
Name string
Body []Node // constraint expressions
IsStatic bool
IsSoft bool
}
// AssertionType represents the type of assertion
type AssertionType int
const (
AssertionTypeAssert AssertionType = iota
AssertionTypeAssume
AssertionTypeCover
AssertionTypeRestrict
AssertionTypeExpect
)
// EdgeType represents clock edge types
type EdgeType int
const (
EdgeTypePosedge EdgeType = iota
EdgeTypeNegedge
EdgeTypeEdge
EdgeTypeNone
)
// BinType represents the type of coverage bin
type BinType int
const (
BinTypeDefault BinType = iota
BinTypeBins
BinTypeIllegal
BinTypeIgnore
BinTypeWildcard
)
// PropertyExprNode represents a property expression in SVA
type PropertyExprNode struct {
BaseNode
ExprType PropertyExprType
Operator string
Left Node
Right Node
Expr Node // for unary expressions
}
// SequenceExprNode represents a sequence expression in SVA
type SequenceExprNode struct {
BaseNode
ExprType SequenceExprType
Operator string
Left Node
Right Node
Expr Node // for unary expressions
}
// BinaryOpNode represents a binary operation in SVA
type BinaryOpNode struct {
BaseNode
Operator string
Left Node
Right Node
}
// UnaryOpNode represents a unary operation in SVA
type UnaryOpNode struct {
BaseNode
Operator string
Expr Node
}
// RepetitionNode represents repetition operators [*], [+], [=]
type RepetitionNode struct {
BaseNode
RepType RepetitionType
Expr Node
MinCount string
MaxCount string
}
// DelayNode represents delay operators ##
type DelayNode struct {
BaseNode
DelayType DelayType
Count string
MinCount string
MaxCount string
}
// PropertyRefNode represents a reference to a property
type PropertyRefNode struct {
BaseNode
Name string
}
// SequenceRefNode represents a reference to a sequence
type SequenceRefNode struct {
BaseNode
Name string
}
// ExpressionNode represents a simple expression
type ExpressionNode struct {
BaseNode
Text string
}
// PropertyExprType represents types of property expressions
type PropertyExprType int
const (
PropertyExprTypeSimple PropertyExprType = iota
PropertyExprTypeNot
PropertyExprTypeAnd
PropertyExprTypeOr
PropertyExprTypeImplication
PropertyExprTypeNonOverlapImplication
PropertyExprTypeOverlapImplication
PropertyExprTypeThroughout
PropertyExprTypeWithin
PropertyExprTypeIntersect
PropertyExprTypeIff
PropertyExprTypeSequence
)
// SequenceExprType represents types of sequence expressions
type SequenceExprType int
const (
SequenceExprTypeSimple SequenceExprType = iota
SequenceExprTypeAnd
SequenceExprTypeOr
SequenceExprTypeIntersect
SequenceExprTypeThroughout
SequenceExprTypeWithin
SequenceExprTypeFirstMatch
SequenceExprTypeConcat
SequenceExprTypeRepetition
SequenceExprTypeDelay
)
// RepetitionType represents types of repetition operators
type RepetitionType int
const (
RepetitionTypeConsecutive RepetitionType = iota // [*]
RepetitionTypeNonConsecutive // [=]
RepetitionTypeGoto // [->]
)
// DelayType represents types of delay operators
type DelayType int
const (
DelayTypeFixed DelayType = iota // ##n
DelayTypeRange // ##[n:m]
)
// GenerateType represents the type of generate construct
// FileNode Children method
func (n *FileNode) Children() []Node {
return n.Items
}
// PropertyNode Children method
func (n *PropertyNode) Children() []Node {
children := make([]Node, 0)
if n.ClockingEvent != nil {
children = append(children, n.ClockingEvent)
}
if n.Body != nil {
children = append(children, n.Body)
}
return children
}
// SequenceNode Children method
func (n *SequenceNode) Children() []Node {
if n.Body != nil {
return []Node{n.Body}
}
return []Node{}
}
// AssertionNode Children method
func (n *AssertionNode) Children() []Node {
children := make([]Node, 0)
if n.Property != nil {
children = append(children, n.Property)
}
if n.ActionBlock != nil {
children = append(children, n.ActionBlock)
}
return children
}
// ClockingEventNode Children method
func (n *ClockingEventNode) Children() []Node {
return []Node{}
}
// CovergroupNode Children method
func (n *CovergroupNode) Children() []Node {
children := make([]Node, 0)
if n.Event != nil {
children = append(children, n.Event)
}
for _, cp := range n.Coverpoints {
children = append(children, cp)
}
for _, cr := range n.Crosses {
children = append(children, cr)
}
return children
}
// CoverpointNode Children method
func (n *CoverpointNode) Children() []Node {
children := make([]Node, 0)
for i := range n.Bins {
children = append(children, &n.Bins[i])
}
return children
}
// CrossNode Children method
func (n *CrossNode) Children() []Node {
children := make([]Node, 0)
for i := range n.Bins {
children = append(children, &n.Bins[i])
}
return children
}
// BinNode Children method
func (n *BinNode) Children() []Node {
return []Node{}
}
// ConstraintNode Children method
func (n *ConstraintNode) Children() []Node {
return n.Body
}
type GenerateType int
const (
GenerateTypeFor GenerateType = iota
GenerateTypeIf
GenerateTypeCase
)
// AST represents the complete Abstract Syntax Tree
type AST struct {
Root Node
Errors []ParseError
FileName string
}
// ParseError represents a parsing error
type ParseError struct {
Message string
Position Position
}
// PropertyExprNode Children method
func (n *PropertyExprNode) Children() []Node {
children := make([]Node, 0)
if n.Left != nil {
children = append(children, n.Left)
}
if n.Right != nil {
children = append(children, n.Right)
}
if n.Expr != nil {
children = append(children, n.Expr)
}
return children
}
// SequenceExprNode Children method
func (n *SequenceExprNode) Children() []Node {
children := make([]Node, 0)
if n.Left != nil {
children = append(children, n.Left)
}
if n.Right != nil {
children = append(children, n.Right)
}
if n.Expr != nil {
children = append(children, n.Expr)
}
return children
}
// BinaryOpNode Children method
func (n *BinaryOpNode) Children() []Node {
children := make([]Node, 0, 2)
if n.Left != nil {
children = append(children, n.Left)
}
if n.Right != nil {
children = append(children, n.Right)
}
return children
}
// UnaryOpNode Children method
func (n *UnaryOpNode) Children() []Node {
if n.Expr != nil {
return []Node{n.Expr}
}
return []Node{}
}
// RepetitionNode Children method
func (n *RepetitionNode) Children() []Node {
if n.Expr != nil {
return []Node{n.Expr}
}
return []Node{}
}
// DelayNode Children method
func (n *DelayNode) Children() []Node {
return []Node{}
}
// PropertyRefNode Children method
func (n *PropertyRefNode) Children() []Node {
return []Node{}
}
// SequenceRefNode Children method
func (n *SequenceRefNode) Children() []Node {
return []Node{}
}
// ExpressionNode Children method
func (n *ExpressionNode) Children() []Node {
return []Node{}
}
// Constraint-specific nodes
// SolveBeforeNode represents solve...before constraint
type SolveBeforeNode struct {
BaseNode
SolveList []string
BeforeList []string
}
// IfConstraintNode represents if-else constraint
type IfConstraintNode struct {
BaseNode
Condition Node
ThenConstraint Node
ElseConstraint Node
}
// ForeachConstraintNode represents foreach constraint loop
type ForeachConstraintNode struct {
BaseNode
ArrayName string
LoopVars []string
Body Node
}
// DisableSoftNode represents disable soft constraint
type DisableSoftNode struct {
BaseNode
Constraint Node
}
// InsideExprNode represents inside operator expression
type InsideExprNode struct {
BaseNode
Expression Node
RangeList []Node
}
// DistExprNode represents dist operator expression
type DistExprNode struct {
BaseNode
Expression Node
Items []Node
}
// RangeExprNode represents a range expression (start:end)
type RangeExprNode struct {
BaseNode
Start Node
End Node
}
// DistItemNode represents a distribution item
type DistItemNode struct {
BaseNode
Value Node
Weight Node
WeightType string // ":=" or ":/"
}
// IdentifierNode represents an identifier
type IdentifierNode struct {
BaseNode
Name string
}
// NumberNode represents a numeric literal
type NumberNode struct {
BaseNode
Value string
}
// FunctionCallNode represents a function call
type FunctionCallNode struct {
BaseNode
Name string
Arguments []Node
}
// ArrayAccessNode represents array access
type ArrayAccessNode struct {
BaseNode
Array Node
Index Node
}
// ArrayLiteralNode represents array literal
type ArrayLiteralNode struct {
BaseNode
Elements []Node
}
// SystemFunctionCallNode represents system function call like $random
type SystemFunctionCallNode struct {
BaseNode
Name string
Arguments []Node
}
// Children methods for constraint nodes
func (n *SolveBeforeNode) Children() []Node {
return []Node{}
}
func (n *IfConstraintNode) Children() []Node {
children := make([]Node, 0, 3)
if n.Condition != nil {
children = append(children, n.Condition)
}
if n.ThenConstraint != nil {
children = append(children, n.ThenConstraint)
}
if n.ElseConstraint != nil {
children = append(children, n.ElseConstraint)
}
return children
}
func (n *ForeachConstraintNode) Children() []Node {
if n.Body != nil {
return []Node{n.Body}
}
return []Node{}
}
func (n *DisableSoftNode) Children() []Node {
if n.Constraint != nil {
return []Node{n.Constraint}
}
return []Node{}
}
func (n *InsideExprNode) Children() []Node {
children := make([]Node, 0, 1+len(n.RangeList))
if n.Expression != nil {
children = append(children, n.Expression)
}
children = append(children, n.RangeList...)
return children
}
func (n *DistExprNode) Children() []Node {
children := make([]Node, 0, 1+len(n.Items))
if n.Expression != nil {
children = append(children, n.Expression)
}
children = append(children, n.Items...)
return children
}
func (n *RangeExprNode) Children() []Node {
children := make([]Node, 0, 2)
if n.Start != nil {
children = append(children, n.Start)
}
if n.End != nil {
children = append(children, n.End)
}
return children
}
func (n *DistItemNode) Children() []Node {
children := make([]Node, 0, 2)
if n.Value != nil {
children = append(children, n.Value)
}
if n.Weight != nil {
children = append(children, n.Weight)
}
return children
}
func (n *IdentifierNode) Children() []Node {
return []Node{}
}
func (n *NumberNode) Children() []Node {
return []Node{}
}
func (n *FunctionCallNode) Children() []Node {
return n.Arguments
}
func (n *ArrayAccessNode) Children() []Node {
children := make([]Node, 0, 2)
if n.Array != nil {
children = append(children, n.Array)
}
if n.Index != nil {
children = append(children, n.Index)
}
return children
}
func (n *ArrayLiteralNode) Children() []Node {
return n.Elements
}
func (n *SystemFunctionCallNode) Children() []Node {
return n.Arguments
}
package parser
import (
"fmt"
)
// ConstraintParser handles parsing of SystemVerilog constraint expressions
type ConstraintParser struct {
lexer *Lexer
current int
tokens []Token
errorList []error
}
// NewConstraintParser creates a new constraint parser
func NewConstraintParser(lexer *Lexer) *ConstraintParser {
return &ConstraintParser{
lexer: lexer,
current: 0,
tokens: make([]Token, 0),
errorList: make([]error, 0),
}
}
// ParseConstraintBody parses the body of a constraint block
func (p *ConstraintParser) ParseConstraintBody(tokens []Token, startIdx int) ([]Node, int, error) {
p.tokens = tokens
p.current = startIdx
p.errorList = make([]error, 0)
body := make([]Node, 0)
// Parse constraint items until we hit }
for p.current < len(p.tokens) && p.currentToken().Type != TokenRightBrace {
item := p.parseConstraintItem()
if item != nil {
body = append(body, item)
}
// Skip semicolon if present
if p.currentToken().Type == TokenSemicolon {
p.advance()
}
}
return body, p.current, nil
}
// parseConstraintItem parses a single constraint item
func (p *ConstraintParser) parseConstraintItem() Node {
// Handle solve...before
if p.currentToken().Type == TokenSolve {
return p.parseSolveBeforeConstraint()
}
// Handle if-else constraints
if p.currentToken().Type == TokenIf {
return p.parseIfConstraint()
}
// Handle foreach constraints
if p.currentToken().Type == TokenForeach {
return p.parseForeachConstraint()
}
// Handle disable soft
if p.currentToken().Type == TokenDisable && p.peekToken().Type == TokenSoft {
return p.parseDisableSoftConstraint()
}
// Otherwise parse as expression constraint
return p.parseExpressionConstraint()
}
// parseSolveBeforeConstraint parses solve...before statements
func (p *ConstraintParser) parseSolveBeforeConstraint() Node {
node := &SolveBeforeNode{
BaseNode: BaseNode{
NodeType: NodeTypeSolveBefore,
Pos: Range{
Start: p.currentToken().Position,
},
},
}
p.advance() // skip 'solve'
// Parse solve list (variables to solve first)
node.SolveList = p.parseIdentifierList()
// Expect 'before'
if p.currentToken().Type != TokenBefore {
p.addError("expected 'before' in solve constraint")
return nil
}
p.advance()
// Parse before list (variables to solve after)
node.BeforeList = p.parseIdentifierList()
node.Pos.End = p.previousToken().Position
return node
}
// parseIfConstraint parses if-else constraint expressions
func (p *ConstraintParser) parseIfConstraint() Node {
node := &IfConstraintNode{
BaseNode: BaseNode{
NodeType: NodeTypeIfConstraint,
Pos: Range{
Start: p.currentToken().Position,
},
},
}
p.advance() // skip 'if'
// Expect (
if p.currentToken().Type != TokenLeftParen {
p.addError("expected '(' after 'if'")
return nil
}
p.advance()
// Parse condition
node.Condition = p.parseExpression()
// Expect )
if p.currentToken().Type != TokenRightParen {
p.addError("expected ')' after condition")
return nil
}
p.advance()
// Parse then constraint
node.ThenConstraint = p.parseConstraintItem()
// Check for semicolon before else (handle both syntaxes)
if p.currentToken().Type == TokenSemicolon && p.peekToken().Type == TokenElse {
p.advance() // skip semicolon
}
// Check for else
if p.currentToken().Type == TokenElse {
p.advance()
node.ElseConstraint = p.parseConstraintItem()
}
node.Pos.End = p.previousToken().Position
return node
}
// parseForeachConstraint parses foreach constraint loops
func (p *ConstraintParser) parseForeachConstraint() Node {
node := &ForeachConstraintNode{
BaseNode: BaseNode{
NodeType: NodeTypeForeachConstraint,
Pos: Range{
Start: p.currentToken().Position,
},
},
}
p.advance() // skip 'foreach'
// Expect (
if p.currentToken().Type != TokenLeftParen {
p.addError("expected '(' after 'foreach'")
return nil
}
p.advance()
// Parse array name
if p.currentToken().Type != TokenIdentifier {
p.addError("expected array identifier in foreach")
return nil
}
node.ArrayName = p.currentToken().Text
p.advance()
// Parse loop variables [i], [i,j], etc.
if p.currentToken().Type == TokenLeftBracket {
p.advance()
node.LoopVars = p.parseIdentifierList()
if p.currentToken().Type != TokenRightBracket {
p.addError("expected ']' after loop variables")
return nil
}
p.advance()
}
// Expect )
if p.currentToken().Type != TokenRightParen {
p.addError("expected ')' after foreach specification")
return nil
}
p.advance()
// Parse body constraint
node.Body = p.parseConstraintItem()
node.Pos.End = p.previousToken().Position
return node
}
// parseDisableSoftConstraint parses disable soft constraints
func (p *ConstraintParser) parseDisableSoftConstraint() Node {
node := &DisableSoftNode{
BaseNode: BaseNode{
NodeType: NodeTypeDisableSoft,
Pos: Range{
Start: p.currentToken().Position,
},
},
}
p.advance() // skip 'disable'
p.advance() // skip 'soft'
// Parse the constraint expression to disable
node.Constraint = p.parseExpression()
node.Pos.End = p.previousToken().Position
return node
}
// parseExpressionConstraint parses general expression constraints
func (p *ConstraintParser) parseExpressionConstraint() Node {
// This handles dist, inside, and general expressions
expr := p.parseConstraintExpression()
if expr == nil {
// Skip to next semicolon or closing brace
for p.current < len(p.tokens) &&
p.currentToken().Type != TokenSemicolon &&
p.currentToken().Type != TokenRightBrace {
p.advance()
}
}
return expr
}
// parseConstraintExpression parses constraint-specific expressions
func (p *ConstraintParser) parseConstraintExpression() Node {
return p.parseImplicationExpression()
}
// Helper function to create binary operator nodes with nil checks
func (p *ConstraintParser) createBinaryOp(left Node, op Token, right Node) *BinaryOpNode {
startPos := op.Position
endPos := op.Position
if left != nil {
startPos = left.Range().Start
}
if right != nil {
endPos = right.Range().End
}
return &BinaryOpNode{
BaseNode: BaseNode{
NodeType: NodeTypeBinaryOp,
Pos: Range{
Start: startPos,
End: endPos,
},
},
Operator: op.Text,
Left: left,
Right: right,
}
}
// parseImplicationExpression handles -> operator
func (p *ConstraintParser) parseImplicationExpression() Node {
left := p.parseOrExpression()
if p.currentToken().Type == TokenImplication {
op := p.currentToken()
p.advance()
right := p.parseImplicationExpression()
return p.createBinaryOp(left, op, right)
}
return left
}
// parseOrExpression handles || operator
func (p *ConstraintParser) parseOrExpression() Node {
left := p.parseAndExpression()
for p.currentToken().Type == TokenLogicalOr {
op := p.currentToken()
p.advance()
right := p.parseAndExpression()
left = p.createBinaryOp(left, op, right)
}
return left
}
// parseAndExpression handles && operator
func (p *ConstraintParser) parseAndExpression() Node {
left := p.parseRelationalExpression()
for p.currentToken().Type == TokenLogicalAnd {
op := p.currentToken()
p.advance()
right := p.parseRelationalExpression()
left = p.createBinaryOp(left, op, right)
}
return left
}
// parseRelationalExpression handles ==, !=, <, >, <=, >=
func (p *ConstraintParser) parseRelationalExpression() Node {
left := p.parseInsideExpression()
for {
switch p.currentToken().Type {
case TokenEqual, TokenNotEqual, TokenLess, TokenGreater,
TokenLessEqual, TokenGreaterEqual:
op := p.currentToken()
p.advance()
right := p.parseInsideExpression()
left = p.createBinaryOp(left, op, right)
default:
return left
}
}
}
// parseInsideExpression handles 'inside' operator
func (p *ConstraintParser) parseInsideExpression() Node {
left := p.parseDistExpression()
if p.currentToken().Type == TokenInside {
node := &InsideExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeInsideExpr,
Pos: Range{
Start: left.Range().Start,
},
},
Expression: left,
}
p.advance() // skip 'inside'
// Expect {
if p.currentToken().Type != TokenLeftBrace {
p.addError("expected '{' after 'inside'")
return left
}
p.advance()
// Parse range list
node.RangeList = p.parseInsideRangeList()
// Expect }
if p.currentToken().Type != TokenRightBrace {
p.addError("expected '}' after inside range list")
return node
}
p.advance()
node.Pos.End = p.previousToken().Position
return node
}
return left
}
// parseDistExpression handles 'dist' operator
func (p *ConstraintParser) parseDistExpression() Node {
left := p.parseAdditiveExpression()
if p.currentToken().Type == TokenDist {
node := &DistExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeDistExpr,
Pos: Range{
Start: left.Range().Start,
},
},
Expression: left,
}
p.advance() // skip 'dist'
// Expect {
if p.currentToken().Type != TokenLeftBrace {
p.addError("expected '{' after 'dist'")
return left
}
p.advance()
// Parse distribution items
node.Items = p.parseDistItems()
// Expect }
if p.currentToken().Type != TokenRightBrace {
p.addError("expected '}' after distribution items")
return node
}
p.advance()
node.Pos.End = p.previousToken().Position
return node
}
return left
}
// parseAdditiveExpression handles +, - operators
func (p *ConstraintParser) parseAdditiveExpression() Node {
left := p.parseMultiplicativeExpression()
for {
switch p.currentToken().Type {
case TokenPlus, TokenMinus:
op := p.currentToken()
p.advance()
right := p.parseMultiplicativeExpression()
left = p.createBinaryOp(left, op, right)
default:
return left
}
}
}
// parseMultiplicativeExpression handles *, /, % operators
func (p *ConstraintParser) parseMultiplicativeExpression() Node {
left := p.parseUnaryExpression()
for {
switch p.currentToken().Type {
case TokenStar, TokenSlash, TokenPercent:
op := p.currentToken()
p.advance()
right := p.parseUnaryExpression()
left = p.createBinaryOp(left, op, right)
default:
return left
}
}
}
// parseUnaryExpression handles !, ~, +, - unary operators
func (p *ConstraintParser) parseUnaryExpression() Node {
switch p.currentToken().Type {
case TokenLogicalNot, TokenBitwiseNot, TokenPlus, TokenMinus:
op := p.currentToken()
p.advance()
expr := p.parseUnaryExpression()
return &UnaryOpNode{
BaseNode: BaseNode{
NodeType: NodeTypeUnaryOp,
Pos: Range{
Start: op.Position,
End: expr.Range().End,
},
},
Operator: op.Text,
Expr: expr,
}
}
return p.parsePrimaryExpression()
}
// parsePrimaryExpression handles primary expressions
func (p *ConstraintParser) parsePrimaryExpression() Node {
switch p.currentToken().Type {
case TokenIdentifier:
return p.parseIdentifierOrCall()
case TokenNumber:
return p.parseNumber()
case TokenLeftParen:
return p.parseParenExpression()
case TokenLeftBrace:
return p.parseArrayLiteral()
case TokenDollar:
return p.parseSystemFunction()
default:
p.addError(fmt.Sprintf("unexpected token in expression: %v", p.currentToken().Type))
p.advance()
return nil
}
}
// parseExpression is the main expression parser entry point
func (p *ConstraintParser) parseExpression() Node {
return p.parseConstraintExpression()
}
// Helper methods
func (p *ConstraintParser) currentToken() Token {
if p.current >= len(p.tokens) {
return Token{Type: TokenEOF}
}
return p.tokens[p.current]
}
func (p *ConstraintParser) previousToken() Token {
if p.current > 0 && p.current-1 < len(p.tokens) {
return p.tokens[p.current-1]
}
return Token{Type: TokenEOF}
}
func (p *ConstraintParser) peekToken() Token {
if p.current+1 >= len(p.tokens) {
return Token{Type: TokenEOF}
}
return p.tokens[p.current+1]
}
func (p *ConstraintParser) advance() {
if p.current < len(p.tokens) {
p.current++
}
}
func (p *ConstraintParser) addError(msg string) {
p.errorList = append(p.errorList, fmt.Errorf("%s at %v", msg, p.currentToken().Position))
}
// parseIdentifierList parses comma-separated identifiers
func (p *ConstraintParser) parseIdentifierList() []string {
list := make([]string, 0)
if p.currentToken().Type == TokenIdentifier {
list = append(list, p.currentToken().Text)
p.advance()
for p.currentToken().Type == TokenComma {
p.advance()
if p.currentToken().Type == TokenIdentifier {
list = append(list, p.currentToken().Text)
p.advance()
}
}
}
return list
}
// parseInsideRangeList parses the range list for inside operator
func (p *ConstraintParser) parseInsideRangeList() []Node {
ranges := make([]Node, 0)
for p.currentToken().Type != TokenRightBrace && p.currentToken().Type != TokenEOF {
// Check for range syntax [start:end]
if p.currentToken().Type == TokenLeftBracket {
p.advance() // skip [
start := p.parseExpression()
if p.currentToken().Type == TokenColon {
p.advance() // skip :
end := p.parseExpression()
if p.currentToken().Type == TokenRightBracket {
p.advance() // skip ]
}
startPos := Position{}
endPos := Position{}
if start != nil {
startPos = start.Range().Start
}
if end != nil {
endPos = end.Range().End
}
ranges = append(ranges, &RangeExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeRangeExpr,
Pos: Range{
Start: startPos,
End: endPos,
},
},
Start: start,
End: end,
})
} else {
// Error: expected : in range
p.addError("expected ':' in range expression")
}
} else {
// Parse value or range
start := p.parseExpression()
if p.currentToken().Type == TokenColon {
// It's a range without brackets
p.advance() // skip :
end := p.parseExpression()
startPos := Position{}
endPos := Position{}
if start != nil {
startPos = start.Range().Start
}
if end != nil {
endPos = end.Range().End
}
ranges = append(ranges, &RangeExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeRangeExpr,
Pos: Range{
Start: startPos,
End: endPos,
},
},
Start: start,
End: end,
})
} else {
// Single value
ranges = append(ranges, start)
}
}
// Check for comma
if p.currentToken().Type == TokenComma {
p.advance()
} else if p.currentToken().Type != TokenRightBrace {
break
}
}
return ranges
}
// parseDistItems parses distribution items for dist operator
func (p *ConstraintParser) parseDistItems() []Node {
items := make([]Node, 0)
for p.currentToken().Type != TokenRightBrace && p.currentToken().Type != TokenEOF {
var value Node
// Check for range syntax [start:end]
if p.currentToken().Type == TokenLeftBracket {
p.advance() // skip [
start := p.parseExpression()
if p.currentToken().Type == TokenColon {
p.advance() // skip :
end := p.parseExpression()
if p.currentToken().Type == TokenRightBracket {
p.advance() // skip ]
}
startPos := Position{}
endPos := Position{}
if start != nil {
startPos = start.Range().Start
}
if end != nil {
endPos = end.Range().End
}
value = &RangeExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeRangeExpr,
Pos: Range{
Start: startPos,
End: endPos,
},
},
Start: start,
End: end,
}
} else {
// Error: expected : in range
p.addError("expected ':' in range expression")
value = start
}
} else {
// Parse single value
value = p.parseExpression()
}
// Check for := or :/
var weight Node
var weightType string
if p.currentToken().Type == TokenColonEqual {
weightType = ":="
p.advance()
weight = p.parseExpression()
} else if p.currentToken().Type == TokenColonSlash {
weightType = ":/"
p.advance()
weight = p.parseExpression()
}
startPos := Position{}
endPos := Position{}
if value != nil {
startPos = value.Range().Start
endPos = value.Range().End
}
item := &DistItemNode{
BaseNode: BaseNode{
NodeType: NodeTypeDistItem,
Pos: Range{
Start: startPos,
End: endPos,
},
},
Value: value,
Weight: weight,
WeightType: weightType,
}
if weight != nil {
item.Pos.End = weight.Range().End
}
items = append(items, item)
// Check for comma
if p.currentToken().Type == TokenComma {
p.advance()
} else if p.currentToken().Type != TokenRightBrace {
break
}
}
return items
}
// parseIdentifierOrCall parses identifier or function call
func (p *ConstraintParser) parseIdentifierOrCall() Node {
name := p.currentToken().Text
pos := p.currentToken().Position
p.advance()
// Check for function call
if p.currentToken().Type == TokenLeftParen {
p.advance()
args := make([]Node, 0)
for p.currentToken().Type != TokenRightParen && p.currentToken().Type != TokenEOF {
args = append(args, p.parseExpression())
if p.currentToken().Type == TokenComma {
p.advance()
} else if p.currentToken().Type != TokenRightParen {
break
}
}
if p.currentToken().Type == TokenRightParen {
p.advance()
}
return &FunctionCallNode{
BaseNode: BaseNode{
NodeType: NodeTypeFunctionCall,
Pos: Range{
Start: pos,
End: p.previousToken().Position,
},
},
Name: name,
Arguments: args,
}
}
// Check for array access
if p.currentToken().Type == TokenLeftBracket {
var node Node = &IdentifierNode{
BaseNode: BaseNode{
NodeType: NodeTypeIdentifier,
Pos: Range{
Start: pos,
End: pos,
},
},
Name: name,
}
for p.currentToken().Type == TokenLeftBracket {
p.advance()
index := p.parseExpression()
if p.currentToken().Type == TokenRightBracket {
p.advance()
}
node = &ArrayAccessNode{
BaseNode: BaseNode{
NodeType: NodeTypeArrayAccess,
Pos: Range{
Start: node.Range().Start,
End: p.previousToken().Position,
},
},
Array: node,
Index: index,
}
}
return node
}
// Just an identifier
return &IdentifierNode{
BaseNode: BaseNode{
NodeType: NodeTypeIdentifier,
Pos: Range{
Start: pos,
End: pos,
},
},
Name: name,
}
}
// parseNumber parses numeric literals
func (p *ConstraintParser) parseNumber() Node {
node := &NumberNode{
BaseNode: BaseNode{
NodeType: NodeTypeNumber,
Pos: Range{
Start: p.currentToken().Position,
End: p.currentToken().Position,
},
},
Value: p.currentToken().Text,
}
p.advance()
return node
}
// parseParenExpression parses parenthesized expressions
func (p *ConstraintParser) parseParenExpression() Node {
p.advance() // skip (
expr := p.parseExpression()
if p.currentToken().Type == TokenRightParen {
p.advance()
}
return expr
}
// parseArrayLiteral parses array literals like '{1, 2, 3}
func (p *ConstraintParser) parseArrayLiteral() Node {
node := &ArrayLiteralNode{
BaseNode: BaseNode{
NodeType: NodeTypeArrayLiteral,
Pos: Range{
Start: p.currentToken().Position,
},
},
}
p.advance() // skip '{'
// Handle concatenation operator '{
if p.currentToken().Type == TokenLeftBrace {
// This is a concatenation, not array literal
return p.parseConcatenation()
}
// Parse elements
for p.currentToken().Type != TokenRightBrace && p.currentToken().Type != TokenEOF {
node.Elements = append(node.Elements, p.parseExpression())
if p.currentToken().Type == TokenComma {
p.advance()
} else if p.currentToken().Type != TokenRightBrace {
break
}
}
if p.currentToken().Type == TokenRightBrace {
node.Pos.End = p.currentToken().Position
p.advance()
}
return node
}
// parseConcatenation parses concatenation expressions
func (p *ConstraintParser) parseConcatenation() Node {
// TODO: Implement concatenation parsing
return nil
}
// parseSystemFunction parses $random, $urandom, etc.
func (p *ConstraintParser) parseSystemFunction() Node {
start := p.currentToken().Position
p.advance() // skip $
if p.currentToken().Type != TokenIdentifier {
p.addError("expected system function name after $")
return nil
}
name := "$" + p.currentToken().Text
p.advance()
// Check for function call
var args []Node
if p.currentToken().Type == TokenLeftParen {
p.advance()
for p.currentToken().Type != TokenRightParen && p.currentToken().Type != TokenEOF {
args = append(args, p.parseExpression())
if p.currentToken().Type == TokenComma {
p.advance()
} else if p.currentToken().Type != TokenRightParen {
break
}
}
if p.currentToken().Type == TokenRightParen {
p.advance()
}
}
return &SystemFunctionCallNode{
BaseNode: BaseNode{
NodeType: NodeTypeSystemFunctionCall,
Pos: Range{
Start: start,
End: p.previousToken().Position,
},
},
Name: name,
Arguments: args,
}
}
package parser
import (
"strings"
"unicode"
)
// TokenType represents the type of a token
type TokenType int
const (
// Special tokens
TokenEOF TokenType = iota
TokenError
TokenWhitespace
TokenComment
// Literals
TokenIdentifier
TokenNumber
TokenString
// Keywords
TokenModule
TokenEndModule
TokenInterface
TokenEndInterface
TokenClass
TokenEndClass
TokenFunction
TokenEndFunction
TokenTask
TokenEndTask
TokenAlways
TokenAlwaysComb
TokenAlwaysFF
TokenAlwaysLatch
TokenInitial
TokenBegin
TokenEnd
TokenIf
TokenElse
TokenCase
TokenEndCase
TokenFor
TokenWhile
TokenGenerate
TokenEndGenerate
TokenGenvar
TokenParameter
TokenLocalparam
TokenInput
TokenOutput
TokenInout
TokenWire
TokenReg
TokenLogic
TokenBit
TokenByte
TokenInt
TokenInteger
TokenReal
TokenTime
TokenRand
TokenRandc
TokenVirtual
TokenExtends
TokenImplements
TokenImport
TokenExport
TokenReturn
TokenBreak
TokenContinue
TokenAssign
// Operators
TokenPlus
TokenMinus
TokenStar
TokenSlash
TokenPercent
TokenEqual
TokenNotEqual
TokenLess
TokenLessEqual
TokenGreater
TokenGreaterEqual
TokenLogicalAnd
TokenLogicalOr
TokenBitwiseAnd
TokenBitwiseOr
TokenBitwiseXor
TokenBitwiseNot
TokenLogicalNot
TokenLeftShift
TokenRightShift
TokenAssignOp
TokenNonBlockingAssign
TokenDoubleEqual // == (alias for TokenEqual)
TokenBang // ! (alias for TokenLogicalNot)
TokenTilde // ~ (alias for TokenBitwiseNot)
TokenDoubleBar // || (alias for TokenLogicalOr)
TokenDoubleAmpersand // && (alias for TokenLogicalAnd)
TokenArrow // -> (alias for TokenImplication)
TokenColonEqual // :=
TokenColonSlash // :/
// Delimiters
TokenSemicolon
TokenComma
TokenDot
TokenColon
TokenDoubleColon
TokenHash
TokenDoubleHash
TokenAt
TokenDollar
TokenLeftParen
TokenRightParen
TokenLeftBracket
TokenRightBracket
TokenLeftBrace
TokenRightBrace
// Edge sensitivity
TokenPosedge
TokenNegedge
// Special keywords
TokenCasex
TokenCasez
// SystemVerilog Assertions (SVA) keywords
TokenAssert
TokenAssume
TokenCover
TokenRestrict
TokenProperty
TokenEndProperty
TokenSequence
TokenEndSequence
TokenExpect
TokenDisable
TokenIff
TokenImplication // ->
TokenNonOverlapImplication // |->
TokenOverlapImplication // |=>
TokenThroughout
TokenWithin
TokenMatched
TokenFirstMatch
TokenIntersect
TokenAnd
TokenOr
TokenNot
TokenUntil
TokenSRand
TokenEventually
TokenNexttime
TokenSNexttime
TokenAlways_s
TokenUntilWith
TokenImplies
TokenAcceptOn
TokenRejectOn
TokenSyncAcceptOn
TokenSyncRejectOn
TokenStrong
TokenWeak
// Constraint keywords
TokenConstraint
TokenSolve
TokenBefore
TokenDist
TokenInside
TokenWith
TokenForeach
TokenUnique
TokenSoft
TokenPure
TokenContext
TokenIllegalBins
TokenIgnoreBins
TokenDefaultSequence
// Coverage keywords
TokenCovergroup
TokenEndgroup
TokenCoverpoint
TokenCross
TokenBins
TokenWildcard
TokenOption
TokenTypeOption
TokenIffCover
TokenBinsof
TokenIntersectCover
// Randomization keywords
TokenRandomize
TokenPreRandomize
TokenPostRandomize
TokenStdRandomize
// Other missing SystemVerilog keywords
TokenModport
TokenClocking
TokenEndClocking
TokenDefault
TokenProgram
TokenEndProgram
TokenPackage
TokenEndPackage
TokenTypedef
TokenEnum
TokenStruct
TokenUnion
TokenTagged
TokenPacked
TokenChecker
TokenEndChecker
TokenLet
TokenBind
TokenAlias
TokenJoin
TokenJoinAny
TokenJoinNone
TokenFork
TokenWait
TokenWaitOrder
TokenTriggered
TokenRandcase
TokenRandsequence
TokenPriority
TokenUnique0
TokenPriorityIf
TokenUniqueIf
TokenMatchesOp // matches operator
TokenDistOp // dist operator
TokenInsideOp // inside operator
TokenGlobal
TokenStatic
TokenAutomatic
TokenConst
TokenVar
TokenNew
TokenNull
TokenThis
TokenSuper
TokenLocal
TokenProtected
TokenPulse
TokenEdge
TokenRef
)
// Token represents a lexical token
type Token struct {
Type TokenType
Text string
Position Position
}
// Lexer performs lexical analysis on SystemVerilog source code
type Lexer struct {
input []byte
position int
line int
column int
tokens []Token
}
// NewLexer creates a new lexer for the given input
func NewLexer(input string) *Lexer {
return &Lexer{
input: []byte(input),
line: 1,
column: 1,
}
}
// NextToken returns the next token from the input
func (l *Lexer) NextToken() Token {
l.skipWhitespace()
if l.position >= len(l.input) {
return Token{Type: TokenEOF, Position: l.currentPosition()}
}
// Check for comments
if token := l.scanComment(); token.Type != TokenError {
return token
}
// Check for keywords and identifiers
if l.isLetter(l.current()) || l.current() == '_' {
return l.scanIdentifierOrKeyword()
}
// Check for numbers
if l.isDigit(l.current()) {
return l.scanNumber()
}
// Check for strings
if l.current() == '"' {
return l.scanString()
}
// Check for operators and delimiters
return l.scanOperatorOrDelimiter()
}
// Tokenize tokenizes the entire input and returns all tokens
func (l *Lexer) Tokenize() []Token {
var tokens []Token
for {
token := l.NextToken()
if token.Type == TokenEOF {
tokens = append(tokens, token)
break
}
// Skip whitespace and comments in tokenize mode
if token.Type != TokenWhitespace && token.Type != TokenComment {
tokens = append(tokens, token)
}
}
return tokens
}
func (l *Lexer) current() byte {
if l.position >= len(l.input) {
return 0
}
return l.input[l.position]
}
func (l *Lexer) peek(offset int) byte {
pos := l.position + offset
if pos >= len(l.input) {
return 0
}
return l.input[pos]
}
func (l *Lexer) advance() {
if l.position < len(l.input) {
if l.input[l.position] == '\n' {
l.line++
l.column = 1
} else {
l.column++
}
l.position++
}
}
func (l *Lexer) currentPosition() Position {
return Position{
Line: l.line,
Column: l.column,
Offset: l.position,
}
}
func (l *Lexer) skipWhitespace() {
for l.position < len(l.input) && unicode.IsSpace(rune(l.current())) {
l.advance()
}
}
func (l *Lexer) scanComment() Token {
start := l.currentPosition()
// Single-line comment
if l.current() == '/' && l.peek(1) == '/' {
l.advance() // /
l.advance() // /
for l.current() != '\n' && l.position < len(l.input) {
l.advance()
}
return Token{
Type: TokenComment,
Text: string(l.input[start.Offset:l.position]),
Position: start,
}
}
// Multi-line comment
if l.current() == '/' && l.peek(1) == '*' {
l.advance() // /
l.advance() // *
for l.position < len(l.input)-1 {
if l.current() == '*' && l.peek(1) == '/' {
l.advance() // *
l.advance() // /
break
}
l.advance()
}
return Token{
Type: TokenComment,
Text: string(l.input[start.Offset:l.position]),
Position: start,
}
}
return Token{Type: TokenError}
}
func (l *Lexer) scanIdentifierOrKeyword() Token {
start := l.currentPosition()
startPos := l.position
for l.isLetter(l.current()) || l.isDigit(l.current()) || l.current() == '_' {
l.advance()
}
text := string(l.input[startPos:l.position])
tokenType := l.getKeywordType(text)
return Token{
Type: tokenType,
Text: text,
Position: start,
}
}
func (l *Lexer) scanNumber() Token {
start := l.currentPosition()
startPos := l.position
// Handle different number formats
// First scan the size part (before quote)
for l.isDigit(l.current()) || l.current() == '_' {
l.advance()
}
// Check for sized literal format like 4'b1010
if l.current() == '\'' {
l.advance() // Skip quote
if l.current() == 'b' || l.current() == 'h' || l.current() == 'd' || l.current() == 'o' {
l.advance() // Skip base indicator
for l.isHexDigit(l.current()) || l.current() == '_' {
l.advance()
}
}
} else {
// Handle decimal point for real numbers
if l.current() == '.' && l.isDigit(l.peek(1)) {
l.advance()
for l.isDigit(l.current()) || l.current() == '_' {
l.advance()
}
}
}
return Token{
Type: TokenNumber,
Text: string(l.input[startPos:l.position]),
Position: start,
}
}
func (l *Lexer) scanString() Token {
start := l.currentPosition()
startPos := l.position
l.advance() // Skip opening quote
for l.current() != '"' && l.position < len(l.input) {
if l.current() == '\\' {
l.advance() // Skip escape character
}
l.advance()
}
if l.current() == '"' {
l.advance() // Skip closing quote
}
return Token{
Type: TokenString,
Text: string(l.input[startPos:l.position]),
Position: start,
}
}
func (l *Lexer) scanOperatorOrDelimiter() Token {
start := l.currentPosition()
switch l.current() {
case '+':
l.advance()
return Token{Type: TokenPlus, Text: "+", Position: start}
case '-':
l.advance()
if l.current() == '>' {
l.advance()
return Token{Type: TokenImplication, Text: "->", Position: start}
}
return Token{Type: TokenMinus, Text: "-", Position: start}
case '*':
l.advance()
return Token{Type: TokenStar, Text: "*", Position: start}
case '/':
l.advance()
return Token{Type: TokenSlash, Text: "/", Position: start}
case '%':
l.advance()
return Token{Type: TokenPercent, Text: "%", Position: start}
case '=':
l.advance()
if l.current() == '=' {
l.advance()
return Token{Type: TokenEqual, Text: "==", Position: start}
}
return Token{Type: TokenAssignOp, Text: "=", Position: start}
case '!':
l.advance()
if l.current() == '=' {
l.advance()
return Token{Type: TokenNotEqual, Text: "!=", Position: start}
}
return Token{Type: TokenLogicalNot, Text: "!", Position: start}
case '<':
l.advance()
if l.current() == '=' {
l.advance()
return Token{Type: TokenLessEqual, Text: "<=", Position: start}
} else if l.current() == '<' {
l.advance()
return Token{Type: TokenLeftShift, Text: "<<", Position: start}
}
return Token{Type: TokenLess, Text: "<", Position: start}
case '>':
l.advance()
if l.current() == '=' {
l.advance()
return Token{Type: TokenGreaterEqual, Text: ">=", Position: start}
} else if l.current() == '>' {
l.advance()
return Token{Type: TokenRightShift, Text: ">>", Position: start}
}
return Token{Type: TokenGreater, Text: ">", Position: start}
case '&':
l.advance()
if l.current() == '&' {
l.advance()
return Token{Type: TokenLogicalAnd, Text: "&&", Position: start}
}
return Token{Type: TokenBitwiseAnd, Text: "&", Position: start}
case '|':
l.advance()
if l.current() == '|' {
l.advance()
return Token{Type: TokenLogicalOr, Text: "||", Position: start}
} else if l.current() == '-' && l.peek(1) == '>' {
l.advance() // -
l.advance() // >
return Token{Type: TokenNonOverlapImplication, Text: "|->", Position: start}
} else if l.current() == '=' && l.peek(1) == '>' {
l.advance() // =
l.advance() // >
return Token{Type: TokenOverlapImplication, Text: "|=>", Position: start}
}
return Token{Type: TokenBitwiseOr, Text: "|", Position: start}
case '^':
l.advance()
return Token{Type: TokenBitwiseXor, Text: "^", Position: start}
case '~':
l.advance()
return Token{Type: TokenBitwiseNot, Text: "~", Position: start}
case ';':
l.advance()
return Token{Type: TokenSemicolon, Text: ";", Position: start}
case ',':
l.advance()
return Token{Type: TokenComma, Text: ",", Position: start}
case '.':
l.advance()
return Token{Type: TokenDot, Text: ".", Position: start}
case ':':
l.advance()
if l.current() == ':' {
l.advance()
return Token{Type: TokenDoubleColon, Text: "::", Position: start}
} else if l.current() == '=' {
l.advance()
return Token{Type: TokenColonEqual, Text: ":=", Position: start}
} else if l.current() == '/' {
l.advance()
return Token{Type: TokenColonSlash, Text: ":/", Position: start}
}
return Token{Type: TokenColon, Text: ":", Position: start}
case '#':
l.advance()
if l.current() == '#' {
l.advance()
return Token{Type: TokenDoubleHash, Text: "##", Position: start}
}
return Token{Type: TokenHash, Text: "#", Position: start}
case '@':
l.advance()
return Token{Type: TokenAt, Text: "@", Position: start}
case '$':
l.advance()
return Token{Type: TokenDollar, Text: "$", Position: start}
case '(':
l.advance()
return Token{Type: TokenLeftParen, Text: "(", Position: start}
case ')':
l.advance()
return Token{Type: TokenRightParen, Text: ")", Position: start}
case '[':
l.advance()
return Token{Type: TokenLeftBracket, Text: "[", Position: start}
case ']':
l.advance()
return Token{Type: TokenRightBracket, Text: "]", Position: start}
case '{':
l.advance()
return Token{Type: TokenLeftBrace, Text: "{", Position: start}
case '}':
l.advance()
return Token{Type: TokenRightBrace, Text: "}", Position: start}
default:
l.advance()
return Token{Type: TokenError, Text: string(l.input[start.Offset:l.position]), Position: start}
}
}
func (l *Lexer) isLetter(ch byte) bool {
return (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z')
}
func (l *Lexer) isDigit(ch byte) bool {
return ch >= '0' && ch <= '9'
}
func (l *Lexer) isHexDigit(ch byte) bool {
return l.isDigit(ch) || (ch >= 'a' && ch <= 'f') || (ch >= 'A' && ch <= 'F')
}
func (l *Lexer) getKeywordType(text string) TokenType {
keywords := map[string]TokenType{
"module": TokenModule,
"endmodule": TokenEndModule,
"interface": TokenInterface,
"endinterface": TokenEndInterface,
"class": TokenClass,
"endclass": TokenEndClass,
"function": TokenFunction,
"endfunction": TokenEndFunction,
"task": TokenTask,
"endtask": TokenEndTask,
"always": TokenAlways,
"always_comb": TokenAlwaysComb,
"always_ff": TokenAlwaysFF,
"always_latch": TokenAlwaysLatch,
"initial": TokenInitial,
"begin": TokenBegin,
"end": TokenEnd,
"if": TokenIf,
"else": TokenElse,
"case": TokenCase,
"endcase": TokenEndCase,
"for": TokenFor,
"while": TokenWhile,
"generate": TokenGenerate,
"endgenerate": TokenEndGenerate,
"genvar": TokenGenvar,
"parameter": TokenParameter,
"localparam": TokenLocalparam,
"input": TokenInput,
"output": TokenOutput,
"inout": TokenInout,
"wire": TokenWire,
"reg": TokenReg,
"logic": TokenLogic,
"bit": TokenBit,
"byte": TokenByte,
"int": TokenInt,
"integer": TokenInteger,
"real": TokenReal,
"time": TokenTime,
"rand": TokenRand,
"randc": TokenRandc,
"virtual": TokenVirtual,
"extends": TokenExtends,
"implements": TokenImplements,
"import": TokenImport,
"export": TokenExport,
"return": TokenReturn,
"break": TokenBreak,
"continue": TokenContinue,
"assign": TokenAssign,
"posedge": TokenPosedge,
"negedge": TokenNegedge,
"casex": TokenCasex,
"casez": TokenCasez,
// SystemVerilog Assertions (SVA) keywords
"assert": TokenAssert,
"assume": TokenAssume,
"cover": TokenCover,
"restrict": TokenRestrict,
"property": TokenProperty,
"endproperty": TokenEndProperty,
"sequence": TokenSequence,
"endsequence": TokenEndSequence,
"expect": TokenExpect,
"disable": TokenDisable,
"iff": TokenIff,
"throughout": TokenThroughout,
"within": TokenWithin,
"matched": TokenMatched,
"first_match": TokenFirstMatch,
"intersect": TokenIntersect,
"and": TokenAnd,
"or": TokenOr,
"not": TokenNot,
"until": TokenUntil,
"s_rand": TokenSRand,
"eventually": TokenEventually,
"nexttime": TokenNexttime,
"s_nexttime": TokenSNexttime,
"s_always": TokenAlways_s,
"s_until_with": TokenUntilWith,
"implies": TokenImplies,
"accept_on": TokenAcceptOn,
"reject_on": TokenRejectOn,
"sync_accept_on": TokenSyncAcceptOn,
"sync_reject_on": TokenSyncRejectOn,
"strong": TokenStrong,
"weak": TokenWeak,
// Constraint keywords
"constraint": TokenConstraint,
"solve": TokenSolve,
"before": TokenBefore,
"dist": TokenDist,
"inside": TokenInside,
"with": TokenWith,
"foreach": TokenForeach,
"unique": TokenUnique,
"soft": TokenSoft,
"pure": TokenPure,
"context": TokenContext,
"illegal_bins": TokenIllegalBins,
"ignore_bins": TokenIgnoreBins,
"default_sequence": TokenDefaultSequence,
// Coverage keywords
"covergroup": TokenCovergroup,
"endgroup": TokenEndgroup,
"coverpoint": TokenCoverpoint,
"cross": TokenCross,
"bins": TokenBins,
"wildcard": TokenWildcard,
"option": TokenOption,
"type_option": TokenTypeOption,
"iff_cover": TokenIffCover,
"binsof": TokenBinsof,
"intersect_cover": TokenIntersectCover,
// Randomization keywords
"randomize": TokenRandomize,
"pre_randomize": TokenPreRandomize,
"post_randomize": TokenPostRandomize,
"std_randomize": TokenStdRandomize,
// Other missing SystemVerilog keywords
"modport": TokenModport,
"clocking": TokenClocking,
"endclocking": TokenEndClocking,
"default": TokenDefault,
"program": TokenProgram,
"endprogram": TokenEndProgram,
"package": TokenPackage,
"endpackage": TokenEndPackage,
"typedef": TokenTypedef,
"enum": TokenEnum,
"struct": TokenStruct,
"union": TokenUnion,
"tagged": TokenTagged,
"packed": TokenPacked,
"checker": TokenChecker,
"endchecker": TokenEndChecker,
"let": TokenLet,
"bind": TokenBind,
"alias": TokenAlias,
"join": TokenJoin,
"join_any": TokenJoinAny,
"join_none": TokenJoinNone,
"fork": TokenFork,
"wait": TokenWait,
"wait_order": TokenWaitOrder,
"triggered": TokenTriggered,
"randcase": TokenRandcase,
"randsequence": TokenRandsequence,
"priority": TokenPriority,
"unique0": TokenUnique0,
"priority_if": TokenPriorityIf,
"unique_if": TokenUniqueIf,
"global": TokenGlobal,
"static": TokenStatic,
"automatic": TokenAutomatic,
"const": TokenConst,
"var": TokenVar,
"new": TokenNew,
"null": TokenNull,
"this": TokenThis,
"super": TokenSuper,
"local": TokenLocal,
"protected": TokenProtected,
"pulse": TokenPulse,
"edge": TokenEdge,
"ref": TokenRef,
}
if tokenType, found := keywords[strings.ToLower(text)]; found {
return tokenType
}
return TokenIdentifier
}
// PeekToken returns the next token without consuming it
func (l *Lexer) PeekToken() Token {
savedPos := l.position
savedLine := l.line
savedColumn := l.column
token := l.NextToken()
l.position = savedPos
l.line = savedLine
l.column = savedColumn
return token
}
// Reset resets the lexer to the beginning of the input
func (l *Lexer) Reset() {
l.position = 0
l.line = 1
l.column = 1
}
package parser
import (
"fmt"
"strings"
)
// Parser performs parsing on SystemVerilog source code
type Parser struct {
lexer *Lexer
tokens []Token
position int
errors []ParseError
}
// NewParser creates a new parser for the given input
func NewParser(input string) *Parser {
lexer := NewLexer(input)
tokens := lexer.Tokenize()
return &Parser{
lexer: lexer,
tokens: tokens,
position: 0,
errors: make([]ParseError, 0),
}
}
// Parse parses the entire input and returns an AST
func (p *Parser) Parse() *AST {
fileNode := &FileNode{
BaseNode: BaseNode{
NodeType: NodeTypeFile,
Pos: Range{
Start: Position{Line: 1, Column: 1, Offset: 0},
End: Position{Line: 1, Column: 1, Offset: 0},
},
},
Items: make([]Node, 0),
}
// Parse multiple top-level constructs
for p.current().Type != TokenEOF {
var item Node
var err error
switch p.current().Type {
case TokenModule:
item, err = p.parseModule()
case TokenInterface:
item, err = p.parseInterface()
case TokenClass:
item, err = p.parseClass()
case TokenProperty:
svaParser := NewSVAParser(p.tokens, p.position)
var propNode *PropertyNode
propNode, p.position = svaParser.ParseProperty()
item = propNode
case TokenSequence:
svaParser := NewSVAParser(p.tokens, p.position)
var seqNode *SequenceNode
seqNode, p.position = svaParser.ParseSequence()
item = seqNode
case TokenAssert, TokenAssume, TokenCover, TokenRestrict, TokenExpect:
svaParser := NewSVAParser(p.tokens, p.position)
var assertNode *AssertionNode
assertNode, p.position = svaParser.ParseAssertion()
item = assertNode
case TokenCovergroup:
item, err = p.parseCovergroup()
case TokenConstraint:
item, err = p.parseConstraint()
default:
// Check for labeled assertions
if p.peek(1).Type == TokenColon &&
(p.peek(2).Type == TokenAssert || p.peek(2).Type == TokenAssume ||
p.peek(2).Type == TokenCover || p.peek(2).Type == TokenRestrict) {
svaParser := NewSVAParser(p.tokens, p.position)
var assertNode *AssertionNode
assertNode, p.position = svaParser.ParseAssertion()
item = assertNode
} else {
err = fmt.Errorf("unexpected token at top level: %s", p.current().Text)
p.advance() // Skip the unexpected token to avoid infinite loop
}
}
if err != nil {
p.addError(err.Error(), p.current().Position)
} else if item != nil {
fileNode.Items = append(fileNode.Items, item)
}
// Skip any remaining tokens if we're stuck
if p.current().Type != TokenEOF && err != nil {
p.advance()
}
}
// Update the file node's end position
if len(fileNode.Items) > 0 {
lastItem := fileNode.Items[len(fileNode.Items)-1]
fileNode.Pos.End = lastItem.Range().End
}
return &AST{
Root: fileNode,
Errors: p.errors,
FileName: "",
}
}
// current returns the current token
func (p *Parser) current() Token {
if p.position >= len(p.tokens) {
return Token{Type: TokenEOF}
}
return p.tokens[p.position]
}
// peek returns a token at the given offset from current position
func (p *Parser) peek(offset int) Token {
pos := p.position + offset
if pos >= len(p.tokens) {
return Token{Type: TokenEOF}
}
return p.tokens[pos]
}
// previous returns the previous token
func (p *Parser) previous() Token {
if p.position > 0 {
return p.tokens[p.position-1]
}
return Token{Type: TokenEOF}
}
// advance moves to the next token
func (p *Parser) advance() {
if p.position < len(p.tokens) {
p.position++
}
}
// expect checks if the current token matches the expected type and advances
func (p *Parser) expect(tokenType TokenType) (Token, error) {
if p.current().Type != tokenType {
return Token{}, fmt.Errorf("expected %d, got %s", tokenType, p.current().Text)
}
token := p.current()
p.advance()
return token, nil
}
// addError adds a parsing error
func (p *Parser) addError(message string, position Position) {
p.errors = append(p.errors, ParseError{
Message: message,
Position: position,
})
}
// parseModule parses a module declaration
func (p *Parser) parseModule() (*ModuleNode, error) {
startPos := p.current().Position
// module keyword
if _, err := p.expect(TokenModule); err != nil {
return nil, err
}
// module name
nameToken, err := p.expect(TokenIdentifier)
if err != nil {
return nil, err
}
module := &ModuleNode{
BaseNode: BaseNode{
NodeType: NodeTypeModule,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
Name: nameToken.Text,
Parameters: make([]*ParameterNode, 0),
Ports: make([]*PortNode, 0),
Items: make([]Node, 0),
}
// Check for parameter list
if p.current().Type == TokenHash {
p.advance() // skip #
if _, err := p.expect(TokenLeftParen); err != nil {
return nil, err
}
module.Parameters, err = p.parseParameterList()
if err != nil {
return nil, err
}
if _, err := p.expect(TokenRightParen); err != nil {
return nil, err
}
}
// Check for port list
if p.current().Type == TokenLeftParen {
p.advance() // skip (
module.Ports, err = p.parsePortList()
if err != nil {
return nil, err
}
if _, err := p.expect(TokenRightParen); err != nil {
return nil, err
}
}
// Semicolon (required)
if p.current().Type == TokenSemicolon {
p.advance()
} else {
return nil, fmt.Errorf("expected semicolon after module declaration at line %d, got %s", p.current().Position.Line, p.current().Text)
}
// Parse module items until endmodule
for p.current().Type != TokenEndModule && p.current().Type != TokenEOF {
// Debug: print current token
// fmt.Printf("parseModule: current token type=%v text=%s\n", p.current().Type, p.current().Text)
item, err := p.parseModuleItem()
if err != nil {
// fmt.Printf("parseModule: parseModuleItem returned error: %v\n", err)
p.addError(err.Error(), p.current().Position)
// Skip to next token to continue parsing
p.advance()
continue
}
if item != nil {
// fmt.Printf("parseModule: adding item type=%v to module\n", item.Type())
module.Items = append(module.Items, item)
} else {
// fmt.Printf("parseModule: parseModuleItem returned nil\n")
}
}
// endmodule
if _, err := p.expect(TokenEndModule); err != nil {
return nil, err
}
module.Pos.End = p.current().Position
return module, nil
}
// parseInterface parses an interface declaration
func (p *Parser) parseInterface() (*InterfaceNode, error) {
startPos := p.current().Position
// interface keyword
if _, err := p.expect(TokenInterface); err != nil {
return nil, err
}
// interface name
nameToken, err := p.expect(TokenIdentifier)
if err != nil {
return nil, err
}
iface := &InterfaceNode{
BaseNode: BaseNode{
NodeType: NodeTypeInterface,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
Name: nameToken.Text,
Parameters: make([]*ParameterNode, 0),
Ports: make([]*PortNode, 0),
Items: make([]Node, 0),
}
// Check for parameter list
if p.current().Type == TokenHash {
p.advance() // skip #
if _, err := p.expect(TokenLeftParen); err != nil {
return nil, err
}
iface.Parameters, err = p.parseParameterList()
if err != nil {
return nil, err
}
if _, err := p.expect(TokenRightParen); err != nil {
return nil, err
}
}
// Check for extends
if p.current().Type == TokenExtends {
p.advance()
extendsToken, err := p.expect(TokenIdentifier)
if err != nil {
return nil, err
}
iface.Extends = extendsToken.Text
}
// Semicolon
if p.current().Type == TokenSemicolon {
p.advance()
}
// Parse interface items until endinterface
for p.current().Type != TokenEndInterface && p.current().Type != TokenEOF {
item, err := p.parseInterfaceItem()
if err != nil {
p.addError(err.Error(), p.current().Position)
// Skip to next token to continue parsing
p.advance()
continue
}
if item != nil {
iface.Items = append(iface.Items, item)
}
}
// endinterface
if _, err := p.expect(TokenEndInterface); err != nil {
return nil, err
}
iface.Pos.End = p.current().Position
return iface, nil
}
// parseClass parses a class declaration (basic implementation)
func (p *Parser) parseClass() (*ClassNode, error) {
startPos := p.current().Position
// class keyword
if _, err := p.expect(TokenClass); err != nil {
return nil, err
}
// class name
nameToken, err := p.expect(TokenIdentifier)
if err != nil {
return nil, err
}
class := &ClassNode{
BaseNode: BaseNode{
NodeType: NodeTypeClass,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
Name: nameToken.Text,
Parameters: make([]*ParameterNode, 0),
Items: make([]Node, 0),
}
// Check for extends
if p.current().Type == TokenExtends {
p.advance()
extendsToken, err := p.expect(TokenIdentifier)
if err != nil {
return nil, err
}
class.Extends = extendsToken.Text
}
// Semicolon
if p.current().Type == TokenSemicolon {
p.advance()
}
// Parse class items until endclass
for p.current().Type != TokenEndClass && p.current().Type != TokenEOF {
item, err := p.parseClassItem()
if err != nil {
p.addError(err.Error(), p.current().Position)
// Skip to next token to continue parsing
p.advance()
continue
}
if item != nil {
class.Items = append(class.Items, item)
}
}
// endclass
if _, err := p.expect(TokenEndClass); err != nil {
return nil, err
}
class.Pos.End = p.current().Position
return class, nil
}
// parseParameterList parses a parameter list
func (p *Parser) parseParameterList() ([]*ParameterNode, error) {
var parameters []*ParameterNode
if p.current().Type == TokenRightParen {
return parameters, nil
}
for {
param, err := p.parseParameter()
if err != nil {
return nil, err
}
parameters = append(parameters, param)
if p.current().Type != TokenComma {
break
}
p.advance() // skip comma
}
return parameters, nil
}
// parseParameter parses a single parameter
func (p *Parser) parseParameter() (*ParameterNode, error) {
startPos := p.current().Position
isLocal := false
// Check if this is a parameter declaration (with parameter/localparam keyword)
hasParameterKeyword := false
if p.current().Type == TokenLocalparam {
isLocal = true
hasParameterKeyword = true
p.advance()
} else if p.current().Type == TokenParameter {
hasParameterKeyword = true
p.advance()
}
// Data type (optional)
dataType := ""
// If this is a module parameter declaration (with parameter keyword),
// the type is optional and defaults to logic
if hasParameterKeyword {
// Check if next token is a data type
if p.isDataType(p.current().Type) {
dataType = p.current().Text
p.advance()
}
} else {
// For function/task parameters, we need to handle type vs name
if p.isDataType(p.current().Type) || p.current().Type == TokenIdentifier {
// Save the token in case it's the name
savedToken := p.current().Text
p.advance()
// Check if there's another identifier (the actual parameter name)
if p.current().Type == TokenIdentifier {
// Previous token was the type
dataType = savedToken
} else {
// Previous token was the name, no explicit type
name := savedToken
return &ParameterNode{
BaseNode: BaseNode{
NodeType: NodeTypeParameter,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
Name: name,
DataType: dataType,
DefaultValue: "",
IsLocal: isLocal,
}, nil
}
}
}
// Parameter name
nameToken, err := p.expect(TokenIdentifier)
if err != nil {
return nil, err
}
// Default value (optional)
defaultValue := ""
if p.current().Type == TokenAssignOp {
p.advance()
// Simple expression parsing - just grab the next token(s)
for p.current().Type != TokenComma && p.current().Type != TokenRightParen && p.current().Type != TokenSemicolon {
defaultValue += p.current().Text
p.advance()
}
}
return &ParameterNode{
BaseNode: BaseNode{
NodeType: NodeTypeParameter,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
Name: nameToken.Text,
DataType: dataType,
DefaultValue: strings.TrimSpace(defaultValue),
IsLocal: isLocal,
}, nil
}
// parsePortList parses a port list
func (p *Parser) parsePortList() ([]*PortNode, error) {
var ports []*PortNode
if p.current().Type == TokenRightParen {
return ports, nil
}
for {
port, err := p.parsePort()
if err != nil {
return nil, err
}
ports = append(ports, port)
if p.current().Type != TokenComma {
break
}
p.advance() // skip comma
}
return ports, nil
}
// parsePort parses a single port
func (p *Parser) parsePort() (*PortNode, error) {
startPos := p.current().Position
// Port direction
direction := PortDirectionInput
switch p.current().Type {
case TokenInput:
direction = PortDirectionInput
p.advance()
case TokenOutput:
direction = PortDirectionOutput
p.advance()
case TokenInout:
direction = PortDirectionInout
p.advance()
default:
// Could be interface port or just signal name
if p.peek(1).Type == TokenDot {
direction = PortDirectionInterface
}
}
// Data type
dataType := ""
if p.isDataType(p.current().Type) {
dataType = p.current().Text
p.advance()
} else if p.current().Type == TokenIdentifier && p.peek(1).Type == TokenIdentifier {
// Custom data type followed by port name
dataType = p.current().Text
p.advance()
}
// Width specification
var width *RangeNode
if p.current().Type == TokenLeftBracket {
var err error
width, err = p.parseRange()
if err != nil {
return nil, err
}
}
// Port name
nameToken, err := p.expect(TokenIdentifier)
if err != nil {
return nil, err
}
return &PortNode{
BaseNode: BaseNode{
NodeType: NodeTypePort,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
Direction: direction,
Name: nameToken.Text,
DataType: dataType,
Width: width,
}, nil
}
// parseRange parses a range specification [high:low]
func (p *Parser) parseRange() (*RangeNode, error) {
startPos := p.current().Position
if _, err := p.expect(TokenLeftBracket); err != nil {
return nil, err
}
// High value
high := ""
for p.current().Type != TokenColon && p.current().Type != TokenRightBracket {
high += p.current().Text
p.advance()
}
low := "0" // default
if p.current().Type == TokenColon {
p.advance()
low = ""
for p.current().Type != TokenRightBracket {
low += p.current().Text
p.advance()
}
}
if _, err := p.expect(TokenRightBracket); err != nil {
return nil, err
}
return &RangeNode{
BaseNode: BaseNode{
NodeType: NodeTypeModule, // TODO: Add proper range node type
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
High: strings.TrimSpace(high),
Low: strings.TrimSpace(low),
}, nil
}
// parseModuleItem parses a module item
func (p *Parser) parseModuleItem() (Node, error) {
switch p.current().Type {
case TokenAlways, TokenAlwaysComb, TokenAlwaysFF, TokenAlwaysLatch:
return p.parseAlways()
case TokenAssign:
return p.parseAssignment()
case TokenParameter, TokenLocalparam:
return p.parseParameter()
case TokenFunction:
return p.parseFunction()
case TokenTask:
return p.parseTask()
case TokenProperty:
svaParser := NewSVAParser(p.tokens, p.position)
propNode, newPos := svaParser.ParseProperty()
p.position = newPos
return propNode, nil
case TokenSequence:
svaParser := NewSVAParser(p.tokens, p.position)
seqNode, newPos := svaParser.ParseSequence()
p.position = newPos
return seqNode, nil
case TokenAssert, TokenAssume, TokenCover, TokenRestrict, TokenExpect:
svaParser := NewSVAParser(p.tokens, p.position)
assertNode, newPos := svaParser.ParseAssertion()
p.position = newPos
return assertNode, nil
case TokenCovergroup:
return p.parseCovergroup()
case TokenConstraint:
return p.parseConstraint()
default:
// Check for labeled assertions
if p.peek(1).Type == TokenColon &&
(p.peek(2).Type == TokenAssert || p.peek(2).Type == TokenAssume ||
p.peek(2).Type == TokenCover || p.peek(2).Type == TokenRestrict) {
svaParser := NewSVAParser(p.tokens, p.position)
assertNode, newPos := svaParser.ParseAssertion()
p.position = newPos
return assertNode, nil
}
// Try to parse as variable declaration or module instantiation
return p.parseVariableOrInstance()
}
}
// parseInterfaceItem parses an interface item
func (p *Parser) parseInterfaceItem() (Node, error) {
switch p.current().Type {
case TokenParameter, TokenLocalparam:
return p.parseParameter()
default:
// Try to parse as variable declaration
return p.parseVariable()
}
}
// parseClassItem parses a class item
func (p *Parser) parseClassItem() (Node, error) {
switch p.current().Type {
case TokenFunction:
return p.parseFunction()
case TokenTask:
return p.parseTask()
case TokenProperty:
svaParser := NewSVAParser(p.tokens, p.position)
propNode, newPos := svaParser.ParseProperty()
p.position = newPos
return propNode, nil
case TokenSequence:
svaParser := NewSVAParser(p.tokens, p.position)
seqNode, newPos := svaParser.ParseSequence()
p.position = newPos
return seqNode, nil
case TokenAssert, TokenAssume, TokenCover, TokenRestrict, TokenExpect:
svaParser := NewSVAParser(p.tokens, p.position)
assertNode, newPos := svaParser.ParseAssertion()
p.position = newPos
return assertNode, nil
case TokenCovergroup:
return p.parseCovergroup()
case TokenConstraint:
return p.parseConstraint()
default:
// Try to parse as variable declaration
return p.parseVariable()
}
}
// parseCovergroup parses a covergroup declaration with full coverage specification support
func (p *Parser) parseCovergroup() (*CovergroupNode, error) {
node := &CovergroupNode{
BaseNode: BaseNode{
NodeType: NodeTypeCovergroup,
Pos: Range{
Start: p.current().Position,
},
},
Coverpoints: make([]*CoverpointNode, 0),
Crosses: make([]*CrossNode, 0),
}
// Skip 'covergroup'
p.advance()
// Parse name
if p.current().Type != TokenIdentifier {
return nil, fmt.Errorf("expected covergroup name")
}
node.Name = p.current().Text
p.advance()
// Parse optional clocking event (@(posedge clk))
// TODO: Implement clocking event parsing
if p.current().Type == TokenAt {
// Skip clocking event for now
p.advance() // @
if p.current().Type == TokenLeftParen {
p.advance() // (
for p.current().Type != TokenRightParen && p.current().Type != TokenEOF {
p.advance()
}
if p.current().Type == TokenRightParen {
p.advance() // )
}
}
}
// Parse optional semicolon
if p.current().Type == TokenSemicolon {
p.advance()
}
// Parse covergroup body
for p.current().Type != TokenEndgroup && p.current().Type != TokenEOF {
switch p.current().Type {
case TokenCoverpoint:
coverpoint, err := p.parseCoverpoint()
if err != nil {
return nil, err
}
node.Coverpoints = append(node.Coverpoints, coverpoint)
case TokenCross:
cross, err := p.parseCross()
if err != nil {
return nil, err
}
node.Crosses = append(node.Crosses, cross)
case TokenIdentifier:
// Check if this is a named coverpoint or cross
if p.peek(1).Type == TokenColon {
if p.peek(2).Type == TokenCoverpoint {
coverpoint, err := p.parseCoverpoint()
if err != nil {
return nil, err
}
node.Coverpoints = append(node.Coverpoints, coverpoint)
} else if p.peek(2).Type == TokenCross {
cross, err := p.parseCross()
if err != nil {
return nil, err
}
node.Crosses = append(node.Crosses, cross)
} else {
p.advance() // Skip unknown identifier
}
} else {
p.advance() // Skip identifier
}
case TokenOption:
// Skip option parsing for now (basic implementation)
p.advance()
default:
p.advance()
}
}
if p.current().Type == TokenEndgroup {
p.advance()
}
node.Pos.End = p.previous().Position
return node, nil
}
// parseCoverpoint parses a coverpoint declaration with bins and options
func (p *Parser) parseCoverpoint() (*CoverpointNode, error) {
node := &CoverpointNode{
BaseNode: BaseNode{
NodeType: NodeTypeCoverpoint,
Pos: Range{
Start: p.current().Position,
},
},
Bins: make([]BinNode, 0),
Options: make(map[string]string),
}
// Skip 'coverpoint'
p.advance()
// Parse optional name (name : coverpoint)
if p.current().Type == TokenIdentifier && p.peek(1).Type == TokenColon {
node.Name = p.current().Text
p.advance() // Skip name
p.advance() // Skip colon
if p.current().Type == TokenCoverpoint {
p.advance() // Skip second 'coverpoint'
}
}
// Parse expression
expressionStart := p.position
for p.current().Type != TokenIff && p.current().Type != TokenLeftBrace &&
p.current().Type != TokenSemicolon && p.current().Type != TokenEOF {
p.advance()
}
if p.position > expressionStart {
// Build expression string from tokens
expr := ""
for i := expressionStart; i < p.position; i++ {
if i > expressionStart {
expr += " "
}
expr += p.tokens[i].Text
}
node.Expression = expr
}
// Parse optional iff condition
if p.current().Type == TokenIff {
p.advance() // Skip 'iff'
for p.current().Type != TokenLeftBrace && p.current().Type != TokenSemicolon && p.current().Type != TokenEOF {
p.advance()
}
// Build iff condition string (simplified for now)
// In a full implementation, this would parse the expression properly
}
// Parse bins and options
hasBraceBlock := false
if p.current().Type == TokenLeftBrace {
hasBraceBlock = true
p.advance() // Skip '{'
for p.current().Type != TokenRightBrace && p.current().Type != TokenEOF {
switch p.current().Type {
case TokenBins, TokenIllegalBins, TokenIgnoreBins:
bin, err := p.parseBin()
if err != nil {
return nil, err
}
node.Bins = append(node.Bins, *bin)
case TokenOption:
// Skip option parsing for now (basic implementation)
p.advance()
default:
p.advance()
}
}
if p.current().Type == TokenRightBrace {
p.advance() // Skip '}'
}
}
// Only look for semicolon if there's no brace block
if !hasBraceBlock {
// Skip to semicolon for simple coverpoint declarations
for p.current().Type != TokenSemicolon && p.current().Type != TokenEOF {
p.advance()
}
if p.current().Type == TokenSemicolon {
p.advance()
}
}
node.Pos.End = p.previous().Position
return node, nil
}
// parseCross parses a cross coverage declaration
func (p *Parser) parseCross() (*CrossNode, error) {
node := &CrossNode{
BaseNode: BaseNode{
NodeType: NodeTypeCross,
Pos: Range{
Start: p.current().Position,
},
},
Coverpoints: make([]string, 0),
Bins: make([]BinNode, 0),
Options: make(map[string]string),
}
// Parse optional name (name : cross) or just cross
if p.current().Type == TokenIdentifier && p.peek(1).Type == TokenColon {
// Named cross: "name: cross ..."
node.Name = p.current().Text
p.advance() // Skip name
p.advance() // Skip colon
// Expect 'cross'
if p.current().Type == TokenCross {
p.advance() // Skip 'cross'
}
} else {
// Unnamed cross: "cross ..."
p.advance() // Skip 'cross'
}
// Parse coverpoint list
for p.current().Type != TokenLeftBrace && p.current().Type != TokenSemicolon && p.current().Type != TokenEOF {
if p.current().Type == TokenIdentifier {
node.Coverpoints = append(node.Coverpoints, p.current().Text)
}
p.advance()
}
// Parse bins and options
if p.current().Type == TokenLeftBrace {
p.advance() // Skip '{'
for p.current().Type != TokenRightBrace && p.current().Type != TokenEOF {
switch p.current().Type {
case TokenBins, TokenIllegalBins, TokenIgnoreBins:
bin, err := p.parseBin()
if err != nil {
return nil, err
}
node.Bins = append(node.Bins, *bin)
case TokenOption:
// Skip option parsing for now (basic implementation)
p.advance()
default:
p.advance()
}
}
if p.current().Type == TokenRightBrace {
p.advance() // Skip '}'
}
}
// Skip to semicolon
for p.current().Type != TokenSemicolon && p.current().Type != TokenEOF {
p.advance()
}
if p.current().Type == TokenSemicolon {
p.advance()
}
node.Pos.End = p.previous().Position
return node, nil
}
// parseBin parses a bin specification (bins, illegal_bins, ignore_bins)
func (p *Parser) parseBin() (*BinNode, error) {
node := &BinNode{
BaseNode: BaseNode{
NodeType: NodeTypeBin,
Pos: Range{
Start: p.current().Position,
},
},
Values: make([]string, 0),
}
// Parse bin type
switch p.current().Type {
case TokenBins:
node.BinType = BinTypeBins
case TokenIllegalBins:
node.BinType = BinTypeIllegal
case TokenIgnoreBins:
node.BinType = BinTypeIgnore
default:
return nil, fmt.Errorf("expected bin type, got %s", p.current().Text)
}
p.advance()
// Parse bin name
if p.current().Type != TokenIdentifier {
return nil, fmt.Errorf("expected bin name, got %s", p.current().Text)
}
node.Name = p.current().Text
p.advance()
// Parse optional array size [N]
if p.current().Type == TokenLeftBracket {
p.advance() // Skip '['
arraySizeStart := p.position
for p.current().Type != TokenRightBracket && p.current().Type != TokenEOF {
p.advance()
}
if p.position > arraySizeStart {
// Build array size string from tokens
arraySize := ""
for i := arraySizeStart; i < p.position; i++ {
if i > arraySizeStart {
arraySize += " "
}
arraySize += p.tokens[i].Text
}
node.ArraySize = arraySize
}
if p.current().Type == TokenRightBracket {
p.advance() // Skip ']'
}
}
// Parse '='
if p.current().Type == TokenAssignOp {
p.advance()
} else {
return nil, fmt.Errorf("expected '=', got %s (type %d)", p.current().Text, int(p.current().Type))
}
// Parse bin values
if p.current().Type == TokenLeftBrace {
p.advance() // Skip '{'
for p.current().Type != TokenRightBrace && p.current().Type != TokenEOF {
value, err := p.parseBinValue()
if err != nil {
return nil, fmt.Errorf("parseBinValue error: %v", err)
}
node.Values = append(node.Values, value)
// Skip commas
if p.current().Type == TokenComma {
p.advance()
}
}
if p.current().Type == TokenRightBrace {
p.advance() // Skip '}'
}
}
// Skip to semicolon
for p.current().Type != TokenSemicolon && p.current().Type != TokenEOF {
p.advance()
}
if p.current().Type == TokenSemicolon {
p.advance()
}
node.Pos.End = p.previous().Position
return node, nil
}
// parseBinValue parses a bin value (range, single value, or list)
func (p *Parser) parseBinValue() (string, error) {
if p.current().Type == TokenLeftBracket {
// Range: [start:end]
p.advance() // Skip '['
valueStart := p.position
for p.current().Type != TokenRightBracket && p.current().Type != TokenEOF {
p.advance()
}
value := ""
if p.position > valueStart {
// Build range string from tokens
for i := valueStart; i < p.position; i++ {
if i > valueStart {
value += " "
}
value += p.tokens[i].Text
}
}
if p.current().Type == TokenRightBracket {
p.advance() // Skip ']'
}
return "[" + value + "]", nil
} else {
// Single value or identifier
if p.current().Type == TokenIdentifier || p.current().Type == TokenNumber {
value := p.current().Text
p.advance()
return value, nil
}
}
return "", fmt.Errorf("expected bin value")
}
// parseConstraint parses a constraint block
// TODO: Implement full constraint parsing
func (p *Parser) parseConstraint() (*ConstraintNode, error) {
node := &ConstraintNode{
BaseNode: BaseNode{
NodeType: NodeTypeConstraint,
Pos: Range{
Start: p.current().Position,
},
},
Body: make([]Node, 0),
}
// Check for static/soft modifiers
if p.current().Type == TokenStatic {
node.IsStatic = true
p.advance()
}
if p.current().Type == TokenSoft {
node.IsSoft = true
p.advance()
}
// Skip 'constraint'
if p.current().Type != TokenConstraint {
return nil, fmt.Errorf("expected 'constraint' keyword")
}
p.advance()
// Parse name
if p.current().Type != TokenIdentifier {
return nil, fmt.Errorf("expected constraint name")
}
node.Name = p.current().Text
p.advance()
// Expect {
if p.current().Type != TokenLeftBrace {
return nil, fmt.Errorf("expected '{' after constraint name")
}
p.advance()
// Parse constraint expressions using the constraint parser
constraintParser := NewConstraintParser(p.lexer)
body, endIdx, err := constraintParser.ParseConstraintBody(p.tokens, p.position)
if err != nil {
return nil, err
}
node.Body = body
p.position = endIdx
// Expect closing }
if p.current().Type != TokenRightBrace {
return nil, fmt.Errorf("expected '}' to close constraint block")
}
p.advance()
node.Pos.End = p.previous().Position
return node, nil
}
// parseAlways parses an always block
func (p *Parser) parseAlways() (Node, error) {
startPos := p.current().Position
// Determine always type
alwaysType := AlwaysTypeBasic
switch p.current().Type {
case TokenAlways:
alwaysType = AlwaysTypeBasic
case TokenAlwaysComb:
alwaysType = AlwaysTypeComb
case TokenAlwaysFF:
alwaysType = AlwaysTypeFF
case TokenAlwaysLatch:
alwaysType = AlwaysTypeLatch
}
p.advance() // consume always keyword
always := &AlwaysNode{
BaseNode: BaseNode{
NodeType: NodeTypeAlways,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
AlwaysType: alwaysType,
Sensitivity: make([]string, 0),
Body: make([]Node, 0),
}
// Parse sensitivity list for basic always
if alwaysType == AlwaysTypeBasic && p.current().Type == TokenAt {
p.advance() // consume @
if p.current().Type == TokenLeftParen {
p.advance() // consume (
// Parse sensitivity items
for p.current().Type != TokenRightParen && p.current().Type != TokenEOF {
if p.current().Type == TokenStar || p.current().Text == "*" {
always.Sensitivity = append(always.Sensitivity, "*")
p.advance()
} else if p.current().Type == TokenIdentifier {
always.Sensitivity = append(always.Sensitivity, p.current().Text)
p.advance()
} else if p.current().Type == TokenPosedge || p.current().Type == TokenNegedge {
edge := p.current().Text
p.advance()
if p.current().Type == TokenIdentifier {
always.Sensitivity = append(always.Sensitivity, edge+" "+p.current().Text)
p.advance()
}
}
if p.current().Type == TokenComma {
p.advance()
}
}
if _, err := p.expect(TokenRightParen); err != nil {
return nil, err
}
}
}
// Parse body (single statement or begin/end block)
body, err := p.parseStatement()
if err != nil {
return nil, err
}
if body != nil {
always.Body = append(always.Body, body)
}
always.Pos.End = p.current().Position
return always, nil
}
// parseAssignment parses a continuous assignment
func (p *Parser) parseAssignment() (Node, error) {
startPos := p.current().Position
if _, err := p.expect(TokenAssign); err != nil {
return nil, err
}
// Parse left-hand side
lhs := ""
for p.current().Type != TokenAssignOp && p.current().Type != TokenEOF {
lhs += p.current().Text
p.advance()
}
if _, err := p.expect(TokenAssignOp); err != nil {
return nil, err
}
// Parse right-hand side
rhs := ""
for p.current().Type != TokenSemicolon && p.current().Type != TokenEOF {
rhs += p.current().Text + " "
p.advance()
}
if _, err := p.expect(TokenSemicolon); err != nil {
return nil, err
}
return &AssignmentNode{
BaseNode: BaseNode{
NodeType: NodeTypeAssignment,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
LHS: strings.TrimSpace(lhs),
RHS: strings.TrimSpace(rhs),
IsContinuous: true,
}, nil
}
// parseVariableOrInstance parses either a variable declaration or module instantiation
func (p *Parser) parseVariableOrInstance() (Node, error) {
// Save position to potentially backtrack
savedPos := p.position
// Try to determine if this is a variable or instance
// Look for patterns like: <type> <name> or <module> #() <instance>()
p.advance()
// Check if next token helps identify the construct
if p.current().Type == TokenHash ||
(p.current().Type == TokenIdentifier && p.peek(1).Type == TokenLeftParen) {
// Likely a module instantiation
p.position = savedPos
return p.parseInstance()
} else if p.current().Type == TokenLeftBracket ||
p.current().Type == TokenIdentifier ||
p.current().Type == TokenAssignOp ||
p.current().Type == TokenSemicolon {
// Likely a variable declaration
p.position = savedPos
return p.parseVariable()
}
// Default to variable
p.position = savedPos
return p.parseVariable()
}
// parseVariable parses a variable declaration
func (p *Parser) parseVariable() (Node, error) {
startPos := p.current().Position
// Parse data type
dataType := ""
isRand := false
isRandc := false
// Check for rand/randc
if p.current().Type == TokenRand {
isRand = true
p.advance()
} else if p.current().Type == TokenRandc {
isRandc = true
p.advance()
}
// Get data type
if p.isDataType(p.current().Type) || p.current().Type == TokenIdentifier {
dataType = p.current().Text
p.advance()
} else {
return nil, fmt.Errorf("expected data type")
}
// Parse width if present
var width *RangeNode
if p.current().Type == TokenLeftBracket {
var err error
width, err = p.parseRange()
if err != nil {
return nil, err
}
}
// Parse variable name
if p.current().Type != TokenIdentifier {
return nil, fmt.Errorf("expected variable name")
}
name := p.current().Text
p.advance()
// Parse array dimensions if present
for p.current().Type == TokenLeftBracket {
// Skip array dimensions for now
for p.current().Type != TokenRightBracket && p.current().Type != TokenEOF {
p.advance()
}
if p.current().Type == TokenRightBracket {
p.advance()
}
}
// Parse initial value if present
initValue := ""
if p.current().Type == TokenAssignOp {
p.advance()
for p.current().Type != TokenSemicolon && p.current().Type != TokenComma && p.current().Type != TokenEOF {
initValue += p.current().Text + " "
p.advance()
}
}
// Handle comma-separated declarations
vars := []*VariableNode{
&VariableNode{
BaseNode: BaseNode{
NodeType: NodeTypeVariable,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
Name: name,
DataType: dataType,
Width: width,
InitValue: strings.TrimSpace(initValue),
IsRand: isRand,
IsRandc: isRandc,
},
}
// Parse additional variables in same declaration
for p.current().Type == TokenComma {
p.advance()
if p.current().Type != TokenIdentifier {
break
}
varName := p.current().Text
p.advance()
// Parse initial value if present
varInitValue := ""
if p.current().Type == TokenAssignOp {
p.advance()
for p.current().Type != TokenSemicolon && p.current().Type != TokenComma && p.current().Type != TokenEOF {
varInitValue += p.current().Text + " "
p.advance()
}
}
vars = append(vars, &VariableNode{
BaseNode: BaseNode{
NodeType: NodeTypeVariable,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
Name: varName,
DataType: dataType,
Width: width,
InitValue: strings.TrimSpace(varInitValue),
IsRand: isRand,
IsRandc: isRandc,
})
}
if _, err := p.expect(TokenSemicolon); err != nil {
return nil, err
}
// Return first variable if only one, otherwise create a group node
if len(vars) == 1 {
return vars[0], nil
}
// Create a container node for multiple variables
// For now, just return the first one
return vars[0], nil
}
func (p *Parser) parseFunction() (Node, error) {
startPos := p.current().Position
// Consume 'function' keyword
if _, err := p.expect(TokenFunction); err != nil {
return nil, err
}
// Parse return type (optional)
returnType := ""
if p.isDataType(p.current().Type) || p.current().Type == TokenIdentifier {
returnType = p.current().Text
p.advance()
}
// Parse function name
if p.current().Type != TokenIdentifier {
return nil, fmt.Errorf("expected function name")
}
name := p.current().Text
p.advance()
// Parse parameters
var parameters []*ParameterNode
if p.current().Type == TokenLeftParen {
p.advance() // consume '('
for p.current().Type != TokenRightParen && p.current().Type != TokenEOF {
param, err := p.parseParameter()
if err != nil {
return nil, err
}
if param != nil {
parameters = append(parameters, param)
}
if p.current().Type == TokenComma {
p.advance()
} else if p.current().Type != TokenRightParen {
return nil, fmt.Errorf("expected ',' or ')' in parameter list")
}
}
if _, err := p.expect(TokenRightParen); err != nil {
return nil, err
}
}
// Skip semicolon if present
if p.current().Type == TokenSemicolon {
p.advance()
}
// Parse function body (skip for now)
var body []Node
// TODO: Parse function body properly
for p.current().Type != TokenEndFunction && p.current().Type != TokenEOF {
p.advance()
}
// Consume 'endfunction'
if _, err := p.expect(TokenEndFunction); err != nil {
return nil, err
}
endPos := p.tokens[p.position-1].Position
result := &FunctionNode{
BaseNode: BaseNode{
NodeType: NodeTypeFunction,
Pos: Range{
Start: startPos,
End: endPos,
},
},
Name: name,
ReturnType: returnType,
Parameters: parameters,
Body: body,
}
return result, nil
}
func (p *Parser) parseTask() (Node, error) {
startPos := p.current().Position
// Consume 'task' keyword
if _, err := p.expect(TokenTask); err != nil {
return nil, err
}
// Parse task name
if p.current().Type != TokenIdentifier {
return nil, fmt.Errorf("expected task name")
}
name := p.current().Text
p.advance()
// Parse parameters
var parameters []*ParameterNode
if p.current().Type == TokenLeftParen {
p.advance() // consume '('
for p.current().Type != TokenRightParen && p.current().Type != TokenEOF {
param, err := p.parseParameter()
if err != nil {
return nil, err
}
if param != nil {
parameters = append(parameters, param)
}
if p.current().Type == TokenComma {
p.advance()
} else if p.current().Type != TokenRightParen {
return nil, fmt.Errorf("expected ',' or ')' in parameter list")
}
}
if _, err := p.expect(TokenRightParen); err != nil {
return nil, err
}
}
// Skip semicolon if present
if p.current().Type == TokenSemicolon {
p.advance()
}
// Parse task body (skip for now)
var body []Node
// TODO: Parse task body properly
for p.current().Type != TokenEndTask && p.current().Type != TokenEOF {
p.advance()
}
// Consume 'endtask'
if _, err := p.expect(TokenEndTask); err != nil {
return nil, err
}
endPos := p.tokens[p.position-1].Position
return &TaskNode{
BaseNode: BaseNode{
NodeType: NodeTypeTask,
Pos: Range{
Start: startPos,
End: endPos,
},
},
Name: name,
Parameters: parameters,
Body: body,
}, nil
}
// parseInstance parses a module/interface instantiation
func (p *Parser) parseInstance() (Node, error) {
startPos := p.current().Position
// Module name
if p.current().Type != TokenIdentifier {
return nil, fmt.Errorf("expected module name")
}
moduleName := p.current().Text
p.advance()
// Parse parameters if present
params := make(map[string]string)
if p.current().Type == TokenHash {
p.advance()
if p.current().Type == TokenLeftParen {
p.advance()
// Parse parameter assignments
for p.current().Type != TokenRightParen && p.current().Type != TokenEOF {
if p.current().Type == TokenDot {
p.advance()
if p.current().Type == TokenIdentifier {
paramName := p.current().Text
p.advance()
if p.current().Type == TokenLeftParen {
p.advance()
value := ""
for p.current().Type != TokenRightParen && p.current().Type != TokenEOF {
value += p.current().Text
p.advance()
}
params[paramName] = value
if p.current().Type == TokenRightParen {
p.advance()
}
}
}
}
if p.current().Type == TokenComma {
p.advance()
}
}
if p.current().Type == TokenRightParen {
p.advance()
}
}
}
// Instance name
if p.current().Type != TokenIdentifier {
return nil, fmt.Errorf("expected instance name")
}
instanceName := p.current().Text
p.advance()
// Parse connections
connections := make(map[string]string)
if p.current().Type == TokenLeftParen {
p.advance()
// Parse port connections
for p.current().Type != TokenRightParen && p.current().Type != TokenEOF {
if p.current().Type == TokenDot {
p.advance()
if p.current().Type == TokenIdentifier {
portName := p.current().Text
p.advance()
if p.current().Type == TokenLeftParen {
p.advance()
connection := ""
for p.current().Type != TokenRightParen && p.current().Type != TokenEOF {
connection += p.current().Text
p.advance()
}
connections[portName] = connection
if p.current().Type == TokenRightParen {
p.advance()
}
}
}
}
if p.current().Type == TokenComma {
p.advance()
}
}
if p.current().Type == TokenRightParen {
p.advance()
}
}
if _, err := p.expect(TokenSemicolon); err != nil {
return nil, err
}
return &InstanceNode{
BaseNode: BaseNode{
NodeType: NodeTypeInstance,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
ModuleName: moduleName,
InstanceName: instanceName,
Parameters: params,
Connections: connections,
}, nil
}
// parseStatement parses a single statement or block
func (p *Parser) parseStatement() (Node, error) {
switch p.current().Type {
case TokenBegin:
return p.parseBeginEndBlock()
case TokenIf:
return p.parseIfStatement()
case TokenCase, TokenCasex, TokenCasez:
return p.parseCaseStatement()
case TokenFor:
return p.parseForLoop()
case TokenWhile:
return p.parseWhileLoop()
default:
// Parse simple statement
return p.parseSimpleStatement()
}
}
// parseBeginEndBlock parses a begin/end block
func (p *Parser) parseBeginEndBlock() (Node, error) {
if _, err := p.expect(TokenBegin); err != nil {
return nil, err
}
// Parse optional block name
if p.current().Type == TokenColon {
p.advance()
if p.current().Type == TokenIdentifier {
// Skip block name for now
p.advance()
}
}
// Parse statements until end
var statements []Node
for p.current().Type != TokenEnd && p.current().Type != TokenEOF {
stmt, err := p.parseStatement()
if err != nil {
p.addError(err.Error(), p.current().Position)
p.advance()
continue
}
if stmt != nil {
statements = append(statements, stmt)
}
}
if _, err := p.expect(TokenEnd); err != nil {
return nil, err
}
// For now, return the first statement or nil
if len(statements) > 0 {
return statements[0], nil
}
return nil, nil
}
// parseIfStatement parses an if statement
func (p *Parser) parseIfStatement() (Node, error) {
startPos := p.current().Position
if _, err := p.expect(TokenIf); err != nil {
return nil, err
}
// Parse condition
if _, err := p.expect(TokenLeftParen); err != nil {
return nil, err
}
// Skip condition parsing for now
parenCount := 1
for parenCount > 0 && p.current().Type != TokenEOF {
if p.current().Type == TokenLeftParen {
parenCount++
} else if p.current().Type == TokenRightParen {
parenCount--
}
p.advance()
}
// Parse then branch
_, err := p.parseStatement()
if err != nil {
return nil, err
}
// Check for else
if p.current().Type == TokenElse {
p.advance()
_, err := p.parseStatement()
if err != nil {
return nil, err
}
}
return &IfNode{
BaseNode: BaseNode{
NodeType: NodeTypeIf,
Pos: Range{
Start: startPos,
End: p.current().Position,
},
},
}, nil
}
// parseCaseStatement parses a case statement
func (p *Parser) parseCaseStatement() (Node, error) {
// Skip implementation for now
p.advance()
return nil, nil
}
// parseForLoop parses a for loop
func (p *Parser) parseForLoop() (Node, error) {
// Skip implementation for now
p.advance()
return nil, nil
}
// parseWhileLoop parses a while loop
func (p *Parser) parseWhileLoop() (Node, error) {
// Skip implementation for now
p.advance()
return nil, nil
}
// parseSimpleStatement parses a simple statement
func (p *Parser) parseSimpleStatement() (Node, error) {
// Skip to semicolon for now
for p.current().Type != TokenSemicolon && p.current().Type != TokenEOF {
p.advance()
}
if p.current().Type == TokenSemicolon {
p.advance()
}
return nil, nil
}
// isDataType checks if a token type represents a data type
func (p *Parser) isDataType(tokenType TokenType) bool {
switch tokenType {
case TokenLogic, TokenBit, TokenByte, TokenInt, TokenInteger, TokenReal, TokenTime, TokenWire, TokenReg:
return true
default:
return false
}
}
package parser
import "strings"
// SVAParser handles parsing of SystemVerilog Assertions
type SVAParser struct {
tokens []Token
position int
}
// NewSVAParser creates a new SVA parser
func NewSVAParser(tokens []Token, position int) *SVAParser {
return &SVAParser{
tokens: tokens,
position: position,
}
}
// ParseProperty parses a property declaration
// property name [(params)] [clocking_event] [disable iff (expr)]
//
// property_body
//
// endproperty
func (p *SVAParser) ParseProperty() (*PropertyNode, int) {
node := &PropertyNode{
BaseNode: BaseNode{
NodeType: NodeTypeProperty,
Pos: Range{
Start: p.currentToken().Position,
},
},
}
// Skip 'property' keyword
p.advance()
// Parse property name
if p.currentToken().Type != TokenIdentifier {
return nil, p.position
}
node.Name = p.currentToken().Text
p.advance()
// Parse parameters
if p.currentToken().Type == TokenLeftParen {
p.advance() // (
node.Parameters = p.parsePropertyParameters()
if p.currentToken().Type == TokenRightParen {
p.advance()
}
}
// Parse optional clocking event
if p.currentToken().Type == TokenAt {
node.ClockingEvent = p.parseClockingEvent()
}
// Parse optional disable iff
if p.currentToken().Type == TokenDisable && p.peekToken(1).Type == TokenIff {
p.advance() // disable
p.advance() // iff
if p.currentToken().Type == TokenLeftParen {
p.advance()
node.DisableIff = p.parseExpression(TokenRightParen)
p.advance() // skip )
}
}
// Parse property body
node.Body = p.parsePropertyExpression()
// Skip to endproperty
p.skipUntil(TokenEndProperty)
if p.currentToken().Type == TokenEndProperty {
p.advance()
}
node.Pos.End = p.previousToken().Position
return node, p.position
}
// ParseSequence parses a sequence declaration
// sequence name [(params)]
//
// sequence_body
//
// endsequence
func (p *SVAParser) ParseSequence() (*SequenceNode, int) {
node := &SequenceNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequence,
Pos: Range{
Start: p.currentToken().Position,
},
},
}
// Skip 'sequence' keyword
p.advance()
// Parse sequence name
if p.currentToken().Type != TokenIdentifier {
return nil, p.position
}
node.Name = p.currentToken().Text
p.advance()
// Parse parameters
if p.currentToken().Type == TokenLeftParen {
p.advance() // (
node.Parameters = p.parseSequenceParameters()
if p.currentToken().Type == TokenRightParen {
p.advance()
}
}
// Parse sequence body
node.Body = p.parseSequenceExpression()
// Skip to endsequence
p.skipUntil(TokenEndSequence)
if p.currentToken().Type == TokenEndSequence {
p.advance()
}
node.Pos.End = p.previousToken().Position
return node, p.position
}
// ParseAssertion parses an assertion statement
// [label:] assert|assume|cover|restrict property (property_spec) [action_block];
func (p *SVAParser) ParseAssertion() (*AssertionNode, int) {
node := &AssertionNode{
BaseNode: BaseNode{
NodeType: NodeTypeAssertion,
Pos: Range{
Start: p.currentToken().Position,
},
},
}
// Check for optional label
if p.peekToken(1).Type == TokenColon {
node.Label = p.currentToken().Text
p.advance() // label
p.advance() // :
}
// Determine assertion type
switch p.currentToken().Type {
case TokenAssert:
node.AssertionType = AssertionTypeAssert
case TokenAssume:
node.AssertionType = AssertionTypeAssume
case TokenCover:
node.AssertionType = AssertionTypeCover
case TokenRestrict:
node.AssertionType = AssertionTypeRestrict
case TokenExpect:
node.AssertionType = AssertionTypeExpect
default:
return nil, p.position
}
p.advance()
// Check if this is a concurrent assertion (has 'property' keyword)
if p.currentToken().Type == TokenProperty {
node.IsConcurrent = true
p.advance()
}
// Parse property specification
if p.currentToken().Type == TokenLeftParen {
p.advance()
// Parse the property expression inside parentheses
node.Property = p.parsePropertyExpression()
if p.currentToken().Type == TokenRightParen {
p.advance()
}
} else {
// No parentheses - parse property expression directly
node.Property = p.parsePropertyExpression()
}
// Parse optional action block (else clause)
node.ActionBlock = p.parseActionBlock()
// Advance past semicolon if action block didn't consume it
if p.currentToken().Type == TokenSemicolon {
p.advance()
}
node.Pos.End = p.previousToken().Position
return node, p.position
}
// parseClockingEvent parses a clocking event @(edge signal)
func (p *SVAParser) parseClockingEvent() *ClockingEventNode {
node := &ClockingEventNode{
BaseNode: BaseNode{
NodeType: NodeTypeClockingEvent,
Pos: Range{
Start: p.currentToken().Position,
},
},
}
// Skip @ token
p.advance()
// Expect (
if p.currentToken().Type != TokenLeftParen {
return nil
}
p.advance()
// Check for edge
switch p.currentToken().Type {
case TokenPosedge:
node.Edge = EdgeTypePosedge
p.advance()
case TokenNegedge:
node.Edge = EdgeTypeNegedge
p.advance()
case TokenEdge:
node.Edge = EdgeTypeEdge
p.advance()
default:
node.Edge = EdgeTypeNone
}
// Parse signal expression
node.Expression = p.parseExpression(TokenRightParen)
// Skip )
if p.currentToken().Type == TokenRightParen {
p.advance()
}
node.Pos.End = p.previousToken().Position
return node
}
// parsePropertyExpression parses a property expression
// This handles all property operators including implication, throughout, within, etc.
func (p *SVAParser) parsePropertyExpression() Node {
return p.parsePropertyIff()
}
// parsePropertyIff handles iff expressions (lowest precedence)
func (p *SVAParser) parsePropertyIff() Node {
left := p.parsePropertyImplication()
if p.currentToken().Type == TokenIff {
p.advance() // iff
right := p.parsePropertyImplication()
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: PropertyExprTypeIff,
Operator: "iff",
Left: left,
Right: right,
}
}
return left
}
// parsePropertyImplication handles implication operators
func (p *SVAParser) parsePropertyImplication() Node {
left := p.parsePropertyOr()
switch p.currentToken().Type {
case TokenImplication: // ->
p.advance()
right := p.parsePropertyExpression()
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: PropertyExprTypeImplication,
Operator: "->",
Left: left,
Right: right,
}
case TokenNonOverlapImplication: // |->
p.advance()
right := p.parsePropertyExpression()
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: PropertyExprTypeNonOverlapImplication,
Operator: "|->",
Left: left,
Right: right,
}
case TokenOverlapImplication: // |=>
p.advance()
right := p.parsePropertyExpression()
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: PropertyExprTypeOverlapImplication,
Operator: "|=>",
Left: left,
Right: right,
}
}
return left
}
// parsePropertyOr handles or operations
func (p *SVAParser) parsePropertyOr() Node {
left := p.parsePropertyAnd()
for p.currentToken().Type == TokenOr {
p.advance()
right := p.parsePropertyAnd()
left = &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: PropertyExprTypeOr,
Operator: "or",
Left: left,
Right: right,
}
}
return left
}
// parsePropertyAnd handles and operations
func (p *SVAParser) parsePropertyAnd() Node {
left := p.parsePropertyThroughout()
for p.currentToken().Type == TokenAnd {
p.advance()
right := p.parsePropertyThroughout()
left = &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: PropertyExprTypeAnd,
Operator: "and",
Left: left,
Right: right,
}
}
return left
}
// parsePropertyThroughout handles throughout operations
func (p *SVAParser) parsePropertyThroughout() Node {
left := p.parsePropertyWithin()
// Check if current token is throughout after parsing left side
if p.currentToken().Type == TokenThroughout {
p.advance()
right := p.parsePropertyWithin()
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: PropertyExprTypeThroughout,
Operator: "throughout",
Left: left,
Right: right,
}
}
return left
}
// parsePropertyWithin handles within operations
func (p *SVAParser) parsePropertyWithin() Node {
left := p.parsePropertyIntersect()
if p.currentToken().Type == TokenWithin {
p.advance()
right := p.parsePropertyIntersect()
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: PropertyExprTypeWithin,
Operator: "within",
Left: left,
Right: right,
}
}
return left
}
// parsePropertyIntersect handles intersect operations
func (p *SVAParser) parsePropertyIntersect() Node {
left := p.parsePropertyUnary()
if p.currentToken().Type == TokenIntersect {
p.advance()
right := p.parsePropertyUnary()
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: PropertyExprTypeIntersect,
Operator: "intersect",
Left: left,
Right: right,
}
}
return left
}
// parsePropertyUnary handles unary operations (not)
func (p *SVAParser) parsePropertyUnary() Node {
if p.currentToken().Type == TokenNot {
start := p.currentToken().Position
p.advance()
expr := p.parsePropertyPrimary()
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: start,
End: expr.Range().End,
},
},
ExprType: PropertyExprTypeNot,
Operator: "not",
Expr: expr,
}
}
return p.parsePropertyPrimary()
}
// parsePropertyPrimary handles primary property expressions
func (p *SVAParser) parsePropertyPrimary() Node {
start := p.currentToken().Position
// Clocking event
if p.currentToken().Type == TokenAt {
clockingEvent := p.parseClockingEvent()
// After clocking event, parse the property expression that follows
propExpr := p.parsePropertyExpression()
// Wrap in a property expression that includes the clocking event
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: start,
End: propExpr.Range().End,
},
},
ExprType: PropertyExprTypeSimple,
Operator: "clocked",
Left: clockingEvent,
Right: propExpr,
}
}
// Parenthesized expression
if p.currentToken().Type == TokenLeftParen {
p.advance()
expr := p.parsePropertyExpression()
if p.currentToken().Type == TokenRightParen {
p.advance()
}
return expr
}
// Sequence expression
if p.isSequenceStart() {
seq := p.parseSequenceExpression()
// Wrap sequence in property expression
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: seq.Range(),
},
ExprType: PropertyExprTypeSequence,
Operator: "sequence",
Expr: seq,
}
}
// Property reference or simple expression
if p.currentToken().Type == TokenIdentifier {
name := p.currentToken().Text
end := p.currentToken().Position
// Look ahead to see if it's a property call
if p.peekToken(1).Type == TokenLeftParen {
p.advance() // identifier
p.advance() // (
// Skip instantiation parameters for now
p.skipUntil(TokenRightParen)
if p.currentToken().Type == TokenRightParen {
p.advance()
}
return &PropertyRefNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyRef,
Pos: Range{
Start: start,
End: p.previousToken().Position,
},
},
Name: name,
}
} else {
// Simple identifier - wrap in property expression
p.advance()
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: Range{
Start: start,
End: end,
},
},
ExprType: PropertyExprTypeSimple,
Operator: "simple",
Expr: &ExpressionNode{
BaseNode: BaseNode{
NodeType: NodeTypeExpression,
Pos: Range{
Start: start,
End: end,
},
},
Text: name,
},
}
}
}
// Other simple expressions
expr := p.parseBasicExpression()
return &PropertyExprNode{
BaseNode: BaseNode{
NodeType: NodeTypePropertyExpr,
Pos: expr.Range(),
},
ExprType: PropertyExprTypeSimple,
Operator: "simple",
Expr: expr,
}
}
// isSequenceStart checks if current position starts a sequence
func (p *SVAParser) isSequenceStart() bool {
// Look for sequence-specific operators or patterns
switch p.currentToken().Type {
case TokenDoubleHash:
return true
case TokenLeftBracket:
// Check if it's a repetition operator [*, [+, [=, [->
if p.position+1 < len(p.tokens) {
next := p.peekToken(1).Type
if next == TokenStar || next == TokenPlus || next == TokenEqual || next == TokenMinus {
return true
}
}
return false
}
// Look ahead for sequence-specific operators
if p.position+1 < len(p.tokens) {
next := p.peekToken(1).Type
// Check for sequence concatenation (##) or repetition on identifier (a[*])
if next == TokenDoubleHash || next == TokenLeftBracket {
return true
}
}
return false
}
// isExpressionStart checks if current position starts a basic expression
func (p *SVAParser) isExpressionStart() bool {
switch p.currentToken().Type {
case TokenIdentifier, TokenLeftParen, TokenNot, TokenLogicalNot:
return true
}
return false
}
// parseBasicExpression parses a basic expression (identifier, literal, etc.)
func (p *SVAParser) parseBasicExpression() Node {
start := p.currentToken().Position
expr := p.parseExpression(TokenSemicolon)
return &ExpressionNode{
BaseNode: BaseNode{
NodeType: NodeTypeExpression,
Pos: Range{
Start: start,
End: p.previousToken().Position,
},
},
Text: expr,
}
}
// parseSequenceExpression parses a sequence expression
// This handles all sequence operators including ##, [*], [+], [=], and, or, intersect
func (p *SVAParser) parseSequenceExpression() Node {
return p.parseSequenceOr()
}
// parseSequenceOr handles or operations
func (p *SVAParser) parseSequenceOr() Node {
left := p.parseSequenceAnd()
for p.currentToken().Type == TokenOr {
p.advance()
right := p.parseSequenceAnd()
left = &SequenceExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: SequenceExprTypeOr,
Operator: "or",
Left: left,
Right: right,
}
}
return left
}
// parseSequenceAnd handles and operations
func (p *SVAParser) parseSequenceAnd() Node {
left := p.parseSequenceIntersect()
for p.currentToken().Type == TokenAnd {
p.advance()
right := p.parseSequenceIntersect()
left = &SequenceExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: SequenceExprTypeAnd,
Operator: "and",
Left: left,
Right: right,
}
}
return left
}
// parseSequenceIntersect handles intersect operations
func (p *SVAParser) parseSequenceIntersect() Node {
left := p.parseSequenceThroughout()
if p.currentToken().Type == TokenIntersect {
p.advance()
right := p.parseSequenceThroughout()
return &SequenceExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: SequenceExprTypeIntersect,
Operator: "intersect",
Left: left,
Right: right,
}
}
return left
}
// parseSequenceThroughout handles throughout operations
func (p *SVAParser) parseSequenceThroughout() Node {
left := p.parseSequenceWithin()
// Check if current token is throughout after parsing left side
if p.currentToken().Type == TokenThroughout {
p.advance()
right := p.parseSequenceWithin()
return &SequenceExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: SequenceExprTypeThroughout,
Operator: "throughout",
Left: left,
Right: right,
}
}
return left
}
// parseSequenceWithin handles within operations
func (p *SVAParser) parseSequenceWithin() Node {
left := p.parseSequenceFirstMatch()
if p.currentToken().Type == TokenWithin {
p.advance()
right := p.parseSequenceFirstMatch()
return &SequenceExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: SequenceExprTypeWithin,
Operator: "within",
Left: left,
Right: right,
}
}
return left
}
// parseSequenceFirstMatch handles first_match operations
func (p *SVAParser) parseSequenceFirstMatch() Node {
if p.currentToken().Type == TokenFirstMatch {
start := p.currentToken().Position
p.advance()
if p.currentToken().Type == TokenLeftParen {
p.advance()
expr := p.parseSequenceExpression()
if p.currentToken().Type == TokenRightParen {
p.advance()
}
return &SequenceExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceExpr,
Pos: Range{
Start: start,
End: p.previousToken().Position,
},
},
ExprType: SequenceExprTypeFirstMatch,
Operator: "first_match",
Expr: expr,
}
}
}
return p.parseSequenceConcat()
}
// parseSequenceConcat handles concatenation (implicit)
func (p *SVAParser) parseSequenceConcat() Node {
left := p.parseSequenceDelay()
// Check for implicit concatenation (two sequences next to each other)
for p.isSequencePrimaryStart() || p.currentToken().Type == TokenDoubleHash {
right := p.parseSequenceDelay()
left = &SequenceExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceExpr,
Pos: Range{
Start: left.Range().Start,
End: right.Range().End,
},
},
ExprType: SequenceExprTypeConcat,
Operator: "concat",
Left: left,
Right: right,
}
}
return left
}
// parseSequenceDelay handles delay operators ##
func (p *SVAParser) parseSequenceDelay() Node {
if p.currentToken().Type == TokenDoubleHash {
start := p.currentToken().Position
p.advance()
// Parse delay amount
delayNode := &DelayNode{
BaseNode: BaseNode{
NodeType: NodeTypeDelay,
Pos: Range{
Start: start,
},
},
}
// Check for range [n:m]
if p.currentToken().Type == TokenLeftBracket {
p.advance()
delayNode.MinCount = p.parseExpression(TokenColon)
if p.currentToken().Type == TokenColon {
p.advance()
delayNode.MaxCount = p.parseExpression(TokenRightBracket)
delayNode.DelayType = DelayTypeRange
} else {
delayNode.Count = delayNode.MinCount
delayNode.DelayType = DelayTypeFixed
}
if p.currentToken().Type == TokenRightBracket {
p.advance()
}
} else if p.currentToken().Type == TokenNumber {
delayNode.Count = p.currentToken().Text
delayNode.DelayType = DelayTypeFixed
p.advance()
} else {
// ##1 is implicit
delayNode.Count = "1"
delayNode.DelayType = DelayTypeFixed
}
delayNode.Pos.End = p.previousToken().Position
// Parse the sequence after delay
seq := p.parseSequencePrimary()
return &SequenceExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceExpr,
Pos: Range{
Start: start,
End: seq.Range().End,
},
},
ExprType: SequenceExprTypeDelay,
Operator: "##",
Left: delayNode,
Right: seq,
}
}
return p.parseSequencePrimary()
}
// parseSequencePrimary handles primary sequence expressions
func (p *SVAParser) parseSequencePrimary() Node {
// Parenthesized expression
if p.currentToken().Type == TokenLeftParen {
p.advance()
expr := p.parseSequenceExpression()
if p.currentToken().Type == TokenRightParen {
p.advance()
}
return expr
}
// Expression with repetition
expr := p.parseSequenceAtom()
// Check for repetition operators
if p.currentToken().Type == TokenLeftBracket {
return p.parseRepetition(expr)
}
return expr
}
// parseSequenceAtom parses atomic sequence expressions
func (p *SVAParser) parseSequenceAtom() Node {
start := p.currentToken().Position
// Sequence reference or simple expression
if p.currentToken().Type == TokenIdentifier {
name := p.currentToken().Text
end := p.currentToken().Position
// Look ahead to see if it's a sequence call
if p.peekToken(1).Type == TokenLeftParen {
p.advance() // identifier
p.advance() // (
// Skip parameters for now
p.skipUntil(TokenRightParen)
if p.currentToken().Type == TokenRightParen {
p.advance()
}
return &SequenceRefNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceRef,
Pos: Range{
Start: start,
End: p.previousToken().Position,
},
},
Name: name,
}
} else {
// Simple identifier - wrap in sequence expression
p.advance()
return &SequenceExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceExpr,
Pos: Range{
Start: start,
End: end,
},
},
ExprType: SequenceExprTypeSimple,
Operator: "simple",
Expr: &ExpressionNode{
BaseNode: BaseNode{
NodeType: NodeTypeExpression,
Pos: Range{
Start: start,
End: end,
},
},
Text: name,
},
}
}
}
// Other simple expressions
expr := p.parseBasicExpression()
return &SequenceExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceExpr,
Pos: expr.Range(),
},
ExprType: SequenceExprTypeSimple,
Operator: "simple",
Expr: expr,
}
}
// parseRepetition parses repetition operators [*], [+], [=], [->]
func (p *SVAParser) parseRepetition(expr Node) Node {
if p.currentToken().Type != TokenLeftBracket {
return expr
}
start := expr.Range().Start
p.advance() // [
repNode := &RepetitionNode{
BaseNode: BaseNode{
NodeType: NodeTypeRepetition,
Pos: Range{
Start: start,
},
},
Expr: expr,
}
// Check repetition type
switch p.currentToken().Type {
case TokenStar: // [*]
p.advance()
repNode.RepType = RepetitionTypeConsecutive
// Check for range
if p.currentToken().Type == TokenNumber {
repNode.MinCount = p.currentToken().Text
p.advance()
if p.currentToken().Type == TokenColon {
p.advance()
if p.currentToken().Type == TokenNumber {
repNode.MaxCount = p.currentToken().Text
p.advance()
} else if p.currentToken().Type == TokenDollar {
repNode.MaxCount = "$"
p.advance()
}
} else {
repNode.MaxCount = repNode.MinCount
}
}
case TokenPlus: // [+]
p.advance()
repNode.RepType = RepetitionTypeConsecutive
repNode.MinCount = "1"
repNode.MaxCount = "$"
case TokenEqual: // [=]
p.advance()
repNode.RepType = RepetitionTypeNonConsecutive
// Parse range like [*]
if p.currentToken().Type == TokenNumber {
repNode.MinCount = p.currentToken().Text
p.advance()
if p.currentToken().Type == TokenColon {
p.advance()
if p.currentToken().Type == TokenNumber {
repNode.MaxCount = p.currentToken().Text
p.advance()
} else if p.currentToken().Type == TokenDollar {
repNode.MaxCount = "$"
p.advance()
}
} else {
repNode.MaxCount = repNode.MinCount
}
}
case TokenMinus: // [->]
p.advance()
if p.currentToken().Type == TokenGreater {
p.advance()
repNode.RepType = RepetitionTypeGoto
// Parse range
if p.currentToken().Type == TokenNumber {
repNode.MinCount = p.currentToken().Text
p.advance()
if p.currentToken().Type == TokenColon {
p.advance()
if p.currentToken().Type == TokenNumber {
repNode.MaxCount = p.currentToken().Text
p.advance()
}
} else {
repNode.MaxCount = repNode.MinCount
}
}
}
}
if p.currentToken().Type == TokenRightBracket {
p.advance()
}
repNode.Pos.End = p.previousToken().Position
return &SequenceExprNode{
BaseNode: BaseNode{
NodeType: NodeTypeSequenceExpr,
Pos: Range{
Start: start,
End: repNode.Pos.End,
},
},
ExprType: SequenceExprTypeRepetition,
Operator: "repetition",
Expr: repNode,
}
}
// isSequencePrimaryStart checks if current position can start a sequence primary
func (p *SVAParser) isSequencePrimaryStart() bool {
switch p.currentToken().Type {
case TokenIdentifier, TokenLeftParen, TokenNot, TokenLogicalNot:
return true
}
return false
}
// parseExpression parses a general expression until the given end token
func (p *SVAParser) parseExpression(endToken TokenType) string {
expr := ""
depth := 0
for p.position < len(p.tokens) && p.currentToken().Type != TokenEOF {
if p.currentToken().Type == endToken && depth == 0 {
break
}
switch p.currentToken().Type {
case TokenLeftParen, TokenLeftBracket, TokenLeftBrace:
depth++
case TokenRightParen, TokenRightBracket, TokenRightBrace:
depth--
if depth < 0 {
break
}
}
expr += p.currentToken().Text + " "
p.advance()
}
return expr
}
// parsePropertyParameters parses property parameters
func (p *SVAParser) parsePropertyParameters() []*ParameterNode {
var params []*ParameterNode
for p.position < len(p.tokens) && p.currentToken().Type != TokenRightParen && p.currentToken().Type != TokenEOF {
param := p.parseParameter()
if param != nil {
params = append(params, param)
}
if p.currentToken().Type == TokenComma {
p.advance()
} else {
break
}
}
return params
}
// parseSequenceParameters parses sequence parameters
func (p *SVAParser) parseSequenceParameters() []*ParameterNode {
// Same as property parameters
return p.parsePropertyParameters()
}
// parseParameter parses a single parameter
func (p *SVAParser) parseParameter() *ParameterNode {
start := p.currentToken().Position
param := &ParameterNode{
BaseNode: BaseNode{
NodeType: NodeTypeParameter,
Pos: Range{
Start: start,
},
},
}
// Check for parameter keyword
if p.currentToken().Type == TokenParameter {
p.advance()
}
// Check for data type
if p.isDataType() {
param.DataType = p.currentToken().Text
p.advance()
// Check for parameterized type
if p.currentToken().Type == TokenLeftBracket {
p.advance()
width := p.parseExpression(TokenRightBracket)
param.DataType += "[" + width + "]"
if p.currentToken().Type == TokenRightBracket {
p.advance()
}
}
}
// Parse parameter name
if p.currentToken().Type == TokenIdentifier {
param.Name = p.currentToken().Text
p.advance()
} else {
// Invalid parameter
return nil
}
// Parse default value
if p.currentToken().Type == TokenAssignOp {
p.advance()
param.DefaultValue = p.parseExpression(TokenComma)
}
param.Pos.End = p.previousToken().Position
return param
}
// isDataType checks if current token is a data type
func (p *SVAParser) isDataType() bool {
switch p.currentToken().Type {
case TokenLogic, TokenBit, TokenReg, TokenWire, TokenInt, TokenInteger,
TokenByte, TokenReal, TokenTime, TokenString:
return true
case TokenIdentifier:
// Could be a user-defined type
return true
}
return false
}
// parseActionBlock parses assertion action blocks (success and failure actions)
func (p *SVAParser) parseActionBlock() Node {
// Action blocks can be:
// 1. Single statement followed by semicolon
// 2. else statement
// 3. begin/end block
// Skip whitespace/comments
for p.currentToken().Type == TokenWhitespace || p.currentToken().Type == TokenComment {
p.advance()
}
// Check for else clause (failure action)
if p.currentToken().Type == TokenElse {
start := p.currentToken().Position
p.advance()
// Parse the else action
var elseAction Node
if p.currentToken().Type == TokenBegin {
// begin/end block
elseAction = p.parseBeginEndBlock()
} else {
// Single statement
elseAction = p.parseStatement()
}
// Create an action block node
return &BaseNode{
NodeType: NodeTypeSequence, // Using Sequence as a placeholder for action block
Pos: Range{
Start: start,
End: p.previousToken().Position,
},
Childs: []Node{elseAction},
}
}
// No action block
return nil
}
// parseBeginEndBlock parses a begin/end block
func (p *SVAParser) parseBeginEndBlock() Node {
start := p.currentToken().Position
if p.currentToken().Type != TokenBegin {
return nil
}
p.advance()
var statements []Node
// Parse statements until 'end'
for p.currentToken().Type != TokenEnd && p.currentToken().Type != TokenEOF {
stmt := p.parseStatement()
if stmt != nil {
statements = append(statements, stmt)
}
// Skip semicolons between statements
if p.currentToken().Type == TokenSemicolon {
p.advance()
}
}
if p.currentToken().Type == TokenEnd {
p.advance()
}
return &BaseNode{
NodeType: NodeTypeSequence, // Using Sequence as a placeholder for block
Pos: Range{
Start: start,
End: p.previousToken().Position,
},
Childs: statements,
}
}
// parseStatement parses a single statement
func (p *SVAParser) parseStatement() Node {
start := p.currentToken().Position
// For now, just capture the statement as text until semicolon
var stmt string
depth := 0
for p.position < len(p.tokens) && p.currentToken().Type != TokenEOF {
// Track nested constructs
switch p.currentToken().Type {
case TokenBegin, TokenLeftParen, TokenLeftBrace:
depth++
case TokenEnd, TokenRightParen, TokenRightBrace:
depth--
case TokenSemicolon:
if depth == 0 {
// Don't include the semicolon in the statement
break
}
}
if p.currentToken().Type == TokenSemicolon && depth == 0 {
break
}
stmt += p.currentToken().Text + " "
p.advance()
}
if stmt == "" {
return nil
}
return &ExpressionNode{
BaseNode: BaseNode{
NodeType: NodeTypeExpression,
Pos: Range{
Start: start,
End: p.previousToken().Position,
},
},
Text: strings.TrimSpace(stmt),
}
}
// Helper methods
func (p *SVAParser) currentToken() Token {
if p.position >= len(p.tokens) {
return Token{Type: TokenEOF}
}
return p.tokens[p.position]
}
func (p *SVAParser) peekToken(offset int) Token {
pos := p.position + offset
if pos >= len(p.tokens) {
return Token{Type: TokenEOF}
}
return p.tokens[pos]
}
func (p *SVAParser) previousToken() Token {
if p.position > 0 {
return p.tokens[p.position-1]
}
return Token{Type: TokenEOF}
}
func (p *SVAParser) advance() {
if p.position < len(p.tokens) {
p.position++
}
}
func (p *SVAParser) skipUntil(tokenType TokenType) {
for p.position < len(p.tokens) && p.currentToken().Type != tokenType {
p.advance()
}
}
// GetPosition returns the current parser position
func (p *SVAParser) GetPosition() int {
return p.position
}
package symbols
import (
"fmt"
"github.com/adicens/systemverilog-lsp/internal/parser"
)
// SymbolBuilder builds symbol tables from AST nodes
type SymbolBuilder struct {
table *SymbolTable
document string
idCounter int
}
// NewSymbolBuilder creates a new symbol builder
func NewSymbolBuilder(document string) *SymbolBuilder {
return &SymbolBuilder{
table: NewSymbolTable(),
document: document,
idCounter: 0,
}
}
// Build builds a symbol table from an AST
func (sb *SymbolBuilder) Build(ast *parser.AST) (*SymbolTable, error) {
if ast.Root == nil {
return sb.table, fmt.Errorf("AST root is nil")
}
err := sb.visitNode(ast.Root)
if err != nil {
return sb.table, err
}
// Create document symbols structure for this document
sb.createDocumentSymbols()
return sb.table, nil
}
// GetTable returns the symbol table
func (sb *SymbolBuilder) GetTable() *SymbolTable {
return sb.table
}
// generateID generates a unique ID
func (sb *SymbolBuilder) generateID() string {
sb.idCounter++
return fmt.Sprintf("%s_%d", sb.document, sb.idCounter)
}
// createDocumentSymbols creates document symbols for the current document
func (sb *SymbolBuilder) createDocumentSymbols() {
// Get all symbols that belong to this document
allSymbols := sb.table.GetAllSymbols()
documentSymbols := make(map[string]*Symbol)
// Filter symbols for this document
for id, symbol := range allSymbols {
if symbol.Position.File == sb.document {
documentSymbols[id] = symbol
}
}
// Create DocumentSymbols structure
docSymbols := &DocumentSymbols{
URI: sb.document,
Version: 1, // Default version
RootScope: sb.table.GetRootScope(),
Symbols: documentSymbols,
}
// Add to symbol table
sb.table.AddDocumentSymbols(sb.document, 1, docSymbols)
}
// visitNode visits an AST node and builds symbols
func (sb *SymbolBuilder) visitNode(node parser.Node) error {
if node == nil {
return nil
}
switch n := node.(type) {
case *parser.ModuleNode:
return sb.visitModule(n)
case *parser.InterfaceNode:
return sb.visitInterface(n)
case *parser.ClassNode:
return sb.visitClass(n)
case *parser.FunctionNode:
return sb.visitFunction(n)
case *parser.TaskNode:
return sb.visitTask(n)
case *parser.ParameterNode:
return sb.visitParameter(n)
case *parser.PortNode:
return sb.visitPort(n)
case *parser.VariableNode:
return sb.visitVariable(n)
case *parser.AlwaysNode:
return sb.visitAlways(n)
case *parser.InstanceNode:
return sb.visitInstance(n)
case *parser.GenerateNode:
return sb.visitGenerate(n)
case *parser.PropertyNode:
return sb.visitProperty(n)
case *parser.SequenceNode:
return sb.visitSequence(n)
case *parser.AssertionNode:
return sb.visitAssertion(n)
case *parser.CovergroupNode:
return sb.visitCovergroup(n)
case *parser.ConstraintNode:
return sb.visitConstraint(n)
// Constraint expression nodes
case *parser.SolveBeforeNode:
// Track solve dependencies for potential analysis
return sb.visitSolveBefore(n)
case *parser.IfConstraintNode:
// Visit condition and both branches
if err := sb.visitNode(n.Condition); err != nil {
return err
}
if err := sb.visitNode(n.ThenConstraint); err != nil {
return err
}
if n.ElseConstraint != nil {
return sb.visitNode(n.ElseConstraint)
}
return nil
case *parser.ForeachConstraintNode:
// Visit body
return sb.visitNode(n.Body)
default:
// For unknown node types, try to visit children
for _, child := range node.Children() {
if err := sb.visitNode(child); err != nil {
return err
}
}
}
return nil
}
// visitModule visits a module node
func (sb *SymbolBuilder) visitModule(node *parser.ModuleNode) error {
if node == nil {
return fmt.Errorf("module node is nil")
}
// Create module symbol
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypeModule, "module", pos)
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Enter module scope
scope := sb.table.EnterScope(sb.generateID(), node.Name, SymbolTypeModule, pos)
scope.Position = pos
// Visit parameters
for _, param := range node.Parameters {
if err := sb.visitParameter(param); err != nil {
return err
}
}
// Visit ports
for _, port := range node.Ports {
if err := sb.visitPort(port); err != nil {
return err
}
}
// Visit module items
for _, item := range node.Items {
if err := sb.visitNode(item); err != nil {
return err
}
}
// Exit module scope
sb.table.ExitScope()
return nil
}
// visitInterface visits an interface node
func (sb *SymbolBuilder) visitInterface(node *parser.InterfaceNode) error {
if node == nil {
return fmt.Errorf("interface node is nil")
}
// Create interface symbol
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypeInterface, "interface", pos)
// Add inheritance information
if node.Extends != "" {
symbol.SetAttribute("extends", node.Extends)
}
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Enter interface scope
scope := sb.table.EnterScope(sb.generateID(), node.Name, SymbolTypeInterface, pos)
scope.Position = pos
// Visit parameters
for _, param := range node.Parameters {
if err := sb.visitParameter(param); err != nil {
return err
}
}
// Visit ports
for _, port := range node.Ports {
if err := sb.visitPort(port); err != nil {
return err
}
}
// Visit interface items
for _, item := range node.Items {
if err := sb.visitNode(item); err != nil {
return err
}
}
// Exit interface scope
sb.table.ExitScope()
return nil
}
// visitClass visits a class node
func (sb *SymbolBuilder) visitClass(node *parser.ClassNode) error {
if node == nil {
return fmt.Errorf("class node is nil")
}
// Create class symbol
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypeClass, "class", pos)
// Add inheritance information
if node.Extends != "" {
symbol.SetAttribute("extends", node.Extends)
}
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Enter class scope
scope := sb.table.EnterScope(sb.generateID(), node.Name, SymbolTypeClass, pos)
scope.Position = pos
// Visit parameters
for _, param := range node.Parameters {
if err := sb.visitParameter(param); err != nil {
return err
}
}
// Visit class items
for _, item := range node.Items {
if err := sb.visitNode(item); err != nil {
return err
}
}
// Exit class scope
sb.table.ExitScope()
return nil
}
// visitFunction visits a function node
func (sb *SymbolBuilder) visitFunction(node *parser.FunctionNode) error {
// Create function symbol
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypeFunction, node.ReturnType, pos)
// Add attributes for signature help
if symbol.Attributes == nil {
symbol.Attributes = make(map[string]interface{})
}
symbol.Attributes["returnType"] = node.ReturnType
// Extract parameter information for signature help
var paramInfo []interface{}
for _, param := range node.Parameters {
paramMap := map[string]interface{}{
"name": param.Name,
"type": param.DataType,
}
if param.DefaultValue != "" {
paramMap["default"] = param.DefaultValue
}
paramInfo = append(paramInfo, paramMap)
}
symbol.Attributes["parameters"] = paramInfo
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Enter function scope
scope := sb.table.EnterScope(sb.generateID(), node.Name, SymbolTypeFunction, pos)
scope.Position = pos
// Visit parameters
for _, param := range node.Parameters {
if err := sb.visitParameter(param); err != nil {
return err
}
}
// Visit function body
for _, stmt := range node.Body {
if err := sb.visitNode(stmt); err != nil {
return err
}
}
// Exit function scope
sb.table.ExitScope()
return nil
}
// visitTask visits a task node
func (sb *SymbolBuilder) visitTask(node *parser.TaskNode) error {
// Create task symbol
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypeTask, "task", pos)
// Add attributes for signature help
if symbol.Attributes == nil {
symbol.Attributes = make(map[string]interface{})
}
// Extract parameter information for signature help
var paramInfo []interface{}
for _, param := range node.Parameters {
paramMap := map[string]interface{}{
"name": param.Name,
"type": param.DataType,
}
if param.DefaultValue != "" {
paramMap["default"] = param.DefaultValue
}
paramInfo = append(paramInfo, paramMap)
}
symbol.Attributes["parameters"] = paramInfo
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Enter task scope
scope := sb.table.EnterScope(sb.generateID(), node.Name, SymbolTypeTask, pos)
scope.Position = pos
// Visit parameters
for _, param := range node.Parameters {
if err := sb.visitParameter(param); err != nil {
return err
}
}
// Visit task body
for _, stmt := range node.Body {
if err := sb.visitNode(stmt); err != nil {
return err
}
}
// Exit task scope
sb.table.ExitScope()
return nil
}
// visitParameter visits a parameter node
func (sb *SymbolBuilder) visitParameter(node *parser.ParameterNode) error {
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypeParameter, node.DataType, pos)
// Add parameter attributes
symbol.SetAttribute("default_value", node.DefaultValue)
symbol.SetAttribute("is_local", node.IsLocal)
symbol.SetAttribute("is_type", node.IsType)
return sb.table.AddSymbol(symbol)
}
// visitPort visits a port node
func (sb *SymbolBuilder) visitPort(node *parser.PortNode) error {
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypePort, node.DataType, pos)
// Add port attributes
symbol.SetAttribute("direction", node.Direction)
symbol.SetAttribute("is_interface", node.IsInterface)
if node.Modport != "" {
symbol.SetAttribute("modport", node.Modport)
}
if node.Width != nil {
symbol.SetAttribute("width_high", node.Width.High)
symbol.SetAttribute("width_low", node.Width.Low)
}
return sb.table.AddSymbol(symbol)
}
// visitVariable visits a variable node
func (sb *SymbolBuilder) visitVariable(node *parser.VariableNode) error {
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypeVariable, node.DataType, pos)
// Add variable attributes
if node.InitValue != "" {
symbol.SetAttribute("init_value", node.InitValue)
}
symbol.SetAttribute("is_rand", node.IsRand)
symbol.SetAttribute("is_randc", node.IsRandc)
if node.Width != nil {
symbol.SetAttribute("width_high", node.Width.High)
symbol.SetAttribute("width_low", node.Width.Low)
}
return sb.table.AddSymbol(symbol)
}
// visitAlways visits an always node
func (sb *SymbolBuilder) visitAlways(node *parser.AlwaysNode) error {
pos := sb.convertPosition(node.Range().Start)
// Generate a name for the always block
alwaysName := fmt.Sprintf("always_%d", sb.idCounter)
symbol := NewSymbol(sb.generateID(), alwaysName, SymbolTypeAlways, "always", pos)
// Add always block attributes
symbol.SetAttribute("always_type", node.AlwaysType)
symbol.SetAttribute("sensitivity", node.Sensitivity)
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Enter always scope
scope := sb.table.EnterScope(sb.generateID(), alwaysName, SymbolTypeAlways, pos)
scope.Position = pos
// Visit always body
for _, stmt := range node.Body {
if err := sb.visitNode(stmt); err != nil {
return err
}
}
// Exit always scope
sb.table.ExitScope()
return nil
}
// visitInstance visits an instance node
func (sb *SymbolBuilder) visitInstance(node *parser.InstanceNode) error {
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.InstanceName, SymbolTypeInstance, node.ModuleName, pos)
// Add instance attributes
symbol.SetAttribute("module_name", node.ModuleName)
symbol.SetAttribute("parameters", node.Parameters)
symbol.SetAttribute("connections", node.Connections)
return sb.table.AddSymbol(symbol)
}
// visitGenerate visits a generate node
func (sb *SymbolBuilder) visitGenerate(node *parser.GenerateNode) error {
pos := sb.convertPosition(node.Range().Start)
// Generate a name for the generate block
generateName := fmt.Sprintf("generate_%d", sb.idCounter)
symbol := NewSymbol(sb.generateID(), generateName, SymbolTypeGenerate, "generate", pos)
// Add generate attributes
symbol.SetAttribute("generate_type", node.GenerateType)
symbol.SetAttribute("condition", node.Condition)
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Enter generate scope
scope := sb.table.EnterScope(sb.generateID(), generateName, SymbolTypeGenerate, pos)
scope.Position = pos
// Visit generate body
for _, item := range node.Body {
if err := sb.visitNode(item); err != nil {
return err
}
}
// Exit generate scope
sb.table.ExitScope()
return nil
}
// visitProperty visits a property node
func (sb *SymbolBuilder) visitProperty(node *parser.PropertyNode) error {
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypeProperty, "property", pos)
// Add property attributes
if node.ClockingEvent != nil {
symbol.SetAttribute("clocking_event", true)
}
if node.DisableIff != "" {
symbol.SetAttribute("disable_iff", node.DisableIff)
}
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Enter property scope
scope := sb.table.EnterScope(sb.generateID(), node.Name, SymbolTypeProperty, pos)
scope.Position = pos
// Visit parameters
for _, param := range node.Parameters {
if err := sb.visitNode(param); err != nil {
return err
}
}
// Visit property body
if node.Body != nil {
if err := sb.visitNode(node.Body); err != nil {
return err
}
}
// Exit property scope
sb.table.ExitScope()
return nil
}
// visitSequence visits a sequence node
func (sb *SymbolBuilder) visitSequence(node *parser.SequenceNode) error {
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypeSequence, "sequence", pos)
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Enter sequence scope
scope := sb.table.EnterScope(sb.generateID(), node.Name, SymbolTypeSequence, pos)
scope.Position = pos
// Visit parameters
for _, param := range node.Parameters {
if err := sb.visitNode(param); err != nil {
return err
}
}
// Visit sequence body
if node.Body != nil {
if err := sb.visitNode(node.Body); err != nil {
return err
}
}
// Exit sequence scope
sb.table.ExitScope()
return nil
}
// visitAssertion visits an assertion node
func (sb *SymbolBuilder) visitAssertion(node *parser.AssertionNode) error {
pos := sb.convertPosition(node.Range().Start)
// Generate a name for the assertion if it doesn't have a label
assertionName := node.Label
if assertionName == "" {
assertionName = fmt.Sprintf("assertion_%d", sb.idCounter)
}
symbol := NewSymbol(sb.generateID(), assertionName, SymbolTypeAssertion, "assertion", pos)
// Add assertion attributes
symbol.SetAttribute("assertion_type", node.AssertionType)
symbol.SetAttribute("is_concurrent", node.IsConcurrent)
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Visit property expression
if node.Property != nil {
if err := sb.visitNode(node.Property); err != nil {
return err
}
}
// Visit action block
if node.ActionBlock != nil {
if err := sb.visitNode(node.ActionBlock); err != nil {
return err
}
}
return nil
}
// visitSolveBefore visits a solve-before constraint node
func (sb *SymbolBuilder) visitSolveBefore(node *parser.SolveBeforeNode) error {
// We could track solve dependencies for advanced analysis
// For now, just ensure the referenced variables are in scope
// Check solve list variables
for _, varName := range node.SolveList {
if _, found := sb.table.FindSymbol(varName); !found {
// Variable might be defined later or in parent scope
// Don't treat as error for now
}
}
// Check before list variables
for _, varName := range node.BeforeList {
if _, found := sb.table.FindSymbol(varName); !found {
// Variable might be defined later or in parent scope
// Don't treat as error for now
}
}
return nil
}
// visitCovergroup visits a covergroup node
func (sb *SymbolBuilder) visitCovergroup(node *parser.CovergroupNode) error {
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypeCoverage, "covergroup", pos)
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Enter covergroup scope
scope := sb.table.EnterScope(sb.generateID(), node.Name, SymbolTypeCoverage, pos)
scope.Position = pos
// Visit parameters
for _, param := range node.Parameters {
if err := sb.visitNode(param); err != nil {
return err
}
}
// Visit clocking event
if node.Event != nil {
if err := sb.visitNode(node.Event); err != nil {
return err
}
}
// Visit coverpoints
for _, coverpoint := range node.Coverpoints {
if err := sb.visitNode(coverpoint); err != nil {
return err
}
}
// Visit crosses
for _, cross := range node.Crosses {
if err := sb.visitNode(cross); err != nil {
return err
}
}
// Exit covergroup scope
sb.table.ExitScope()
return nil
}
// visitConstraint visits a constraint node
func (sb *SymbolBuilder) visitConstraint(node *parser.ConstraintNode) error {
pos := sb.convertPosition(node.Range().Start)
symbol := NewSymbol(sb.generateID(), node.Name, SymbolTypeConstraint, "constraint", pos)
// Add constraint attributes
symbol.SetAttribute("is_static", node.IsStatic)
symbol.SetAttribute("is_soft", node.IsSoft)
if err := sb.table.AddSymbol(symbol); err != nil {
return err
}
// Visit constraint body
for _, expr := range node.Body {
if err := sb.visitNode(expr); err != nil {
return err
}
}
return nil
}
// convertPosition converts parser position to symbol position
func (sb *SymbolBuilder) convertPosition(pos parser.Position) Position {
return Position{
File: sb.document,
Line: pos.Line,
Column: pos.Column,
Offset: pos.Offset,
}
}
package symbols
import (
"fmt"
"sync"
"time"
)
// SymbolTable represents a symbol table for SystemVerilog code
type SymbolTable struct {
rootScope *Scope
currentScope *Scope
allSymbols map[string]*Symbol
allScopes map[string]*Scope
documents map[string]*DocumentSymbols
mutex sync.RWMutex
lastUpdated time.Time
}
// DocumentSymbols represents symbols for a single document
type DocumentSymbols struct {
URI string
Version int
RootScope *Scope
Symbols map[string]*Symbol
LastUpdated time.Time
}
// NewSymbolTable creates a new symbol table
func NewSymbolTable() *SymbolTable {
rootScope := NewScope("root", "root", SymbolTypeModule, nil)
return &SymbolTable{
rootScope: rootScope,
currentScope: rootScope,
allSymbols: make(map[string]*Symbol),
allScopes: make(map[string]*Scope),
documents: make(map[string]*DocumentSymbols),
lastUpdated: time.Now(),
}
}
// EnterScope enters a new scope
func (st *SymbolTable) EnterScope(id, name string, scopeType SymbolType, position Position) *Scope {
st.mutex.Lock()
defer st.mutex.Unlock()
scope := NewScope(id, name, scopeType, st.currentScope)
scope.Position = position
st.allScopes[id] = scope
st.currentScope = scope
st.lastUpdated = time.Now()
return scope
}
// ExitScope exits the current scope
func (st *SymbolTable) ExitScope() {
st.mutex.Lock()
defer st.mutex.Unlock()
if st.currentScope.Parent != nil {
st.currentScope = st.currentScope.Parent
}
st.lastUpdated = time.Now()
}
// GetCurrentScope returns the current scope
func (st *SymbolTable) GetCurrentScope() *Scope {
st.mutex.RLock()
defer st.mutex.RUnlock()
return st.currentScope
}
// GetRootScope returns the root scope
func (st *SymbolTable) GetRootScope() *Scope {
st.mutex.RLock()
defer st.mutex.RUnlock()
return st.rootScope
}
// AddSymbol adds a symbol to the current scope
func (st *SymbolTable) AddSymbol(symbol *Symbol) error {
st.mutex.Lock()
defer st.mutex.Unlock()
// Check for duplicate symbols in current scope
if existing, exists := st.currentScope.GetSymbol(symbol.Name); exists {
return fmt.Errorf("symbol '%s' already exists in current scope at %s:%d:%d",
symbol.Name, existing.Position.File, existing.Position.Line, existing.Position.Column)
}
st.currentScope.AddSymbol(symbol)
st.allSymbols[symbol.ID] = symbol
st.lastUpdated = time.Now()
return nil
}
// FindSymbol finds a symbol by name in the current scope or parent scopes
func (st *SymbolTable) FindSymbol(name string) (*Symbol, bool) {
st.mutex.RLock()
defer st.mutex.RUnlock()
return st.currentScope.FindSymbol(name)
}
// GetSymbolByID gets a symbol by its ID
func (st *SymbolTable) GetSymbolByID(id string) (*Symbol, bool) {
st.mutex.RLock()
defer st.mutex.RUnlock()
symbol, exists := st.allSymbols[id]
return symbol, exists
}
// GetScopeByID gets a scope by its ID
func (st *SymbolTable) GetScopeByID(id string) (*Scope, bool) {
st.mutex.RLock()
defer st.mutex.RUnlock()
scope, exists := st.allScopes[id]
return scope, exists
}
// GetAllSymbols returns all symbols in the symbol table
func (st *SymbolTable) GetAllSymbols() map[string]*Symbol {
st.mutex.RLock()
defer st.mutex.RUnlock()
symbols := make(map[string]*Symbol)
for id, symbol := range st.allSymbols {
symbols[id] = symbol
}
return symbols
}
// GetSymbolsByType returns all symbols of a specific type
func (st *SymbolTable) GetSymbolsByType(symbolType SymbolType) []*Symbol {
st.mutex.RLock()
defer st.mutex.RUnlock()
var symbols []*Symbol
for _, symbol := range st.allSymbols {
if symbol.Type == symbolType {
symbols = append(symbols, symbol)
}
}
return symbols
}
// GetSymbolsInScope returns all symbols in a specific scope
func (st *SymbolTable) GetSymbolsInScope(scopeID string) map[string]*Symbol {
st.mutex.RLock()
defer st.mutex.RUnlock()
if scope, exists := st.allScopes[scopeID]; exists {
return scope.GetAllSymbols()
}
return make(map[string]*Symbol)
}
// AddDocumentSymbols adds symbols for a document
func (st *SymbolTable) AddDocumentSymbols(uri string, version int, symbols *DocumentSymbols) {
st.mutex.Lock()
defer st.mutex.Unlock()
symbols.URI = uri
symbols.Version = version
symbols.LastUpdated = time.Now()
st.documents[uri] = symbols
st.lastUpdated = time.Now()
}
// GetDocumentSymbols gets symbols for a document
func (st *SymbolTable) GetDocumentSymbols(uri string) (*DocumentSymbols, bool) {
st.mutex.RLock()
defer st.mutex.RUnlock()
symbols, exists := st.documents[uri]
return symbols, exists
}
// RemoveDocumentSymbols removes symbols for a document
func (st *SymbolTable) RemoveDocumentSymbols(uri string) {
st.mutex.Lock()
defer st.mutex.Unlock()
delete(st.documents, uri)
st.lastUpdated = time.Now()
}
// GetDocumentURIs returns all document URIs
func (st *SymbolTable) GetDocumentURIs() []string {
st.mutex.RLock()
defer st.mutex.RUnlock()
uris := make([]string, 0, len(st.documents))
for uri := range st.documents {
uris = append(uris, uri)
}
return uris
}
// FindSymbolsMatching finds symbols matching a pattern
func (st *SymbolTable) FindSymbolsMatching(pattern string) []*Symbol {
st.mutex.RLock()
defer st.mutex.RUnlock()
var matches []*Symbol
for _, symbol := range st.allSymbols {
// Simple pattern matching - could be enhanced with regex
if len(pattern) == 0 || symbol.Name == pattern {
matches = append(matches, symbol)
}
}
return matches
}
// GetSymbolReferences returns all references to a symbol
func (st *SymbolTable) GetSymbolReferences(symbolID string) []Position {
st.mutex.RLock()
defer st.mutex.RUnlock()
if symbol, exists := st.allSymbols[symbolID]; exists {
return symbol.GetReferences()
}
return []Position{}
}
// AddSymbolReference adds a reference to a symbol
func (st *SymbolTable) AddSymbolReference(symbolID string, position Position) error {
st.mutex.Lock()
defer st.mutex.Unlock()
if symbol, exists := st.allSymbols[symbolID]; exists {
symbol.AddReference(position)
st.lastUpdated = time.Now()
return nil
}
return fmt.Errorf("symbol with ID '%s' not found", symbolID)
}
// Clear clears the symbol table
func (st *SymbolTable) Clear() {
st.mutex.Lock()
defer st.mutex.Unlock()
st.rootScope = NewScope("root", "root", SymbolTypeModule, nil)
st.currentScope = st.rootScope
st.allSymbols = make(map[string]*Symbol)
st.allScopes = make(map[string]*Scope)
st.documents = make(map[string]*DocumentSymbols)
st.lastUpdated = time.Now()
}
// GetStats returns statistics about the symbol table
func (st *SymbolTable) GetStats() map[string]interface{} {
st.mutex.RLock()
defer st.mutex.RUnlock()
stats := make(map[string]interface{})
stats["total_symbols"] = len(st.allSymbols)
stats["total_scopes"] = len(st.allScopes)
stats["total_documents"] = len(st.documents)
stats["last_updated"] = st.lastUpdated
// Count symbols by type
symbolCounts := make(map[string]int)
for _, symbol := range st.allSymbols {
symbolCounts[symbol.Type.String()]++
}
stats["symbol_counts"] = symbolCounts
return stats
}
// Validate validates the symbol table for consistency
func (st *SymbolTable) Validate() []string {
st.mutex.RLock()
defer st.mutex.RUnlock()
var errors []string
// Check that all symbols have valid scopes
for _, symbol := range st.allSymbols {
if symbol.Scope == nil {
errors = append(errors, fmt.Sprintf("symbol '%s' has no scope", symbol.Name))
}
}
// Check that all scopes are reachable from root
visited := make(map[string]bool)
st.validateScopeReachability(st.rootScope, visited)
for id := range st.allScopes {
if !visited[id] {
errors = append(errors, fmt.Sprintf("scope '%s' is not reachable from root", id))
}
}
return errors
}
// validateScopeReachability recursively validates scope reachability
func (st *SymbolTable) validateScopeReachability(scope *Scope, visited map[string]bool) {
if scope == nil {
return
}
visited[scope.ID] = true
for _, child := range scope.Children {
st.validateScopeReachability(child, visited)
}
}
package symbols
import (
"sync"
"time"
)
// SymbolType represents the type of a symbol
type SymbolType int
const (
SymbolTypeModule SymbolType = iota
SymbolTypeInterface
SymbolTypeClass
SymbolTypeFunction
SymbolTypeTask
SymbolTypeVariable
SymbolTypeParameter
SymbolTypePort
SymbolTypeInstance
SymbolTypeGenerate
SymbolTypeAlways
SymbolTypeInitial
SymbolTypeConstraint
SymbolTypeTypedef
SymbolTypeEnum
SymbolTypeStruct
SymbolTypeUnion
SymbolTypePackage
SymbolTypeProperty
SymbolTypeSequence
SymbolTypeAssertion
SymbolTypeCoverage
)
// Scope represents a lexical scope in SystemVerilog
type Scope struct {
ID string
Name string
ScopeType SymbolType
Parent *Scope
Children []*Scope
Symbols map[string]*Symbol
Position Position
mutex sync.RWMutex
LastUpdated time.Time
}
// Symbol represents a symbol in the symbol table
type Symbol struct {
ID string
Name string
Type SymbolType
DataType string
Scope *Scope
Position Position
References []Position
Value interface{}
Attributes map[string]interface{}
mutex sync.RWMutex
LastUpdated time.Time
}
// Position represents a position in source code
type Position struct {
File string
Line int
Column int
Offset int
}
// Range represents a range in source code
type Range struct {
Start Position
End Position
}
// NewScope creates a new scope
func NewScope(id, name string, scopeType SymbolType, parent *Scope) *Scope {
scope := &Scope{
ID: id,
Name: name,
ScopeType: scopeType,
Parent: parent,
Children: make([]*Scope, 0),
Symbols: make(map[string]*Symbol),
LastUpdated: time.Now(),
}
if parent != nil {
parent.AddChild(scope)
}
return scope
}
// AddChild adds a child scope
func (s *Scope) AddChild(child *Scope) {
s.mutex.Lock()
defer s.mutex.Unlock()
s.Children = append(s.Children, child)
child.Parent = s
s.LastUpdated = time.Now()
}
// AddSymbol adds a symbol to the scope
func (s *Scope) AddSymbol(symbol *Symbol) {
s.mutex.Lock()
defer s.mutex.Unlock()
symbol.Scope = s
s.Symbols[symbol.Name] = symbol
s.LastUpdated = time.Now()
}
// GetSymbol retrieves a symbol by name from this scope only
func (s *Scope) GetSymbol(name string) (*Symbol, bool) {
s.mutex.RLock()
defer s.mutex.RUnlock()
symbol, exists := s.Symbols[name]
return symbol, exists
}
// FindSymbol searches for a symbol by name, walking up the scope chain
func (s *Scope) FindSymbol(name string) (*Symbol, bool) {
current := s
for current != nil {
if symbol, exists := current.GetSymbol(name); exists {
return symbol, true
}
current = current.Parent
}
return nil, false
}
// GetAllSymbols returns all symbols in this scope
func (s *Scope) GetAllSymbols() map[string]*Symbol {
s.mutex.RLock()
defer s.mutex.RUnlock()
// Create a copy to avoid concurrent access issues
symbols := make(map[string]*Symbol)
for name, symbol := range s.Symbols {
symbols[name] = symbol
}
return symbols
}
// NewSymbol creates a new symbol
func NewSymbol(id, name string, symbolType SymbolType, dataType string, position Position) *Symbol {
return &Symbol{
ID: id,
Name: name,
Type: symbolType,
DataType: dataType,
Position: position,
References: make([]Position, 0),
Attributes: make(map[string]interface{}),
LastUpdated: time.Now(),
}
}
// AddReference adds a reference to this symbol
func (s *Symbol) AddReference(position Position) {
s.mutex.Lock()
defer s.mutex.Unlock()
s.References = append(s.References, position)
s.LastUpdated = time.Now()
}
// GetReferences returns all references to this symbol
func (s *Symbol) GetReferences() []Position {
s.mutex.RLock()
defer s.mutex.RUnlock()
// Create a copy to avoid concurrent access issues
refs := make([]Position, len(s.References))
copy(refs, s.References)
return refs
}
// SetAttribute sets an attribute on the symbol
func (s *Symbol) SetAttribute(key string, value interface{}) {
s.mutex.Lock()
defer s.mutex.Unlock()
s.Attributes[key] = value
s.LastUpdated = time.Now()
}
// GetAttribute gets an attribute from the symbol
func (s *Symbol) GetAttribute(key string) (interface{}, bool) {
s.mutex.RLock()
defer s.mutex.RUnlock()
value, exists := s.Attributes[key]
return value, exists
}
// String returns a string representation of the symbol type
func (st SymbolType) String() string {
switch st {
case SymbolTypeModule:
return "module"
case SymbolTypeInterface:
return "interface"
case SymbolTypeClass:
return "class"
case SymbolTypeFunction:
return "function"
case SymbolTypeTask:
return "task"
case SymbolTypeVariable:
return "variable"
case SymbolTypeParameter:
return "parameter"
case SymbolTypePort:
return "port"
case SymbolTypeInstance:
return "instance"
case SymbolTypeGenerate:
return "generate"
case SymbolTypeAlways:
return "always"
case SymbolTypeInitial:
return "initial"
case SymbolTypeConstraint:
return "constraint"
case SymbolTypeTypedef:
return "typedef"
case SymbolTypeEnum:
return "enum"
case SymbolTypeStruct:
return "struct"
case SymbolTypeUnion:
return "union"
case SymbolTypePackage:
return "package"
case SymbolTypeProperty:
return "property"
case SymbolTypeSequence:
return "sequence"
case SymbolTypeAssertion:
return "assertion"
case SymbolTypeCoverage:
return "coverage"
default:
return "unknown"
}
}
// IsContainer returns true if this symbol type can contain other symbols
func (st SymbolType) IsContainer() bool {
switch st {
case SymbolTypeModule, SymbolTypeInterface, SymbolTypeClass, SymbolTypeFunction,
SymbolTypeTask, SymbolTypeGenerate, SymbolTypeAlways, SymbolTypeInitial,
SymbolTypePackage, SymbolTypeProperty, SymbolTypeSequence:
return true
default:
return false
}
}
// IsCallable returns true if this symbol type can be called
func (st SymbolType) IsCallable() bool {
switch st {
case SymbolTypeFunction, SymbolTypeTask:
return true
default:
return false
}
}
// IsDeclaration returns true if this symbol type is a declaration
func (st SymbolType) IsDeclaration() bool {
switch st {
case SymbolTypeVariable, SymbolTypeParameter, SymbolTypePort, SymbolTypeTypedef:
return true
default:
return false
}
}