Skip to content

Commit

Permalink
More Bugfixes and changed a lot of grammar to have less ambiguous par…
Browse files Browse the repository at this point in the history
…sing behavior in edge cases. This causes Contexts, Data and Workflows to require { } brackets. Supporting more BPMN-Style notations.
  • Loading branch information
torbenschinke committed Jun 30, 2023
1 parent 5cd04c5 commit 8c8d736
Show file tree
Hide file tree
Showing 21 changed files with 675 additions and 440 deletions.
9 changes: 5 additions & 4 deletions cmd/dddw/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,16 +48,17 @@ func main() {
return nil
})

parse := editor.Parser(func(text string) (*parser.Doc, error) {
doc, err := parser.ParseText("???", text)
parse := editor.Parser(func(text string) (*parser.Workspace, error) {
fname := "???"
doc, err := parser.ParseText(fname, text)
if err != nil {
return nil, fmt.Errorf("cannot parse model: %w", err)
}

return doc, nil
return &parser.Workspace{Documents: map[string]*parser.Doc{fname: doc}}, nil
})

linter := editor.Linter(func(doc *parser.Doc) []linter.Hint {
linter := editor.Linter(func(doc *parser.Workspace) []linter.Hint {
return linter.Lint(doc)
})

Expand Down
56 changes: 20 additions & 36 deletions lsp/server.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package lsp

import (
"encoding/json"
"fmt"
"github.com/alecthomas/participle/v2"
"github.com/worldiety/dddl/linter"
Expand Down Expand Up @@ -67,7 +68,8 @@ func (s *Server) reloadFiles() {

// Handle a client's request to initialize and respond with our capabilities.
func (s *Server) Initialize(params *protocol.InitializeParams) protocol.InitializeResult {
log.Printf("%+v", params)
buf, _ := json.Marshal(params)
log.Printf("%+v", string(buf))
s.rootPath = params.RootPath
s.reloadFiles()

Expand Down Expand Up @@ -229,13 +231,13 @@ func (s *Server) FullSemanticTokens(params *protocol.SemanticTokensParams) proto

func (s *Server) AsciiDoc(filename protocol.DocumentURI) string {
var out strings.Builder
doc, err := s.parseSuperDoc()
doc, err := s.parseWorkspace()
if doc == nil {
return err.Error()
}

out.WriteString("= Implement me\n\n")
for _, context := range doc.Contexts {
for _, context := range doc.Contexts() {
out.WriteString("== ")
out.WriteString(context.Name.Value)
out.WriteString("\n")
Expand Down Expand Up @@ -268,10 +270,16 @@ func (s *Server) sendPreviewHtml() {
}

func (s *Server) sendSemanticTokenRefresh() {
err := SendNotification("workspace/semanticTokens/refresh", struct{}{})
err := SendNotification("workspace/semanticTokens/refresh", nil)
if err != nil {
log.Printf("cannot send sendSemanticTokenRefresh: %v", err)
}

err = SendNotification("workspace/codeLens/refresh", nil)
if err != nil {
log.Printf("cannot send sendSemanticTokenRefresh: %v", err)
}

}

// sendDiagnostics sends any parser errors.
Expand Down Expand Up @@ -317,9 +325,9 @@ func (s *Server) sendDiagnostics() {

if len(diagnostics) == 0 {
// we have no errors, so its worth to lint the entire thing
doc, err := s.parseSuperDoc()
doc, err := s.parseWorkspace()
if err != nil {
log.Println("unexpected superdoc parser error", err)
log.Println("unexpected workspace parser error", err)
}

if doc != nil {
Expand Down Expand Up @@ -373,37 +381,13 @@ type PreviewHtmlParams struct {
TailwindUri protocol.DocumentURI
}

func (s *Server) parseSuperDoc() (*parser.Doc, error) {
var superErr error
superDoc := &parser.Doc{}
func (s *Server) parseWorkspace() (*parser.Workspace, error) {
tmp := map[string]string{}
for _, file := range s.files {
doc, err := parser.ParseText(string(file.Uri), file.Content)
if err != nil {
if superErr == nil {
superErr = err
} else {
superErr = fmt.Errorf("also occurred: %w and %w", superErr, err)
}
continue
}

for _, context := range doc.Contexts {
ctx := superDoc.ContextByName(context.Name.Value)
if ctx == nil {
superDoc.Contexts = append(superDoc.Contexts, context)
} else {
for _, element := range context.Elements {
ctx.Elements = append(ctx.Elements, element) // just append it
}

if ctx.Definition == nil {
ctx.Definition = context.Definition // maybe non-nil, TODO double definition is a lint-error
}
}
}
tmp[string(file.Uri)] = file.Content
}

return superDoc, superErr
return parser.ParseWorkspaceText(tmp)
}

func (s *Server) RenderPreviewHtml(params PreviewHtmlParams) string {
Expand All @@ -413,7 +397,7 @@ func (s *Server) RenderPreviewHtml(params PreviewHtmlParams) string {
var model editor.EditorPreview
model.VSCode.ScriptUris = append(model.VSCode.ScriptUris, string(s.lastPreviewParams.TailwindUri))

doc, err := s.parseSuperDoc()
doc, err := s.parseWorkspace()
if doc == nil {
return err.Error()
}
Expand All @@ -422,7 +406,7 @@ func (s *Server) RenderPreviewHtml(params PreviewHtmlParams) string {
model.Error = err.Error()
}

linter := editor.Linter(func(doc *parser.Doc) []linter.Hint {
linter := editor.Linter(func(doc *parser.Workspace) []linter.Hint {
return linter.Lint(doc)
})

Expand Down
20 changes: 18 additions & 2 deletions lsp/tokens.go
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ func getTokenType(node parser.Node) int {
*parser.KeywordContext,
*parser.KeywordActor,
*parser.KeywordEvent,
*parser.KeywordEventSent,
*parser.KeywordData,
*parser.KeywordActivity,
*parser.KeywordIf,
Expand All @@ -100,11 +101,17 @@ func getTokenType(node parser.Node) int {
*parser.KeywordDecision,
*parser.KeywordReturn,
*parser.KeywordReturnError,
*parser.KeywordWhile,
*parser.KeywordView,
*parser.KeywordInput,
*parser.KeywordOutput,
*parser.KeywordWorkflow:
return TokenKeyword

case *parser.Literal, *parser.Definition:
return TokenString
case *parser.ToDoText:
return TokenComment
default:
return TokenComment
}
Expand All @@ -115,6 +122,7 @@ func isSemanticToken(n parser.Node) bool {
case *parser.KeywordTodo,
*parser.KeywordActor,
*parser.KeywordEvent,
*parser.KeywordEventSent,
*parser.KeywordData,
*parser.KeywordContext,
*parser.KeywordActivity,
Expand All @@ -124,9 +132,13 @@ func isSemanticToken(n parser.Node) bool {
*parser.KeywordDecision,
*parser.KeywordReturn,
*parser.KeywordReturnError,
*parser.KeywordWhile,
*parser.KeywordView,
*parser.KeywordInput,
*parser.KeywordOutput,
*parser.KeywordWorkflow:
return true
case *parser.Ident, *parser.Literal, *parser.Definition:
case *parser.Ident, *parser.Literal, *parser.Definition, *parser.ToDoText:
return true
}

Expand All @@ -146,10 +158,12 @@ func IntoTokens(doc *parser.Doc) VSCTokens {
start := n.Position()
end := n.EndPosition()
if start == end {
//log.Printf("token %T has invalid start/end: %+v->%+v\n", n, start, end)
log.Printf("token %T has invalid start/end: %+v->%+v\n", n, start, end)
return nil // the token has not a useful token info attached
}

log.Printf("%T->%d:%d bis %d:%d\n", n, n.Position().Line, n.Position().Column, n.EndPosition().Line, n.EndPosition().Column)

if start.Line == end.Line {
tokens = append(tokens, VSCToken{
Node: n,
Expand Down Expand Up @@ -219,5 +233,7 @@ func IntoTokens(doc *parser.Doc) VSCTokens {
log.Println(err)
}

log.Println(tokens)

return tokens
}
13 changes: 13 additions & 0 deletions parser/ast.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,19 @@ func (n *node) EndPosition() lexer.Position {
return n.EndPos
}

// relocateEndPos uses the given lexer token slice to recalculate the actual
// endposition which is often wrong. This is a bug in the participle parser, which
// appends all whitespace until the next token appears.
func (n *node) relocateEndPos(tokens []lexer.Token) lexer.Position {
if len(tokens) == 0 {
return n.EndPos
}

pos := n.Position()
pos.Column += len(tokens[len(tokens)-1].Value)
return pos
}

func (n *node) Children() []Node {
return nil
}
Expand Down
103 changes: 97 additions & 6 deletions parser/common.go
Original file line number Diff line number Diff line change
@@ -1,15 +1,32 @@
package parser

import (
"github.com/alecthomas/participle/v2/lexer"
"strings"
)

// Literal refers to the rules of quoted Text by the Lexer.
type Literal struct {
node
Value string `@Text`
Tokens []lexer.Token
Value string `@Text`
}

func (n *Literal) EndPosition() lexer.Position {
pos := n.relocateEndPos(n.Tokens)
pos.Column += 2 // fix leading and appended "
return pos
}

// Ident refers to the rules of an Identifier used by the Lexer.
type Ident struct {
node
Value string `@Name`
Tokens []lexer.Token
Value string `@Name`
}

func (n *Ident) EndPosition() lexer.Position {
return n.relocateEndPos(n.Tokens)
}

func (n *Ident) IsUniverse() bool {
Expand All @@ -27,7 +44,39 @@ func (n *Ident) IsUniverse() bool {

type Definition struct {
node
Text string `@Text`
Tokens []lexer.Token
Text string `@Text`
}

func (n *Definition) EndPosition() lexer.Position {
pos := n.relocateEndPos(n.Tokens)
pos.Column += 2 // fix leading and appended "
return pos
}

func (n *Definition) Empty() bool {
if n == nil {
return true
}

tmp := strings.TrimSpace(n.Text)
if tmp == "" {
return true
}

if tmp == "???" {
return true
}

return false
}

func (n *Definition) NeedsRevise() bool {
if n.Empty() {
return false
}

return strings.Contains(n.Text, "???")
}

type IdentOrLiteral struct {
Expand All @@ -46,8 +95,50 @@ func (n *IdentOrLiteral) Value() string {
}

func (n *IdentOrLiteral) Children() []Node {
if n.Name != nil {
return []Node{n.Name}
return sliceOf(n.Name, n.Literal)
}

type ToDo struct {
node
KeywordTodo *KeywordTodo `@@ ":"`
Text *ToDoText `@@`
}

func (n *ToDo) Children() []Node {
return sliceOf(n.KeywordTodo, n.Text)
}

func offsetPosText(pos lexer.Position, text string) lexer.Position {
if len(text) == 0 {
return pos
}
return []Node{n.Literal}

lines := strings.Split(text, "\n")
var lastLineLen int
if len(lines) == 1 {
lastLineLen = pos.Column + len(lines[len(lines)-1])
} else {
lastLineLen = len(lines[len(lines)-1])
}

pos.Line += len(lines) - 1
pos.Column = lastLineLen

return pos
}

type ToDoText struct {
node
Tokens []lexer.Token
Text string `@Text`
}

func (n *ToDoText) Children() []Node {
return nil
}

func (n *ToDoText) EndPosition() lexer.Position {
pos := n.relocateEndPos(n.Tokens)
pos.Column += 2 // fix leading and appended "
return pos
}
Loading

0 comments on commit 8c8d736

Please sign in to comment.