From ffc0436aedac14d798e4dede069412a950b137ba Mon Sep 17 00:00:00 2001 From: Arpad Ryszka Date: Tue, 26 Aug 2025 03:37:55 +0200 Subject: [PATCH] store generated files in git to allow go get --- .gitignore | 2 - docreflect.gen.go | 62 +++ iniparser.gen.go | 1050 +++++++++++++++++++++++++++++++++++++++++++++ tools/tools.go | 22 +- 4 files changed, 1130 insertions(+), 6 deletions(-) create mode 100644 docreflect.gen.go create mode 100644 iniparser.gen.go diff --git a/.gitignore b/.gitignore index 9bc182d..24e5b0a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1 @@ -iniparser.gen.go -docreflect.gen.go .build diff --git a/docreflect.gen.go b/docreflect.gen.go new file mode 100644 index 0000000..12f9364 --- /dev/null +++ b/docreflect.gen.go @@ -0,0 +1,62 @@ +package wand +import "code.squareroundforest.org/arpio/docreflect" +func init() { +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate", "Package generate provides a generator to generate go code from go docs that registers doc entries\nfor use with the docreflect package.\n") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.GenerateRegistry", "GenerateRegistry generates a Go code file to the output, including a package init function that\nwill register the documentation of the declarations specified by their gopath.\n\nThe gopath argument accepts any number of package, package level symbol, of struct field paths.\nIt is recommended to use package paths unless special circumstances.\n\nSome important gotchas to keep in mind, GenerateRegistry does not resolve type references like\ntype aliases, or type definitions based on named types, and it doesn't follow import paths.\n\nfunc(w, outputPackageName, gopath)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.cleanPaths", "\nfunc(gopath)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.collectGoDirs", "\nfunc(o)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.findFieldDocs", "\nfunc(str, fieldPath)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.findGoMod", "\nfunc(dir)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.fixDocPackage", "\nfunc(p)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.format", "\nfunc(w, pname, docs)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.funcDocs", "\nfunc(f)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.funcParams", "\nfunc(f)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.generate", "\nfunc(o, gopaths)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.getGoroot", "\nfunc()") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.importPackages", "\nfunc(o, godirs, paths)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.initOptions", "\nfunc()") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.merge", "\nfunc(m)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.methodDocs", "\nfunc(importPath, t)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.modCache", "\nfunc()") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.options", "") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.options.gomod", "") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.options.goroot", "") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.options.modules", "") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.options.wd", "") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.packageDocs", "\nfunc(pkg)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.packageFuncDocs", "\nfunc(importPath, funcs)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.packagePaths", "\nfunc(p)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.parsePackages", "\nfunc(pkgs)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.parserInclude", "\nfunc(pkg)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.readGomod", "\nfunc(wd)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.set", "\nfunc(m, key, value)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.splitGopath", "\nfunc(p)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.structFieldDocs", "\nfunc(t, fieldPath)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.structFieldsDocs", "\nfunc(importPath, t)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.symbolDocs", "\nfunc(pkg, gopath)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.symbolPath", "\nfunc(packagePath, name)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.takeDocs", "\nfunc(pkgs, gopaths)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.takeFieldDocs", "\nfunc(packagePath, prefix, f)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.typeDocs", "\nfunc(importPath, types)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.typeMethodDocs", "\nfunc(t, name)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.unpack", "\nfunc(e)") +docreflect.Register("code.squareroundforest.org/arpio/docreflect/generate.valueDocs", "\nfunc(packagePath, v)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools", "") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.Docreflect", "\nfunc(out, packageName, gopaths)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.Exec", "\nfunc(o, function, args)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.ExecOptions", "") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.ExecOptions.CacheDir", "") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.ExecOptions.NoCache", "") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.ExecOptions.PurgeCache", "") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.Man", "\nfunc(out, commandDir)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.Markdown", "\nfunc(out, commandDir)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.copyGomod", "\nfunc(fn, dst, src)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.execCommandDir", "\nfunc(out, commandDir, env)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.execInternal", "\nfunc(command, args)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.execTransparent", "\nfunc(command, args)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.execc", "\nfunc(stdin, stdout, stderr, command, args, env)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.findGomod", "\nfunc(wd)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.functionHash", "\nfunc(function)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.printFile", "\nfunc(fn, pkg, expression)") +docreflect.Register("code.squareroundforest.org/arpio/wand/tools.splitFunction", "\nfunc(function)") +} \ No newline at end of file diff --git a/iniparser.gen.go b/iniparser.gen.go new file mode 100644 index 0000000..7023607 --- /dev/null +++ b/iniparser.gen.go @@ -0,0 +1,1050 @@ +/* +This file was generated with treerack (https://code.squareroundforest.org/arpio/treerack). + +The contents of this file fall under different licenses. + +The code between the "// head" and "// eo head" lines falls under the same +license as the source code of treerack (https://code.squareroundforest.org/arpio/treerack), +unless explicitly stated otherwise, if treerack's license allows changing the +license of this source code. + +Treerack's license: MIT https://opensource.org/licenses/MIT +where YEAR=2017, COPYRIGHT HOLDER=Arpad Ryszka (arpad.ryszka@gmail.com) + +The rest of the content of this file falls under the same license as the one +that the user of treerack generating this file declares for it, or it is +unlicensed. +*/ + +package wand + +// head +import ( + "bufio" + "errors" + "fmt" + "io" + "strconv" + "strings" + "unicode" +) + +type charParser struct { + name string + id int + not bool + chars []rune + ranges [][]rune +} +type charBuilder struct { + name string + id int +} + +func (p *charParser) nodeName() string { + return p.name +} +func (p *charParser) nodeID() int { + return p.id +} +func (p *charParser) commitType() commitType { + return alias +} +func matchChar(chars []rune, ranges [][]rune, not bool, char rune) bool { + for _, ci := range chars { + if ci == char { + return !not + } + } + for _, ri := range ranges { + if char >= ri[0] && char <= ri[1] { + return !not + } + } + return not +} +func (p *charParser) match(t rune) bool { + return matchChar(p.chars, p.ranges, p.not, t) +} +func (p *charParser) parse(c *context) { + if tok, ok := c.token(); !ok || !p.match(tok) { + if c.offset > c.failOffset { + c.failOffset = c.offset + c.failingParser = nil + } + c.fail(c.offset) + return + } + c.success(c.offset + 1) +} +func (b *charBuilder) nodeName() string { + return b.name +} +func (b *charBuilder) nodeID() int { + return b.id +} +func (b *charBuilder) build(c *context) ([]*node, bool) { + return nil, false +} + +type sequenceParser struct { + name string + id int + commit commitType + items []parser + ranges [][]int + generalizations []int + allChars bool +} +type sequenceBuilder struct { + name string + id int + commit commitType + items []builder + ranges [][]int + generalizations []int + allChars bool +} + +func (p *sequenceParser) nodeName() string { + return p.name +} +func (p *sequenceParser) nodeID() int { + return p.id +} +func (p *sequenceParser) commitType() commitType { + return p.commit +} +func (p *sequenceParser) parse(c *context) { + if !p.allChars { + if c.results.pending(c.offset, p.id) { + c.fail(c.offset) + return + } + c.results.markPending(c.offset, p.id) + } + var ( + currentCount int + parsed bool + ) + itemIndex := 0 + from := c.offset + to := c.offset + for itemIndex < len(p.items) { + p.items[itemIndex].parse(c) + if !c.matchLast { + if currentCount >= p.ranges[itemIndex][0] { + itemIndex++ + currentCount = 0 + continue + } + c.offset = from + if c.fromResults(p) { + if to > c.failOffset { + c.failOffset = -1 + c.failingParser = nil + } + if !p.allChars { + c.results.unmarkPending(from, p.id) + } + return + } + if c.failingParser == nil && p.commit&userDefined != 0 && p.commit&whitespace == 0 && p.commit&failPass == 0 { + c.failingParser = p + } + c.fail(from) + if !p.allChars { + c.results.unmarkPending(from, p.id) + } + return + } + parsed = c.offset > to + if parsed { + currentCount++ + } + to = c.offset + if !parsed || p.ranges[itemIndex][1] > 0 && currentCount == p.ranges[itemIndex][1] { + itemIndex++ + currentCount = 0 + } + } + if p.commit&noKeyword != 0 && c.isKeyword(from, to) { + if c.failingParser == nil && p.commit&userDefined != 0 && p.commit&whitespace == 0 && p.commit&failPass == 0 { + c.failingParser = p + } + c.fail(from) + if !p.allChars { + c.results.unmarkPending(from, p.id) + } + return + } + for _, g := range p.generalizations { + if c.results.pending(from, g) { + c.results.setMatch(from, g, to) + } + } + if to > c.failOffset { + c.failOffset = -1 + c.failingParser = nil + } + c.results.setMatch(from, p.id, to) + c.success(to) + if !p.allChars { + c.results.unmarkPending(from, p.id) + } +} +func (b *sequenceBuilder) nodeName() string { + return b.name +} +func (b *sequenceBuilder) nodeID() int { + return b.id +} +func (b *sequenceBuilder) build(c *context) ([]*node, bool) { + to, ok := c.results.longestMatch(c.offset, b.id) + if !ok { + return nil, false + } + from := c.offset + parsed := to > from + if b.allChars { + c.offset = to + if b.commit&alias != 0 { + return nil, true + } + return []*node{{Name: b.name, From: from, To: to, tokens: c.tokens}}, true + } else if parsed { + c.results.dropMatchTo(c.offset, b.id, to) + for _, g := range b.generalizations { + c.results.dropMatchTo(c.offset, g, to) + } + } else { + if c.results.pending(c.offset, b.id) { + return nil, false + } + c.results.markPending(c.offset, b.id) + for _, g := range b.generalizations { + c.results.markPending(c.offset, g) + } + } + var ( + itemIndex int + currentCount int + nodes []*node + ) + for itemIndex < len(b.items) { + itemFrom := c.offset + n, ok := b.items[itemIndex].build(c) + if !ok { + itemIndex++ + currentCount = 0 + continue + } + if c.offset > itemFrom { + nodes = append(nodes, n...) + currentCount++ + if b.ranges[itemIndex][1] > 0 && currentCount == b.ranges[itemIndex][1] { + itemIndex++ + currentCount = 0 + } + continue + } + if currentCount < b.ranges[itemIndex][0] { + for i := 0; i < b.ranges[itemIndex][0]-currentCount; i++ { + nodes = append(nodes, n...) + } + } + itemIndex++ + currentCount = 0 + } + if !parsed { + c.results.unmarkPending(from, b.id) + for _, g := range b.generalizations { + c.results.unmarkPending(from, g) + } + } + if b.commit&alias != 0 { + return nodes, true + } + return []*node{{Name: b.name, From: from, To: to, Nodes: nodes, tokens: c.tokens}}, true +} + +type choiceParser struct { + name string + id int + commit commitType + options []parser + generalizations []int +} +type choiceBuilder struct { + name string + id int + commit commitType + options []builder + generalizations []int +} + +func (p *choiceParser) nodeName() string { + return p.name +} +func (p *choiceParser) nodeID() int { + return p.id +} +func (p *choiceParser) commitType() commitType { + return p.commit +} +func (p *choiceParser) parse(c *context) { + if c.fromResults(p) { + return + } + if c.results.pending(c.offset, p.id) { + c.fail(c.offset) + return + } + c.results.markPending(c.offset, p.id) + var ( + match bool + optionIndex int + foundMatch bool + failingParser parser + ) + from := c.offset + to := c.offset + initialFailOffset := c.failOffset + initialFailingParser := c.failingParser + failOffset := initialFailOffset + for { + foundMatch = false + optionIndex = 0 + for optionIndex < len(p.options) { + p.options[optionIndex].parse(c) + optionIndex++ + if !c.matchLast { + if c.failOffset > failOffset { + failOffset = c.failOffset + failingParser = c.failingParser + } + } + if !c.matchLast || match && c.offset <= to { + c.offset = from + continue + } + match = true + foundMatch = true + to = c.offset + c.offset = from + c.results.setMatch(from, p.id, to) + } + if !foundMatch { + break + } + } + if match { + if p.commit&noKeyword != 0 && c.isKeyword(from, to) { + if c.failingParser == nil && p.commit&userDefined != 0 && p.commit&whitespace == 0 && p.commit&failPass == 0 { + c.failingParser = p + } + c.fail(from) + c.results.unmarkPending(from, p.id) + return + } + if failOffset > to { + c.failOffset = failOffset + c.failingParser = failingParser + } else if to > initialFailOffset { + c.failOffset = -1 + c.failingParser = nil + } else { + c.failOffset = initialFailOffset + c.failingParser = initialFailingParser + } + c.success(to) + c.results.unmarkPending(from, p.id) + return + } + if failOffset > initialFailOffset { + c.failOffset = failOffset + c.failingParser = failingParser + if c.failingParser == nil && p.commitType()&userDefined != 0 && p.commitType()&whitespace == 0 && p.commitType()&failPass == 0 { + c.failingParser = p + } + } + c.results.setNoMatch(from, p.id) + c.fail(from) + c.results.unmarkPending(from, p.id) +} +func (b *choiceBuilder) nodeName() string { + return b.name +} +func (b *choiceBuilder) nodeID() int { + return b.id +} +func (b *choiceBuilder) build(c *context) ([]*node, bool) { + to, ok := c.results.longestMatch(c.offset, b.id) + if !ok { + return nil, false + } + from := c.offset + parsed := to > from + if parsed { + c.results.dropMatchTo(c.offset, b.id, to) + for _, g := range b.generalizations { + c.results.dropMatchTo(c.offset, g, to) + } + } else { + if c.results.pending(c.offset, b.id) { + return nil, false + } + c.results.markPending(c.offset, b.id) + for _, g := range b.generalizations { + c.results.markPending(c.offset, g) + } + } + var option builder + for _, o := range b.options { + if c.results.hasMatchTo(c.offset, o.nodeID(), to) { + option = o + break + } + } + n, _ := option.build(c) + if !parsed { + c.results.unmarkPending(from, b.id) + for _, g := range b.generalizations { + c.results.unmarkPending(from, g) + } + } + if b.commit&alias != 0 { + return n, true + } + return []*node{{Name: b.name, From: from, To: to, Nodes: n, tokens: c.tokens}}, true +} + +type idSet struct{ ids []uint } + +func divModBits(id int) (int, int) { + return id / strconv.IntSize, id % strconv.IntSize +} +func (s *idSet) set(id int) { + d, m := divModBits(id) + if d >= len(s.ids) { + if d < cap(s.ids) { + s.ids = s.ids[:d+1] + } else { + s.ids = s.ids[:cap(s.ids)] + for i := cap(s.ids); i <= d; i++ { + s.ids = append(s.ids, 0) + } + } + } + s.ids[d] |= 1 << uint(m) +} +func (s *idSet) unset(id int) { + d, m := divModBits(id) + if d >= len(s.ids) { + return + } + s.ids[d] &^= 1 << uint(m) +} +func (s *idSet) has(id int) bool { + d, m := divModBits(id) + if d >= len(s.ids) { + return false + } + return s.ids[d]&(1< offset { + return ints + } + if cap(ints) > offset { + ints = ints[:offset+1] + return ints + } + ints = ints[:cap(ints)] + for i := len(ints); i <= offset; i++ { + ints = append(ints, nil) + } + return ints +} +func ensureOffsetIDs(ids []*idSet, offset int) []*idSet { + if len(ids) > offset { + return ids + } + if cap(ids) > offset { + ids = ids[:offset+1] + return ids + } + ids = ids[:cap(ids)] + for i := len(ids); i <= offset; i++ { + ids = append(ids, nil) + } + return ids +} +func (r *results) setMatch(offset, id, to int) { + r.match = ensureOffsetInts(r.match, offset) + for i := 0; i < len(r.match[offset]); i += 2 { + if r.match[offset][i] != id || r.match[offset][i+1] != to { + continue + } + return + } + r.match[offset] = append(r.match[offset], id, to) +} +func (r *results) setNoMatch(offset, id int) { + if len(r.match) > offset { + for i := 0; i < len(r.match[offset]); i += 2 { + if r.match[offset][i] != id { + continue + } + return + } + } + r.noMatch = ensureOffsetIDs(r.noMatch, offset) + if r.noMatch[offset] == nil { + r.noMatch[offset] = &idSet{} + } + r.noMatch[offset].set(id) +} +func (r *results) hasMatchTo(offset, id, to int) bool { + if len(r.match) <= offset { + return false + } + for i := 0; i < len(r.match[offset]); i += 2 { + if r.match[offset][i] != id { + continue + } + if r.match[offset][i+1] == to { + return true + } + } + return false +} +func (r *results) longestMatch(offset, id int) (int, bool) { + if len(r.match) <= offset { + return 0, false + } + var found bool + to := -1 + for i := 0; i < len(r.match[offset]); i += 2 { + if r.match[offset][i] != id { + continue + } + if r.match[offset][i+1] > to { + to = r.match[offset][i+1] + } + found = true + } + return to, found +} +func (r *results) longestResult(offset, id int) (int, bool, bool) { + if len(r.noMatch) > offset && r.noMatch[offset] != nil && r.noMatch[offset].has(id) { + return 0, false, true + } + to, ok := r.longestMatch(offset, id) + return to, ok, ok +} +func (r *results) dropMatchTo(offset, id, to int) { + for i := 0; i < len(r.match[offset]); i += 2 { + if r.match[offset][i] != id { + continue + } + if r.match[offset][i+1] == to { + r.match[offset][i] = -1 + return + } + } +} +func (r *results) resetPending() { + r.isPending = nil +} +func (r *results) pending(offset, id int) bool { + if len(r.isPending) <= id { + return false + } + for i := range r.isPending[id] { + if r.isPending[id][i] == offset { + return true + } + } + return false +} +func (r *results) markPending(offset, id int) { + r.isPending = ensureOffsetInts(r.isPending, id) + for i := range r.isPending[id] { + if r.isPending[id][i] == -1 { + r.isPending[id][i] = offset + return + } + } + r.isPending[id] = append(r.isPending[id], offset) +} +func (r *results) unmarkPending(offset, id int) { + for i := range r.isPending[id] { + if r.isPending[id][i] == offset { + r.isPending[id][i] = -1 + break + } + } +} + +type context struct { + reader io.RuneReader + keywords []parser + offset int + readOffset int + consumed int + offsetLimit int + failOffset int + failingParser parser + readErr error + eof bool + results *results + tokens []rune + matchLast bool +} + +func newContext(r io.RuneReader, keywords []parser) *context { + return &context{reader: r, keywords: keywords, results: &results{}, offsetLimit: -1, failOffset: -1} +} +func (c *context) read() bool { + if c.eof || c.readErr != nil { + return false + } + token, n, err := c.reader.ReadRune() + if err != nil { + if err == io.EOF { + if n == 0 { + c.eof = true + return false + } + } else { + c.readErr = err + return false + } + } + c.readOffset++ + if token == unicode.ReplacementChar { + c.readErr = errInvalidUnicodeCharacter + return false + } + c.tokens = append(c.tokens, token) + return true +} +func (c *context) token() (rune, bool) { + if c.offset == c.offsetLimit { + return 0, false + } + if c.offset == c.readOffset { + if !c.read() { + return 0, false + } + } + return c.tokens[c.offset], true +} +func (c *context) fromResults(p parser) bool { + to, m, ok := c.results.longestResult(c.offset, p.nodeID()) + if !ok { + return false + } + if m { + c.success(to) + } else { + c.fail(c.offset) + } + return true +} +func (c *context) isKeyword(from, to int) bool { + ol := c.offsetLimit + c.offsetLimit = to + defer func() { + c.offsetLimit = ol + }() + for _, kw := range c.keywords { + c.offset = from + kw.parse(c) + if c.matchLast && c.offset == to { + return true + } + } + return false +} +func (c *context) success(to int) { + c.offset = to + c.matchLast = true + if to > c.consumed { + c.consumed = to + } +} +func (c *context) fail(offset int) { + c.offset = offset + c.matchLast = false +} +func findLine(tokens []rune, offset int) (line, column int) { + tokens = tokens[:offset] + for i := range tokens { + column++ + if tokens[i] == '\n' { + column = 0 + line++ + } + } + return +} +func (c *context) parseError(p parser) error { + definition := p.nodeName() + flagIndex := strings.Index(definition, ":") + if flagIndex > 0 { + definition = definition[:flagIndex] + } + if c.failingParser == nil { + c.failOffset = c.consumed + } + line, col := findLine(c.tokens, c.failOffset) + return &parseError{Offset: c.failOffset, Line: line, Column: col, Definition: definition} +} +func (c *context) finalizeParse(root parser) error { + fp := c.failingParser + if fp == nil { + fp = root + } + to, match, found := c.results.longestResult(0, root.nodeID()) + if !found || !match || found && match && to < c.readOffset { + return c.parseError(fp) + } + c.read() + if c.eof { + return nil + } + if c.readErr != nil { + return c.readErr + } + return c.parseError(root) +} + +type node struct { + Name string + Nodes []*node + From, To int + tokens []rune +} + +func (n *node) Tokens() []rune { + return n.tokens +} +func (n *node) String() string { + return fmt.Sprintf("%s:%d:%d:%s", n.Name, n.From, n.To, n.Text()) +} +func (n *node) Text() string { + return string(n.Tokens()[n.From:n.To]) +} + +type commitType int + +const ( + none commitType = 0 + alias commitType = 1 << iota + whitespace + noWhitespace + keyword + noKeyword + failPass + root + userDefined +) + +type formatFlags int + +const ( + formatNone formatFlags = 0 + formatPretty formatFlags = 1 << iota + formatIncludeComments +) + +type parseError struct { + Input string + Offset int + Line int + Column int + Definition string +} +type parser interface { + nodeName() string + nodeID() int + commitType() commitType + parse(*context) +} +type builder interface { + nodeName() string + nodeID() int + build(*context) ([]*node, bool) +} + +var errInvalidUnicodeCharacter = errors.New("invalid unicode character") + +func (pe *parseError) Error() string { + return fmt.Sprintf("%s:%d:%d:parse failed, parsing: %s", pe.Input, pe.Line+1, pe.Column+1, pe.Definition) +} +func parseInput(r io.Reader, p parser, b builder, kw []parser) (*node, error) { + c := newContext(bufio.NewReader(r), kw) + p.parse(c) + if c.readErr != nil { + return nil, c.readErr + } + if err := c.finalizeParse(p); err != nil { + if perr, ok := err.(*parseError); ok { + perr.Input = "" + } + return nil, err + } + c.offset = 0 + c.results.resetPending() + n, _ := b.build(c) + return n[0], nil +} + +// eo head + +func parse(r io.Reader) (*node, error) { + + var p67 = sequenceParser{id: 67, commit: 128, ranges: [][]int{{0, -1}, {1, 1}, {0, -1}}} + var p65 = choiceParser{id: 65, commit: 2} + var p64 = sequenceParser{id: 64, commit: 262, name: "whitespace", allChars: true, ranges: [][]int{{1, 1}, {1, 1}}, generalizations: []int{65}} + var p1 = charParser{id: 1, chars: []rune{32, 8, 12, 13, 9, 11}} + p64.items = []parser{&p1} + p65.options = []parser{&p64} + var p66 = sequenceParser{id: 66, commit: 258, name: "doc:wsroot", ranges: [][]int{{0, 1}}} + var p63 = sequenceParser{id: 63, commit: 2, ranges: [][]int{{1, 1}, {0, -1}}} + var p61 = choiceParser{id: 61, commit: 2} + var p58 = sequenceParser{id: 58, commit: 256, name: "key-val", ranges: [][]int{{0, 1}, {0, -1}, {1, 1}, {0, -1}, {0, 1}, {0, -1}, {0, 1}}, generalizations: []int{61}} + var p54 = sequenceParser{id: 54, commit: 2, ranges: [][]int{{1, 1}, {0, -1}, {1, 1}}} + var p14 = sequenceParser{id: 14, commit: 256, name: "comment", ranges: [][]int{{1, 1}, {0, 1}}} + var p8 = sequenceParser{id: 8, commit: 258, name: "comment-line", ranges: [][]int{{1, 1}, {0, 1}}, generalizations: []int{61}} + var p3 = sequenceParser{id: 3, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p2 = charParser{id: 2, chars: []rune{35}} + p3.items = []parser{&p2} + var p7 = sequenceParser{id: 7, commit: 2, ranges: [][]int{{0, -1}, {1, 1}, {0, -1}}} + var p5 = sequenceParser{id: 5, commit: 2, allChars: true, ranges: [][]int{{1, 1}}} + var p4 = charParser{id: 4, not: true, chars: []rune{10}} + p5.items = []parser{&p4} + var p6 = sequenceParser{id: 6, commit: 2, ranges: [][]int{{0, -1}, {1, 1}}} + p6.items = []parser{&p65, &p5} + p7.items = []parser{&p65, &p5, &p6} + p8.items = []parser{&p3, &p7} + var p13 = sequenceParser{id: 13, commit: 2, ranges: [][]int{{0, -1}, {1, 1}, {0, -1}}} + var p11 = sequenceParser{id: 11, commit: 2, ranges: [][]int{{1, 1}, {0, -1}, {1, 1}}} + var p10 = sequenceParser{id: 10, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p9 = charParser{id: 9, chars: []rune{10}} + p10.items = []parser{&p9} + p11.items = []parser{&p10, &p65, &p8} + var p12 = sequenceParser{id: 12, commit: 2, ranges: [][]int{{0, -1}, {1, 1}}} + p12.items = []parser{&p65, &p11} + p13.items = []parser{&p65, &p11, &p12} + p14.items = []parser{&p8, &p13} + var p53 = sequenceParser{id: 53, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p52 = charParser{id: 52, chars: []rune{10}} + p53.items = []parser{&p52} + p54.items = []parser{&p14, &p65, &p53} + var p39 = choiceParser{id: 39, commit: 256, name: "key"} + var p38 = sequenceParser{id: 38, commit: 266, name: "word", ranges: [][]int{{1, 1}, {0, -1}, {1, 1}, {0, -1}}, generalizations: []int{39}} + var p29 = sequenceParser{id: 29, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p28 = charParser{id: 28, chars: []rune{95}, ranges: [][]rune{{97, 122}, {65, 90}}} + p29.items = []parser{&p28} + var p37 = choiceParser{id: 37, commit: 10} + var p31 = sequenceParser{id: 31, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}, generalizations: []int{37}} + var p30 = charParser{id: 30, chars: []rune{95, 45}, ranges: [][]rune{{97, 122}, {65, 90}, {48, 57}}} + p31.items = []parser{&p30} + var p36 = sequenceParser{id: 36, commit: 10, ranges: [][]int{{1, 1}, {1, 1}, {1, 1}, {1, 1}}, generalizations: []int{37}} + var p33 = sequenceParser{id: 33, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p32 = charParser{id: 32, chars: []rune{92}} + p33.items = []parser{&p32} + var p35 = sequenceParser{id: 35, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p34 = charParser{id: 34, not: true} + p35.items = []parser{&p34} + p36.items = []parser{&p33, &p35} + p37.options = []parser{&p31, &p36} + p38.items = []parser{&p29, &p37} + var p27 = sequenceParser{id: 27, commit: 266, name: "quoted", ranges: [][]int{{1, 1}, {0, -1}, {1, 1}, {1, 1}, {0, -1}, {1, 1}}, generalizations: []int{39, 51}} + var p16 = sequenceParser{id: 16, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p15 = charParser{id: 15, chars: []rune{34}} + p16.items = []parser{&p15} + var p24 = choiceParser{id: 24, commit: 10} + var p18 = sequenceParser{id: 18, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}, generalizations: []int{24}} + var p17 = charParser{id: 17, not: true, chars: []rune{92, 34}} + p18.items = []parser{&p17} + var p23 = sequenceParser{id: 23, commit: 10, ranges: [][]int{{1, 1}, {1, 1}, {1, 1}, {1, 1}}, generalizations: []int{24}} + var p20 = sequenceParser{id: 20, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p19 = charParser{id: 19, chars: []rune{92}} + p20.items = []parser{&p19} + var p22 = sequenceParser{id: 22, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p21 = charParser{id: 21, not: true} + p22.items = []parser{&p21} + p23.items = []parser{&p20, &p22} + p24.options = []parser{&p18, &p23} + var p26 = sequenceParser{id: 26, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p25 = charParser{id: 25, chars: []rune{34}} + p26.items = []parser{&p25} + p27.items = []parser{&p16, &p24, &p26} + p39.options = []parser{&p38, &p27} + var p57 = sequenceParser{id: 57, commit: 2, ranges: [][]int{{1, 1}, {0, -1}, {0, 1}}} + var p56 = sequenceParser{id: 56, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p55 = charParser{id: 55, chars: []rune{61}} + p56.items = []parser{&p55} + var p51 = choiceParser{id: 51, commit: 256, name: "value"} + var p50 = sequenceParser{id: 50, commit: 2, ranges: [][]int{{1, 1}, {0, -1}}, generalizations: []int{51}} + var p48 = sequenceParser{id: 48, commit: 266, name: "value-chars", ranges: [][]int{{1, -1}, {1, -1}}} + var p47 = choiceParser{id: 47, commit: 10} + var p41 = sequenceParser{id: 41, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}, generalizations: []int{47}} + var p40 = charParser{id: 40, not: true, chars: []rune{92, 34, 10, 61, 35, 32, 8, 12, 13, 9, 11}} + p41.items = []parser{&p40} + var p46 = sequenceParser{id: 46, commit: 10, ranges: [][]int{{1, 1}, {1, 1}, {1, 1}, {1, 1}}, generalizations: []int{47}} + var p43 = sequenceParser{id: 43, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p42 = charParser{id: 42, chars: []rune{92}} + p43.items = []parser{&p42} + var p45 = sequenceParser{id: 45, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var p44 = charParser{id: 44, not: true} + p45.items = []parser{&p44} + p46.items = []parser{&p43, &p45} + p47.options = []parser{&p41, &p46} + p48.items = []parser{&p47} + var p49 = sequenceParser{id: 49, commit: 2, ranges: [][]int{{0, -1}, {1, 1}}} + p49.items = []parser{&p65, &p48} + p50.items = []parser{&p48, &p49} + p51.options = []parser{&p50, &p27} + p57.items = []parser{&p56, &p65, &p51} + p58.items = []parser{&p54, &p65, &p39, &p65, &p57, &p65, &p8} + var p60 = sequenceParser{id: 60, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}, generalizations: []int{61}} + var p59 = charParser{id: 59, chars: []rune{10}} + p60.items = []parser{&p59} + p61.options = []parser{&p58, &p8, &p60} + var p62 = sequenceParser{id: 62, commit: 2, ranges: [][]int{{0, -1}, {1, 1}}} + p62.items = []parser{&p65, &p61} + p63.items = []parser{&p61, &p62} + p66.items = []parser{&p63} + p67.items = []parser{&p65, &p66, &p65} + var b67 = sequenceBuilder{id: 67, commit: 128, name: "doc", ranges: [][]int{{0, -1}, {1, 1}, {0, -1}}} + var b65 = choiceBuilder{id: 65, commit: 2} + var b64 = sequenceBuilder{id: 64, commit: 262, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}, generalizations: []int{65}} + var b1 = charBuilder{} + b64.items = []builder{&b1} + b65.options = []builder{&b64} + var b66 = sequenceBuilder{id: 66, commit: 258, ranges: [][]int{{0, 1}}} + var b63 = sequenceBuilder{id: 63, commit: 2, ranges: [][]int{{1, 1}, {0, -1}}} + var b61 = choiceBuilder{id: 61, commit: 2} + var b58 = sequenceBuilder{id: 58, commit: 256, name: "key-val", ranges: [][]int{{0, 1}, {0, -1}, {1, 1}, {0, -1}, {0, 1}, {0, -1}, {0, 1}}, generalizations: []int{61}} + var b54 = sequenceBuilder{id: 54, commit: 2, ranges: [][]int{{1, 1}, {0, -1}, {1, 1}}} + var b14 = sequenceBuilder{id: 14, commit: 256, name: "comment", ranges: [][]int{{1, 1}, {0, 1}}} + var b8 = sequenceBuilder{id: 8, commit: 258, ranges: [][]int{{1, 1}, {0, 1}}, generalizations: []int{61}} + var b3 = sequenceBuilder{id: 3, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b2 = charBuilder{} + b3.items = []builder{&b2} + var b7 = sequenceBuilder{id: 7, commit: 2, ranges: [][]int{{0, -1}, {1, 1}, {0, -1}}} + var b5 = sequenceBuilder{id: 5, commit: 2, allChars: true, ranges: [][]int{{1, 1}}} + var b4 = charBuilder{} + b5.items = []builder{&b4} + var b6 = sequenceBuilder{id: 6, commit: 2, ranges: [][]int{{0, -1}, {1, 1}}} + b6.items = []builder{&b65, &b5} + b7.items = []builder{&b65, &b5, &b6} + b8.items = []builder{&b3, &b7} + var b13 = sequenceBuilder{id: 13, commit: 2, ranges: [][]int{{0, -1}, {1, 1}, {0, -1}}} + var b11 = sequenceBuilder{id: 11, commit: 2, ranges: [][]int{{1, 1}, {0, -1}, {1, 1}}} + var b10 = sequenceBuilder{id: 10, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b9 = charBuilder{} + b10.items = []builder{&b9} + b11.items = []builder{&b10, &b65, &b8} + var b12 = sequenceBuilder{id: 12, commit: 2, ranges: [][]int{{0, -1}, {1, 1}}} + b12.items = []builder{&b65, &b11} + b13.items = []builder{&b65, &b11, &b12} + b14.items = []builder{&b8, &b13} + var b53 = sequenceBuilder{id: 53, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b52 = charBuilder{} + b53.items = []builder{&b52} + b54.items = []builder{&b14, &b65, &b53} + var b39 = choiceBuilder{id: 39, commit: 256, name: "key"} + var b38 = sequenceBuilder{id: 38, commit: 266, ranges: [][]int{{1, 1}, {0, -1}, {1, 1}, {0, -1}}, generalizations: []int{39}} + var b29 = sequenceBuilder{id: 29, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b28 = charBuilder{} + b29.items = []builder{&b28} + var b37 = choiceBuilder{id: 37, commit: 10} + var b31 = sequenceBuilder{id: 31, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}, generalizations: []int{37}} + var b30 = charBuilder{} + b31.items = []builder{&b30} + var b36 = sequenceBuilder{id: 36, commit: 10, ranges: [][]int{{1, 1}, {1, 1}, {1, 1}, {1, 1}}, generalizations: []int{37}} + var b33 = sequenceBuilder{id: 33, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b32 = charBuilder{} + b33.items = []builder{&b32} + var b35 = sequenceBuilder{id: 35, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b34 = charBuilder{} + b35.items = []builder{&b34} + b36.items = []builder{&b33, &b35} + b37.options = []builder{&b31, &b36} + b38.items = []builder{&b29, &b37} + var b27 = sequenceBuilder{id: 27, commit: 266, ranges: [][]int{{1, 1}, {0, -1}, {1, 1}, {1, 1}, {0, -1}, {1, 1}}, generalizations: []int{39, 51}} + var b16 = sequenceBuilder{id: 16, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b15 = charBuilder{} + b16.items = []builder{&b15} + var b24 = choiceBuilder{id: 24, commit: 10} + var b18 = sequenceBuilder{id: 18, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}, generalizations: []int{24}} + var b17 = charBuilder{} + b18.items = []builder{&b17} + var b23 = sequenceBuilder{id: 23, commit: 10, ranges: [][]int{{1, 1}, {1, 1}, {1, 1}, {1, 1}}, generalizations: []int{24}} + var b20 = sequenceBuilder{id: 20, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b19 = charBuilder{} + b20.items = []builder{&b19} + var b22 = sequenceBuilder{id: 22, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b21 = charBuilder{} + b22.items = []builder{&b21} + b23.items = []builder{&b20, &b22} + b24.options = []builder{&b18, &b23} + var b26 = sequenceBuilder{id: 26, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b25 = charBuilder{} + b26.items = []builder{&b25} + b27.items = []builder{&b16, &b24, &b26} + b39.options = []builder{&b38, &b27} + var b57 = sequenceBuilder{id: 57, commit: 2, ranges: [][]int{{1, 1}, {0, -1}, {0, 1}}} + var b56 = sequenceBuilder{id: 56, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b55 = charBuilder{} + b56.items = []builder{&b55} + var b51 = choiceBuilder{id: 51, commit: 256, name: "value"} + var b50 = sequenceBuilder{id: 50, commit: 2, ranges: [][]int{{1, 1}, {0, -1}}, generalizations: []int{51}} + var b48 = sequenceBuilder{id: 48, commit: 266, ranges: [][]int{{1, -1}, {1, -1}}} + var b47 = choiceBuilder{id: 47, commit: 10} + var b41 = sequenceBuilder{id: 41, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}, generalizations: []int{47}} + var b40 = charBuilder{} + b41.items = []builder{&b40} + var b46 = sequenceBuilder{id: 46, commit: 10, ranges: [][]int{{1, 1}, {1, 1}, {1, 1}, {1, 1}}, generalizations: []int{47}} + var b43 = sequenceBuilder{id: 43, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b42 = charBuilder{} + b43.items = []builder{&b42} + var b45 = sequenceBuilder{id: 45, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}} + var b44 = charBuilder{} + b45.items = []builder{&b44} + b46.items = []builder{&b43, &b45} + b47.options = []builder{&b41, &b46} + b48.items = []builder{&b47} + var b49 = sequenceBuilder{id: 49, commit: 2, ranges: [][]int{{0, -1}, {1, 1}}} + b49.items = []builder{&b65, &b48} + b50.items = []builder{&b48, &b49} + b51.options = []builder{&b50, &b27} + b57.items = []builder{&b56, &b65, &b51} + b58.items = []builder{&b54, &b65, &b39, &b65, &b57, &b65, &b8} + var b60 = sequenceBuilder{id: 60, commit: 10, allChars: true, ranges: [][]int{{1, 1}, {1, 1}}, generalizations: []int{61}} + var b59 = charBuilder{} + b60.items = []builder{&b59} + b61.options = []builder{&b58, &b8, &b60} + var b62 = sequenceBuilder{id: 62, commit: 2, ranges: [][]int{{0, -1}, {1, 1}}} + b62.items = []builder{&b65, &b61} + b63.items = []builder{&b61, &b62} + b66.items = []builder{&b63} + b67.items = []builder{&b65, &b66, &b65} + + var keywords = []parser{} + + return parseInput(r, &p67, &b67, keywords) +} diff --git a/tools/tools.go b/tools/tools.go index e7cc796..cf4647a 100644 --- a/tools/tools.go +++ b/tools/tools.go @@ -120,7 +120,7 @@ func findGomod(wd string) (string, bool) { } } -func copyFile(dst, src string) error { +func copyGomod(fn, dst, src string) error { srcf, err := os.Open(src) if err != nil { return fmt.Errorf("failed to open file: %s; %w", src, err) @@ -133,8 +133,22 @@ func copyFile(dst, src string) error { } defer dstf.Close() - if _, err := io.Copy(dstf, srcf); err != nil { - return fmt.Errorf("failed to copy file %s to %s; %w", src, dst, err) + b, err := io.ReadAll(srcf) + if err != nil { + return fmt.Errorf("failed to read go.mod file %s: %w", src, err) + } + + s := string(b) + ss := strings.Split(s, "\n") + for i := range ss { + if strings.HasPrefix(ss[i], "module ") { + ss[i] = fmt.Sprintf("module %s", fn) + break + } + } + + if _, err := dstf.Write([]byte(strings.Join(ss, "\n"))); err != nil { + return fmt.Errorf("failed to write go.mod file %s: %w", dst, err) } return nil @@ -219,7 +233,7 @@ func Exec(o ExecOptions, function string, args ...string) error { defer os.Chdir(wd) gomodPath, hasGomod := findGomod(wd) if hasGomod { - if err := copyFile(path.Join(functionDir, "go.mod"), gomodPath); err != nil { + if err := copyGomod(expression, path.Join(functionDir, "go.mod"), gomodPath); err != nil { return err } } else {