Includes mk now.

This commit is contained in:
Andrey Parhomenko 2023-01-27 22:05:46 +05:00
parent 82ccc253d2
commit 708052ec0a
9 changed files with 2340 additions and 0 deletions

6
readme
View file

@ -11,3 +11,9 @@ Inspired by Plan9, Suckless and cat-v software.
Not Posix compatible since it's implemented thousand times. Not Posix compatible since it's implemented thousand times.
Lack of a few features is a feature too. Lack of a few features is a feature too.
Since Golang is so good at static files it makes sense to
put many programs into one, so now it is gonna include many
suckless stuff, including:
mk

View file

@ -26,6 +26,7 @@ import(
"github.com/surdeus/goblin/src/tool/in" "github.com/surdeus/goblin/src/tool/in"
"github.com/surdeus/goblin/src/tool/useprog" "github.com/surdeus/goblin/src/tool/useprog"
"github.com/surdeus/goblin/src/tool/path" "github.com/surdeus/goblin/src/tool/path"
"github.com/surdeus/goblin/src/tool/mk"
) )
func main() { func main() {
@ -54,6 +55,7 @@ func main() {
"in" : mtool.Tool{in.Run, "filter strings from stdin that aren not in arguments"}, "in" : mtool.Tool{in.Run, "filter strings from stdin that aren not in arguments"},
"useprog" : mtool.Tool{useprog.Run, "print the name of the first existing program in arg list"}, "useprog" : mtool.Tool{useprog.Run, "print the name of the first existing program in arg list"},
"path" : mtool.Tool{path.Run, "print cross platform path based on cmd arguments"}, "path" : mtool.Tool{path.Run, "print cross platform path based on cmd arguments"},
"mk" : mtool.Tool{mk.Run, "file dependency system"},
} }
mtool.Main("goblin", tools) mtool.Main("goblin", tools)

328
src/tool/mk/expand.go Normal file
View file

@ -0,0 +1,328 @@
// String substitution and expansion.
package mk
import (
"regexp"
"strings"
"unicode/utf8"
"os"
)
// Expand a word. This includes substituting variables and handling quotes.
func expand(input string, vars map[string][]string, expandBackticks bool) []string {
parts := make([]string, 0)
expanded := ""
var i, j int
for i = 0; i < len(input); {
j = strings.IndexAny(input[i:], "\"'`$\\")
if j < 0 {
expanded += input[i:]
break
}
j += i
expanded += input[i:j]
c, w := utf8.DecodeRuneInString(input[j:])
i = j + w
var off int
var out string
switch c {
case '\\':
out, off = expandEscape(input[i:])
expanded += out
case '"':
out, off = expandDoubleQuoted(input[i:], vars, expandBackticks)
expanded += out
case '\'':
out, off = expandSingleQuoted(input[i:])
expanded += out
case '`':
if expandBackticks {
var outparts []string
outparts, off = expandBackQuoted(input[i:], vars)
if len(outparts) > 0 {
outparts[0] = expanded + outparts[0]
expanded = outparts[len(outparts)-1]
parts = append(parts, outparts[:len(outparts)-1]...)
}
} else {
out = input
off = len(input)
expanded += out
}
case '$':
var outparts []string
outparts, off = expandSigil(input[i:], vars)
if len(outparts) > 0 {
firstpart := expanded + outparts[0]
if len(outparts) > 1 {
parts = append(parts, firstpart)
if len(outparts) > 2 {
parts = append(parts, outparts[1:len(outparts)-1]...)
}
expanded = outparts[len(outparts)-1]
} else {
expanded = firstpart
}
}
}
i += off
}
if len(expanded) > 0 {
parts = append(parts, expanded)
}
return parts
}
// Expand following a '\\'
func expandEscape(input string) (string, int) {
c, w := utf8.DecodeRuneInString(input)
if c == '\t' || c == ' ' {
return string(c), w
}
return "\\" + string(c), w
}
// Expand a double quoted string starting after a '\"'
func expandDoubleQuoted(input string, vars map[string][]string, expandBackticks bool) (string, int) {
// find the first non-escaped "
j := 0
for {
j = strings.IndexAny(input[j:], "\"\\")
if j < 0 {
break
}
c, w := utf8.DecodeRuneInString(input[j:])
j += w
if c == '"' {
return strings.Join(expand(input[:j], vars, expandBackticks), " "), (j + w)
}
if c == '\\' {
if j+w < len(input) {
j += w
_, w := utf8.DecodeRuneInString(input[j:])
j += w
} else {
break
}
}
}
return input, len(input)
}
// Expand a single quoted string starting after a '\''
func expandSingleQuoted(input string) (string, int) {
j := strings.Index(input, "'")
if j < 0 {
return input, len(input)
}
return input[:j], (j + 1)
}
// Expand something starting with at '$'.
func expandSigil(input string, vars map[string][]string) ([]string, int) {
c, w := utf8.DecodeRuneInString(input)
var offset int
var varname string
// escaping of "$" with "$$"
if c == '$' {
return []string{"$"}, 2
// match bracketed expansions: ${foo}, or ${foo:a%b=c%d}
} else if c == '{' {
var namelist_pattern = regexp.MustCompile(
`^\s*([^:]+)\s*:\s*([^%]*)%([^=]*)\s*=\s*([^%]*)%([^%]*)\s*`)
j := strings.IndexRune(input[w:], '}')
if j < 0 {
return []string{"$" + input}, len(input)
}
varname = input[w : w+j]
offset = w + j + 1
// is this a namelist?
mat := namelist_pattern.FindStringSubmatch(varname)
if mat != nil && isValidVarName(mat[1]) {
// ${varname:a%b=c%d}
varname = mat[1]
a, b, c, d := mat[2], mat[3], mat[4], mat[5]
values, ok := vars[varname]
if !ok {
return []string{}, offset
}
pat := regexp.MustCompile(strings.Join([]string{`^\Q`, a, `\E(.*)\Q`, b, `\E$`}, ""))
expanded_values := make([]string, len(values))
for i, value := range values {
value_match := pat.FindStringSubmatch(value)
if value_match != nil {
expanded_values[i] = strings.Join([]string{c, value_match[1], d}, "")
} else {
expanded_values[i] = value
}
}
return expanded_values, offset
}
// bare variables: $foo
} else if c == '(' { // Environment variables.
j := strings.IndexRune(input[w:], ')')
if j < 0 {
return []string{"$" + input}, len(input)
}
varname = input[w : w+j]
offset = w + j + 1
return []string{os.Getenv(varname)}, offset
} else {
// try to match a variable name
i := 0
j := i
for j < len(input) {
c, w = utf8.DecodeRuneInString(input[j:])
if !(isalpha(c) || c == '_' || (j > i && isdigit(c))) {
break
}
j += w
}
if j > i {
varname = input[i:j]
offset = j
} else {
return []string{"$" + input}, len(input)
}
}
if isValidVarName(varname) {
varvals, ok := vars[varname]
if ok {
return varvals, offset
} else {
return []string{"$" + input[:offset]}, offset
}
}
return []string{"$" + input}, len(input)
}
// Find and expand all sigils.
func expandSigils(input string, vars map[string][]string) []string {
parts := make([]string, 0)
expanded := ""
for i := 0; i < len(input); {
j := strings.IndexRune(input[i:], '$')
if j < 0 {
expanded += input[i:]
break
}
ex, k := expandSigil(input[j+1:], vars)
if len(ex) > 0 {
ex[0] = expanded + ex[0]
expanded = ex[len(ex)-1]
parts = append(parts, ex[:len(ex)-1]...)
}
i = k
}
if len(expanded) > 0 {
parts = append(parts, expanded)
}
return parts
}
// Find and expand all sigils in a recipe, producing a flat string.
func expandRecipeSigils(input string, vars map[string][]string) string {
expanded := ""
for i := 0; i < len(input); {
off := strings.IndexAny(input[i:], "$\\")
if off < 0 {
expanded += input[i:]
break
}
expanded += input[i : i+off]
i += off
c, w := utf8.DecodeRuneInString(input[i:])
if c == '$' {
i += w
ex, k := expandSigil(input[i:], vars)
expanded += strings.Join(ex, " ")
i += k
} else if c == '\\' {
i += w
c, w := utf8.DecodeRuneInString(input[i:])
if c == '$' {
expanded += "$"
} else {
expanded += "\\" + string(c)
}
i += w
}
}
return expanded
}
// Expand all unescaped '%' characters.
func expandSuffixes(input string, stem string) string {
expanded := make([]byte, 0)
for i := 0; i < len(input); {
j := strings.IndexAny(input[i:], "\\%")
if j < 0 {
expanded = append(expanded, input[i:]...)
break
}
c, w := utf8.DecodeRuneInString(input[j:])
expanded = append(expanded, input[i:j]...)
if c == '%' {
expanded = append(expanded, stem...)
i = j + w
} else {
j += w
c, w := utf8.DecodeRuneInString(input[j:])
if c == '%' {
expanded = append(expanded, '%')
i = j + w
}
}
}
return string(expanded)
}
// Expand a backtick quoted string, by executing the contents.
func expandBackQuoted(input string, vars map[string][]string) ([]string, int) {
// TODO: expand sigils?
j := strings.Index(input, "`")
if j < 0 {
return []string{input}, len(input)
}
// TODO: handle errors
output, _ := subprocess("sh", nil, input[:j], true)
parts := make([]string, 0)
_, tokens := lexWords(output)
for t := range tokens {
parts = append(parts, t.val)
}
return parts, (j + 1)
}

371
src/tool/mk/graph.go Normal file
View file

@ -0,0 +1,371 @@
package mk
import (
"fmt"
"io"
"os"
"sync"
"time"
)
// A dependency graph
type graph struct {
root *node // the intial target's node
nodes map[string]*node // map targets to their nodes
}
// An edge in the graph.
type edge struct {
v *node // node this edge directs to
stem string // stem matched for meta-rule applications
matches []string // regular expression matches
togo bool // this edge is going to be pruned
r *rule
}
// Current status of a node in the build.
type nodeStatus int
const (
nodeStatusReady nodeStatus = iota
nodeStatusStarted
nodeStatusNop
nodeStatusDone
nodeStatusFailed
)
type nodeFlag int
const (
nodeFlagCycle nodeFlag = 0x0002
nodeFlagReady = 0x0004
nodeFlagProbable = 0x0100
nodeFlagVacuous = 0x0200
)
// A node in the dependency graph
type node struct {
r *rule // rule to be applied
name string // target name
prog string // custom program to compare times
t time.Time // file modification time
exists bool // does a non-virtual target exist
prereqs []*edge // prerequisite rules
status nodeStatus // current state of the node in the build
mutex sync.Mutex // exclusivity for the status variable
listeners []chan nodeStatus // channels to notify of completion
flags nodeFlag // bitwise combination of node flags
}
// Update a node's timestamp and 'exists' flag.
func (u *node) updateTimestamp() {
info, err := os.Stat(u.name)
if err == nil {
u.t = info.ModTime()
u.exists = true
u.flags |= nodeFlagProbable
} else {
_, ok := err.(*os.PathError)
if ok {
u.t = time.Unix(0, 0)
u.exists = false
} else {
mkError(err.Error())
}
}
if rebuildall {
u.flags |= nodeFlagProbable
}
}
// Create a new node
func (g *graph) newnode(name string) *node {
u := &node{name: name}
u.updateTimestamp()
g.nodes[name] = u
return u
}
// Print a graph in graphviz format.
func (g *graph) visualize(w io.Writer) {
fmt.Fprintln(w, "digraph mk {")
for t, u := range g.nodes {
for i := range u.prereqs {
if u.prereqs[i].v != nil {
fmt.Fprintf(w, " \"%s\" -> \"%s\";\n", t, u.prereqs[i].v.name)
}
}
}
fmt.Fprintln(w, "}")
}
// Create a new arc.
func (u *node) newedge(v *node, r *rule) *edge {
e := &edge{v: v, r: r}
u.prereqs = append(u.prereqs, e)
return e
}
// Create a dependency graph for the given target.
func buildgraph(rs *ruleSet, target string) *graph {
g := &graph{nil, make(map[string]*node)}
// keep track of how many times each rule is visited, to avoid cycles.
rulecnt := make([]int, len(rs.rules))
g.root = applyrules(rs, g, target, rulecnt)
g.cyclecheck(g.root)
g.root.flags |= nodeFlagProbable
g.vacuous(g.root)
g.ambiguous(g.root)
return g
}
// Recursively match the given target to a rule in the rule set to construct the
// full graph.
func applyrules(rs *ruleSet, g *graph, target string, rulecnt []int) *node {
u, ok := g.nodes[target]
if ok {
return u
}
u = g.newnode(target)
// does the target match a concrete rule?
ks, ok := rs.targetrules[target]
if ok {
for ki := range ks {
k := ks[ki]
if rulecnt[k] > maxRuleCnt {
continue
}
r := &rs.rules[k]
// skip meta-rules
if r.ismeta {
continue
}
// skip rules that have no effect
if r.recipe == "" && len(r.prereqs) == 0 {
continue
}
u.flags |= nodeFlagProbable
rulecnt[k] += 1
if len(r.prereqs) == 0 {
u.newedge(nil, r)
} else {
for i := range r.prereqs {
u.newedge(applyrules(rs, g, r.prereqs[i], rulecnt), r)
}
}
rulecnt[k] -= 1
}
}
// find applicable metarules
for k := range rs.rules {
if rulecnt[k] >= maxRuleCnt {
continue
}
r := &rs.rules[k]
if !r.ismeta {
continue
}
// skip rules that have no effect
if r.recipe == "" && len(r.prereqs) == 0 {
continue
}
for j := range r.targets {
mat := r.targets[j].match(target)
if mat == nil {
continue
}
var stem string
var matches []string
var match_vars = make(map[string][]string)
if r.attributes.regex {
matches = mat
for i := range matches {
key := fmt.Sprintf("stem%d", i)
match_vars[key] = matches[i : i+1]
}
} else {
stem = mat[1]
}
rulecnt[k] += 1
if len(r.prereqs) == 0 {
e := u.newedge(nil, r)
e.stem = stem
e.matches = matches
} else {
for i := range r.prereqs {
var prereq string
if r.attributes.regex {
prereq = expandRecipeSigils(r.prereqs[i], match_vars)
} else {
prereq = expandSuffixes(r.prereqs[i], stem)
}
e := u.newedge(applyrules(rs, g, prereq, rulecnt), r)
e.stem = stem
e.matches = matches
}
}
rulecnt[k] -= 1
}
}
return u
}
// Remove edges marked as togo.
func (g *graph) togo(u *node) {
n := 0
for i := range u.prereqs {
if !u.prereqs[i].togo {
n++
}
}
prereqs := make([]*edge, n)
j := 0
for i := range u.prereqs {
if !u.prereqs[i].togo {
prereqs[j] = u.prereqs[i]
j++
}
}
// TODO: We may have to delete nodes from g.nodes, right?
u.prereqs = prereqs
}
// Remove vacous children of n.
func (g *graph) vacuous(u *node) bool {
vac := u.flags&nodeFlagProbable == 0
if u.flags&nodeFlagReady != 0 {
return vac
}
u.flags |= nodeFlagReady
for i := range u.prereqs {
e := u.prereqs[i]
if e.v != nil && g.vacuous(e.v) && e.r.ismeta {
e.togo = true
} else {
vac = false
}
}
// if a rule generated edges that are not togo, keep all of its edges
for i := range u.prereqs {
e := u.prereqs[i]
if !e.togo {
for j := range u.prereqs {
f := u.prereqs[j]
if e.r == f.r {
f.togo = false
}
}
}
}
g.togo(u)
if vac {
u.flags |= nodeFlagVacuous
}
return vac
}
// Check for cycles
func (g *graph) cyclecheck(u *node) {
if u.flags&nodeFlagCycle != 0 && len(u.prereqs) > 0 {
mkError(fmt.Sprintf("cycle in the graph detected at target %s", u.name))
}
u.flags |= nodeFlagCycle
for i := range u.prereqs {
if u.prereqs[i].v != nil {
g.cyclecheck(u.prereqs[i].v)
}
}
u.flags &= ^nodeFlagCycle
}
// Deal with ambiguous rules.
func (g *graph) ambiguous(u *node) {
bad := 0
var le *edge
for i := range u.prereqs {
e := u.prereqs[i]
if e.v != nil {
g.ambiguous(e.v)
}
if e.r.recipe == "" {
continue
}
if le == nil || le.r == nil {
le = e
} else {
if !le.r.equivRecipe(e.r) {
if le.r.ismeta && !e.r.ismeta {
mkPrintRecipe(u.name, le.r.recipe, false)
le.togo = true
le = e
} else if !le.r.ismeta && e.r.ismeta {
mkPrintRecipe(u.name, e.r.recipe, false)
e.togo = true
continue
}
}
if !le.r.equivRecipe(e.r) {
if bad == 0 {
mkPrintError(fmt.Sprintf("mk: ambiguous recipes for %s\n", u.name))
bad = 1
g.trace(u.name, le)
}
g.trace(u.name, e)
}
}
}
if bad > 0 {
mkError("")
}
g.togo(u)
}
// Print a trace of rules, k
func (g *graph) trace(name string, e *edge) {
fmt.Fprintf(os.Stderr, "\t%s", name)
for true {
prereqname := ""
if e.v != nil {
prereqname = e.v.name
}
fmt.Fprintf(os.Stderr, " <-(%s:%d)- %s", e.r.file, e.r.line, prereqname)
if e.v != nil {
for i := range e.v.prereqs {
if e.v.prereqs[i].r.recipe != "" {
e = e.v.prereqs[i]
continue
}
}
break
} else {
break
}
}
}

409
src/tool/mk/lex.go Normal file
View file

@ -0,0 +1,409 @@
package mk
import (
"fmt"
"strings"
"unicode/utf8"
)
type tokenType int
const eof rune = '\000'
// Rune's that cannot be part of a bare (unquoted) string.
const nonBareRunes = " \t\n\r\\=:#'\"$"
// Return true if the string contains whitespace only.
func onlyWhitespace(s string) bool {
return strings.IndexAny(s, " \t\r\n") < 0
}
const (
tokenError tokenType = iota
tokenNewline
tokenWord
tokenPipeInclude
tokenRedirInclude
tokenColon
tokenAssign
tokenRecipe
)
func (typ tokenType) String() string {
switch typ {
case tokenError:
return "[Error]"
case tokenNewline:
return "[Newline]"
case tokenWord:
return "[Word]"
case tokenPipeInclude:
return "[PipeInclude]"
case tokenRedirInclude:
return "[RedirInclude]"
case tokenColon:
return "[Colon]"
case tokenAssign:
return "[Assign]"
case tokenRecipe:
return "[Recipe]"
}
return "[MysteryToken]"
}
type token struct {
typ tokenType // token type
val string // token string
line int // line where it was found
col int // column on which the token began
}
func (t *token) String() string {
if t.typ == tokenError {
return t.val
} else if t.typ == tokenNewline {
return "\\n"
}
return t.val
}
type lexer struct {
input string // input string to be lexed
output chan token // channel on which tokens are sent
start int // token beginning
startcol int // column on which the token begins
pos int // position within input
line int // line within input
col int // column within input
errmsg string // set to an appropriate error message when necessary
indented bool // true if the only whitespace so far on this line
barewords bool // lex only a sequence of words
}
// A lexerStateFun is simultaneously the the state of the lexer and the next
// action the lexer will perform.
type lexerStateFun func(*lexer) lexerStateFun
func (l *lexer) lexerror(what string) {
if l.errmsg == "" {
l.errmsg = what
}
l.emit(tokenError)
}
// Return the nth character without advancing.
func (l *lexer) peekN(n int) (c rune) {
pos := l.pos
var width int
i := 0
for ; i <= n && pos < len(l.input); i++ {
c, width = utf8.DecodeRuneInString(l.input[pos:])
pos += width
}
if i <= n {
return eof
}
return
}
// Return the next character without advancing.
func (l *lexer) peek() rune {
return l.peekN(0)
}
// Consume and return the next character in the lexer input.
func (l *lexer) next() rune {
if l.pos >= len(l.input) {
return eof
}
c, width := utf8.DecodeRuneInString(l.input[l.pos:])
l.pos += width
if c == '\n' {
l.col = 0
l.line += 1
l.indented = true
} else {
l.col += 1
if strings.IndexRune(" \t", c) < 0 {
l.indented = false
}
}
return c
}
// Skip and return the next character in the lexer input.
func (l *lexer) skip() {
l.next()
l.start = l.pos
l.startcol = l.col
}
func (l *lexer) emit(typ tokenType) {
l.output <- token{typ, l.input[l.start:l.pos], l.line, l.startcol}
l.start = l.pos
l.startcol = 0
}
// Consume the next run if it is in the given string.
func (l *lexer) accept(valid string) bool {
if strings.IndexRune(valid, l.peek()) >= 0 {
l.next()
return true
}
return false
}
// Skip the next rune if it is in the valid string. Return true if it was
// skipped.
func (l *lexer) ignore(valid string) bool {
if strings.IndexRune(valid, l.peek()) >= 0 {
l.skip()
return true
}
return false
}
// Consume characters from the valid string until the next is not.
func (l *lexer) acceptRun(valid string) int {
prevpos := l.pos
for strings.IndexRune(valid, l.peek()) >= 0 {
l.next()
}
return l.pos - prevpos
}
// Accept until something from the given string is encountered.
func (l *lexer) acceptUntil(invalid string) {
for l.pos < len(l.input) && strings.IndexRune(invalid, l.peek()) < 0 {
l.next()
}
if l.peek() == eof {
l.lexerror(fmt.Sprintf("end of file encountered while looking for one of: %s", invalid))
}
}
// Accept until something from the given string is encountered, or the end of th
// file
func (l *lexer) acceptUntilOrEof(invalid string) {
for l.pos < len(l.input) && strings.IndexRune(invalid, l.peek()) < 0 {
l.next()
}
}
// Skip characters from the valid string until the next is not.
func (l *lexer) skipRun(valid string) int {
prevpos := l.pos
for strings.IndexRune(valid, l.peek()) >= 0 {
l.skip()
}
return l.pos - prevpos
}
// Skip until something from the given string is encountered.
func (l *lexer) skipUntil(invalid string) {
for l.pos < len(l.input) && strings.IndexRune(invalid, l.peek()) < 0 {
l.skip()
}
if l.peek() == eof {
l.lexerror(fmt.Sprintf("end of file encountered while looking for one of: %s", invalid))
}
}
// Start a new lexer to lex the given input.
func lex(input string) (*lexer, chan token) {
l := &lexer{input: input, output: make(chan token), line: 1, col: 0, indented: true}
go l.run()
return l, l.output
}
func lexWords(input string) (*lexer, chan token) {
l := &lexer{input: input, output: make(chan token), line: 1, col: 0, indented: true, barewords: true}
go l.run()
return l, l.output
}
func (l *lexer) run() {
for state := lexTopLevel; state != nil; {
state = state(l)
}
close(l.output)
}
func lexTopLevel(l *lexer) lexerStateFun {
for {
l.skipRun(" \t\r")
// emit a newline token if we are ending a non-empty line.
if l.peek() == '\n' && !l.indented {
l.next()
if l.barewords {
return nil
} else {
l.emit(tokenNewline)
}
}
l.skipRun(" \t\r\n")
if l.peek() == '\\' && l.peekN(1) == '\n' {
l.next()
l.next()
l.indented = false
} else {
break
}
}
if l.indented && l.col > 0 {
return lexRecipe
}
c := l.peek()
switch c {
case eof:
return nil
case '#':
return lexComment
case '<':
return lexInclude
case ':':
return lexColon
case '=':
return lexAssign
case '"':
return lexDoubleQuotedWord
case '\'':
return lexSingleQuotedWord
case '`':
return lexBackQuotedWord
}
return lexBareWord
}
func lexColon(l *lexer) lexerStateFun {
l.next()
l.emit(tokenColon)
return lexTopLevel
}
func lexAssign(l *lexer) lexerStateFun {
l.next()
l.emit(tokenAssign)
return lexTopLevel
}
func lexComment(l *lexer) lexerStateFun {
l.skip() // '#'
l.skipUntil("\n")
return lexTopLevel
}
func lexInclude(l *lexer) lexerStateFun {
l.next() // '<'
if l.accept("|") {
l.emit(tokenPipeInclude)
} else {
l.emit(tokenRedirInclude)
}
return lexTopLevel
}
func lexDoubleQuotedWord(l *lexer) lexerStateFun {
l.next() // '"'
for l.peek() != '"' && l.peek() != eof {
l.acceptUntil("\\\"")
if l.accept("\\") {
l.accept("\"")
}
}
if l.peek() == eof {
l.lexerror("end of file encountered while parsing a quoted string.")
}
l.next() // '"'
return lexBareWord
}
func lexBackQuotedWord(l *lexer) lexerStateFun {
l.next() // '`'
l.acceptUntil("`")
l.next() // '`'
return lexBareWord
}
func lexSingleQuotedWord(l *lexer) lexerStateFun {
l.next() // '\''
l.acceptUntil("'")
l.next() // '\''
return lexBareWord
}
func lexRecipe(l *lexer) lexerStateFun {
for {
l.acceptUntilOrEof("\n")
l.acceptRun(" \t\n\r")
if !l.indented || l.col == 0 {
break
}
}
if !onlyWhitespace(l.input[l.start:l.pos]) {
l.emit(tokenRecipe)
}
return lexTopLevel
}
func lexBareWord(l *lexer) lexerStateFun {
l.acceptUntil(nonBareRunes)
c := l.peek()
if c == '"' {
return lexDoubleQuotedWord
} else if c == '\'' {
return lexSingleQuotedWord
} else if c == '`' {
return lexBackQuotedWord
} else if c == '\\' {
c1 := l.peekN(1)
if c1 == '\n' || c1 == '\r' {
if l.start < l.pos {
l.emit(tokenWord)
}
l.skip()
l.skip()
return lexTopLevel
} else {
l.next()
l.next()
return lexBareWord
}
} else if c == '$' {
c1 := l.peekN(1)
if c1 == '{' {
return lexBracketExpansion
} else {
l.next()
return lexBareWord
}
}
if l.start < l.pos {
l.emit(tokenWord)
}
return lexTopLevel
}
func lexBracketExpansion(l *lexer) lexerStateFun {
l.next() // '$'
l.next() // '{'
l.acceptUntil("}")
l.next() // '}'
return lexBareWord
}

417
src/tool/mk/main.go Normal file
View file

@ -0,0 +1,417 @@
package mk
import (
"bufio"
"flag"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
"sync"
)
// True if messages should be printed without fancy colors.
var nocolor bool = false
// True if we are ignoring timestamps and rebuilding everything.
var rebuildall bool = false
// Set of targets for which we are forcing rebuild
var rebuildtargets map[string]bool = make(map[string]bool)
// Lock on standard out, messages don't get interleaved too much.
var mkMsgMutex sync.Mutex
// The maximum number of times an rule may be applied.
const maxRuleCnt = 1
// Limit the number of recipes executed simultaneously.
var subprocsAllowed int
// Current subprocesses being executed
var subprocsRunning int
// Wakeup on a free subprocess slot.
var subprocsRunningCond *sync.Cond = sync.NewCond(&sync.Mutex{})
// Prevent more than one recipe at a time from trying to take over
var exclusiveSubproc = sync.Mutex{}
// Wait until there is an available subprocess slot.
func reserveSubproc() {
subprocsRunningCond.L.Lock()
for subprocsRunning >= subprocsAllowed {
subprocsRunningCond.Wait()
}
subprocsRunning++
subprocsRunningCond.L.Unlock()
}
// Free up another subprocess to run.
func finishSubproc() {
subprocsRunningCond.L.Lock()
subprocsRunning--
subprocsRunningCond.Signal()
subprocsRunningCond.L.Unlock()
}
// Make everyone wait while we
func reserveExclusiveSubproc() {
exclusiveSubproc.Lock()
// Wait until everything is done running
stolen_subprocs := 0
subprocsRunningCond.L.Lock()
stolen_subprocs = subprocsAllowed - subprocsRunning
subprocsRunning = subprocsAllowed
for stolen_subprocs < subprocsAllowed {
subprocsRunningCond.Wait()
stolen_subprocs += subprocsAllowed - subprocsRunning
subprocsRunning = subprocsAllowed
}
}
func finishExclusiveSubproc() {
subprocsRunning = 0
subprocsRunningCond.Broadcast()
subprocsRunningCond.L.Unlock()
exclusiveSubproc.Unlock()
}
// Ansi color codes.
const (
ansiTermDefault = "\033[0m"
ansiTermBlack = "\033[30m"
ansiTermRed = "\033[31m"
ansiTermGreen = "\033[32m"
ansiTermYellow = "\033[33m"
ansiTermBlue = "\033[34m"
ansiTermMagenta = "\033[35m"
ansiTermBright = "\033[1m"
ansiTermUnderline = "\033[4m"
)
// Build a node's prereqs. Block until completed.
//
func mkNodePrereqs(g *graph, u *node, e *edge, prereqs []*node, dryrun bool,
required bool) nodeStatus {
prereqstat := make(chan nodeStatus)
pending := 0
// build prereqs that need building
for i := range prereqs {
prereqs[i].mutex.Lock()
switch prereqs[i].status {
case nodeStatusReady, nodeStatusNop:
go mkNode(g, prereqs[i], dryrun, required)
fallthrough
case nodeStatusStarted:
prereqs[i].listeners = append(prereqs[i].listeners, prereqstat)
pending++
}
prereqs[i].mutex.Unlock()
}
// wait until all the prereqs are built
status := nodeStatusDone
for pending > 0 {
s := <-prereqstat
pending--
if s == nodeStatusFailed {
status = nodeStatusFailed
}
}
return status
}
// Build a target in the graph.
//
// This selects an appropriate rule (edge) and builds all prerequisites
// concurrently.
//
// Args:
// g: Graph in which the node lives.
// u: Node to (possibly) build.
// dryrun: Don't actually build anything, just pretend.
// required: Avoid building this node, unless its prereqs are out of date.
//
func mkNode(g *graph, u *node, dryrun bool, required bool) {
// try to claim on this node
u.mutex.Lock()
if u.status != nodeStatusReady && u.status != nodeStatusNop {
u.mutex.Unlock()
return
} else {
u.status = nodeStatusStarted
}
u.mutex.Unlock()
// when finished, notify the listeners
finalstatus := nodeStatusDone
defer func() {
u.mutex.Lock()
u.status = finalstatus
for i := range u.listeners {
u.listeners[i] <- u.status
}
u.listeners = u.listeners[0:0]
u.mutex.Unlock()
}()
// there's no fucking rules, dude
if len(u.prereqs) == 0 {
if !(u.r != nil && u.r.attributes.virtual) && !u.exists {
wd, _ := os.Getwd()
mkError(fmt.Sprintf("don't know how to make %s in %s\n", u.name, wd))
}
finalstatus = nodeStatusNop
return
}
// there should otherwise be exactly one edge with an associated rule
prereqs := make([]*node, 0)
var e *edge = nil
for i := range u.prereqs {
if u.prereqs[i].r != nil {
e = u.prereqs[i]
}
if u.prereqs[i].v != nil {
prereqs = append(prereqs, u.prereqs[i].v)
}
}
// this should have been caught during graph building
if e == nil {
wd, _ := os.Getwd()
mkError(fmt.Sprintf("don't know how to make %s in %s", u.name, wd))
}
prereqs_required := required && (e.r.attributes.virtual || !u.exists)
mkNodePrereqs(g, u, e, prereqs, dryrun, prereqs_required)
uptodate := true
if !e.r.attributes.virtual {
u.updateTimestamp()
if !u.exists && required {
uptodate = false
} else if u.exists || required {
for i := range prereqs {
if u.t.Before(prereqs[i].t) || prereqs[i].status == nodeStatusDone {
uptodate = false
}
}
} else if required {
uptodate = false
}
} else {
uptodate = false
}
_, isrebuildtarget := rebuildtargets[u.name]
if isrebuildtarget || rebuildall {
uptodate = false
}
// make another pass on the prereqs, since we know we need them now
if !uptodate {
mkNodePrereqs(g, u, e, prereqs, dryrun, true)
}
// execute the recipe, unless the prereqs failed
if !uptodate && finalstatus != nodeStatusFailed && len(e.r.recipe) > 0 {
if e.r.attributes.exclusive {
reserveExclusiveSubproc()
} else {
reserveSubproc()
}
if !dorecipe(u.name, u, e, dryrun) {
finalstatus = nodeStatusFailed
}
u.updateTimestamp()
if e.r.attributes.exclusive {
finishExclusiveSubproc()
} else {
finishSubproc()
}
} else if finalstatus != nodeStatusFailed {
finalstatus = nodeStatusNop
}
}
func mkWarn(msg string) {
mkPrintWarn(msg)
}
func mkPrintWarn(msg string) {
if !nocolor {
os.Stderr.WriteString(ansiTermYellow)
}
fmt.Fprintf(os.Stderr, "%s\n", msg)
if !nocolor {
os.Stderr.WriteString(ansiTermDefault)
}
}
func mkError(msg string) {
mkPrintError(msg)
os.Exit(1)
}
func mkPrintError(msg string) {
if !nocolor {
os.Stderr.WriteString(ansiTermRed)
}
fmt.Fprintf(os.Stderr, "%s\n", msg)
if !nocolor {
os.Stderr.WriteString(ansiTermDefault)
}
}
func mkPrintSuccess(msg string) {
if nocolor {
fmt.Println(msg)
} else {
fmt.Printf("%s%s%s\n", ansiTermGreen, msg, ansiTermDefault)
}
}
func mkPrintMessage(msg string) {
mkMsgMutex.Lock()
if nocolor {
fmt.Println(msg)
} else {
fmt.Printf("%s%s%s\n", ansiTermBlue, msg, ansiTermDefault)
}
mkMsgMutex.Unlock()
}
func mkPrintRecipe(target string, recipe string, quiet bool) {
mkMsgMutex.Lock()
if nocolor {
fmt.Printf("%s: ", target)
} else {
fmt.Printf("%s%s%s → %s",
ansiTermBlue+ansiTermBright+ansiTermUnderline, target,
ansiTermDefault, ansiTermBlue)
}
if quiet {
if nocolor {
fmt.Println("...")
} else {
fmt.Println("…")
}
} else {
printIndented(os.Stdout, recipe, len(target)+3)
if len(recipe) == 0 {
os.Stdout.WriteString("\n")
}
}
if !nocolor {
os.Stdout.WriteString(ansiTermDefault)
}
mkMsgMutex.Unlock()
}
func Run(args []string) {
var mkfilepath string
var interactive bool
var dryrun bool
var shallowrebuild bool
var quiet bool
arg0 := args[0]
args = args[1:]
if mkincdir := os.Getenv("MKINCDIR") ; mkincdir == "" {
homeDir, _ := os.UserHomeDir()
os.Setenv("MKINCDIR", homeDir + "/app/mk/inc" )
}
flags := flag.NewFlagSet(arg0, flag.ExitOnError)
flags.StringVar(&mkfilepath, "f", "mkfile", "use the given file as mkfile")
flags.BoolVar(&dryrun, "n", false, "print commands without actually executing")
flags.BoolVar(&shallowrebuild, "r", false, "force building of just targets")
flags.BoolVar(&rebuildall, "a", false, "force building of all dependencies")
flags.IntVar(&subprocsAllowed, "p", 4, "maximum number of jobs to execute in parallel")
flags.BoolVar(&interactive, "i", false, "prompt before executing rules")
flags.BoolVar(&quiet, "q", false, "don't print recipes before executing them")
flags.Parse(args)
mkfile, err := os.Open(mkfilepath)
if err != nil {
mkError("no mkfile found")
}
input, _ := ioutil.ReadAll(mkfile)
mkfile.Close()
abspath, err := filepath.Abs(mkfilepath)
if err != nil {
mkError("unable to find mkfile's absolute path")
}
rs := parse(string(input), mkfilepath, abspath)
if quiet {
for i := range rs.rules {
rs.rules[i].attributes.quiet = true
}
}
targets := flags.Args()
// build the first non-meta rule in the makefile, if none are given explicitly
if len(targets) == 0 {
for i := range rs.rules {
if !rs.rules[i].ismeta {
for j := range rs.rules[i].targets {
targets = append(targets, rs.rules[i].targets[j].spat)
}
break
}
}
}
if len(targets) == 0 {
fmt.Println("mk: nothing to mk")
return
}
if shallowrebuild {
for i := range targets {
rebuildtargets[targets[i]] = true
}
}
// Create a dummy virtual rule that depends on every target
root := rule{}
root.targets = []pattern{pattern{false, "", nil}}
root.attributes = attribSet{false, false, false, false, false, false, false, true, false}
root.prereqs = targets
rs.add(root)
if interactive {
g := buildgraph(rs, "")
mkNode(g, g.root, true, true)
fmt.Print("Proceed? ")
in := bufio.NewReader(os.Stdin)
for {
c, _, err := in.ReadRune()
if err != nil {
return
} else if strings.IndexRune(" \n\t\r", c) >= 0 {
continue
} else if c == 'y' {
break
} else {
return
}
}
}
g := buildgraph(rs, "")
mkNode(g, g.root, dryrun, true)
}

384
src/tool/mk/parse.go Normal file
View file

@ -0,0 +1,384 @@
// This is a mkfile parser. It executes assignments and includes as it goes, and
// collects a set of rules, which are returned as a ruleSet object.
package mk
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"strings"
)
type parser struct {
l *lexer // underlying lexer
name string // name of the file being parsed
path string // full path of the file being parsed
tokenbuf []token // tokens consumed on the current statement
rules *ruleSet // current ruleSet
}
// Pretty errors.
func (p *parser) parseError(context string, expected string, found token) {
mkPrintError(fmt.Sprintf("%s:%d: syntax error: ", p.name, found.line))
mkPrintError(fmt.Sprintf("while %s, expected %s but found '%s'.\n",
context, expected, found.String()))
mkError("")
}
func (p *parser) basicWarnAtToken(what string, found token) {
p.basicWarnAtLine(what, found.line)
}
func (p *parser) basicWarnAtLine(what string, line int) {
mkWarn(fmt.Sprintf("%s:%d: warning: %s\n", p.name, line, what))
}
// More basic errors.
func (p *parser) basicErrorAtToken(what string, found token) {
p.basicErrorAtLine(what, found.line)
}
func (p *parser) basicErrorAtLine(what string, line int) {
mkError(fmt.Sprintf("%s:%d: syntax error: %s\n", p.name, line, what))
}
// Accept a token for use in the current statement being parsed.
func (p *parser) push(t token) {
p.tokenbuf = append(p.tokenbuf, t)
}
// Clear all the accepted tokens. Called when a statement is finished.
func (p *parser) clear() {
p.tokenbuf = p.tokenbuf[:0]
}
// A parser state function takes a parser and the next token and returns a new
// state function, or nil if there was a parse error.
type parserStateFun func(*parser, token) parserStateFun
// Parse a mkfile, returning a new ruleSet.
func parse(input string, name string, path string) *ruleSet {
rules := &ruleSet{make(map[string][]string),
make([]rule, 0),
make(map[string][]int)}
parseInto(input, name, rules, path)
return rules
}
// Parse a mkfile inserting rules and variables into a given ruleSet.
func parseInto(input string, name string, rules *ruleSet, path string) {
l, tokens := lex(input)
p := &parser{l, name, path, []token{}, rules}
oldmkfiledir := p.rules.vars["mkfiledir"]
p.rules.vars["mkfiledir"] = []string{filepath.Dir(path)}
state := parseTopLevel
for t := range tokens {
if t.typ == tokenError {
p.basicErrorAtLine(l.errmsg, t.line)
break
}
state = state(p, t)
}
// insert a dummy newline to allow parsing of any assignments or recipeless
// rules to finish.
state = state(p, token{tokenNewline, "\n", l.line, l.col})
p.rules.vars["mkfiledir"] = oldmkfiledir
// TODO: Error when state != parseTopLevel
}
// We are at the top level of a mkfile, expecting rules, assignments, or
// includes.
func parseTopLevel(p *parser, t token) parserStateFun {
switch t.typ {
case tokenNewline:
return parseTopLevel
case tokenPipeInclude:
return parsePipeInclude
case tokenRedirInclude:
return parseRedirInclude
case tokenWord:
return parseAssignmentOrTarget(p, t)
default:
p.parseError("parsing mkfile",
"a rule, include, or assignment", t)
}
return parseTopLevel
}
// Consumed a '<|'
func parsePipeInclude(p *parser, t token) parserStateFun {
switch t.typ {
case tokenNewline:
if len(p.tokenbuf) == 0 {
p.basicErrorAtToken("empty pipe include", t)
}
args := make([]string, len(p.tokenbuf))
for i := 0; i < len(p.tokenbuf); i++ {
args[i] = p.tokenbuf[i].val
}
output, success := subprocess("sh", args, "", true)
if !success {
p.basicErrorAtToken("subprocess include failed", t)
}
parseInto(output, fmt.Sprintf("%s:sh", p.name), p.rules, p.path)
p.clear()
return parseTopLevel
// Almost anything goes. Let the shell sort it out.
case tokenPipeInclude:
fallthrough
case tokenRedirInclude:
fallthrough
case tokenColon:
fallthrough
case tokenAssign:
fallthrough
case tokenWord:
p.tokenbuf = append(p.tokenbuf, t)
default:
p.parseError("parsing piped include", "a shell command", t)
}
return parsePipeInclude
}
// Consumed a '<'
func parseRedirInclude(p *parser, t token) parserStateFun {
switch t.typ {
case tokenNewline:
filename := ""
//fmt.Printf("'%v'\n", p.tokenbuf)
for i := range p.tokenbuf {
filename += expand(p.tokenbuf[i].val, p.rules.vars, true)[0]
}
file, err := os.Open(filename)
if err != nil {
p.basicWarnAtToken(fmt.Sprintf("cannot open %s", filename), p.tokenbuf[0])
//p.basicErrorAtToken(fmt.Sprintf("cannot open %s", filename), p.tokenbuf[0])
}
input, _ := ioutil.ReadAll(file)
path, err := filepath.Abs(filename)
if err != nil {
mkError("unable to find mkfile's absolute path")
}
parseInto(string(input), filename, p.rules, path)
p.clear()
return parseTopLevel
case tokenWord:
p.tokenbuf = append(p.tokenbuf, t)
default:
p.parseError("parsing include", "a file name", t)
}
return parseRedirInclude
}
// Encountered a bare string at the beginning of the line.
func parseAssignmentOrTarget(p *parser, t token) parserStateFun {
p.push(t)
return parseEqualsOrTarget
}
// Consumed one bare string ot the beginning of the line.
func parseEqualsOrTarget(p *parser, t token) parserStateFun {
switch t.typ {
case tokenAssign:
return parseAssignment
case tokenWord:
p.push(t)
return parseTargets
case tokenColon:
p.push(t)
return parseAttributesOrPrereqs
default:
p.parseError("reading a target or assignment",
"'=', ':', or another target", t)
}
return parseTopLevel // unreachable
}
// Consumed 'foo='. Everything else is a value being assigned to foo.
func parseAssignment(p *parser, t token) parserStateFun {
switch t.typ {
case tokenNewline:
err := p.rules.executeAssignment(p.tokenbuf)
if err != nil {
p.basicErrorAtToken(err.what, err.where)
}
p.clear()
return parseTopLevel
default:
p.push(t)
}
return parseAssignment
}
// Everything up to ':' must be a target.
func parseTargets(p *parser, t token) parserStateFun {
switch t.typ {
case tokenWord:
p.push(t)
case tokenColon:
p.push(t)
return parseAttributesOrPrereqs
default:
p.parseError("reading a rule's targets",
"filename or pattern", t)
}
return parseTargets
}
// Consume one or more strings followed by a first ':'.
func parseAttributesOrPrereqs(p *parser, t token) parserStateFun {
switch t.typ {
case tokenNewline:
return parseRecipe
case tokenColon:
p.push(t)
return parsePrereqs
case tokenWord:
p.push(t)
default:
p.parseError("reading a rule's attributes or prerequisites",
"an attribute, pattern, or filename", t)
}
return parseAttributesOrPrereqs
}
// Targets and attributes and the second ':' have been consumed.
func parsePrereqs(p *parser, t token) parserStateFun {
switch t.typ {
case tokenNewline:
return parseRecipe
case tokenWord:
p.push(t)
default:
p.parseError("reading a rule's prerequisites",
"filename or pattern", t)
}
return parsePrereqs
}
// An entire rule has been consumed.
func parseRecipe(p *parser, t token) parserStateFun {
// Assemble the rule!
r := rule{}
// find one or two colons
i := 0
for ; i < len(p.tokenbuf) && p.tokenbuf[i].typ != tokenColon; i++ {
}
j := i + 1
for ; j < len(p.tokenbuf) && p.tokenbuf[j].typ != tokenColon; j++ {
}
// rule has attributes
if j < len(p.tokenbuf) {
attribs := make([]string, 0)
for k := i + 1; k < j; k++ {
exparts := expand(p.tokenbuf[k].val, p.rules.vars, true)
attribs = append(attribs, exparts...)
}
err := r.parseAttribs(attribs)
if err != nil {
msg := fmt.Sprintf("while reading a rule's attributes expected an attribute but found \"%c\".", err.found)
p.basicErrorAtToken(msg, p.tokenbuf[i+1])
}
if r.attributes.regex {
r.ismeta = true
}
} else {
j = i
}
// targets
r.targets = make([]pattern, 0)
for k := 0; k < i; k++ {
exparts := expand(p.tokenbuf[k].val, p.rules.vars, true)
for i := range exparts {
targetstr := exparts[i]
r.targets = append(r.targets, pattern{spat: targetstr})
if r.attributes.regex {
rpat, err := regexp.Compile("^" + targetstr + "$")
if err != nil {
msg := fmt.Sprintf("invalid regular expression: %q", err)
p.basicErrorAtToken(msg, p.tokenbuf[k])
}
r.targets[len(r.targets)-1].rpat = rpat
} else {
idx := strings.IndexRune(targetstr, '%')
if idx >= 0 {
var left, right string
if idx > 0 {
left = regexp.QuoteMeta(targetstr[:idx])
}
if idx < len(targetstr)-1 {
right = regexp.QuoteMeta(targetstr[idx+1:])
}
patstr := fmt.Sprintf("^%s(.*)%s$", left, right)
rpat, err := regexp.Compile(patstr)
if err != nil {
msg := fmt.Sprintf("error compiling suffix rule. This is a bug. Error: %s", err)
p.basicErrorAtToken(msg, p.tokenbuf[k])
}
r.targets[len(r.targets)-1].rpat = rpat
r.targets[len(r.targets)-1].issuffix = true
r.ismeta = true
}
}
}
}
// prereqs
r.prereqs = make([]string, 0)
for k := j + 1; k < len(p.tokenbuf); k++ {
exparts := expand(p.tokenbuf[k].val, p.rules.vars, true)
r.prereqs = append(r.prereqs, exparts...)
}
if t.typ == tokenRecipe {
r.recipe = expandRecipeSigils(stripIndentation(t.val, t.col), p.rules.vars)
}
p.rules.add(r)
p.clear()
// the current token doesn't belong to this rule
if t.typ != tokenRecipe {
return parseTopLevel(p, t)
}
return parseTopLevel
}

208
src/tool/mk/recipe.go Normal file
View file

@ -0,0 +1,208 @@
// Various function for dealing with recipes.
package mk
import (
"bufio"
"fmt"
"io"
"log"
"os"
"os/exec"
"strings"
"unicode/utf8"
)
// Try to unindent a recipe, so that it begins an column 0. (This is mainly for
// recipes in python, or other indentation-significant languages.)
func stripIndentation(s string, mincol int) string {
// trim leading whitespace
reader := bufio.NewReader(strings.NewReader(s))
output := ""
for {
line, err := reader.ReadString('\n')
col := 0
i := 0
for i < len(line) && col < mincol {
c, w := utf8.DecodeRuneInString(line[i:])
if strings.IndexRune(" \t\n", c) >= 0 {
col += 1
i += w
} else {
break
}
}
output += line[i:]
if err != nil {
break
}
}
return output
}
// Indent each line of a recipe.
func printIndented(out io.Writer, s string, ind int) {
indentation := strings.Repeat(" ", ind)
reader := bufio.NewReader(strings.NewReader(s))
firstline := true
for {
line, err := reader.ReadString('\n')
if len(line) > 0 {
if !firstline {
io.WriteString(out, indentation)
}
io.WriteString(out, line)
}
if err != nil {
break
}
firstline = false
}
}
// Execute a recipe.
func dorecipe(target string, u *node, e *edge, dryrun bool) bool {
vars := make(map[string][]string)
vars["target"] = []string{target}
if e.r.ismeta {
if e.r.attributes.regex {
for i := range e.matches {
vars[fmt.Sprintf("stem%d", i)] = e.matches[i : i+1]
}
} else {
vars["stem"] = []string{e.stem}
}
}
// TODO: other variables to set
// alltargets
// newprereq
prereqs := make([]string, 0)
for i := range u.prereqs {
if u.prereqs[i].r == e.r && u.prereqs[i].v != nil {
prereqs = append(prereqs, u.prereqs[i].v.name)
}
}
vars["prereq"] = prereqs
input := expandRecipeSigils(e.r.recipe, vars)
sh := "sh"
args := []string{}
if len(e.r.shell) > 0 {
sh = e.r.shell[0]
args = e.r.shell[1:]
}
mkPrintRecipe(target, input, e.r.attributes.quiet)
if dryrun {
return true
}
_, success := subprocess(
sh,
args,
input,
false)
return success
}
// Execute a subprocess (typically a recipe).
//
// Args:
// program: Program path or name located in PATH
// input: String piped into the program's stdin
// capture_out: If true, capture and return the program's stdout rather than echoing it.
//
// Returns
// (output, success)
// output is an empty string of catputer_out is false, or the collected output from the profram is true.
//
// success is true if the exit code was 0 and false otherwise
//
func subprocess(program string,
args []string,
input string,
capture_out bool) (string, bool) {
program_path, err := exec.LookPath(program)
if err != nil {
log.Fatal(err)
}
proc_args := []string{program}
proc_args = append(proc_args, args...)
stdin_pipe_read, stdin_pipe_write, err := os.Pipe()
if err != nil {
log.Fatal(err)
}
attr := os.ProcAttr{Files: []*os.File{stdin_pipe_read, os.Stdout, os.Stderr}}
output := make([]byte, 0)
capture_done := make(chan bool)
if capture_out {
stdout_pipe_read, stdout_pipe_write, err := os.Pipe()
if err != nil {
log.Fatal(err)
}
attr.Files[1] = stdout_pipe_write
go func() {
buf := make([]byte, 1024)
for {
n, err := stdout_pipe_read.Read(buf)
if err == io.EOF && n == 0 {
break
} else if err != nil {
log.Fatal(err)
}
output = append(output, buf[:n]...)
}
capture_done <- true
}()
}
proc, err := os.StartProcess(program_path, proc_args, &attr)
if err != nil {
log.Fatal(err)
}
go func() {
_, err := stdin_pipe_write.WriteString(input)
if err != nil {
log.Fatal(err)
}
err = stdin_pipe_write.Close()
if err != nil {
log.Fatal(err)
}
}()
state, err := proc.Wait()
if attr.Files[1] != os.Stdout {
attr.Files[1].Close()
}
if err != nil {
log.Fatal(err)
}
// wait until stdout copying in finished
if capture_out {
<-capture_done
}
return string(output), state.Success()
}

215
src/tool/mk/rules.go Normal file
View file

@ -0,0 +1,215 @@
// Mkfiles are parsed into ruleSets, which as the name suggests, are sets of
// rules with accompanying recipes, as well as assigned variables which are
// expanding when evaluating rules and recipes.
package mk
import (
"fmt"
"regexp"
"unicode/utf8"
)
type attribSet struct {
delFailed bool // delete targets when the recipe fails
nonstop bool // don't stop if the recipe fails
forcedTimestamp bool // update timestamp whether the recipe does or not
nonvirtual bool // a meta-rule that will only match files
quiet bool // don't print the recipe
regex bool // regular expression meta-rule
update bool // treat the targets as if they were updated
virtual bool // rule is virtual (does not match files)
exclusive bool // don't execute concurrently with any other rule
}
// Error parsing an attribute
type attribError struct {
found rune
}
// target and rereq patterns
type pattern struct {
issuffix bool // is a suffix '%' rule, so we should define $stem.
spat string // simple string pattern
rpat *regexp.Regexp // non-nil if this is a regexp pattern
}
// Match a pattern, returning an array of submatches, or nil if it doesn'm
// match.
func (p *pattern) match(target string) []string {
if p.rpat != nil {
return p.rpat.FindStringSubmatch(target)
}
if target == p.spat {
return make([]string, 0)
}
return nil
}
// A single rule.
type rule struct {
targets []pattern // non-empty array of targets
attributes attribSet // rule attributes
prereqs []string // possibly empty prerequesites
shell []string // command used to execute the recipe
recipe string // recipe source
command []string // command attribute
ismeta bool // is this a meta rule
file string // file where the rule is defined
line int // line number on which the rule is defined
}
// Equivalent recipes.
func (r1 *rule) equivRecipe(r2 *rule) bool {
if r1.recipe != r2.recipe {
return false
}
if len(r1.shell) != len(r2.shell) {
return false
}
for i := range r1.shell {
if r1.shell[i] != r2.shell[i] {
return false
}
}
return true
}
// A set of rules.
type ruleSet struct {
vars map[string][]string
rules []rule
// map a target to an array of indexes into rules
targetrules map[string][]int
}
// Read attributes for an array of strings, updating the rule.
func (r *rule) parseAttribs(inputs []string) *attribError {
for i := 0; i < len(inputs); i++ {
input := inputs[i]
pos := 0
for pos < len(input) {
c, w := utf8.DecodeRuneInString(input[pos:])
switch c {
case 'D':
r.attributes.delFailed = true
case 'E':
r.attributes.nonstop = true
case 'N':
r.attributes.forcedTimestamp = true
case 'n':
r.attributes.nonvirtual = true
case 'Q':
r.attributes.quiet = true
case 'R':
r.attributes.regex = true
case 'U':
r.attributes.update = true
case 'V':
r.attributes.virtual = true
case 'X':
r.attributes.exclusive = true
case 'P':
if pos+w < len(input) {
r.command = append(r.command, input[pos+w:])
}
r.command = append(r.command, inputs[i+1:]...)
return nil
case 'S':
if pos+w < len(input) {
r.shell = append(r.shell, input[pos+w:])
}
r.shell = append(r.shell, inputs[i+1:]...)
return nil
default:
return &attribError{c}
}
pos += w
}
}
return nil
}
// Add a rule to the rule set.
func (rs *ruleSet) add(r rule) {
rs.rules = append(rs.rules, r)
k := len(rs.rules) - 1
for i := range r.targets {
if r.targets[i].rpat == nil {
rs.targetrules[r.targets[i].spat] =
append(rs.targetrules[r.targets[i].spat], k)
}
}
}
func isValidVarName(v string) bool {
for i := 0; i < len(v); {
c, w := utf8.DecodeRuneInString(v[i:])
if i == 0 && !(isalpha(c) || c == '_') {
return false
} else if !(isalnum(c) || c == '_') {
return false
}
i += w
}
return true
}
func isdigit(c rune) bool {
return '0' <= c && c <= '9'
}
func isalpha(c rune) bool {
return ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z')
}
func isalnum(c rune) bool {
return isalpha(c) || isdigit(c)
}
type assignmentError struct {
what string
where token
}
// Parse and execute assignment operation.
func (rs *ruleSet) executeAssignment(ts []token) *assignmentError {
assignee := ts[0].val
if !isValidVarName(assignee) {
return &assignmentError{
fmt.Sprintf("target of assignment is not a valid variable name: \"%s\"", assignee),
ts[0]}
}
// interpret tokens in assignment context
input := make([]string, 0)
for i := 1; i < len(ts); i++ {
if ts[i].typ != tokenWord || (i > 1 && ts[i-1].typ != tokenWord) {
if len(input) == 0 {
input = append(input, ts[i].val)
} else {
input[len(input)-1] += ts[i].val
}
} else {
input = append(input, ts[i].val)
}
}
// expanded variables
vals := make([]string, 0)
for i := 0; i < len(input); i++ {
vals = append(vals, expand(input[i], rs.vars, true)...)
}
rs.vars[assignee] = vals
return nil
}