deserialize.go 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213
  1. package unit
  2. import (
  3. "bufio"
  4. "bytes"
  5. "errors"
  6. "fmt"
  7. "io"
  8. "strings"
  9. "unicode"
  10. )
  11. // Deserialize parses a systemd unit file into a list of UnitOption objects.
  12. func Deserialize(f io.Reader) (opts []*UnitOption, err error) {
  13. lexer, optchan, errchan := newLexer(f)
  14. go lexer.lex()
  15. for opt := range optchan {
  16. opts = append(opts, &(*opt))
  17. }
  18. err = <-errchan
  19. return opts, err
  20. }
  21. func newLexer(f io.Reader) (*lexer, <-chan *UnitOption, <-chan error) {
  22. optchan := make(chan *UnitOption)
  23. errchan := make(chan error, 1)
  24. buf := bufio.NewReader(f)
  25. return &lexer{buf, optchan, errchan, ""}, optchan, errchan
  26. }
  27. type lexer struct {
  28. buf *bufio.Reader
  29. optchan chan *UnitOption
  30. errchan chan error
  31. section string
  32. }
  33. func (l *lexer) lex() {
  34. var err error
  35. next := l.lexNextSection
  36. for next != nil {
  37. next, err = next()
  38. if err != nil {
  39. l.errchan <- err
  40. break
  41. }
  42. }
  43. close(l.optchan)
  44. close(l.errchan)
  45. }
  46. type lexStep func() (lexStep, error)
  47. func (l *lexer) lexSectionName() (lexStep, error) {
  48. sec, err := l.buf.ReadBytes(']')
  49. if err != nil {
  50. return nil, errors.New("unable to find end of section")
  51. }
  52. return l.lexSectionSuffixFunc(string(sec[:len(sec)-1])), nil
  53. }
  54. func (l *lexer) lexSectionSuffixFunc(section string) lexStep {
  55. return func() (lexStep, error) {
  56. garbage, err := l.toEOL()
  57. if err != nil {
  58. return nil, err
  59. }
  60. garbage = bytes.TrimSpace(garbage)
  61. if len(garbage) > 0 {
  62. return nil, fmt.Errorf("found garbage after section name %s: %v", l.section, garbage)
  63. }
  64. return l.lexNextSectionOrOptionFunc(section), nil
  65. }
  66. }
  67. func (l *lexer) ignoreLineFunc(next lexStep) lexStep {
  68. return func() (lexStep, error) {
  69. for {
  70. line, err := l.toEOL()
  71. if err != nil {
  72. return nil, err
  73. }
  74. line = bytes.TrimSuffix(line, []byte{' '})
  75. // lack of continuation means this line has been exhausted
  76. if !bytes.HasSuffix(line, []byte{'\\'}) {
  77. break
  78. }
  79. }
  80. // reached end of buffer, safe to exit
  81. return next, nil
  82. }
  83. }
  84. func (l *lexer) lexNextSection() (lexStep, error) {
  85. r, _, err := l.buf.ReadRune()
  86. if err != nil {
  87. if err == io.EOF {
  88. err = nil
  89. }
  90. return nil, err
  91. }
  92. if r == '[' {
  93. return l.lexSectionName, nil
  94. } else if isComment(r) {
  95. return l.ignoreLineFunc(l.lexNextSection), nil
  96. }
  97. return l.lexNextSection, nil
  98. }
  99. func (l *lexer) lexNextSectionOrOptionFunc(section string) lexStep {
  100. return func() (lexStep, error) {
  101. r, _, err := l.buf.ReadRune()
  102. if err != nil {
  103. if err == io.EOF {
  104. err = nil
  105. }
  106. return nil, err
  107. }
  108. if unicode.IsSpace(r) {
  109. return l.lexNextSectionOrOptionFunc(section), nil
  110. } else if r == '[' {
  111. return l.lexSectionName, nil
  112. } else if isComment(r) {
  113. return l.ignoreLineFunc(l.lexNextSectionOrOptionFunc(section)), nil
  114. }
  115. l.buf.UnreadRune()
  116. return l.lexOptionNameFunc(section), nil
  117. }
  118. }
  119. func (l *lexer) lexOptionNameFunc(section string) lexStep {
  120. return func() (lexStep, error) {
  121. var partial bytes.Buffer
  122. for {
  123. r, _, err := l.buf.ReadRune()
  124. if err != nil {
  125. return nil, err
  126. }
  127. if r == '\n' || r == '\r' {
  128. return nil, errors.New("unexpected newline encountered while parsing option name")
  129. }
  130. if r == '=' {
  131. break
  132. }
  133. partial.WriteRune(r)
  134. }
  135. name := strings.TrimSpace(partial.String())
  136. return l.lexOptionValueFunc(section, name), nil
  137. }
  138. }
  139. func (l *lexer) lexOptionValueFunc(section, name string) lexStep {
  140. return func() (lexStep, error) {
  141. var partial bytes.Buffer
  142. for {
  143. line, err := l.toEOL()
  144. if err != nil {
  145. return nil, err
  146. }
  147. // lack of continuation means this value has been exhausted
  148. idx := bytes.LastIndex(line, []byte{'\\'})
  149. if idx == -1 || idx != (len(line)-1) {
  150. partial.Write(line)
  151. break
  152. }
  153. partial.Write(line[0:idx])
  154. partial.WriteRune(' ')
  155. }
  156. val := strings.TrimSpace(partial.String())
  157. l.optchan <- &UnitOption{Section: section, Name: name, Value: val}
  158. return l.lexNextSectionOrOptionFunc(section), nil
  159. }
  160. }
  161. func (l *lexer) toEOL() ([]byte, error) {
  162. line, err := l.buf.ReadBytes('\n')
  163. // ignore EOF here since it's roughly equivalent to EOL
  164. if err != nil && err != io.EOF {
  165. return nil, err
  166. }
  167. line = bytes.TrimSuffix(line, []byte{'\r'})
  168. line = bytes.TrimSuffix(line, []byte{'\n'})
  169. return line, nil
  170. }
  171. func isComment(r rune) bool {
  172. return r == '#' || r == ';'
  173. }