mirror of
https://github.com/NullHypothesis/censorbib.git
synced 2025-07-22 06:09:10 -04:00
Add new build tool.
bibliogra.py requires Python 2, which is a pain to deal with. This commit adds a Go tool that compiles CensorBib from BibTeX to HTML. The tool does the bare minimum and is quite strict in the BibTeX format it expects.
This commit is contained in:
parent
d432134ed4
commit
4be5f7bfb2
21 changed files with 2381 additions and 0 deletions
55
src/decode.go
Normal file
55
src/decode.go
Normal file
|
@ -0,0 +1,55 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type conversion struct {
|
||||
from string
|
||||
to string
|
||||
}
|
||||
|
||||
func decodeTitle(title string) string {
|
||||
for _, convert := range []conversion{
|
||||
{`\#`, "#"},
|
||||
{`--`, `–`},
|
||||
{"``", "“"},
|
||||
{"''", "”"},
|
||||
{"'", "’"}, // U+2019
|
||||
{`$\cdot$`, `·`}, // U+00B7.
|
||||
} {
|
||||
title = strings.Replace(title, convert.from, convert.to, -1)
|
||||
}
|
||||
|
||||
// Get rid of all curly brackets. We're displaying titles without changing
|
||||
// their casing.
|
||||
title = strings.ReplaceAll(title, "{", "")
|
||||
title = strings.ReplaceAll(title, "}", "")
|
||||
|
||||
return title
|
||||
}
|
||||
|
||||
func decodeAuthors(authors string) string {
|
||||
for _, convert := range []conversion{
|
||||
{"'", "’"},
|
||||
} {
|
||||
authors = strings.Replace(authors, convert.from, convert.to, -1)
|
||||
}
|
||||
// For simplicity, we expect authors to be formatted as "John Doe" instead
|
||||
// of "Doe, John".
|
||||
if strings.Contains(authors, ",") {
|
||||
log.Fatalf("author %q contains a comma", authors)
|
||||
}
|
||||
authorSlice := strings.Split(authors, " and ")
|
||||
return strings.Join(authorSlice, ", ")
|
||||
}
|
||||
|
||||
func decodeProceedings(proceedings string) string {
|
||||
for _, convert := range []conversion{
|
||||
{`\&`, "&"},
|
||||
} {
|
||||
proceedings = strings.Replace(proceedings, convert.from, convert.to, -1)
|
||||
}
|
||||
return proceedings
|
||||
}
|
81
src/decode_test.go
Normal file
81
src/decode_test.go
Normal file
|
@ -0,0 +1,81 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestToString(t *testing.T) {
|
||||
testCases := []conversion{
|
||||
{
|
||||
from: "Title",
|
||||
to: "Title",
|
||||
},
|
||||
{
|
||||
from: "This is a {Title}",
|
||||
to: "This is a Title",
|
||||
},
|
||||
{
|
||||
from: "This is a {Title}",
|
||||
to: "This is a Title",
|
||||
},
|
||||
{
|
||||
from: `{\#h00t}: Censorship Resistant Microblogging`,
|
||||
to: `#h00t: Censorship Resistant Microblogging`,
|
||||
},
|
||||
{
|
||||
from: "``Good'' Worms and Human Rights",
|
||||
to: "“Good” Worms and Human Rights",
|
||||
},
|
||||
{
|
||||
from: "An Analysis of {China}'s ``{Great Cannon}''",
|
||||
to: "An Analysis of China’s “Great Cannon”",
|
||||
},
|
||||
{
|
||||
from: `lib$\cdot$erate, (n):`,
|
||||
to: `lib·erate, (n):`,
|
||||
},
|
||||
{
|
||||
from: "Well -- Exploring the {Great} {Firewall}'s Poisoned {DNS}",
|
||||
to: "Well – Exploring the Great Firewall’s Poisoned DNS",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
to := decodeTitle(test.from)
|
||||
if to != test.to {
|
||||
t.Errorf("Expected\n%s\ngot\n%s", test.to, to)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeAuthors(t *testing.T) {
|
||||
testCases := []conversion{
|
||||
{ // Multiple authors should be separated by commas.
|
||||
from: "John Doe and Jane Doe",
|
||||
to: "John Doe, Jane Doe",
|
||||
},
|
||||
{ // Single authors should remain as-is.
|
||||
from: "John Doe",
|
||||
to: "John Doe",
|
||||
},
|
||||
{ // Single-name authors should remain as-is.
|
||||
from: "John and Jane",
|
||||
to: "John, Jane",
|
||||
},
|
||||
{ // Non-ASCII characters should be unaffected.
|
||||
from: "Jóhn Doe",
|
||||
to: "Jóhn Doe",
|
||||
},
|
||||
{ // Apostrophes should be replaced with the right single quote.
|
||||
from: "John O'Brian",
|
||||
to: "John O’Brian",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
to := decodeAuthors(test.from)
|
||||
if to != test.to {
|
||||
t.Errorf("Expected\n%s\ngot\n%s", test.to, to)
|
||||
}
|
||||
}
|
||||
}
|
11
src/footer.go
Normal file
11
src/footer.go
Normal file
|
@ -0,0 +1,11 @@
|
|||
package main
|
||||
|
||||
func footer() string {
|
||||
return `<div id="footer">
|
||||
Icons taken without modification from
|
||||
<a href="https://fontawesome.com/license">Font Awesome</a>.
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>`
|
||||
}
|
5
src/go.mod
Normal file
5
src/go.mod
Normal file
|
@ -0,0 +1,5 @@
|
|||
module censorbib-go
|
||||
|
||||
go 1.21.3
|
||||
|
||||
require github.com/nickng/bibtex v1.3.0
|
2
src/go.sum
Normal file
2
src/go.sum
Normal file
|
@ -0,0 +1,2 @@
|
|||
github.com/nickng/bibtex v1.3.0 h1:iv0408z8Xe+FEVquJUo8eraXnhrAF0e+2/WayPcism8=
|
||||
github.com/nickng/bibtex v1.3.0/go.mod h1:4BJ3ka/ZjGVXcHOlkzlRonex6U17L3kW6ICEsygP2bg=
|
231
src/header.go
Normal file
231
src/header.go
Normal file
|
@ -0,0 +1,231 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"log"
|
||||
"text/template"
|
||||
"time"
|
||||
)
|
||||
|
||||
const headerTemplate = `
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||
<title>The Internet censorship bibliography</title>
|
||||
<link rel="icon" type="image/svg+xml" href="favicon.svg">
|
||||
<style>
|
||||
body {
|
||||
font-family: Roboto,Helvetica,sans-serif;
|
||||
background: #ddd;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
margin-top: 20px;
|
||||
max-width: 1000px;
|
||||
}
|
||||
li {
|
||||
margin-top: 1em;
|
||||
margin-bottom: 1em;
|
||||
margin-right: 1em;
|
||||
}
|
||||
h1 {
|
||||
font-size: 25px;
|
||||
color: #efefef;
|
||||
width: 80%;
|
||||
float: left;
|
||||
}
|
||||
ul {
|
||||
border-radius: 10px;
|
||||
border:1px solid #c0c0c0;
|
||||
background: #efefef;
|
||||
box-shadow: 2px 2px 5px #bbb;
|
||||
}
|
||||
a:link {
|
||||
color:#0b61a4;
|
||||
text-decoration:none;
|
||||
}
|
||||
a:visited {
|
||||
color:#033e6b;
|
||||
text-decoration:none;
|
||||
}
|
||||
a:hover {
|
||||
text-decoration:underline;
|
||||
}
|
||||
p {
|
||||
margin: 0px;
|
||||
}
|
||||
.author {
|
||||
color: #666;
|
||||
}
|
||||
.venue {
|
||||
font-style: italic;
|
||||
}
|
||||
.paper {
|
||||
font-weight: bold;
|
||||
}
|
||||
.other {
|
||||
color: #666;
|
||||
}
|
||||
#footer {
|
||||
text-align: center;
|
||||
line-height: 20px;
|
||||
}
|
||||
.icon {
|
||||
height: 1em;
|
||||
margin-right: 0.5em;
|
||||
}
|
||||
.icons {
|
||||
float: right;
|
||||
}
|
||||
.top-icon {
|
||||
height: 1em;
|
||||
width: 1em;
|
||||
position: relative;
|
||||
vertical-align: middle;
|
||||
margin-left: 1em;
|
||||
}
|
||||
.menu-item {
|
||||
padding-bottom: 5px;
|
||||
}
|
||||
.url {
|
||||
font-family: monospace;
|
||||
font-size: 12px;
|
||||
}
|
||||
:target {
|
||||
background-color: #f6ba81;
|
||||
}
|
||||
#left-header {
|
||||
flex: 4;
|
||||
background: #efefef;
|
||||
margin-right: 0.5em;
|
||||
border-radius: 10px;
|
||||
border: 1px solid #c0c0c0;
|
||||
box-shadow: 2px 2px 5px #bbb;
|
||||
overflow: hidden; /* For child elements to inherit rounded corners. */
|
||||
}
|
||||
#right-header {
|
||||
flex: 1;
|
||||
background: #efefef;
|
||||
margin-left: 0.5em;
|
||||
background: #333 url('assets/research-power-tools-cover.jpg') no-repeat;
|
||||
background-size: 100%;
|
||||
}
|
||||
.round-shadow {
|
||||
border-radius: 10px;
|
||||
border: 1px solid #c0c0c0;
|
||||
box-shadow: 2px 2px 5px #bbb;
|
||||
overflow: hidden; /* For child elements to inherit rounded corners. */
|
||||
}
|
||||
.flex-row {
|
||||
display: flex;
|
||||
}
|
||||
.flex-column {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
#title-box {
|
||||
text-align: center;
|
||||
background: #333 url('open-access.svg') right/25% no-repeat;
|
||||
}
|
||||
#censorbib-description {
|
||||
padding: 1em;
|
||||
flex: 5;
|
||||
}
|
||||
#censorbib-links {
|
||||
padding: 1em;
|
||||
flex: 2;
|
||||
font-size: 0.9em;
|
||||
}
|
||||
#book-info {
|
||||
text-align: center;
|
||||
padding: 0.5em;
|
||||
background: #333;
|
||||
color: #efefef;
|
||||
}
|
||||
#book-info > a:link {
|
||||
color: #d94b7b
|
||||
}
|
||||
#book-info > a:visited {
|
||||
color: #d94b7b
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<div class="flex-row">
|
||||
|
||||
<div id="left-header" class="flex-column round-shadow">
|
||||
|
||||
<div id="title-box">
|
||||
<h1>Selected Research Papers<br>in Internet Censorship</h1>
|
||||
</div>
|
||||
|
||||
<div class="flex-row">
|
||||
|
||||
<div id="censorbib-description">
|
||||
CensorBib is an online archive of selected research papers in the field
|
||||
of Internet censorship. Most papers on CensorBib approach the topic
|
||||
from a technical angle, by proposing designs that circumvent censorship
|
||||
systems, or by measuring how censorship works. The icons next to each
|
||||
paper make it easy to download, cite, and link to papers. If you think
|
||||
I missed a paper,
|
||||
<a href="https://nymity.ch/contact.txt">let me know</a>.
|
||||
You can sort papers by
|
||||
<a href="year.html">year</a>,
|
||||
<a href="year_reverse.html">reverse year</a> (default),
|
||||
<a href="author.html">author</a>, and
|
||||
<a href="author_reverse.html">reverse author</a>.
|
||||
Finally, the
|
||||
<a href="https://github.com/net4people/bbs/issues">net4people/bbs forum</a>
|
||||
has reading groups for many of the papers listed below.
|
||||
</div> <!-- censorbib-description -->
|
||||
|
||||
<div id="censorbib-links">
|
||||
<div class="menu-item">
|
||||
<img class="top-icon" src="assets/code-icon.svg" alt="source code icon">
|
||||
<a href="https://github.com/NullHypothesis/censorbib">CensorBib code</a>
|
||||
</div>
|
||||
<div class="menu-item">
|
||||
<img class="top-icon" src="assets/update-icon.svg" alt="update icon">
|
||||
<a href="https://github.com/NullHypothesis/censorbib/commits/master">Last update: {{.Date}}</a>
|
||||
</div>
|
||||
</div> <!-- censorbib-links -->
|
||||
|
||||
</div>
|
||||
|
||||
</div> <!-- left-header -->
|
||||
|
||||
<div id="right-header" class="round-shadow">
|
||||
|
||||
<div class="flex-column" style="height: 100%">
|
||||
<div style="flex: 1 1 auto">
|
||||
</div>
|
||||
|
||||
<div id="book-info" style="flex: 0 1 auto">
|
||||
Are you a researcher? If so, you may like my book
|
||||
<a href="http://research-power-tools.com">Research Power Tools</a>.
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div> <!-- right-header -->
|
||||
|
||||
</div>`
|
||||
|
||||
func header() string {
|
||||
tmpl, err := template.New("header").Parse(headerTemplate)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
i := struct {
|
||||
Date string
|
||||
}{
|
||||
Date: time.Now().UTC().Format(time.DateOnly),
|
||||
}
|
||||
buf := bytes.NewBufferString("")
|
||||
if err = tmpl.Execute(buf, i); err != nil {
|
||||
log.Fatalf("Error executing template: %v", err)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
130
src/html.go
Normal file
130
src/html.go
Normal file
|
@ -0,0 +1,130 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/nickng/bibtex"
|
||||
)
|
||||
|
||||
func sortByYear(yearToEntries map[string][]string) []string {
|
||||
keys := make([]string, 0, len(yearToEntries))
|
||||
for k := range yearToEntries {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Sort(sort.Reverse(sort.StringSlice(keys)))
|
||||
return keys
|
||||
}
|
||||
|
||||
func appendIfNotEmpty(slice []string, s string) []string {
|
||||
if s != "" {
|
||||
return append(slice, s)
|
||||
}
|
||||
return slice
|
||||
}
|
||||
|
||||
func makeBib(to io.Writer, bibEntries []bibEntry) {
|
||||
yearToEntries := make(map[string][]string)
|
||||
|
||||
for _, entry := range bibEntries {
|
||||
y := entry.Fields["year"].String()
|
||||
yearToEntries[y] = append(yearToEntries[y], makeBibEntry(&entry))
|
||||
}
|
||||
|
||||
sortedYears := sortByYear(yearToEntries)
|
||||
for _, year := range sortedYears {
|
||||
fmt.Fprintln(to, "<ul>")
|
||||
for _, entry := range yearToEntries[year] {
|
||||
fmt.Fprint(to, entry)
|
||||
}
|
||||
fmt.Fprintln(to, "</ul>")
|
||||
}
|
||||
}
|
||||
|
||||
func makeBibEntry(entry *bibEntry) string {
|
||||
s := []string{
|
||||
fmt.Sprintf("<li id='%s'>", entry.CiteName),
|
||||
`<div>`,
|
||||
makeBibEntryTitle(entry),
|
||||
`</div>`,
|
||||
`<div>`,
|
||||
makeBibEntryAuthors(entry),
|
||||
`</div>`,
|
||||
`<span class="other">`,
|
||||
makeBibEntryMisc(entry),
|
||||
`</span>`,
|
||||
`</li>`,
|
||||
}
|
||||
return strings.Join(s, "\n")
|
||||
}
|
||||
|
||||
func makeBibEntryTitle(entry *bibEntry) string {
|
||||
// Paper title is on the left side.
|
||||
title := []string{
|
||||
`<span class="paper">`,
|
||||
decodeTitle(entry.Fields["title"].String()),
|
||||
`</span>`,
|
||||
}
|
||||
// Icons are on the right side.
|
||||
icons := []string{
|
||||
`<span class="icons">`,
|
||||
fmt.Sprintf("<a href='%s'>", entry.Fields["url"].String()),
|
||||
`<img class="icon" title="Download paper" src="assets/pdf-icon.svg" alt="Download icon">`,
|
||||
`</a>`,
|
||||
fmt.Sprintf("<a href='https://censorbib.nymity.ch/pdf/%s.pdf'>", entry.CiteName),
|
||||
`<img class="icon" title="Download cached paper" src="assets/cache-icon.svg" alt="Cached download icon">`,
|
||||
`</a>`,
|
||||
fmt.Sprintf("<a href='https://github.com/NullHypothesis/censorbib/blob/master/references.bib#L%d'>", entry.lineNum),
|
||||
`<img class="icon" title="Download BibTeX" src="assets/bibtex-icon.svg" alt="BibTeX download icon">`,
|
||||
`</a>`,
|
||||
fmt.Sprintf("<a href='#%s'>", entry.CiteName),
|
||||
`<img class="icon" title="Link to paper" src="assets/link-icon.svg" alt="Paper link icon">`,
|
||||
`</a>`,
|
||||
`</span>`,
|
||||
}
|
||||
return strings.Join(append(title, icons...), "\n")
|
||||
}
|
||||
|
||||
func makeBibEntryAuthors(entry *bibEntry) string {
|
||||
s := []string{
|
||||
`<span class="author">`,
|
||||
decodeAuthors(entry.Fields["author"].String()),
|
||||
`</span>`,
|
||||
}
|
||||
return strings.Join(s, "\n")
|
||||
}
|
||||
|
||||
func makeBibEntryMisc(entry *bibEntry) string {
|
||||
s := []string{}
|
||||
s = appendIfNotEmpty(s, makeBibEntryVenue(entry))
|
||||
s = appendIfNotEmpty(s, toStr(entry.Fields["year"]))
|
||||
s = appendIfNotEmpty(s, toStr(entry.Fields["publisher"]))
|
||||
return strings.Join(s, ", ")
|
||||
}
|
||||
|
||||
func makeBibEntryVenue(entry *bibEntry) string {
|
||||
var (
|
||||
prefix string
|
||||
bs bibtex.BibString
|
||||
ok bool
|
||||
)
|
||||
|
||||
if bs, ok = entry.Fields["booktitle"]; ok {
|
||||
prefix = "In Proc. of: "
|
||||
} else if bs, ok = entry.Fields["journal"]; ok {
|
||||
prefix = "In: "
|
||||
} else {
|
||||
return "" // Some entries are self-published.
|
||||
}
|
||||
|
||||
s := []string{
|
||||
prefix,
|
||||
`<span class="venue">`,
|
||||
decodeProceedings(toStr(bs)),
|
||||
`</span>`,
|
||||
}
|
||||
|
||||
return strings.Join(s, "")
|
||||
}
|
113
src/main.go
Normal file
113
src/main.go
Normal file
|
@ -0,0 +1,113 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/nickng/bibtex"
|
||||
)
|
||||
|
||||
// Matches e.g.: @inproceedings{Doe2024a,
|
||||
var re = regexp.MustCompile(`@[a-z]*\{([A-Za-z\-]*[0-9]{4}[a-z]),`)
|
||||
|
||||
// Map a cite name (e.g., Doe2024a) to its line number in the .bib file. All
|
||||
// cite names are unique.
|
||||
type entryToLineFunc func(string) int
|
||||
|
||||
// Augment bibtex.BibEntry with the entry's line number in the .bib file.
|
||||
type bibEntry struct {
|
||||
bibtex.BibEntry
|
||||
lineNum int
|
||||
}
|
||||
|
||||
func toStr(b bibtex.BibString) string {
|
||||
if b == nil {
|
||||
return ""
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func parseBibFile(path string) []bibEntry {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
bib, err := bibtex.Parse(file)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Augment our BibTeX entries with their respective line numbers in the .bib
|
||||
// file. This is necessary to create the "Download BibTeX" links.
|
||||
lineOf := buildEntryToLineFunc(path)
|
||||
bibEntries := []bibEntry{}
|
||||
for _, entry := range bib.Entries {
|
||||
bibEntries = append(bibEntries, bibEntry{
|
||||
BibEntry: *entry,
|
||||
lineNum: lineOf(entry.CiteName),
|
||||
})
|
||||
}
|
||||
|
||||
return bibEntries
|
||||
}
|
||||
|
||||
func buildEntryToLineFunc(path string) entryToLineFunc {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
sc := bufio.NewScanner(file)
|
||||
entryToLine := make(map[string]int)
|
||||
line := 0
|
||||
for sc.Scan() {
|
||||
line++
|
||||
s := sc.Text()
|
||||
if !strings.HasPrefix(s, "@") {
|
||||
continue
|
||||
}
|
||||
entry := parseCiteName(s) // E.g., Doe2024a
|
||||
entryToLine[entry] = line
|
||||
}
|
||||
if err := sc.Err(); err != nil {
|
||||
log.Fatalf("scan file error: %v", err)
|
||||
}
|
||||
|
||||
return func(entry string) int {
|
||||
if line, ok := entryToLine[entry]; ok {
|
||||
return line
|
||||
}
|
||||
log.Fatalf("could not find line number for cite name: %s", entry)
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
||||
func parseCiteName(line string) string {
|
||||
matches := re.FindStringSubmatch(line)
|
||||
if len(matches) != 2 {
|
||||
log.Fatalf("failed to extract cite name of: %s", line)
|
||||
}
|
||||
return matches[1]
|
||||
}
|
||||
|
||||
func run(w io.Writer, bibEntries []bibEntry) {
|
||||
fmt.Fprint(w, header())
|
||||
makeBib(w, bibEntries)
|
||||
fmt.Fprint(w, footer())
|
||||
}
|
||||
|
||||
func main() {
|
||||
path := flag.String("path", "", "Path to .bib file.")
|
||||
flag.Parse()
|
||||
if *path == "" {
|
||||
log.Fatal("No path to .bib file provided.")
|
||||
}
|
||||
run(os.Stdout, parseBibFile(*path))
|
||||
log.Println("Successfully created bibliography.")
|
||||
}
|
43
src/main_test.go
Normal file
43
src/main_test.go
Normal file
|
@ -0,0 +1,43 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/nickng/bibtex"
|
||||
)
|
||||
|
||||
func mustParse(t *testing.T, s string) bibEntry {
|
||||
t.Helper()
|
||||
bib, err := bibtex.Parse(strings.NewReader(s))
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse bibtex: %v", err)
|
||||
}
|
||||
return bibEntry{
|
||||
BibEntry: *bib.Entries[0],
|
||||
lineNum: 0,
|
||||
}
|
||||
}
|
||||
|
||||
func TestRun(t *testing.T) {
|
||||
buf := bytes.NewBufferString("")
|
||||
entry := mustParse(t, `@inproceedings{Almutairi2024a,
|
||||
author = {Sultan Almutairi and Yogev Neumann and Khaled Harfoush},
|
||||
title = {Fingerprinting {VPNs} with Custom Router Firmware: A New Censorship Threat Model},
|
||||
booktitle = {Consumer Communications \& Networking Conference},
|
||||
publisher = {IEEE},
|
||||
year = {2024},
|
||||
url = {https://censorbib.nymity.ch/pdf/Almutairi2024a.pdf},
|
||||
}`)
|
||||
|
||||
makeBib(buf, []bibEntry{entry})
|
||||
|
||||
bufStr := buf.String()
|
||||
if !strings.HasPrefix(bufStr, "<ul>") {
|
||||
t.Errorf("expected <ul> but got %q...", bufStr[:10])
|
||||
}
|
||||
if !strings.HasSuffix(bufStr, "</ul>\n") {
|
||||
t.Errorf("expected </ul> but got %q", bufStr[len(bufStr)-10:])
|
||||
}
|
||||
}
|
24
src/vendor/github.com/nickng/bibtex/.gitignore
generated
vendored
Normal file
24
src/vendor/github.com/nickng/bibtex/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||
*.o
|
||||
*.a
|
||||
*.so
|
||||
|
||||
# Folders
|
||||
_obj
|
||||
_test
|
||||
|
||||
# Architecture specific extensions/prefixes
|
||||
*.[568vq]
|
||||
[568vq].out
|
||||
|
||||
*.cgo1.go
|
||||
*.cgo2.c
|
||||
_cgo_defun.c
|
||||
_cgo_gotypes.go
|
||||
_cgo_export.*
|
||||
|
||||
_testmain.go
|
||||
|
||||
*.exe
|
||||
*.test
|
||||
*.prof
|
201
src/vendor/github.com/nickng/bibtex/LICENSE
generated
vendored
Normal file
201
src/vendor/github.com/nickng/bibtex/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
17
src/vendor/github.com/nickng/bibtex/README.md
generated
vendored
Normal file
17
src/vendor/github.com/nickng/bibtex/README.md
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
# bibtex  [](https://pkg.go.dev/github.com/nickng/bibtex)
|
||||
|
||||
## `nickng/bibtex` is a bibtex parser and library for Go.
|
||||
|
||||
The bibtex format is not standardised, this parser follows the descriptions found
|
||||
[here](http://maverick.inria.fr/~Xavier.Decoret/resources/xdkbibtex/bibtex_summary.html).
|
||||
Please file any issues with a minimal working example.
|
||||
|
||||
To get:
|
||||
|
||||
go get -u github.com/nickng/bibtex/...
|
||||
|
||||
This will also install `prettybib`, a bibtex pretty printer.
|
||||
To parse and pretty print a bibtex file, for example:
|
||||
|
||||
cd $GOPATH/src/github.com/nickng/bibtex
|
||||
prettybib -in example/simple.bib
|
355
src/vendor/github.com/nickng/bibtex/bibtex.go
generated
vendored
Normal file
355
src/vendor/github.com/nickng/bibtex/bibtex.go
generated
vendored
Normal file
|
@ -0,0 +1,355 @@
|
|||
package bibtex
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"log"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
"time"
|
||||
)
|
||||
|
||||
// BibString is a segment of a bib string.
|
||||
type BibString interface {
|
||||
RawString() string // Internal representation.
|
||||
String() string // Displayed string.
|
||||
}
|
||||
|
||||
// BibVar is a string variable.
|
||||
type BibVar struct {
|
||||
Key string // Variable key.
|
||||
Value BibString // Variable actual value.
|
||||
}
|
||||
|
||||
// RawString is the internal representation of the variable.
|
||||
func (v *BibVar) RawString() string {
|
||||
return v.Key
|
||||
}
|
||||
|
||||
func (v *BibVar) String() string {
|
||||
return v.Value.String()
|
||||
}
|
||||
|
||||
// BibConst is a string constant.
|
||||
type BibConst string
|
||||
|
||||
// NewBibConst converts a constant string to BibConst.
|
||||
func NewBibConst(c string) BibConst {
|
||||
return BibConst(c)
|
||||
}
|
||||
|
||||
// RawString is the internal representation of the constant (i.e. the string).
|
||||
func (c BibConst) RawString() string {
|
||||
return fmt.Sprintf("{%s}", string(c))
|
||||
}
|
||||
|
||||
func (c BibConst) String() string {
|
||||
return string(c)
|
||||
}
|
||||
|
||||
// BibComposite is a composite string, may contain both variable and string.
|
||||
type BibComposite []BibString
|
||||
|
||||
// NewBibComposite creates a new composite with one element.
|
||||
func NewBibComposite(s BibString) *BibComposite {
|
||||
comp := &BibComposite{}
|
||||
return comp.Append(s)
|
||||
}
|
||||
|
||||
// Append adds a BibString to the composite
|
||||
func (c *BibComposite) Append(s BibString) *BibComposite {
|
||||
comp := append(*c, s)
|
||||
return &comp
|
||||
}
|
||||
|
||||
func (c *BibComposite) String() string {
|
||||
var buf bytes.Buffer
|
||||
for _, s := range *c {
|
||||
buf.WriteString(s.String())
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// RawString returns a raw (bibtex) representation of the composite string.
|
||||
func (c *BibComposite) RawString() string {
|
||||
var buf bytes.Buffer
|
||||
for i, comp := range *c {
|
||||
if i > 0 {
|
||||
buf.WriteString(" # ")
|
||||
}
|
||||
switch comp := comp.(type) {
|
||||
case *BibConst:
|
||||
buf.WriteString(comp.RawString())
|
||||
case *BibVar:
|
||||
buf.WriteString(comp.RawString())
|
||||
case *BibComposite:
|
||||
buf.WriteString(comp.RawString())
|
||||
}
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// BibEntry is a record of BibTeX record.
|
||||
type BibEntry struct {
|
||||
Type string
|
||||
CiteName string
|
||||
Fields map[string]BibString
|
||||
}
|
||||
|
||||
// NewBibEntry creates a new BibTeX entry.
|
||||
func NewBibEntry(entryType string, citeName string) *BibEntry {
|
||||
spaceStripper := strings.NewReplacer(" ", "")
|
||||
cleanedType := strings.ToLower(spaceStripper.Replace(entryType))
|
||||
cleanedName := spaceStripper.Replace(citeName)
|
||||
return &BibEntry{
|
||||
Type: cleanedType,
|
||||
CiteName: cleanedName,
|
||||
Fields: map[string]BibString{},
|
||||
}
|
||||
}
|
||||
|
||||
// AddField adds a field (key-value) to a BibTeX entry.
|
||||
func (entry *BibEntry) AddField(name string, value BibString) {
|
||||
entry.Fields[strings.TrimSpace(name)] = value
|
||||
}
|
||||
|
||||
// prettyStringConfig controls the formatting/printing behaviour of the BibTex's and BibEntry's PrettyPrint functions
|
||||
type prettyStringConfig struct {
|
||||
// priority controls the order in which fields are printed. Keys with lower values are printed earlier.
|
||||
//See keyOrderToPriorityMap
|
||||
priority map[string]int
|
||||
}
|
||||
|
||||
// keyOrderToPriorityMap is a helper function for WithKeyOrder, converting the user facing key order slice
|
||||
// into the map format that is internally used by the sort function
|
||||
func keyOrderToPriorityMap(keyOrder []string) map[string]int {
|
||||
priority := make(map[string]int)
|
||||
offset := len(keyOrder)
|
||||
for i, v := range keyOrder {
|
||||
priority[v] = i - offset
|
||||
}
|
||||
return priority
|
||||
}
|
||||
|
||||
var defaultPrettyStringConfig = prettyStringConfig{priority: keyOrderToPriorityMap([]string{"title", "author", "url"})}
|
||||
|
||||
// PrettyStringOpt allows to change the pretty print format for BibEntry and BibTex
|
||||
type PrettyStringOpt func(config *prettyStringConfig)
|
||||
|
||||
// WithKeyOrder changes the order in which BibEntry keys are printed to the order in which they appear in keyOrder
|
||||
func WithKeyOrder(keyOrder []string) PrettyStringOpt {
|
||||
return func(config *prettyStringConfig) {
|
||||
config.priority = make(map[string]int)
|
||||
offset := len(keyOrder)
|
||||
for i, v := range keyOrder {
|
||||
config.priority[v] = i - offset
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// prettyStringAppend appends the pretty print string for BibEntry using config to configure the formatting
|
||||
func (entry *BibEntry) prettyStringAppend(buf *bytes.Buffer, config prettyStringConfig) {
|
||||
fmt.Fprintf(buf, "@%s{%s,\n", entry.Type, entry.CiteName)
|
||||
|
||||
// Determine key order.
|
||||
keys := []string{}
|
||||
for key := range entry.Fields {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
|
||||
sort.Slice(keys, func(i, j int) bool {
|
||||
pi, pj := config.priority[keys[i]], config.priority[keys[j]]
|
||||
return pi < pj || (pi == pj && keys[i] < keys[j])
|
||||
})
|
||||
|
||||
// Write fields.
|
||||
tw := tabwriter.NewWriter(buf, 1, 4, 1, ' ', 0)
|
||||
for _, key := range keys {
|
||||
value := entry.Fields[key].String()
|
||||
format := stringformat(value)
|
||||
fmt.Fprintf(tw, " %s\t=\t"+format+",\n", key, value)
|
||||
}
|
||||
tw.Flush()
|
||||
buf.WriteString("}\n")
|
||||
|
||||
}
|
||||
|
||||
// PrettyString pretty prints a BibEntry
|
||||
func (entry *BibEntry) PrettyString(options ...PrettyStringOpt) string {
|
||||
config := defaultPrettyStringConfig
|
||||
for _, option := range options {
|
||||
option(&config)
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
entry.prettyStringAppend(&buf, config)
|
||||
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// String returns a BibTex entry as a simplified BibTex string.
|
||||
func (entry *BibEntry) String() string {
|
||||
var bibtex bytes.Buffer
|
||||
bibtex.WriteString(fmt.Sprintf("@%s{%s,\n", entry.Type, entry.CiteName))
|
||||
for key, val := range entry.Fields {
|
||||
if i, err := strconv.Atoi(strings.TrimSpace(val.String())); err == nil {
|
||||
bibtex.WriteString(fmt.Sprintf(" %s = %d,\n", key, i))
|
||||
} else {
|
||||
bibtex.WriteString(fmt.Sprintf(" %s = {%s},\n", key, strings.TrimSpace(val.String())))
|
||||
}
|
||||
}
|
||||
bibtex.Truncate(bibtex.Len() - 2)
|
||||
bibtex.WriteString(fmt.Sprintf("\n}\n"))
|
||||
return bibtex.String()
|
||||
}
|
||||
|
||||
// RawString returns a BibTex entry data structure in its internal representation.
|
||||
func (entry *BibEntry) RawString() string {
|
||||
var bibtex bytes.Buffer
|
||||
bibtex.WriteString(fmt.Sprintf("@%s{%s,\n", entry.Type, entry.CiteName))
|
||||
for key, val := range entry.Fields {
|
||||
if i, err := strconv.Atoi(strings.TrimSpace(val.String())); err == nil {
|
||||
bibtex.WriteString(fmt.Sprintf(" %s = %d,\n", key, i))
|
||||
} else {
|
||||
bibtex.WriteString(fmt.Sprintf(" %s = %s,\n", key, val.RawString()))
|
||||
}
|
||||
}
|
||||
bibtex.Truncate(bibtex.Len() - 2)
|
||||
bibtex.WriteString(fmt.Sprintf("\n}\n"))
|
||||
return bibtex.String()
|
||||
}
|
||||
|
||||
// BibTex is a list of BibTeX entries.
|
||||
type BibTex struct {
|
||||
Preambles []BibString // List of Preambles
|
||||
Entries []*BibEntry // Items in a bibliography.
|
||||
StringVar map[string]*BibVar // Map from string variable to string.
|
||||
|
||||
// A list of default BibVars that are implicitly
|
||||
// defined and can be used without defining
|
||||
defaultVars map[string]string
|
||||
}
|
||||
|
||||
// NewBibTex creates a new BibTex data structure.
|
||||
func NewBibTex() *BibTex {
|
||||
// Sets up some default vars
|
||||
months := map[string]time.Month{
|
||||
"jan": 1, "feb": 2, "mar": 3,
|
||||
"apr": 4, "may": 5, "jun": 6,
|
||||
"jul": 7, "aug": 8, "sep": 9,
|
||||
"oct": 10, "nov": 11, "dec": 12,
|
||||
}
|
||||
|
||||
defaultVars := make(map[string]string)
|
||||
for mth, month := range months {
|
||||
// TODO(nickng): i10n of month name in user's local language
|
||||
defaultVars[mth] = month.String()
|
||||
}
|
||||
|
||||
return &BibTex{
|
||||
Preambles: []BibString{},
|
||||
Entries: []*BibEntry{},
|
||||
StringVar: make(map[string]*BibVar),
|
||||
|
||||
defaultVars: defaultVars,
|
||||
}
|
||||
}
|
||||
|
||||
// AddPreamble adds a preamble to a bibtex.
|
||||
func (bib *BibTex) AddPreamble(p BibString) {
|
||||
bib.Preambles = append(bib.Preambles, p)
|
||||
}
|
||||
|
||||
// AddEntry adds an entry to the BibTeX data structure.
|
||||
func (bib *BibTex) AddEntry(entry *BibEntry) {
|
||||
bib.Entries = append(bib.Entries, entry)
|
||||
}
|
||||
|
||||
// AddStringVar adds a new string var (if does not exist).
|
||||
func (bib *BibTex) AddStringVar(key string, val BibString) {
|
||||
bib.StringVar[key] = &BibVar{Key: key, Value: val}
|
||||
}
|
||||
|
||||
// GetStringVar looks up a string by its key.
|
||||
func (bib *BibTex) GetStringVar(key string) *BibVar {
|
||||
if bv, ok := bib.StringVar[key]; ok {
|
||||
return bv
|
||||
}
|
||||
if v, ok := bib.getDefaultVar(key); ok {
|
||||
return v
|
||||
}
|
||||
// This is undefined.
|
||||
log.Fatalf("%s: %s", ErrUnknownStringVar, key)
|
||||
return nil
|
||||
}
|
||||
|
||||
// getDefaultVar is a fallback for looking up keys (e.g. 3-character month)
|
||||
// and use them even though it hasn't been defined in the bib.
|
||||
func (bib *BibTex) getDefaultVar(key string) (*BibVar, bool) {
|
||||
if v, ok := bib.defaultVars[key]; ok {
|
||||
// if found, add this to the BibTex
|
||||
bib.StringVar[key] = &BibVar{Key: key, Value: NewBibConst(v)}
|
||||
return bib.StringVar[key], true
|
||||
}
|
||||
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// String returns a BibTex data structure as a simplified BibTex string.
|
||||
func (bib *BibTex) String() string {
|
||||
var bibtex bytes.Buffer
|
||||
for _, entry := range bib.Entries {
|
||||
bibtex.WriteString(entry.String())
|
||||
}
|
||||
return bibtex.String()
|
||||
}
|
||||
|
||||
// RawString returns a BibTex data structure in its internal representation.
|
||||
func (bib *BibTex) RawString() string {
|
||||
var bibtex bytes.Buffer
|
||||
for k, strvar := range bib.StringVar {
|
||||
bibtex.WriteString(fmt.Sprintf("@string{%s = {%s}}\n", k, strvar.String()))
|
||||
}
|
||||
for _, preamble := range bib.Preambles {
|
||||
bibtex.WriteString(fmt.Sprintf("@preamble{%s}\n", preamble.RawString()))
|
||||
}
|
||||
for _, entry := range bib.Entries {
|
||||
bibtex.WriteString(entry.RawString())
|
||||
}
|
||||
return bibtex.String()
|
||||
}
|
||||
|
||||
// PrettyString pretty prints a BibTex
|
||||
func (bib *BibTex) PrettyString(options ...PrettyStringOpt) string {
|
||||
config := defaultPrettyStringConfig
|
||||
for _, option := range options {
|
||||
option(&config)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
for i, entry := range bib.Entries {
|
||||
if i != 0 {
|
||||
fmt.Fprint(&buf, "\n")
|
||||
}
|
||||
entry.prettyStringAppend(&buf, config)
|
||||
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// stringformat determines the correct formatting verb for the given BibTeX field value.
|
||||
func stringformat(v string) string {
|
||||
// Numbers may be represented unquoted.
|
||||
if _, err := strconv.Atoi(v); err == nil {
|
||||
return "%s"
|
||||
}
|
||||
|
||||
// Strings with certain characters must be brace quoted.
|
||||
if strings.ContainsAny(v, "\"{}") {
|
||||
return "{%s}"
|
||||
}
|
||||
|
||||
// Default to quoted string.
|
||||
return "%q"
|
||||
}
|
89
src/vendor/github.com/nickng/bibtex/bibtex.y
generated
vendored
Normal file
89
src/vendor/github.com/nickng/bibtex/bibtex.y
generated
vendored
Normal file
|
@ -0,0 +1,89 @@
|
|||
%{
|
||||
package bibtex
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
type bibTag struct {
|
||||
key string
|
||||
val BibString
|
||||
}
|
||||
|
||||
var bib *BibTex // Only for holding current bib
|
||||
%}
|
||||
|
||||
%union {
|
||||
bibtex *BibTex
|
||||
strval string
|
||||
bibentry *BibEntry
|
||||
bibtag *bibTag
|
||||
bibtags []*bibTag
|
||||
strings BibString
|
||||
}
|
||||
|
||||
%token tCOMMENT tSTRING tPREAMBLE
|
||||
%token tATSIGN tCOLON tEQUAL tCOMMA tPOUND tLBRACE tRBRACE tDQUOTE tLPAREN tRPAREN
|
||||
%token <strval> tBAREIDENT tIDENT tCOMMENTBODY
|
||||
%type <bibtex> bibtex
|
||||
%type <bibentry> bibentry
|
||||
%type <bibtag> tag stringentry
|
||||
%type <bibtags> tags
|
||||
%type <strings> longstring preambleentry
|
||||
|
||||
%%
|
||||
|
||||
top : bibtex { }
|
||||
;
|
||||
|
||||
bibtex : /* empty */ { $$ = NewBibTex(); bib = $$ }
|
||||
| bibtex bibentry { $$ = $1; $$.AddEntry($2) }
|
||||
| bibtex commententry { $$ = $1 }
|
||||
| bibtex stringentry { $$ = $1; $$.AddStringVar($2.key, $2.val) }
|
||||
| bibtex preambleentry { $$ = $1; $$.AddPreamble($2) }
|
||||
;
|
||||
|
||||
bibentry : tATSIGN tBAREIDENT tLBRACE tBAREIDENT tCOMMA tags tRBRACE { $$ = NewBibEntry($2, $4); for _, t := range $6 { $$.AddField(t.key, t.val) } }
|
||||
| tATSIGN tBAREIDENT tLPAREN tBAREIDENT tCOMMA tags tRPAREN { $$ = NewBibEntry($2, $4); for _, t := range $6 { $$.AddField(t.key, t.val) } }
|
||||
;
|
||||
|
||||
commententry : tATSIGN tCOMMENT tCOMMENTBODY { }
|
||||
;
|
||||
|
||||
stringentry : tATSIGN tSTRING tLBRACE tBAREIDENT tEQUAL longstring tRBRACE { $$ = &bibTag{key: $4, val: $6 } }
|
||||
| tATSIGN tSTRING tLPAREN tBAREIDENT tEQUAL longstring tRBRACE { $$ = &bibTag{key: $4, val: $6 } }
|
||||
;
|
||||
|
||||
preambleentry : tATSIGN tPREAMBLE tLBRACE longstring tRBRACE { $$ = $4 }
|
||||
| tATSIGN tPREAMBLE tLPAREN longstring tRPAREN { $$ = $4 }
|
||||
;
|
||||
|
||||
longstring : tIDENT { $$ = NewBibConst($1) }
|
||||
| tBAREIDENT { $$ = bib.GetStringVar($1) }
|
||||
| longstring tPOUND tIDENT { $$ = NewBibComposite($1); $$.(*BibComposite).Append(NewBibConst($3))}
|
||||
| longstring tPOUND tBAREIDENT { $$ = NewBibComposite($1); $$.(*BibComposite).Append(bib.GetStringVar($3)) }
|
||||
;
|
||||
|
||||
tag : /* empty */ { }
|
||||
| tBAREIDENT tEQUAL longstring { $$ = &bibTag{key: $1, val: $3} }
|
||||
;
|
||||
|
||||
tags : tag { if $1 != nil { $$ = []*bibTag{$1}; } }
|
||||
| tags tCOMMA tag { if $3 == nil { $$ = $1 } else { $$ = append($1, $3) } }
|
||||
;
|
||||
|
||||
%%
|
||||
|
||||
// Parse is the entry point to the bibtex parser.
|
||||
func Parse(r io.Reader) (*BibTex, error) {
|
||||
l := newLexer(r)
|
||||
bibtexParse(l)
|
||||
switch {
|
||||
case len(l.Errors) > 0: // Non-yacc errors
|
||||
return nil, l.Errors[0]
|
||||
case len(l.ParseErrors) > 0:
|
||||
return nil, l.ParseErrors[0]
|
||||
default:
|
||||
return bib, nil
|
||||
}
|
||||
}
|
645
src/vendor/github.com/nickng/bibtex/bibtex.y.go
generated
vendored
Normal file
645
src/vendor/github.com/nickng/bibtex/bibtex.y.go
generated
vendored
Normal file
|
@ -0,0 +1,645 @@
|
|||
// Code generated by goyacc -p bibtex -o bibtex.y.go bibtex.y. DO NOT EDIT.
|
||||
|
||||
//line bibtex.y:2
|
||||
package bibtex
|
||||
|
||||
import __yyfmt__ "fmt"
|
||||
|
||||
//line bibtex.y:2
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
type bibTag struct {
|
||||
key string
|
||||
val BibString
|
||||
}
|
||||
|
||||
var bib *BibTex // Only for holding current bib
|
||||
|
||||
//line bibtex.y:16
|
||||
type bibtexSymType struct {
|
||||
yys int
|
||||
bibtex *BibTex
|
||||
strval string
|
||||
bibentry *BibEntry
|
||||
bibtag *bibTag
|
||||
bibtags []*bibTag
|
||||
strings BibString
|
||||
}
|
||||
|
||||
const tCOMMENT = 57346
|
||||
const tSTRING = 57347
|
||||
const tPREAMBLE = 57348
|
||||
const tATSIGN = 57349
|
||||
const tCOLON = 57350
|
||||
const tEQUAL = 57351
|
||||
const tCOMMA = 57352
|
||||
const tPOUND = 57353
|
||||
const tLBRACE = 57354
|
||||
const tRBRACE = 57355
|
||||
const tDQUOTE = 57356
|
||||
const tLPAREN = 57357
|
||||
const tRPAREN = 57358
|
||||
const tBAREIDENT = 57359
|
||||
const tIDENT = 57360
|
||||
const tCOMMENTBODY = 57361
|
||||
|
||||
var bibtexToknames = [...]string{
|
||||
"$end",
|
||||
"error",
|
||||
"$unk",
|
||||
"tCOMMENT",
|
||||
"tSTRING",
|
||||
"tPREAMBLE",
|
||||
"tATSIGN",
|
||||
"tCOLON",
|
||||
"tEQUAL",
|
||||
"tCOMMA",
|
||||
"tPOUND",
|
||||
"tLBRACE",
|
||||
"tRBRACE",
|
||||
"tDQUOTE",
|
||||
"tLPAREN",
|
||||
"tRPAREN",
|
||||
"tBAREIDENT",
|
||||
"tIDENT",
|
||||
"tCOMMENTBODY",
|
||||
}
|
||||
var bibtexStatenames = [...]string{}
|
||||
|
||||
const bibtexEofCode = 1
|
||||
const bibtexErrCode = 2
|
||||
const bibtexInitialStackSize = 16
|
||||
|
||||
//line bibtex.y:75
|
||||
|
||||
// Parse is the entry point to the bibtex parser.
|
||||
func Parse(r io.Reader) (*BibTex, error) {
|
||||
l := newLexer(r)
|
||||
bibtexParse(l)
|
||||
switch {
|
||||
case len(l.Errors) > 0: // Non-yacc errors
|
||||
return nil, l.Errors[0]
|
||||
case len(l.ParseErrors) > 0:
|
||||
return nil, l.ParseErrors[0]
|
||||
default:
|
||||
return bib, nil
|
||||
}
|
||||
}
|
||||
|
||||
//line yacctab:1
|
||||
var bibtexExca = [...]int{
|
||||
-1, 1,
|
||||
1, -1,
|
||||
-2, 0,
|
||||
}
|
||||
|
||||
const bibtexPrivate = 57344
|
||||
|
||||
const bibtexLast = 54
|
||||
|
||||
var bibtexAct = [...]int{
|
||||
|
||||
23, 14, 35, 34, 9, 10, 11, 25, 24, 41,
|
||||
40, 36, 43, 22, 21, 32, 20, 8, 45, 26,
|
||||
33, 17, 19, 15, 18, 12, 16, 32, 13, 47,
|
||||
38, 39, 37, 32, 43, 46, 32, 42, 31, 32,
|
||||
28, 27, 44, 30, 29, 49, 48, 7, 4, 1,
|
||||
6, 5, 3, 2,
|
||||
}
|
||||
var bibtexPact = [...]int{
|
||||
|
||||
-1000, -1000, 40, -1000, -1000, -1000, -1000, 0, 13, -18,
|
||||
11, 9, 5, -1, -1000, -3, -4, -10, -10, 31,
|
||||
30, 35, 34, 25, -1000, -1000, 4, -6, -6, -10,
|
||||
-10, -1000, -8, -1000, 24, -1000, 33, 2, 22, 16,
|
||||
-1000, -1000, -1000, -6, -10, -1000, -1000, -1000, -1000, 28,
|
||||
}
|
||||
var bibtexPgo = [...]int{
|
||||
|
||||
0, 53, 52, 2, 51, 3, 0, 50, 49, 48,
|
||||
}
|
||||
var bibtexR1 = [...]int{
|
||||
|
||||
0, 8, 1, 1, 1, 1, 1, 2, 2, 9,
|
||||
4, 4, 7, 7, 6, 6, 6, 6, 3, 3,
|
||||
5, 5,
|
||||
}
|
||||
var bibtexR2 = [...]int{
|
||||
|
||||
0, 1, 0, 2, 2, 2, 2, 7, 7, 3,
|
||||
7, 7, 5, 5, 1, 1, 3, 3, 0, 3,
|
||||
1, 3,
|
||||
}
|
||||
var bibtexChk = [...]int{
|
||||
|
||||
-1000, -8, -1, -2, -9, -4, -7, 7, 17, 4,
|
||||
5, 6, 12, 15, 19, 12, 15, 12, 15, 17,
|
||||
17, 17, 17, -6, 18, 17, -6, 10, 10, 9,
|
||||
9, 13, 11, 16, -5, -3, 17, -5, -6, -6,
|
||||
18, 17, 13, 10, 9, 16, 13, 13, -3, -6,
|
||||
}
|
||||
var bibtexDef = [...]int{
|
||||
|
||||
2, -2, 1, 3, 4, 5, 6, 0, 0, 0,
|
||||
0, 0, 0, 0, 9, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 14, 15, 0, 18, 18, 0,
|
||||
0, 12, 0, 13, 0, 20, 0, 0, 0, 0,
|
||||
16, 17, 7, 18, 0, 8, 10, 11, 21, 19,
|
||||
}
|
||||
var bibtexTok1 = [...]int{
|
||||
|
||||
1,
|
||||
}
|
||||
var bibtexTok2 = [...]int{
|
||||
|
||||
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
|
||||
12, 13, 14, 15, 16, 17, 18, 19,
|
||||
}
|
||||
var bibtexTok3 = [...]int{
|
||||
0,
|
||||
}
|
||||
|
||||
var bibtexErrorMessages = [...]struct {
|
||||
state int
|
||||
token int
|
||||
msg string
|
||||
}{}
|
||||
|
||||
//line yaccpar:1
|
||||
|
||||
/* parser for yacc output */
|
||||
|
||||
var (
|
||||
bibtexDebug = 0
|
||||
bibtexErrorVerbose = false
|
||||
)
|
||||
|
||||
type bibtexLexer interface {
|
||||
Lex(lval *bibtexSymType) int
|
||||
Error(s string)
|
||||
}
|
||||
|
||||
type bibtexParser interface {
|
||||
Parse(bibtexLexer) int
|
||||
Lookahead() int
|
||||
}
|
||||
|
||||
type bibtexParserImpl struct {
|
||||
lval bibtexSymType
|
||||
stack [bibtexInitialStackSize]bibtexSymType
|
||||
char int
|
||||
}
|
||||
|
||||
func (p *bibtexParserImpl) Lookahead() int {
|
||||
return p.char
|
||||
}
|
||||
|
||||
func bibtexNewParser() bibtexParser {
|
||||
return &bibtexParserImpl{}
|
||||
}
|
||||
|
||||
const bibtexFlag = -1000
|
||||
|
||||
func bibtexTokname(c int) string {
|
||||
if c >= 1 && c-1 < len(bibtexToknames) {
|
||||
if bibtexToknames[c-1] != "" {
|
||||
return bibtexToknames[c-1]
|
||||
}
|
||||
}
|
||||
return __yyfmt__.Sprintf("tok-%v", c)
|
||||
}
|
||||
|
||||
func bibtexStatname(s int) string {
|
||||
if s >= 0 && s < len(bibtexStatenames) {
|
||||
if bibtexStatenames[s] != "" {
|
||||
return bibtexStatenames[s]
|
||||
}
|
||||
}
|
||||
return __yyfmt__.Sprintf("state-%v", s)
|
||||
}
|
||||
|
||||
func bibtexErrorMessage(state, lookAhead int) string {
|
||||
const TOKSTART = 4
|
||||
|
||||
if !bibtexErrorVerbose {
|
||||
return "syntax error"
|
||||
}
|
||||
|
||||
for _, e := range bibtexErrorMessages {
|
||||
if e.state == state && e.token == lookAhead {
|
||||
return "syntax error: " + e.msg
|
||||
}
|
||||
}
|
||||
|
||||
res := "syntax error: unexpected " + bibtexTokname(lookAhead)
|
||||
|
||||
// To match Bison, suggest at most four expected tokens.
|
||||
expected := make([]int, 0, 4)
|
||||
|
||||
// Look for shiftable tokens.
|
||||
base := bibtexPact[state]
|
||||
for tok := TOKSTART; tok-1 < len(bibtexToknames); tok++ {
|
||||
if n := base + tok; n >= 0 && n < bibtexLast && bibtexChk[bibtexAct[n]] == tok {
|
||||
if len(expected) == cap(expected) {
|
||||
return res
|
||||
}
|
||||
expected = append(expected, tok)
|
||||
}
|
||||
}
|
||||
|
||||
if bibtexDef[state] == -2 {
|
||||
i := 0
|
||||
for bibtexExca[i] != -1 || bibtexExca[i+1] != state {
|
||||
i += 2
|
||||
}
|
||||
|
||||
// Look for tokens that we accept or reduce.
|
||||
for i += 2; bibtexExca[i] >= 0; i += 2 {
|
||||
tok := bibtexExca[i]
|
||||
if tok < TOKSTART || bibtexExca[i+1] == 0 {
|
||||
continue
|
||||
}
|
||||
if len(expected) == cap(expected) {
|
||||
return res
|
||||
}
|
||||
expected = append(expected, tok)
|
||||
}
|
||||
|
||||
// If the default action is to accept or reduce, give up.
|
||||
if bibtexExca[i+1] != 0 {
|
||||
return res
|
||||
}
|
||||
}
|
||||
|
||||
for i, tok := range expected {
|
||||
if i == 0 {
|
||||
res += ", expecting "
|
||||
} else {
|
||||
res += " or "
|
||||
}
|
||||
res += bibtexTokname(tok)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func bibtexlex1(lex bibtexLexer, lval *bibtexSymType) (char, token int) {
|
||||
token = 0
|
||||
char = lex.Lex(lval)
|
||||
if char <= 0 {
|
||||
token = bibtexTok1[0]
|
||||
goto out
|
||||
}
|
||||
if char < len(bibtexTok1) {
|
||||
token = bibtexTok1[char]
|
||||
goto out
|
||||
}
|
||||
if char >= bibtexPrivate {
|
||||
if char < bibtexPrivate+len(bibtexTok2) {
|
||||
token = bibtexTok2[char-bibtexPrivate]
|
||||
goto out
|
||||
}
|
||||
}
|
||||
for i := 0; i < len(bibtexTok3); i += 2 {
|
||||
token = bibtexTok3[i+0]
|
||||
if token == char {
|
||||
token = bibtexTok3[i+1]
|
||||
goto out
|
||||
}
|
||||
}
|
||||
|
||||
out:
|
||||
if token == 0 {
|
||||
token = bibtexTok2[1] /* unknown char */
|
||||
}
|
||||
if bibtexDebug >= 3 {
|
||||
__yyfmt__.Printf("lex %s(%d)\n", bibtexTokname(token), uint(char))
|
||||
}
|
||||
return char, token
|
||||
}
|
||||
|
||||
func bibtexParse(bibtexlex bibtexLexer) int {
|
||||
return bibtexNewParser().Parse(bibtexlex)
|
||||
}
|
||||
|
||||
func (bibtexrcvr *bibtexParserImpl) Parse(bibtexlex bibtexLexer) int {
|
||||
var bibtexn int
|
||||
var bibtexVAL bibtexSymType
|
||||
var bibtexDollar []bibtexSymType
|
||||
_ = bibtexDollar // silence set and not used
|
||||
bibtexS := bibtexrcvr.stack[:]
|
||||
|
||||
Nerrs := 0 /* number of errors */
|
||||
Errflag := 0 /* error recovery flag */
|
||||
bibtexstate := 0
|
||||
bibtexrcvr.char = -1
|
||||
bibtextoken := -1 // bibtexrcvr.char translated into internal numbering
|
||||
defer func() {
|
||||
// Make sure we report no lookahead when not parsing.
|
||||
bibtexstate = -1
|
||||
bibtexrcvr.char = -1
|
||||
bibtextoken = -1
|
||||
}()
|
||||
bibtexp := -1
|
||||
goto bibtexstack
|
||||
|
||||
ret0:
|
||||
return 0
|
||||
|
||||
ret1:
|
||||
return 1
|
||||
|
||||
bibtexstack:
|
||||
/* put a state and value onto the stack */
|
||||
if bibtexDebug >= 4 {
|
||||
__yyfmt__.Printf("char %v in %v\n", bibtexTokname(bibtextoken), bibtexStatname(bibtexstate))
|
||||
}
|
||||
|
||||
bibtexp++
|
||||
if bibtexp >= len(bibtexS) {
|
||||
nyys := make([]bibtexSymType, len(bibtexS)*2)
|
||||
copy(nyys, bibtexS)
|
||||
bibtexS = nyys
|
||||
}
|
||||
bibtexS[bibtexp] = bibtexVAL
|
||||
bibtexS[bibtexp].yys = bibtexstate
|
||||
|
||||
bibtexnewstate:
|
||||
bibtexn = bibtexPact[bibtexstate]
|
||||
if bibtexn <= bibtexFlag {
|
||||
goto bibtexdefault /* simple state */
|
||||
}
|
||||
if bibtexrcvr.char < 0 {
|
||||
bibtexrcvr.char, bibtextoken = bibtexlex1(bibtexlex, &bibtexrcvr.lval)
|
||||
}
|
||||
bibtexn += bibtextoken
|
||||
if bibtexn < 0 || bibtexn >= bibtexLast {
|
||||
goto bibtexdefault
|
||||
}
|
||||
bibtexn = bibtexAct[bibtexn]
|
||||
if bibtexChk[bibtexn] == bibtextoken { /* valid shift */
|
||||
bibtexrcvr.char = -1
|
||||
bibtextoken = -1
|
||||
bibtexVAL = bibtexrcvr.lval
|
||||
bibtexstate = bibtexn
|
||||
if Errflag > 0 {
|
||||
Errflag--
|
||||
}
|
||||
goto bibtexstack
|
||||
}
|
||||
|
||||
bibtexdefault:
|
||||
/* default state action */
|
||||
bibtexn = bibtexDef[bibtexstate]
|
||||
if bibtexn == -2 {
|
||||
if bibtexrcvr.char < 0 {
|
||||
bibtexrcvr.char, bibtextoken = bibtexlex1(bibtexlex, &bibtexrcvr.lval)
|
||||
}
|
||||
|
||||
/* look through exception table */
|
||||
xi := 0
|
||||
for {
|
||||
if bibtexExca[xi+0] == -1 && bibtexExca[xi+1] == bibtexstate {
|
||||
break
|
||||
}
|
||||
xi += 2
|
||||
}
|
||||
for xi += 2; ; xi += 2 {
|
||||
bibtexn = bibtexExca[xi+0]
|
||||
if bibtexn < 0 || bibtexn == bibtextoken {
|
||||
break
|
||||
}
|
||||
}
|
||||
bibtexn = bibtexExca[xi+1]
|
||||
if bibtexn < 0 {
|
||||
goto ret0
|
||||
}
|
||||
}
|
||||
if bibtexn == 0 {
|
||||
/* error ... attempt to resume parsing */
|
||||
switch Errflag {
|
||||
case 0: /* brand new error */
|
||||
bibtexlex.Error(bibtexErrorMessage(bibtexstate, bibtextoken))
|
||||
Nerrs++
|
||||
if bibtexDebug >= 1 {
|
||||
__yyfmt__.Printf("%s", bibtexStatname(bibtexstate))
|
||||
__yyfmt__.Printf(" saw %s\n", bibtexTokname(bibtextoken))
|
||||
}
|
||||
fallthrough
|
||||
|
||||
case 1, 2: /* incompletely recovered error ... try again */
|
||||
Errflag = 3
|
||||
|
||||
/* find a state where "error" is a legal shift action */
|
||||
for bibtexp >= 0 {
|
||||
bibtexn = bibtexPact[bibtexS[bibtexp].yys] + bibtexErrCode
|
||||
if bibtexn >= 0 && bibtexn < bibtexLast {
|
||||
bibtexstate = bibtexAct[bibtexn] /* simulate a shift of "error" */
|
||||
if bibtexChk[bibtexstate] == bibtexErrCode {
|
||||
goto bibtexstack
|
||||
}
|
||||
}
|
||||
|
||||
/* the current p has no shift on "error", pop stack */
|
||||
if bibtexDebug >= 2 {
|
||||
__yyfmt__.Printf("error recovery pops state %d\n", bibtexS[bibtexp].yys)
|
||||
}
|
||||
bibtexp--
|
||||
}
|
||||
/* there is no state on the stack with an error shift ... abort */
|
||||
goto ret1
|
||||
|
||||
case 3: /* no shift yet; clobber input char */
|
||||
if bibtexDebug >= 2 {
|
||||
__yyfmt__.Printf("error recovery discards %s\n", bibtexTokname(bibtextoken))
|
||||
}
|
||||
if bibtextoken == bibtexEofCode {
|
||||
goto ret1
|
||||
}
|
||||
bibtexrcvr.char = -1
|
||||
bibtextoken = -1
|
||||
goto bibtexnewstate /* try again in the same state */
|
||||
}
|
||||
}
|
||||
|
||||
/* reduction by production bibtexn */
|
||||
if bibtexDebug >= 2 {
|
||||
__yyfmt__.Printf("reduce %v in:\n\t%v\n", bibtexn, bibtexStatname(bibtexstate))
|
||||
}
|
||||
|
||||
bibtexnt := bibtexn
|
||||
bibtexpt := bibtexp
|
||||
_ = bibtexpt // guard against "declared and not used"
|
||||
|
||||
bibtexp -= bibtexR2[bibtexn]
|
||||
// bibtexp is now the index of $0. Perform the default action. Iff the
|
||||
// reduced production is ε, $1 is possibly out of range.
|
||||
if bibtexp+1 >= len(bibtexS) {
|
||||
nyys := make([]bibtexSymType, len(bibtexS)*2)
|
||||
copy(nyys, bibtexS)
|
||||
bibtexS = nyys
|
||||
}
|
||||
bibtexVAL = bibtexS[bibtexp+1]
|
||||
|
||||
/* consult goto table to find next state */
|
||||
bibtexn = bibtexR1[bibtexn]
|
||||
bibtexg := bibtexPgo[bibtexn]
|
||||
bibtexj := bibtexg + bibtexS[bibtexp].yys + 1
|
||||
|
||||
if bibtexj >= bibtexLast {
|
||||
bibtexstate = bibtexAct[bibtexg]
|
||||
} else {
|
||||
bibtexstate = bibtexAct[bibtexj]
|
||||
if bibtexChk[bibtexstate] != -bibtexn {
|
||||
bibtexstate = bibtexAct[bibtexg]
|
||||
}
|
||||
}
|
||||
// dummy call; replaced with literal code
|
||||
switch bibtexnt {
|
||||
|
||||
case 1:
|
||||
bibtexDollar = bibtexS[bibtexpt-1 : bibtexpt+1]
|
||||
//line bibtex.y:36
|
||||
{
|
||||
}
|
||||
case 2:
|
||||
bibtexDollar = bibtexS[bibtexpt-0 : bibtexpt+1]
|
||||
//line bibtex.y:39
|
||||
{
|
||||
bibtexVAL.bibtex = NewBibTex()
|
||||
bib = bibtexVAL.bibtex
|
||||
}
|
||||
case 3:
|
||||
bibtexDollar = bibtexS[bibtexpt-2 : bibtexpt+1]
|
||||
//line bibtex.y:40
|
||||
{
|
||||
bibtexVAL.bibtex = bibtexDollar[1].bibtex
|
||||
bibtexVAL.bibtex.AddEntry(bibtexDollar[2].bibentry)
|
||||
}
|
||||
case 4:
|
||||
bibtexDollar = bibtexS[bibtexpt-2 : bibtexpt+1]
|
||||
//line bibtex.y:41
|
||||
{
|
||||
bibtexVAL.bibtex = bibtexDollar[1].bibtex
|
||||
}
|
||||
case 5:
|
||||
bibtexDollar = bibtexS[bibtexpt-2 : bibtexpt+1]
|
||||
//line bibtex.y:42
|
||||
{
|
||||
bibtexVAL.bibtex = bibtexDollar[1].bibtex
|
||||
bibtexVAL.bibtex.AddStringVar(bibtexDollar[2].bibtag.key, bibtexDollar[2].bibtag.val)
|
||||
}
|
||||
case 6:
|
||||
bibtexDollar = bibtexS[bibtexpt-2 : bibtexpt+1]
|
||||
//line bibtex.y:43
|
||||
{
|
||||
bibtexVAL.bibtex = bibtexDollar[1].bibtex
|
||||
bibtexVAL.bibtex.AddPreamble(bibtexDollar[2].strings)
|
||||
}
|
||||
case 7:
|
||||
bibtexDollar = bibtexS[bibtexpt-7 : bibtexpt+1]
|
||||
//line bibtex.y:46
|
||||
{
|
||||
bibtexVAL.bibentry = NewBibEntry(bibtexDollar[2].strval, bibtexDollar[4].strval)
|
||||
for _, t := range bibtexDollar[6].bibtags {
|
||||
bibtexVAL.bibentry.AddField(t.key, t.val)
|
||||
}
|
||||
}
|
||||
case 8:
|
||||
bibtexDollar = bibtexS[bibtexpt-7 : bibtexpt+1]
|
||||
//line bibtex.y:47
|
||||
{
|
||||
bibtexVAL.bibentry = NewBibEntry(bibtexDollar[2].strval, bibtexDollar[4].strval)
|
||||
for _, t := range bibtexDollar[6].bibtags {
|
||||
bibtexVAL.bibentry.AddField(t.key, t.val)
|
||||
}
|
||||
}
|
||||
case 9:
|
||||
bibtexDollar = bibtexS[bibtexpt-3 : bibtexpt+1]
|
||||
//line bibtex.y:50
|
||||
{
|
||||
}
|
||||
case 10:
|
||||
bibtexDollar = bibtexS[bibtexpt-7 : bibtexpt+1]
|
||||
//line bibtex.y:53
|
||||
{
|
||||
bibtexVAL.bibtag = &bibTag{key: bibtexDollar[4].strval, val: bibtexDollar[6].strings}
|
||||
}
|
||||
case 11:
|
||||
bibtexDollar = bibtexS[bibtexpt-7 : bibtexpt+1]
|
||||
//line bibtex.y:54
|
||||
{
|
||||
bibtexVAL.bibtag = &bibTag{key: bibtexDollar[4].strval, val: bibtexDollar[6].strings}
|
||||
}
|
||||
case 12:
|
||||
bibtexDollar = bibtexS[bibtexpt-5 : bibtexpt+1]
|
||||
//line bibtex.y:57
|
||||
{
|
||||
bibtexVAL.strings = bibtexDollar[4].strings
|
||||
}
|
||||
case 13:
|
||||
bibtexDollar = bibtexS[bibtexpt-5 : bibtexpt+1]
|
||||
//line bibtex.y:58
|
||||
{
|
||||
bibtexVAL.strings = bibtexDollar[4].strings
|
||||
}
|
||||
case 14:
|
||||
bibtexDollar = bibtexS[bibtexpt-1 : bibtexpt+1]
|
||||
//line bibtex.y:61
|
||||
{
|
||||
bibtexVAL.strings = NewBibConst(bibtexDollar[1].strval)
|
||||
}
|
||||
case 15:
|
||||
bibtexDollar = bibtexS[bibtexpt-1 : bibtexpt+1]
|
||||
//line bibtex.y:62
|
||||
{
|
||||
bibtexVAL.strings = bib.GetStringVar(bibtexDollar[1].strval)
|
||||
}
|
||||
case 16:
|
||||
bibtexDollar = bibtexS[bibtexpt-3 : bibtexpt+1]
|
||||
//line bibtex.y:63
|
||||
{
|
||||
bibtexVAL.strings = NewBibComposite(bibtexDollar[1].strings)
|
||||
bibtexVAL.strings.(*BibComposite).Append(NewBibConst(bibtexDollar[3].strval))
|
||||
}
|
||||
case 17:
|
||||
bibtexDollar = bibtexS[bibtexpt-3 : bibtexpt+1]
|
||||
//line bibtex.y:64
|
||||
{
|
||||
bibtexVAL.strings = NewBibComposite(bibtexDollar[1].strings)
|
||||
bibtexVAL.strings.(*BibComposite).Append(bib.GetStringVar(bibtexDollar[3].strval))
|
||||
}
|
||||
case 18:
|
||||
bibtexDollar = bibtexS[bibtexpt-0 : bibtexpt+1]
|
||||
//line bibtex.y:67
|
||||
{
|
||||
}
|
||||
case 19:
|
||||
bibtexDollar = bibtexS[bibtexpt-3 : bibtexpt+1]
|
||||
//line bibtex.y:68
|
||||
{
|
||||
bibtexVAL.bibtag = &bibTag{key: bibtexDollar[1].strval, val: bibtexDollar[3].strings}
|
||||
}
|
||||
case 20:
|
||||
bibtexDollar = bibtexS[bibtexpt-1 : bibtexpt+1]
|
||||
//line bibtex.y:71
|
||||
{
|
||||
if bibtexDollar[1].bibtag != nil {
|
||||
bibtexVAL.bibtags = []*bibTag{bibtexDollar[1].bibtag}
|
||||
}
|
||||
}
|
||||
case 21:
|
||||
bibtexDollar = bibtexS[bibtexpt-3 : bibtexpt+1]
|
||||
//line bibtex.y:72
|
||||
{
|
||||
if bibtexDollar[3].bibtag == nil {
|
||||
bibtexVAL.bibtags = bibtexDollar[1].bibtags
|
||||
} else {
|
||||
bibtexVAL.bibtags = append(bibtexDollar[1].bibtags, bibtexDollar[3].bibtag)
|
||||
}
|
||||
}
|
||||
}
|
||||
goto bibtexstack /* stack new state and value */
|
||||
}
|
23
src/vendor/github.com/nickng/bibtex/docs.go
generated
vendored
Normal file
23
src/vendor/github.com/nickng/bibtex/docs.go
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
// Package bibtex is a bibtex parser written in Go.
|
||||
//
|
||||
// The package contains a simple parser and data structure to represent bibtex
|
||||
// records.
|
||||
//
|
||||
// # Supported syntax
|
||||
//
|
||||
// The basic syntax is:
|
||||
//
|
||||
// @BIBTYPE{IDENT,
|
||||
// key1 = word,
|
||||
// key2 = "quoted",
|
||||
// key3 = {quoted},
|
||||
// }
|
||||
//
|
||||
// where BIBTYPE is the type of document (e.g. inproceedings, article, etc.)
|
||||
// and IDENT is a string identifier.
|
||||
//
|
||||
// The bibtex format is not standardised, this parser follows the descriptions
|
||||
// found in the link below. If there are any problems, please file any issues
|
||||
// with a minimal working example at the GitHub repository.
|
||||
// http://maverick.inria.fr/~Xavier.Decoret/resources/xdkbibtex/bibtex_summary.html
|
||||
package bibtex // import "github.com/nickng/bibtex"
|
23
src/vendor/github.com/nickng/bibtex/error.go
generated
vendored
Normal file
23
src/vendor/github.com/nickng/bibtex/error.go
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
package bibtex
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrUnexpectedAtsign is an error for unexpected @ in {}.
|
||||
ErrUnexpectedAtsign = errors.New("unexpected @ sign")
|
||||
// ErrUnknownStringVar is an error for looking up undefined string var.
|
||||
ErrUnknownStringVar = errors.New("unknown string variable")
|
||||
)
|
||||
|
||||
// ErrParse is a parse error.
|
||||
type ErrParse struct {
|
||||
Pos tokenPos
|
||||
Err string // Error string returned from parser.
|
||||
}
|
||||
|
||||
func (e *ErrParse) Error() string {
|
||||
return fmt.Sprintf("parse failed at %s: %s", e.Pos, e.Err)
|
||||
}
|
38
src/vendor/github.com/nickng/bibtex/lexer.go
generated
vendored
Normal file
38
src/vendor/github.com/nickng/bibtex/lexer.go
generated
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
//go:generate goyacc -p bibtex -o bibtex.y.go bibtex.y
|
||||
|
||||
package bibtex
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
// lexer for bibtex.
|
||||
type lexer struct {
|
||||
scanner *scanner
|
||||
ParseErrors []error // Parse errors from yacc
|
||||
Errors []error // Other errors
|
||||
}
|
||||
|
||||
// newLexer returns a new yacc-compatible lexer.
|
||||
func newLexer(r io.Reader) *lexer {
|
||||
return &lexer{
|
||||
scanner: newScanner(r),
|
||||
}
|
||||
}
|
||||
|
||||
// Lex is provided for yacc-compatible parser.
|
||||
func (l *lexer) Lex(yylval *bibtexSymType) int {
|
||||
token, strval, err := l.scanner.Scan()
|
||||
if err != nil {
|
||||
l.Errors = append(l.Errors, fmt.Errorf("%w at %s", err, l.scanner.pos))
|
||||
return int(0)
|
||||
}
|
||||
yylval.strval = strval
|
||||
return int(token)
|
||||
}
|
||||
|
||||
// Error handles error.
|
||||
func (l *lexer) Error(err string) {
|
||||
l.ParseErrors = append(l.ParseErrors, &ErrParse{Err: err, Pos: l.scanner.pos})
|
||||
}
|
237
src/vendor/github.com/nickng/bibtex/scanner.go
generated
vendored
Normal file
237
src/vendor/github.com/nickng/bibtex/scanner.go
generated
vendored
Normal file
|
@ -0,0 +1,237 @@
|
|||
package bibtex
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var parseField bool
|
||||
|
||||
// scanner is a lexical scanner
|
||||
type scanner struct {
|
||||
commentMode bool
|
||||
r *bufio.Reader
|
||||
pos tokenPos
|
||||
}
|
||||
|
||||
// newScanner returns a new instance of scanner.
|
||||
func newScanner(r io.Reader) *scanner {
|
||||
return &scanner{r: bufio.NewReader(r), pos: tokenPos{Char: 0, Lines: []int{}}}
|
||||
}
|
||||
|
||||
// read reads the next rune from the buffered reader.
|
||||
// Returns the rune(0) if an error occurs (or io.eof is returned).
|
||||
func (s *scanner) read() rune {
|
||||
ch, _, err := s.r.ReadRune()
|
||||
if err != nil {
|
||||
return eof
|
||||
}
|
||||
if ch == '\n' {
|
||||
s.pos.Lines = append(s.pos.Lines, s.pos.Char)
|
||||
s.pos.Char = 0
|
||||
} else {
|
||||
s.pos.Char++
|
||||
}
|
||||
return ch
|
||||
}
|
||||
|
||||
// unread places the previously read rune back on the reader.
|
||||
func (s *scanner) unread() {
|
||||
_ = s.r.UnreadRune()
|
||||
if s.pos.Char == 0 {
|
||||
s.pos.Char = s.pos.Lines[len(s.pos.Lines)-1]
|
||||
s.pos.Lines = s.pos.Lines[:len(s.pos.Lines)-1]
|
||||
} else {
|
||||
s.pos.Char--
|
||||
}
|
||||
}
|
||||
|
||||
// Scan returns the next token and literal value.
|
||||
func (s *scanner) Scan() (tok token, lit string, err error) {
|
||||
ch := s.read()
|
||||
if isWhitespace(ch) {
|
||||
s.ignoreWhitespace()
|
||||
ch = s.read()
|
||||
}
|
||||
if isAlphanum(ch) {
|
||||
s.unread()
|
||||
return s.scanIdent()
|
||||
}
|
||||
switch ch {
|
||||
case eof:
|
||||
return 0, "", nil
|
||||
case '@':
|
||||
return tATSIGN, string(ch), nil
|
||||
case ':':
|
||||
return tCOLON, string(ch), nil
|
||||
case ',':
|
||||
parseField = false // reset parseField if reached end of field.
|
||||
return tCOMMA, string(ch), nil
|
||||
case '=':
|
||||
parseField = true // set parseField if = sign outside quoted or ident.
|
||||
return tEQUAL, string(ch), nil
|
||||
case '"':
|
||||
tok, lit := s.scanQuoted()
|
||||
return tok, lit, nil
|
||||
case '{':
|
||||
if parseField {
|
||||
return s.scanBraced()
|
||||
}
|
||||
// If we're reading a comment, return everything after {
|
||||
// to the next @-sign (exclusive)
|
||||
if s.commentMode {
|
||||
s.unread()
|
||||
commentBodyTok, commentBody := s.scanCommentBody()
|
||||
return commentBodyTok, commentBody, nil
|
||||
}
|
||||
return tLBRACE, string(ch), nil
|
||||
case '}':
|
||||
if parseField { // reset parseField if reached end of entry.
|
||||
parseField = false
|
||||
}
|
||||
return tRBRACE, string(ch), nil
|
||||
case '#':
|
||||
return tPOUND, string(ch), nil
|
||||
case ' ':
|
||||
s.ignoreWhitespace()
|
||||
}
|
||||
return tILLEGAL, string(ch), nil
|
||||
}
|
||||
|
||||
// scanIdent categorises a string to one of three categories.
|
||||
func (s *scanner) scanIdent() (tok token, lit string, err error) {
|
||||
switch ch := s.read(); ch {
|
||||
case '"':
|
||||
tok, lit := s.scanQuoted()
|
||||
return tok, lit, nil
|
||||
case '{':
|
||||
return s.scanBraced()
|
||||
default:
|
||||
s.unread() // Not open quote/brace.
|
||||
tok, lit := s.scanBare()
|
||||
return tok, lit, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *scanner) scanBare() (token, string) {
|
||||
var buf bytes.Buffer
|
||||
for {
|
||||
if ch := s.read(); ch == eof {
|
||||
break
|
||||
} else if !isAlphanum(ch) && !isBareSymbol(ch) || isWhitespace(ch) {
|
||||
s.unread()
|
||||
break
|
||||
} else {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
}
|
||||
}
|
||||
str := buf.String()
|
||||
if strings.ToLower(str) == "comment" {
|
||||
s.commentMode = true
|
||||
return tCOMMENT, str
|
||||
} else if strings.ToLower(str) == "preamble" {
|
||||
return tPREAMBLE, str
|
||||
} else if strings.ToLower(str) == "string" {
|
||||
return tSTRING, str
|
||||
} else if _, err := strconv.Atoi(str); err == nil && parseField { // Special case for numeric
|
||||
return tIDENT, str
|
||||
}
|
||||
return tBAREIDENT, str
|
||||
}
|
||||
|
||||
// scanBraced parses a braced string, like {this}.
|
||||
func (s *scanner) scanBraced() (token, string, error) {
|
||||
var buf bytes.Buffer
|
||||
var macro bool
|
||||
brace := 1
|
||||
for {
|
||||
if ch := s.read(); ch == eof {
|
||||
break
|
||||
} else if ch == '\\' {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
macro = true
|
||||
} else if ch == '{' {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
brace++
|
||||
} else if ch == '}' {
|
||||
brace--
|
||||
macro = false
|
||||
if brace == 0 { // Balances open brace.
|
||||
return tIDENT, buf.String(), nil
|
||||
}
|
||||
_, _ = buf.WriteRune(ch)
|
||||
} else if ch == '@' {
|
||||
if macro {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
} else {
|
||||
return token(0), buf.String(), ErrUnexpectedAtsign
|
||||
}
|
||||
} else if isWhitespace(ch) {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
macro = false
|
||||
} else {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
}
|
||||
}
|
||||
return tILLEGAL, buf.String(), nil
|
||||
}
|
||||
|
||||
// scanQuoted parses a quoted string, like "this".
|
||||
func (s *scanner) scanQuoted() (token, string) {
|
||||
var buf bytes.Buffer
|
||||
brace := 0
|
||||
for {
|
||||
if ch := s.read(); ch == eof {
|
||||
break
|
||||
} else if ch == '{' {
|
||||
brace++
|
||||
} else if ch == '}' {
|
||||
brace--
|
||||
} else if ch == '"' {
|
||||
if brace == 0 { // Matches open quote, unescaped
|
||||
return tIDENT, buf.String()
|
||||
}
|
||||
_, _ = buf.WriteRune(ch)
|
||||
} else {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
}
|
||||
}
|
||||
return tILLEGAL, buf.String()
|
||||
}
|
||||
|
||||
// skipCommentBody is a scan method used for reading bibtex
|
||||
// comment item by reading all runes until the next @.
|
||||
//
|
||||
// e.g.
|
||||
// @comment{...anything can go here even if braces are unbalanced@
|
||||
// comment body string will be "...anything can go here even if braces are unbalanced"
|
||||
func (s *scanner) scanCommentBody() (token, string) {
|
||||
var buf bytes.Buffer
|
||||
for {
|
||||
if ch := s.read(); ch == eof {
|
||||
break
|
||||
} else if ch == '@' {
|
||||
s.unread()
|
||||
break
|
||||
} else {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
}
|
||||
}
|
||||
s.commentMode = false
|
||||
return tCOMMENTBODY, buf.String()
|
||||
}
|
||||
|
||||
// ignoreWhitespace consumes the current rune and all contiguous whitespace.
|
||||
func (s *scanner) ignoreWhitespace() {
|
||||
for {
|
||||
if ch := s.read(); ch == eof {
|
||||
break
|
||||
} else if !isWhitespace(ch) {
|
||||
s.unread()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
55
src/vendor/github.com/nickng/bibtex/token.go
generated
vendored
Normal file
55
src/vendor/github.com/nickng/bibtex/token.go
generated
vendored
Normal file
|
@ -0,0 +1,55 @@
|
|||
package bibtex
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Lexer token.
|
||||
type token int
|
||||
|
||||
const (
|
||||
// tILLEGAL stands for an invalid token.
|
||||
tILLEGAL token = iota
|
||||
)
|
||||
|
||||
var eof = rune(0)
|
||||
|
||||
// tokenPos is a pair of coordinate to identify start of token.
|
||||
type tokenPos struct {
|
||||
Char int
|
||||
Lines []int
|
||||
}
|
||||
|
||||
func (p tokenPos) String() string {
|
||||
return fmt.Sprintf("%d:%d", len(p.Lines)+1, p.Char)
|
||||
}
|
||||
|
||||
func isWhitespace(ch rune) bool {
|
||||
return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r'
|
||||
}
|
||||
|
||||
func isAlpha(ch rune) bool {
|
||||
return ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z')
|
||||
}
|
||||
|
||||
func isDigit(ch rune) bool {
|
||||
return ('0' <= ch && ch <= '9')
|
||||
}
|
||||
|
||||
func isAlphanum(ch rune) bool {
|
||||
return isAlpha(ch) || isDigit(ch)
|
||||
}
|
||||
|
||||
func isBareSymbol(ch rune) bool {
|
||||
return strings.ContainsRune("-_:./+", ch)
|
||||
}
|
||||
|
||||
// isSymbol returns true if ch is a valid symbol
|
||||
func isSymbol(ch rune) bool {
|
||||
return strings.ContainsRune("!?&*+-./:;<>[]^_`|~@", ch)
|
||||
}
|
||||
|
||||
func isOpenQuote(ch rune) bool {
|
||||
return ch == '{' || ch == '"'
|
||||
}
|
3
src/vendor/modules.txt
vendored
Normal file
3
src/vendor/modules.txt
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
# github.com/nickng/bibtex v1.3.0
|
||||
## explicit; go 1.18
|
||||
github.com/nickng/bibtex
|
Loading…
Add table
Add a link
Reference in a new issue