Merge pull request #28 from NullHypothesis/auto-deploy
Automatically deploy CensorBib.
14
.github/workflows/build.yaml
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
name: Build
|
||||
on: push
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
go build -C src -o ../compiler
|
||||
./compiler -path references.bib > /dev/null
|
28
.github/workflows/deploy-website.yaml
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
name: Deploy website
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
main
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
deploy-website:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install and Build
|
||||
run: |
|
||||
go build -C src -o ../compiler
|
||||
mkdir build
|
||||
mv assets build
|
||||
./compiler -path references.bib > build/index.html
|
||||
|
||||
- name: Deploy
|
||||
uses: JamesIves/github-pages-deploy-action@v4
|
||||
with:
|
||||
# Must be identical to where we wrote the HTML to.
|
||||
folder: build
|
1
.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
compiler
|
19
Makefile
|
@ -1,19 +0,0 @@
|
|||
define LATEX_CODE
|
||||
\\documentclass{article}
|
||||
\\usepackage[top=2cm,bottom=2.5cm,left=2cm,right=2cm]{geometry}
|
||||
\\usepackage[backend=biber]{biblatex}
|
||||
\\addbibresource{references.bib}
|
||||
\\begin{document}
|
||||
\\nocite{*}
|
||||
\\printbibliography
|
||||
\\end{document}
|
||||
endef
|
||||
|
||||
export LATEX_CODE
|
||||
|
||||
test:
|
||||
TMP_FILE=$$(mktemp "censorbib-tmp-XXXXXXX.tex") ;\
|
||||
echo "$$LATEX_CODE" > "$$TMP_FILE" ;\
|
||||
pdflatex --interaction=batchmode "$${TMP_FILE%.tex}" ;\
|
||||
biber "$${TMP_FILE%.tex}" ;\
|
||||
rm "$${TMP_FILE%.tex}"* ;
|
36
README.md
|
@ -1,27 +1,19 @@
|
|||
Overview
|
||||
--------
|
||||
# CensorBib
|
||||
|
||||
This repository contains the BibTeX file and HTML templates that form the
|
||||
This repository contains the
|
||||
[BibTeX file](references.bib)
|
||||
and
|
||||
[corresponding tooling](src/)
|
||||
that powers the
|
||||
[Internet Censorship Bibliography](https://censorbib.nymity.ch).
|
||||
CensorBib is also available via
|
||||
[GitHub pages](https://NullHypothesis.github.io/censorbib/)
|
||||
in case the primary domain is inaccessible to you.
|
||||
|
||||
Build it
|
||||
--------
|
||||
## Contribute
|
||||
|
||||
You first need [`bibliogra.py`](https://github.com/NullHypothesis/bibliograpy)
|
||||
to turn the BibTeX file into an HTML bibliography.
|
||||
To contribute, please create a pull request that adds a new paper or
|
||||
improves an existing one.
|
||||
|
||||
Then, run the following commands to write the bibliography to `OUTPUT_DIR`.
|
||||
|
||||
$ ./fetch_pdfs.py references.bib OUTPUT_DIR
|
||||
$ bibliogra.py -H header.tpl -F footer.tpl -f references.bib OUTPUT_DIR
|
||||
|
||||
Acknowledgements
|
||||
----------------
|
||||
|
||||
CensorBib uses [Font Awesome](https://fontawesome.com/license/free) icons
|
||||
without modification.
|
||||
|
||||
Feedback
|
||||
--------
|
||||
|
||||
Contact: Philipp Winter <phw@nymity.ch>
|
||||
> [!TIP]
|
||||
> Try to mimic the style of existing BibTeX entries. The parser is strict!
|
Before Width: | Height: | Size: 611 B After Width: | Height: | Size: 611 B |
Before Width: | Height: | Size: 671 B After Width: | Height: | Size: 671 B |
Before Width: | Height: | Size: 917 B After Width: | Height: | Size: 917 B |
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
Before Width: | Height: | Size: 1 KiB After Width: | Height: | Size: 1 KiB |
Before Width: | Height: | Size: 174 KiB After Width: | Height: | Size: 174 KiB |
Before Width: | Height: | Size: 940 B After Width: | Height: | Size: 940 B |
59
favicon.svg
|
@ -1,59 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
width="0.44702148in"
|
||||
height="0.36694336in"
|
||||
viewBox="0 0 11.354346 9.3203613"
|
||||
version="1.1"
|
||||
id="svg5"
|
||||
inkscape:version="1.1.1 (3bf5ae0d25, 2021-09-20)"
|
||||
sodipodi:docname="favicon.svg"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview7"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#666666"
|
||||
borderopacity="1.0"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:document-units="mm"
|
||||
showgrid="false"
|
||||
inkscape:zoom="6.0292929"
|
||||
inkscape:cx="17.746691"
|
||||
inkscape:cy="7.2977048"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1034"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="20"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="layer1"
|
||||
units="in"
|
||||
fit-margin-top="0"
|
||||
fit-margin-left="0"
|
||||
fit-margin-right="0"
|
||||
fit-margin-bottom="0" />
|
||||
<defs
|
||||
id="defs2" />
|
||||
<g
|
||||
inkscape:label="Layer 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(-45.907442,-85.229436)">
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-weight:normal;font-size:12.7px;line-height:1.25;font-family:sans-serif;fill:#800080;fill-opacity:1;stroke:#ffffff;stroke-width:0;stroke-miterlimit:4;stroke-dasharray:none"
|
||||
x="45.386543"
|
||||
y="94.425774"
|
||||
id="text6765"><tspan
|
||||
sodipodi:role="line"
|
||||
id="tspan6763"
|
||||
style="font-style:normal;font-variant:normal;font-weight:bold;font-stretch:normal;font-size:12.7px;font-family:'Open Sans Condensed';-inkscape-font-specification:'Open Sans Condensed, Bold';fill:#900092;fill-opacity:1;stroke:#ffffff;stroke-width:0;stroke-linecap:round;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none"
|
||||
x="45.386543"
|
||||
y="94.425774">CB</tspan></text>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 2.1 KiB |
100
fetch_pdfs.py
|
@ -1,100 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# Copyright 2015 Philipp Winter <phw@nymity.ch>
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Fetch pdf and ps files in BibTeX file.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import errno
|
||||
import urllib.request
|
||||
|
||||
import pybtex.database.input.bibtex as bibtex
|
||||
|
||||
|
||||
def download_pdf(url, file_name):
|
||||
"""
|
||||
Download file and write it to given file name.
|
||||
"""
|
||||
|
||||
print("Now fetching %s" % url)
|
||||
|
||||
try:
|
||||
req = urllib.request.Request(url, headers={'User-Agent': "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36"})
|
||||
fetched_file = urllib.request.urlopen(req)
|
||||
except Exception as err:
|
||||
print(url, err, file=sys.stderr)
|
||||
return
|
||||
|
||||
with open(file_name, "wb") as fd:
|
||||
fd.write(fetched_file.read())
|
||||
|
||||
|
||||
def main(file_name, output_dir):
|
||||
"""
|
||||
Extract BibTeX key and URL, and then trigger file download.
|
||||
"""
|
||||
|
||||
parser = bibtex.Parser()
|
||||
bibdata = parser.parse_file(file_name)
|
||||
|
||||
# Create download directories.
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.join(output_dir, "pdf"))
|
||||
os.makedirs(os.path.join(output_dir, "ps"))
|
||||
except OSError as exc:
|
||||
if exc.errno == errno.EEXIST:
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
|
||||
# Iterate over all BibTeX entries and trigger download if necessary.
|
||||
|
||||
for bibkey in bibdata.entries:
|
||||
|
||||
entry = bibdata.entries[bibkey]
|
||||
url = entry.fields.get("url")
|
||||
if url is None:
|
||||
continue
|
||||
|
||||
# Extract file name extension and see what we are dealing with.
|
||||
|
||||
_, ext = os.path.splitext(url)
|
||||
if ext:
|
||||
ext = ext[1:]
|
||||
|
||||
if ext not in ["pdf", "ps"]:
|
||||
continue
|
||||
|
||||
file_name = os.path.join(output_dir, ext, bibkey + ".%s" % ext)
|
||||
if os.path.exists(file_name):
|
||||
continue
|
||||
|
||||
download_pdf(url, file_name)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
if len(sys.argv) != 3:
|
||||
print("\nUsage: %s FILE_NAME OUTPUT_DIR\n" % sys.argv[0],
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
sys.exit(main(sys.argv[1], sys.argv[2]))
|
|
@ -1,7 +0,0 @@
|
|||
<div id="footer">
|
||||
Icons taken without modification from
|
||||
<a href="https://fontawesome.com/license">Font Awesome</a>.
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
|
@ -1 +0,0 @@
|
|||
<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="sort-alpha-down" class="svg-inline--fa fa-sort-alpha-down fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M176 352h-48V48a16 16 0 0 0-16-16H80a16 16 0 0 0-16 16v304H16c-14.19 0-21.36 17.24-11.29 27.31l80 96a16 16 0 0 0 22.62 0l80-96C197.35 369.26 190.22 352 176 352zm240-64H288a16 16 0 0 0-16 16v32a16 16 0 0 0 16 16h56l-61.26 70.45A32 32 0 0 0 272 446.37V464a16 16 0 0 0 16 16h128a16 16 0 0 0 16-16v-32a16 16 0 0 0-16-16h-56l61.26-70.45A32 32 0 0 0 432 321.63V304a16 16 0 0 0-16-16zm31.06-85.38l-59.27-160A16 16 0 0 0 372.72 32h-41.44a16 16 0 0 0-15.07 10.62l-59.27 160A16 16 0 0 0 272 224h24.83a16 16 0 0 0 15.23-11.08l4.42-12.92h71l4.41 12.92A16 16 0 0 0 407.16 224H432a16 16 0 0 0 15.06-21.38zM335.61 144L352 96l16.39 48z"></path></svg>
|
Before Width: | Height: | Size: 869 B |
|
@ -1 +0,0 @@
|
|||
<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="sort-alpha-down-alt" class="svg-inline--fa fa-sort-alpha-down-alt fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M176 352h-48V48a16 16 0 0 0-16-16H80a16 16 0 0 0-16 16v304H16c-14.19 0-21.36 17.24-11.29 27.31l80 96a16 16 0 0 0 22.62 0l80-96C197.35 369.26 190.22 352 176 352zm112-128h128a16 16 0 0 0 16-16v-32a16 16 0 0 0-16-16h-56l61.26-70.45A32 32 0 0 0 432 65.63V48a16 16 0 0 0-16-16H288a16 16 0 0 0-16 16v32a16 16 0 0 0 16 16h56l-61.26 70.45A32 32 0 0 0 272 190.37V208a16 16 0 0 0 16 16zm159.06 234.62l-59.27-160A16 16 0 0 0 372.72 288h-41.44a16 16 0 0 0-15.07 10.62l-59.27 160A16 16 0 0 0 272 480h24.83a16 16 0 0 0 15.23-11.08l4.42-12.92h71l4.41 12.92A16 16 0 0 0 407.16 480H432a16 16 0 0 0 15.06-21.38zM335.61 400L352 352l16.39 48z"></path></svg>
|
Before Width: | Height: | Size: 880 B |
|
@ -1 +0,0 @@
|
|||
<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="donate" class="svg-inline--fa fa-donate fa-w-16" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M256 416c114.9 0 208-93.1 208-208S370.9 0 256 0 48 93.1 48 208s93.1 208 208 208zM233.8 97.4V80.6c0-9.2 7.4-16.6 16.6-16.6h11.1c9.2 0 16.6 7.4 16.6 16.6v17c15.5.8 30.5 6.1 43 15.4 5.6 4.1 6.2 12.3 1.2 17.1L306 145.6c-3.8 3.7-9.5 3.8-14 1-5.4-3.4-11.4-5.1-17.8-5.1h-38.9c-9 0-16.3 8.2-16.3 18.3 0 8.2 5 15.5 12.1 17.6l62.3 18.7c25.7 7.7 43.7 32.4 43.7 60.1 0 34-26.4 61.5-59.1 62.4v16.8c0 9.2-7.4 16.6-16.6 16.6h-11.1c-9.2 0-16.6-7.4-16.6-16.6v-17c-15.5-.8-30.5-6.1-43-15.4-5.6-4.1-6.2-12.3-1.2-17.1l16.3-15.5c3.8-3.7 9.5-3.8 14-1 5.4 3.4 11.4 5.1 17.8 5.1h38.9c9 0 16.3-8.2 16.3-18.3 0-8.2-5-15.5-12.1-17.6l-62.3-18.7c-25.7-7.7-43.7-32.4-43.7-60.1.1-34 26.4-61.5 59.1-62.4zM480 352h-32.5c-19.6 26-44.6 47.7-73 64h63.8c5.3 0 9.6 3.6 9.6 8v16c0 4.4-4.3 8-9.6 8H73.6c-5.3 0-9.6-3.6-9.6-8v-16c0-4.4 4.3-8 9.6-8h63.8c-28.4-16.3-53.3-38-73-64H32c-17.7 0-32 14.3-32 32v96c0 17.7 14.3 32 32 32h448c17.7 0 32-14.3 32-32v-96c0-17.7-14.3-32-32-32z"></path></svg>
|
Before Width: | Height: | Size: 1.1 KiB |
|
@ -1 +0,0 @@
|
|||
<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="file-alt" class="svg-inline--fa fa-file-alt fa-w-12" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 384 512"><path fill="currentColor" d="M224 136V0H24C10.7 0 0 10.7 0 24v464c0 13.3 10.7 24 24 24h336c13.3 0 24-10.7 24-24V160H248c-13.2 0-24-10.8-24-24zm64 236c0 6.6-5.4 12-12 12H108c-6.6 0-12-5.4-12-12v-8c0-6.6 5.4-12 12-12h168c6.6 0 12 5.4 12 12v8zm0-64c0 6.6-5.4 12-12 12H108c-6.6 0-12-5.4-12-12v-8c0-6.6 5.4-12 12-12h168c6.6 0 12 5.4 12 12v8zm0-72v8c0 6.6-5.4 12-12 12H108c-6.6 0-12-5.4-12-12v-8c0-6.6 5.4-12 12-12h168c6.6 0 12 5.4 12 12zm96-114.1v6.1H256V0h6.1c6.4 0 12.5 2.5 17 7l97.9 98c4.5 4.5 7 10.6 7 16.9z"></path></svg>
|
Before Width: | Height: | Size: 713 B |
|
@ -1 +0,0 @@
|
|||
<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="lock" class="svg-inline--fa fa-lock fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M400 224h-24v-72C376 68.2 307.8 0 224 0S72 68.2 72 152v72H48c-26.5 0-48 21.5-48 48v192c0 26.5 21.5 48 48 48h352c26.5 0 48-21.5 48-48V272c0-26.5-21.5-48-48-48zm-104 0H152v-72c0-39.7 32.3-72 72-72s72 32.3 72 72v72z"></path></svg>
|
Before Width: | Height: | Size: 440 B |
|
@ -1 +0,0 @@
|
|||
<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="sort-numeric-down" class="svg-inline--fa fa-sort-numeric-down fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M304 96h16v64h-16a16 16 0 0 0-16 16v32a16 16 0 0 0 16 16h96a16 16 0 0 0 16-16v-32a16 16 0 0 0-16-16h-16V48a16 16 0 0 0-16-16h-48a16 16 0 0 0-14.29 8.83l-16 32A16 16 0 0 0 304 96zm26.15 162.91a79 79 0 0 0-55 54.17c-14.25 51.05 21.21 97.77 68.85 102.53a84.07 84.07 0 0 1-20.85 12.91c-7.57 3.4-10.8 12.47-8.18 20.34l9.9 20c2.87 8.63 12.53 13.49 20.9 9.91 58-24.76 86.25-61.61 86.25-132V336c-.02-51.21-48.4-91.34-101.85-77.09zM352 356a20 20 0 1 1 20-20 20 20 0 0 1-20 20zm-176-4h-48V48a16 16 0 0 0-16-16H80a16 16 0 0 0-16 16v304H16c-14.19 0-21.36 17.24-11.29 27.31l80 96a16 16 0 0 0 22.62 0l80-96C197.35 369.26 190.22 352 176 352z"></path></svg>
|
Before Width: | Height: | Size: 880 B |
|
@ -1 +0,0 @@
|
|||
<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="sort-numeric-down-alt" class="svg-inline--fa fa-sort-numeric-down-alt fa-w-14" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512"><path fill="currentColor" d="M176 352h-48V48a16 16 0 0 0-16-16H80a16 16 0 0 0-16 16v304H16c-14.19 0-21.36 17.24-11.29 27.31l80 96a16 16 0 0 0 22.62 0l80-96C197.35 369.26 190.22 352 176 352zm224 64h-16V304a16 16 0 0 0-16-16h-48a16 16 0 0 0-14.29 8.83l-16 32A16 16 0 0 0 304 352h16v64h-16a16 16 0 0 0-16 16v32a16 16 0 0 0 16 16h96a16 16 0 0 0 16-16v-32a16 16 0 0 0-16-16zM330.17 34.91a79 79 0 0 0-55 54.17c-14.27 51.05 21.19 97.77 68.83 102.53a84.07 84.07 0 0 1-20.85 12.91c-7.57 3.4-10.8 12.47-8.18 20.34l9.9 20c2.87 8.63 12.53 13.49 20.9 9.91 58-24.77 86.25-61.61 86.25-132V112c-.02-51.21-48.4-91.34-101.85-77.09zM352 132a20 20 0 1 1 20-20 20 20 0 0 1-20 20z"></path></svg>
|
Before Width: | Height: | Size: 891 B |
|
@ -1,99 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
||||
xmlns:cc="http://creativecommons.org/ns#"
|
||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
||||
xmlns:svg="http://www.w3.org/2000/svg"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
id="svg2403"
|
||||
sodipodi:version="0.32"
|
||||
inkscape:version="0.92.4 (5da689c313, 2019-01-14)"
|
||||
width="900"
|
||||
height="360"
|
||||
xml:space="preserve"
|
||||
sodipodi:docname="open-access.svg"
|
||||
inkscape:output_extension="org.inkscape.output.svg.inkscape"
|
||||
version="1.1"><metadata
|
||||
id="metadata2408"><rdf:RDF><cc:Work
|
||||
rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type
|
||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" /></cc:Work></rdf:RDF></metadata><defs
|
||||
id="defs2406"><inkscape:perspective
|
||||
sodipodi:type="inkscape:persp3d"
|
||||
inkscape:vp_x="0 : 526.18109 : 1"
|
||||
inkscape:vp_y="0 : 1000 : 0"
|
||||
inkscape:vp_z="744.09448 : 526.18109 : 1"
|
||||
inkscape:persp3d-origin="372.04724 : 350.78739 : 1"
|
||||
id="perspective2410" /><clipPath
|
||||
clipPathUnits="userSpaceOnUse"
|
||||
id="clipPath2418"><path
|
||||
d="M 0,288 L 720,288 L 720,0 L 0,0 L 0,288 z"
|
||||
id="path2420" /></clipPath></defs><sodipodi:namedview
|
||||
inkscape:window-height="1046"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:pageshadow="2"
|
||||
inkscape:pageopacity="0.0"
|
||||
guidetolerance="10.0"
|
||||
gridtolerance="10.0"
|
||||
objecttolerance="10.0"
|
||||
borderopacity="1.0"
|
||||
bordercolor="#666666"
|
||||
pagecolor="#ffffff"
|
||||
id="base"
|
||||
showgrid="false"
|
||||
inkscape:zoom="0.85481352"
|
||||
inkscape:cx="738.53938"
|
||||
inkscape:cy="134.98226"
|
||||
inkscape:window-x="0"
|
||||
inkscape:window-y="16"
|
||||
inkscape:current-layer="g2416"
|
||||
inkscape:window-maximized="0" /><g
|
||||
id="g2412"
|
||||
inkscape:groupmode="layer"
|
||||
inkscape:label="Open Access logo"
|
||||
transform="matrix(1.25,0,0,-1.25,0,360)"><g
|
||||
id="g2414"><g
|
||||
id="g2416"
|
||||
clip-path="url(#clipPath2418)"><path
|
||||
d="M 262.883,200.896 L 262.883,192.05 L 288.822,192.05 L 288.822,200.896 C 288.822,222.308 306.243,239.727 327.652,239.727 C 349.061,239.727 366.481,222.308 366.481,200.896 L 366.481,136.911 L 392.42,136.911 L 392.42,200.896 C 392.42,236.609 363.364,265.665 327.652,265.665 C 291.941,265.665 262.883,236.609 262.883,200.896"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2424" /><path
|
||||
d="M 349.153,99.568 C 349.153,87.752 339.573,78.172 327.754,78.172 C 315.936,78.172 306.355,87.752 306.355,99.568 C 306.355,111.391 315.936,120.972 327.754,120.972 C 339.573,120.972 349.153,111.391 349.153,99.568"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2426" /><path
|
||||
d="M 277.068,99.799 C 277.068,127.61 299.695,150.235 327.505,150.235 C 355.314,150.235 377.938,127.61 377.938,99.799 C 377.938,71.991 355.314,49.362 327.505,49.362 C 299.695,49.362 277.068,71.991 277.068,99.799 M 251.13,99.799 C 251.13,57.691 285.395,23.426 327.505,23.426 C 369.616,23.426 403.878,57.691 403.878,99.799 C 403.878,141.913 369.616,176.174 327.505,176.174 C 285.395,176.174 251.13,141.913 251.13,99.799"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2428" /><path
|
||||
d="M 51.742,106.854 C 59.69,106.854 64.241,114.339 64.241,124.521 C 64.241,133.86 59.845,141.889 51.742,141.889 C 43.639,141.889 39.085,134.019 39.085,124.293 C 39.085,114.49 43.793,106.854 51.666,106.854 L 51.742,106.854 z M 51.202,97.513 C 35.846,97.513 26.741,109.166 26.741,123.981 C 26.741,139.575 36.771,151.221 52.051,151.221 C 68.023,151.221 76.742,139.265 76.742,124.911 C 76.742,107.776 66.478,97.513 51.277,97.513 L 51.202,97.513 z"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2430" /><path
|
||||
d="M 98.851,126.071 C 99.852,125.834 101.088,125.761 102.782,125.761 C 109.036,125.761 112.892,128.925 112.892,134.17 C 112.892,138.955 109.651,141.809 103.711,141.809 C 101.319,141.809 99.698,141.656 98.851,141.427 L 98.851,126.071 z M 87.198,149.679 C 90.823,150.297 95.918,150.758 103.095,150.758 C 110.345,150.758 115.594,149.375 119.066,146.515 C 122.386,143.972 124.546,139.647 124.546,134.554 C 124.546,129.386 122.925,125.064 119.838,122.209 C 115.824,118.506 109.807,116.731 102.861,116.731 C 101.319,116.731 99.929,116.807 98.851,117.043 L 98.851,98.363 L 87.198,98.363 L 87.198,149.679 z"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2432" /><path
|
||||
d="M 165.711,120.357 L 146.572,120.357 L 146.572,108.014 L 167.948,108.014 L 167.948,98.363 L 134.845,98.363 L 134.845,150.376 L 166.868,150.376 L 166.868,140.729 L 146.572,140.729 L 146.572,129.926 L 165.711,129.926 L 165.711,120.357 z"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2434" /><path
|
||||
d="M 181.763,98.363 L 181.763,150.376 L 195.497,150.376 L 206.298,131.317 C 209.389,125.834 212.395,119.356 214.787,113.49 L 214.944,113.49 C 214.25,120.357 214.015,127.378 214.015,135.252 L 214.015,150.376 L 224.82,150.376 L 224.82,98.363 L 212.474,98.363 L 201.362,118.427 C 198.274,124.063 194.802,130.697 192.333,136.793 L 192.024,136.793 C 192.408,129.849 192.564,122.52 192.564,114.031 L 192.564,98.363 L 181.763,98.363 z"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2436" /><path
|
||||
d="M 450.392,120.509 L 447.153,131.547 C 446.304,134.632 445.375,138.566 444.604,141.579 L 444.378,141.579 C 443.68,138.494 442.83,134.56 442.058,131.547 L 438.898,120.509 L 450.392,120.509 z M 437.196,111.718 L 433.573,98.363 L 421.377,98.363 L 437.273,150.376 L 452.634,150.376 L 468.833,98.363 L 456.179,98.363 L 452.165,111.718 L 437.196,111.718 z"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2438" /><path
|
||||
d="M 513.19,99.755 C 511.107,98.752 506.17,97.513 499.918,97.513 C 481.939,97.513 472.678,108.784 472.678,123.672 C 472.678,141.427 485.336,151.226 501.075,151.226 C 507.175,151.226 511.802,149.988 513.886,148.907 L 511.419,139.647 C 509.1,140.648 505.788,141.579 501.539,141.579 C 492.278,141.579 485.023,135.944 485.023,124.293 C 485.023,113.879 491.198,107.315 501.618,107.315 C 505.239,107.315 509.1,108.014 511.493,109.015 L 513.19,99.755 z"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2440" /><path
|
||||
d="M 559.468,99.755 C 557.383,98.752 552.448,97.513 546.192,97.513 C 528.215,97.513 518.954,108.784 518.954,123.672 C 518.954,141.427 531.612,151.226 547.353,151.226 C 553.45,151.226 558.078,149.988 560.162,148.907 L 557.693,139.647 C 555.38,140.648 552.057,141.579 547.814,141.579 C 538.555,141.579 531.301,135.944 531.301,124.293 C 531.301,113.879 537.476,107.315 547.891,107.315 C 551.521,107.315 555.38,108.014 557.769,109.015 L 559.468,99.755 z"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2442" /><path
|
||||
d="M 603.989,120.357 L 584.852,120.357 L 584.852,108.014 L 606.229,108.014 L 606.229,98.363 L 573.124,98.363 L 573.124,150.376 L 605.151,150.376 L 605.151,140.729 L 584.852,140.729 L 584.852,129.926 L 603.989,129.926 L 603.989,120.357 z"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2444" /><path
|
||||
d="M 620.428,110.406 C 623.513,108.784 628.452,107.162 633.468,107.162 C 638.867,107.162 641.726,109.398 641.726,112.87 C 641.726,116.033 639.258,117.965 633.005,120.126 C 624.363,123.211 618.65,127.996 618.65,135.635 C 618.65,144.511 626.137,151.226 638.329,151.226 C 644.271,151.226 648.513,150.066 651.602,148.604 L 648.977,139.186 C 646.973,140.188 643.187,141.656 638.179,141.656 C 633.079,141.656 630.613,139.265 630.613,136.641 C 630.613,133.321 633.468,131.858 640.259,129.235 C 649.441,125.834 653.688,121.056 653.688,113.646 C 653.688,104.921 647.052,97.513 632.774,97.513 C 626.833,97.513 620.965,99.134 618.035,100.756 L 620.428,110.406 z"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2446" /><path
|
||||
d="M 664.835,110.406 C 667.922,108.784 672.858,107.162 677.873,107.162 C 683.277,107.162 686.133,109.398 686.133,112.87 C 686.133,116.033 683.662,117.965 677.409,120.126 C 668.771,123.211 663.061,127.996 663.061,135.635 C 663.061,144.511 670.54,151.226 682.734,151.226 C 688.678,151.226 692.922,150.066 696.009,148.604 L 693.386,139.186 C 691.381,140.188 687.6,141.656 682.582,141.656 C 677.488,141.656 675.021,139.265 675.021,136.641 C 675.021,133.321 677.873,131.858 684.665,129.235 C 693.846,125.834 698.091,121.056 698.091,113.646 C 698.091,104.921 691.454,97.513 677.178,97.513 C 671.24,97.513 665.373,99.134 662.444,100.756 L 664.835,110.406 z"
|
||||
style="fill:#f68212;fill-opacity:1;fill-rule:nonzero;stroke:none"
|
||||
id="path2448" /></g></g></g></svg>
|
Before Width: | Height: | Size: 9.1 KiB |
55
src/decode.go
Normal file
|
@ -0,0 +1,55 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type conversion struct {
|
||||
from string
|
||||
to string
|
||||
}
|
||||
|
||||
func decodeTitle(title string) string {
|
||||
for _, convert := range []conversion{
|
||||
{`\#`, "#"},
|
||||
{`--`, `–`},
|
||||
{"``", "“"},
|
||||
{"''", "”"},
|
||||
{"'", "’"}, // U+2019
|
||||
{`$\cdot$`, `·`}, // U+00B7.
|
||||
} {
|
||||
title = strings.Replace(title, convert.from, convert.to, -1)
|
||||
}
|
||||
|
||||
// Get rid of all curly brackets. We're displaying titles without changing
|
||||
// their casing.
|
||||
title = strings.ReplaceAll(title, "{", "")
|
||||
title = strings.ReplaceAll(title, "}", "")
|
||||
|
||||
return title
|
||||
}
|
||||
|
||||
func decodeAuthors(authors string) string {
|
||||
for _, convert := range []conversion{
|
||||
{"'", "’"},
|
||||
} {
|
||||
authors = strings.Replace(authors, convert.from, convert.to, -1)
|
||||
}
|
||||
// For simplicity, we expect authors to be formatted as "John Doe" instead
|
||||
// of "Doe, John".
|
||||
if strings.Contains(authors, ",") {
|
||||
log.Fatalf("author %q contains a comma", authors)
|
||||
}
|
||||
authorSlice := strings.Split(authors, " and ")
|
||||
return strings.Join(authorSlice, ", ")
|
||||
}
|
||||
|
||||
func decodeProceedings(proceedings string) string {
|
||||
for _, convert := range []conversion{
|
||||
{`\&`, "&"},
|
||||
} {
|
||||
proceedings = strings.Replace(proceedings, convert.from, convert.to, -1)
|
||||
}
|
||||
return proceedings
|
||||
}
|
81
src/decode_test.go
Normal file
|
@ -0,0 +1,81 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestToString(t *testing.T) {
|
||||
testCases := []conversion{
|
||||
{
|
||||
from: "Title",
|
||||
to: "Title",
|
||||
},
|
||||
{
|
||||
from: "This is a {Title}",
|
||||
to: "This is a Title",
|
||||
},
|
||||
{
|
||||
from: "This is a {Title}",
|
||||
to: "This is a Title",
|
||||
},
|
||||
{
|
||||
from: `{\#h00t}: Censorship Resistant Microblogging`,
|
||||
to: `#h00t: Censorship Resistant Microblogging`,
|
||||
},
|
||||
{
|
||||
from: "``Good'' Worms and Human Rights",
|
||||
to: "“Good” Worms and Human Rights",
|
||||
},
|
||||
{
|
||||
from: "An Analysis of {China}'s ``{Great Cannon}''",
|
||||
to: "An Analysis of China’s “Great Cannon”",
|
||||
},
|
||||
{
|
||||
from: `lib$\cdot$erate, (n):`,
|
||||
to: `lib·erate, (n):`,
|
||||
},
|
||||
{
|
||||
from: "Well -- Exploring the {Great} {Firewall}'s Poisoned {DNS}",
|
||||
to: "Well – Exploring the Great Firewall’s Poisoned DNS",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
to := decodeTitle(test.from)
|
||||
if to != test.to {
|
||||
t.Errorf("Expected\n%s\ngot\n%s", test.to, to)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeAuthors(t *testing.T) {
|
||||
testCases := []conversion{
|
||||
{ // Multiple authors should be separated by commas.
|
||||
from: "John Doe and Jane Doe",
|
||||
to: "John Doe, Jane Doe",
|
||||
},
|
||||
{ // Single authors should remain as-is.
|
||||
from: "John Doe",
|
||||
to: "John Doe",
|
||||
},
|
||||
{ // Single-name authors should remain as-is.
|
||||
from: "John and Jane",
|
||||
to: "John, Jane",
|
||||
},
|
||||
{ // Non-ASCII characters should be unaffected.
|
||||
from: "Jóhn Doe",
|
||||
to: "Jóhn Doe",
|
||||
},
|
||||
{ // Apostrophes should be replaced with the right single quote.
|
||||
from: "John O'Brian",
|
||||
to: "John O’Brian",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
to := decodeAuthors(test.from)
|
||||
if to != test.to {
|
||||
t.Errorf("Expected\n%s\ngot\n%s", test.to, to)
|
||||
}
|
||||
}
|
||||
}
|
11
src/footer.go
Normal file
|
@ -0,0 +1,11 @@
|
|||
package main
|
||||
|
||||
func footer() string {
|
||||
return `<div id="footer">
|
||||
Icons taken without modification from
|
||||
<a href="https://fontawesome.com/license">Font Awesome</a>.
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>`
|
||||
}
|
5
src/go.mod
Normal file
|
@ -0,0 +1,5 @@
|
|||
module censorbib-go
|
||||
|
||||
go 1.21.3
|
||||
|
||||
require github.com/nickng/bibtex v1.3.0
|
2
src/go.sum
Normal file
|
@ -0,0 +1,2 @@
|
|||
github.com/nickng/bibtex v1.3.0 h1:iv0408z8Xe+FEVquJUo8eraXnhrAF0e+2/WayPcism8=
|
||||
github.com/nickng/bibtex v1.3.0/go.mod h1:4BJ3ka/ZjGVXcHOlkzlRonex6U17L3kW6ICEsygP2bg=
|
|
@ -1,13 +1,21 @@
|
|||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
|
||||
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
||||
package main
|
||||
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
import (
|
||||
"bytes"
|
||||
"log"
|
||||
"text/template"
|
||||
"time"
|
||||
)
|
||||
|
||||
const headerTemplate = `
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||
<title>The Internet censorship bibliography</title>
|
||||
<link rel="icon" type="image/svg+xml" href="favicon.svg">
|
||||
<style type="text/css">
|
||||
<style>
|
||||
body {
|
||||
font-family: Roboto,Helvetica,sans-serif;
|
||||
background: #ddd;
|
||||
|
@ -30,12 +38,9 @@
|
|||
ul {
|
||||
border-radius: 10px;
|
||||
border:1px solid #c0c0c0;
|
||||
background: #efefef;
|
||||
background: #f5f5f5;
|
||||
box-shadow: 2px 2px 5px #bbb;
|
||||
}
|
||||
ul.a {
|
||||
list-style-image: url('donate-icon.svg');
|
||||
}
|
||||
a:link {
|
||||
color:#0b61a4;
|
||||
text-decoration:none;
|
||||
|
@ -92,7 +97,7 @@
|
|||
}
|
||||
#left-header {
|
||||
flex: 4;
|
||||
background: #efefef;
|
||||
background: #f5f5f5;
|
||||
margin-right: 0.5em;
|
||||
border-radius: 10px;
|
||||
border: 1px solid #c0c0c0;
|
||||
|
@ -101,9 +106,9 @@
|
|||
}
|
||||
#right-header {
|
||||
flex: 1;
|
||||
background: #efefef;
|
||||
background: #f5f5f5;
|
||||
margin-left: 0.5em;
|
||||
background: #333 url('img/research-power-tools-cover.jpg') no-repeat;
|
||||
background: #333 url('assets/research-power-tools-cover.jpg') no-repeat;
|
||||
background-size: 100%;
|
||||
}
|
||||
.round-shadow {
|
||||
|
@ -151,7 +156,7 @@
|
|||
|
||||
<div class="flex-row">
|
||||
|
||||
<div id="left-header" class="flex-column" class="round-shadow">
|
||||
<div id="left-header" class="flex-column round-shadow">
|
||||
|
||||
<div id="title-box">
|
||||
<h1>Selected Research Papers<br>in Internet Censorship</h1>
|
||||
|
@ -166,12 +171,9 @@
|
|||
systems, or by measuring how censorship works. The icons next to each
|
||||
paper make it easy to download, cite, and link to papers. If you think
|
||||
I missed a paper,
|
||||
<a href="https://nymity.ch/contact.txt">let me know</a>.
|
||||
You can sort papers by
|
||||
<a href="year.html">year</a>,
|
||||
<a href="year_reverse.html">reverse year</a> (default),
|
||||
<a href="author.html">author</a>, and
|
||||
<a href="author_reverse.html">reverse author</a>.
|
||||
<a href="https://github.com/NullHypothesis/censorbib">
|
||||
make a pull request
|
||||
</a>.
|
||||
Finally, the
|
||||
<a href="https://github.com/net4people/bbs/issues">net4people/bbs forum</a>
|
||||
has reading groups for many of the papers listed below.
|
||||
|
@ -179,20 +181,12 @@
|
|||
|
||||
<div id="censorbib-links">
|
||||
<div class="menu-item">
|
||||
<img class="top-icon" src="img/lock-icon.svg" alt="onion service icon"/>
|
||||
<a href="http://putnst3yv7k6vvb3avdqgdutrz3kaufitaiwbjhjox7o3daakr43fhad.onion">Onion service mirror</a>
|
||||
</div>
|
||||
<div class="menu-item">
|
||||
<img class="top-icon" src="img/code-icon.svg" alt="source code icon"/>
|
||||
<img class="top-icon" src="assets/code-icon.svg" alt="source code icon">
|
||||
<a href="https://github.com/NullHypothesis/censorbib">CensorBib code</a>
|
||||
</div>
|
||||
<div class="menu-item">
|
||||
<img class="top-icon" src="img/update-icon.svg" alt="update icon"/>
|
||||
<a href="https://github.com/NullHypothesis/censorbib/commits/master">Last update: 2024-02-25</a>
|
||||
</div>
|
||||
<div class="menu-item">
|
||||
<img class="top-icon" src="img/donate-icon.svg" alt="donate icon"/>
|
||||
<a href="https://nymity.ch/donate.html">Donate</a>
|
||||
<img class="top-icon" src="assets/update-icon.svg" alt="update icon">
|
||||
<a href="https://github.com/NullHypothesis/censorbib/commits/master">Last update: {{.Date}}</a>
|
||||
</div>
|
||||
</div> <!-- censorbib-links -->
|
||||
|
||||
|
@ -214,6 +208,21 @@
|
|||
|
||||
</div> <!-- right-header -->
|
||||
|
||||
</div>
|
||||
</div>`
|
||||
|
||||
<body>
|
||||
func header() string {
|
||||
tmpl, err := template.New("header").Parse(headerTemplate)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
i := struct {
|
||||
Date string
|
||||
}{
|
||||
Date: time.Now().UTC().Format(time.DateOnly),
|
||||
}
|
||||
buf := bytes.NewBufferString("")
|
||||
if err = tmpl.Execute(buf, i); err != nil {
|
||||
log.Fatalf("Error executing template: %v", err)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
130
src/html.go
Normal file
|
@ -0,0 +1,130 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/nickng/bibtex"
|
||||
)
|
||||
|
||||
func sortByYear(yearToEntries map[string][]string) []string {
|
||||
keys := make([]string, 0, len(yearToEntries))
|
||||
for k := range yearToEntries {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Sort(sort.Reverse(sort.StringSlice(keys)))
|
||||
return keys
|
||||
}
|
||||
|
||||
func appendIfNotEmpty(slice []string, s string) []string {
|
||||
if s != "" {
|
||||
return append(slice, s)
|
||||
}
|
||||
return slice
|
||||
}
|
||||
|
||||
func makeBib(to io.Writer, bibEntries []bibEntry) {
|
||||
yearToEntries := make(map[string][]string)
|
||||
|
||||
for _, entry := range bibEntries {
|
||||
y := entry.Fields["year"].String()
|
||||
yearToEntries[y] = append(yearToEntries[y], makeBibEntry(&entry))
|
||||
}
|
||||
|
||||
sortedYears := sortByYear(yearToEntries)
|
||||
for _, year := range sortedYears {
|
||||
fmt.Fprintln(to, "<ul>")
|
||||
for _, entry := range yearToEntries[year] {
|
||||
fmt.Fprint(to, entry)
|
||||
}
|
||||
fmt.Fprintln(to, "</ul>")
|
||||
}
|
||||
}
|
||||
|
||||
func makeBibEntry(entry *bibEntry) string {
|
||||
s := []string{
|
||||
fmt.Sprintf("<li id='%s'>", entry.CiteName),
|
||||
`<div>`,
|
||||
makeBibEntryTitle(entry),
|
||||
`</div>`,
|
||||
`<div>`,
|
||||
makeBibEntryAuthors(entry),
|
||||
`</div>`,
|
||||
`<span class="other">`,
|
||||
makeBibEntryMisc(entry),
|
||||
`</span>`,
|
||||
`</li>`,
|
||||
}
|
||||
return strings.Join(s, "\n")
|
||||
}
|
||||
|
||||
func makeBibEntryTitle(entry *bibEntry) string {
|
||||
// Paper title is on the left side.
|
||||
title := []string{
|
||||
`<span class="paper">`,
|
||||
decodeTitle(entry.Fields["title"].String()),
|
||||
`</span>`,
|
||||
}
|
||||
// Icons are on the right side.
|
||||
icons := []string{
|
||||
`<span class="icons">`,
|
||||
fmt.Sprintf("<a href='%s'>", entry.Fields["url"].String()),
|
||||
`<img class="icon" title="Download paper" src="assets/pdf-icon.svg" alt="Download icon">`,
|
||||
`</a>`,
|
||||
fmt.Sprintf("<a href='https://censorbib.nymity.ch/pdf/%s.pdf'>", entry.CiteName),
|
||||
`<img class="icon" title="Download cached paper" src="assets/cache-icon.svg" alt="Cached download icon">`,
|
||||
`</a>`,
|
||||
fmt.Sprintf("<a href='https://github.com/NullHypothesis/censorbib/blob/master/references.bib#L%d'>", entry.lineNum),
|
||||
`<img class="icon" title="Download BibTeX" src="assets/bibtex-icon.svg" alt="BibTeX download icon">`,
|
||||
`</a>`,
|
||||
fmt.Sprintf("<a href='#%s'>", entry.CiteName),
|
||||
`<img class="icon" title="Link to paper" src="assets/link-icon.svg" alt="Paper link icon">`,
|
||||
`</a>`,
|
||||
`</span>`,
|
||||
}
|
||||
return strings.Join(append(title, icons...), "\n")
|
||||
}
|
||||
|
||||
func makeBibEntryAuthors(entry *bibEntry) string {
|
||||
s := []string{
|
||||
`<span class="author">`,
|
||||
decodeAuthors(entry.Fields["author"].String()),
|
||||
`</span>`,
|
||||
}
|
||||
return strings.Join(s, "\n")
|
||||
}
|
||||
|
||||
func makeBibEntryMisc(entry *bibEntry) string {
|
||||
s := []string{}
|
||||
s = appendIfNotEmpty(s, makeBibEntryVenue(entry))
|
||||
s = appendIfNotEmpty(s, toStr(entry.Fields["year"]))
|
||||
s = appendIfNotEmpty(s, toStr(entry.Fields["publisher"]))
|
||||
return strings.Join(s, ", ")
|
||||
}
|
||||
|
||||
func makeBibEntryVenue(entry *bibEntry) string {
|
||||
var (
|
||||
prefix string
|
||||
bs bibtex.BibString
|
||||
ok bool
|
||||
)
|
||||
|
||||
if bs, ok = entry.Fields["booktitle"]; ok {
|
||||
prefix = "In Proc. of: "
|
||||
} else if bs, ok = entry.Fields["journal"]; ok {
|
||||
prefix = "In: "
|
||||
} else {
|
||||
return "" // Some entries are self-published.
|
||||
}
|
||||
|
||||
s := []string{
|
||||
prefix,
|
||||
`<span class="venue">`,
|
||||
decodeProceedings(toStr(bs)),
|
||||
`</span>`,
|
||||
}
|
||||
|
||||
return strings.Join(s, "")
|
||||
}
|
113
src/main.go
Normal file
|
@ -0,0 +1,113 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/nickng/bibtex"
|
||||
)
|
||||
|
||||
// Matches e.g.: @inproceedings{Doe2024a,
|
||||
var re = regexp.MustCompile(`@[a-z]*\{([A-Za-z\-]*[0-9]{4}[a-z]),`)
|
||||
|
||||
// Map a cite name (e.g., Doe2024a) to its line number in the .bib file. All
|
||||
// cite names are unique.
|
||||
type entryToLineFunc func(string) int
|
||||
|
||||
// Augment bibtex.BibEntry with the entry's line number in the .bib file.
|
||||
type bibEntry struct {
|
||||
bibtex.BibEntry
|
||||
lineNum int
|
||||
}
|
||||
|
||||
func toStr(b bibtex.BibString) string {
|
||||
if b == nil {
|
||||
return ""
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func parseBibFile(path string) []bibEntry {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
bib, err := bibtex.Parse(file)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Augment our BibTeX entries with their respective line numbers in the .bib
|
||||
// file. This is necessary to create the "Download BibTeX" links.
|
||||
lineOf := buildEntryToLineFunc(path)
|
||||
bibEntries := []bibEntry{}
|
||||
for _, entry := range bib.Entries {
|
||||
bibEntries = append(bibEntries, bibEntry{
|
||||
BibEntry: *entry,
|
||||
lineNum: lineOf(entry.CiteName),
|
||||
})
|
||||
}
|
||||
|
||||
return bibEntries
|
||||
}
|
||||
|
||||
func buildEntryToLineFunc(path string) entryToLineFunc {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
sc := bufio.NewScanner(file)
|
||||
entryToLine := make(map[string]int)
|
||||
line := 0
|
||||
for sc.Scan() {
|
||||
line++
|
||||
s := sc.Text()
|
||||
if !strings.HasPrefix(s, "@") {
|
||||
continue
|
||||
}
|
||||
entry := parseCiteName(s) // E.g., Doe2024a
|
||||
entryToLine[entry] = line
|
||||
}
|
||||
if err := sc.Err(); err != nil {
|
||||
log.Fatalf("scan file error: %v", err)
|
||||
}
|
||||
|
||||
return func(entry string) int {
|
||||
if line, ok := entryToLine[entry]; ok {
|
||||
return line
|
||||
}
|
||||
log.Fatalf("could not find line number for cite name: %s", entry)
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
||||
func parseCiteName(line string) string {
|
||||
matches := re.FindStringSubmatch(line)
|
||||
if len(matches) != 2 {
|
||||
log.Fatalf("failed to extract cite name of: %s", line)
|
||||
}
|
||||
return matches[1]
|
||||
}
|
||||
|
||||
func run(w io.Writer, bibEntries []bibEntry) {
|
||||
fmt.Fprint(w, header())
|
||||
makeBib(w, bibEntries)
|
||||
fmt.Fprint(w, footer())
|
||||
}
|
||||
|
||||
func main() {
|
||||
path := flag.String("path", "", "Path to .bib file.")
|
||||
flag.Parse()
|
||||
if *path == "" {
|
||||
log.Fatal("No path to .bib file provided.")
|
||||
}
|
||||
run(os.Stdout, parseBibFile(*path))
|
||||
log.Println("Successfully created bibliography.")
|
||||
}
|
43
src/main_test.go
Normal file
|
@ -0,0 +1,43 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/nickng/bibtex"
|
||||
)
|
||||
|
||||
func mustParse(t *testing.T, s string) bibEntry {
|
||||
t.Helper()
|
||||
bib, err := bibtex.Parse(strings.NewReader(s))
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse bibtex: %v", err)
|
||||
}
|
||||
return bibEntry{
|
||||
BibEntry: *bib.Entries[0],
|
||||
lineNum: 0,
|
||||
}
|
||||
}
|
||||
|
||||
func TestRun(t *testing.T) {
|
||||
buf := bytes.NewBufferString("")
|
||||
entry := mustParse(t, `@inproceedings{Almutairi2024a,
|
||||
author = {Sultan Almutairi and Yogev Neumann and Khaled Harfoush},
|
||||
title = {Fingerprinting {VPNs} with Custom Router Firmware: A New Censorship Threat Model},
|
||||
booktitle = {Consumer Communications \& Networking Conference},
|
||||
publisher = {IEEE},
|
||||
year = {2024},
|
||||
url = {https://censorbib.nymity.ch/pdf/Almutairi2024a.pdf},
|
||||
}`)
|
||||
|
||||
makeBib(buf, []bibEntry{entry})
|
||||
|
||||
bufStr := buf.String()
|
||||
if !strings.HasPrefix(bufStr, "<ul>") {
|
||||
t.Errorf("expected <ul> but got %q...", bufStr[:10])
|
||||
}
|
||||
if !strings.HasSuffix(bufStr, "</ul>\n") {
|
||||
t.Errorf("expected </ul> but got %q", bufStr[len(bufStr)-10:])
|
||||
}
|
||||
}
|
24
src/vendor/github.com/nickng/bibtex/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||
*.o
|
||||
*.a
|
||||
*.so
|
||||
|
||||
# Folders
|
||||
_obj
|
||||
_test
|
||||
|
||||
# Architecture specific extensions/prefixes
|
||||
*.[568vq]
|
||||
[568vq].out
|
||||
|
||||
*.cgo1.go
|
||||
*.cgo2.c
|
||||
_cgo_defun.c
|
||||
_cgo_gotypes.go
|
||||
_cgo_export.*
|
||||
|
||||
_testmain.go
|
||||
|
||||
*.exe
|
||||
*.test
|
||||
*.prof
|
201
src/vendor/github.com/nickng/bibtex/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
17
src/vendor/github.com/nickng/bibtex/README.md
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
# bibtex  [](https://pkg.go.dev/github.com/nickng/bibtex)
|
||||
|
||||
## `nickng/bibtex` is a bibtex parser and library for Go.
|
||||
|
||||
The bibtex format is not standardised, this parser follows the descriptions found
|
||||
[here](http://maverick.inria.fr/~Xavier.Decoret/resources/xdkbibtex/bibtex_summary.html).
|
||||
Please file any issues with a minimal working example.
|
||||
|
||||
To get:
|
||||
|
||||
go get -u github.com/nickng/bibtex/...
|
||||
|
||||
This will also install `prettybib`, a bibtex pretty printer.
|
||||
To parse and pretty print a bibtex file, for example:
|
||||
|
||||
cd $GOPATH/src/github.com/nickng/bibtex
|
||||
prettybib -in example/simple.bib
|
355
src/vendor/github.com/nickng/bibtex/bibtex.go
generated
vendored
Normal file
|
@ -0,0 +1,355 @@
|
|||
package bibtex
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"log"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
"time"
|
||||
)
|
||||
|
||||
// BibString is a segment of a bib string.
|
||||
type BibString interface {
|
||||
RawString() string // Internal representation.
|
||||
String() string // Displayed string.
|
||||
}
|
||||
|
||||
// BibVar is a string variable.
|
||||
type BibVar struct {
|
||||
Key string // Variable key.
|
||||
Value BibString // Variable actual value.
|
||||
}
|
||||
|
||||
// RawString is the internal representation of the variable.
|
||||
func (v *BibVar) RawString() string {
|
||||
return v.Key
|
||||
}
|
||||
|
||||
func (v *BibVar) String() string {
|
||||
return v.Value.String()
|
||||
}
|
||||
|
||||
// BibConst is a string constant.
|
||||
type BibConst string
|
||||
|
||||
// NewBibConst converts a constant string to BibConst.
|
||||
func NewBibConst(c string) BibConst {
|
||||
return BibConst(c)
|
||||
}
|
||||
|
||||
// RawString is the internal representation of the constant (i.e. the string).
|
||||
func (c BibConst) RawString() string {
|
||||
return fmt.Sprintf("{%s}", string(c))
|
||||
}
|
||||
|
||||
func (c BibConst) String() string {
|
||||
return string(c)
|
||||
}
|
||||
|
||||
// BibComposite is a composite string, may contain both variable and string.
|
||||
type BibComposite []BibString
|
||||
|
||||
// NewBibComposite creates a new composite with one element.
|
||||
func NewBibComposite(s BibString) *BibComposite {
|
||||
comp := &BibComposite{}
|
||||
return comp.Append(s)
|
||||
}
|
||||
|
||||
// Append adds a BibString to the composite
|
||||
func (c *BibComposite) Append(s BibString) *BibComposite {
|
||||
comp := append(*c, s)
|
||||
return &comp
|
||||
}
|
||||
|
||||
func (c *BibComposite) String() string {
|
||||
var buf bytes.Buffer
|
||||
for _, s := range *c {
|
||||
buf.WriteString(s.String())
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// RawString returns a raw (bibtex) representation of the composite string.
|
||||
func (c *BibComposite) RawString() string {
|
||||
var buf bytes.Buffer
|
||||
for i, comp := range *c {
|
||||
if i > 0 {
|
||||
buf.WriteString(" # ")
|
||||
}
|
||||
switch comp := comp.(type) {
|
||||
case *BibConst:
|
||||
buf.WriteString(comp.RawString())
|
||||
case *BibVar:
|
||||
buf.WriteString(comp.RawString())
|
||||
case *BibComposite:
|
||||
buf.WriteString(comp.RawString())
|
||||
}
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// BibEntry is a record of BibTeX record.
|
||||
type BibEntry struct {
|
||||
Type string
|
||||
CiteName string
|
||||
Fields map[string]BibString
|
||||
}
|
||||
|
||||
// NewBibEntry creates a new BibTeX entry.
|
||||
func NewBibEntry(entryType string, citeName string) *BibEntry {
|
||||
spaceStripper := strings.NewReplacer(" ", "")
|
||||
cleanedType := strings.ToLower(spaceStripper.Replace(entryType))
|
||||
cleanedName := spaceStripper.Replace(citeName)
|
||||
return &BibEntry{
|
||||
Type: cleanedType,
|
||||
CiteName: cleanedName,
|
||||
Fields: map[string]BibString{},
|
||||
}
|
||||
}
|
||||
|
||||
// AddField adds a field (key-value) to a BibTeX entry.
|
||||
func (entry *BibEntry) AddField(name string, value BibString) {
|
||||
entry.Fields[strings.TrimSpace(name)] = value
|
||||
}
|
||||
|
||||
// prettyStringConfig controls the formatting/printing behaviour of the BibTex's and BibEntry's PrettyPrint functions
|
||||
type prettyStringConfig struct {
|
||||
// priority controls the order in which fields are printed. Keys with lower values are printed earlier.
|
||||
//See keyOrderToPriorityMap
|
||||
priority map[string]int
|
||||
}
|
||||
|
||||
// keyOrderToPriorityMap is a helper function for WithKeyOrder, converting the user facing key order slice
|
||||
// into the map format that is internally used by the sort function
|
||||
func keyOrderToPriorityMap(keyOrder []string) map[string]int {
|
||||
priority := make(map[string]int)
|
||||
offset := len(keyOrder)
|
||||
for i, v := range keyOrder {
|
||||
priority[v] = i - offset
|
||||
}
|
||||
return priority
|
||||
}
|
||||
|
||||
var defaultPrettyStringConfig = prettyStringConfig{priority: keyOrderToPriorityMap([]string{"title", "author", "url"})}
|
||||
|
||||
// PrettyStringOpt allows to change the pretty print format for BibEntry and BibTex
|
||||
type PrettyStringOpt func(config *prettyStringConfig)
|
||||
|
||||
// WithKeyOrder changes the order in which BibEntry keys are printed to the order in which they appear in keyOrder
|
||||
func WithKeyOrder(keyOrder []string) PrettyStringOpt {
|
||||
return func(config *prettyStringConfig) {
|
||||
config.priority = make(map[string]int)
|
||||
offset := len(keyOrder)
|
||||
for i, v := range keyOrder {
|
||||
config.priority[v] = i - offset
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// prettyStringAppend appends the pretty print string for BibEntry using config to configure the formatting
|
||||
func (entry *BibEntry) prettyStringAppend(buf *bytes.Buffer, config prettyStringConfig) {
|
||||
fmt.Fprintf(buf, "@%s{%s,\n", entry.Type, entry.CiteName)
|
||||
|
||||
// Determine key order.
|
||||
keys := []string{}
|
||||
for key := range entry.Fields {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
|
||||
sort.Slice(keys, func(i, j int) bool {
|
||||
pi, pj := config.priority[keys[i]], config.priority[keys[j]]
|
||||
return pi < pj || (pi == pj && keys[i] < keys[j])
|
||||
})
|
||||
|
||||
// Write fields.
|
||||
tw := tabwriter.NewWriter(buf, 1, 4, 1, ' ', 0)
|
||||
for _, key := range keys {
|
||||
value := entry.Fields[key].String()
|
||||
format := stringformat(value)
|
||||
fmt.Fprintf(tw, " %s\t=\t"+format+",\n", key, value)
|
||||
}
|
||||
tw.Flush()
|
||||
buf.WriteString("}\n")
|
||||
|
||||
}
|
||||
|
||||
// PrettyString pretty prints a BibEntry
|
||||
func (entry *BibEntry) PrettyString(options ...PrettyStringOpt) string {
|
||||
config := defaultPrettyStringConfig
|
||||
for _, option := range options {
|
||||
option(&config)
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
entry.prettyStringAppend(&buf, config)
|
||||
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// String returns a BibTex entry as a simplified BibTex string.
|
||||
func (entry *BibEntry) String() string {
|
||||
var bibtex bytes.Buffer
|
||||
bibtex.WriteString(fmt.Sprintf("@%s{%s,\n", entry.Type, entry.CiteName))
|
||||
for key, val := range entry.Fields {
|
||||
if i, err := strconv.Atoi(strings.TrimSpace(val.String())); err == nil {
|
||||
bibtex.WriteString(fmt.Sprintf(" %s = %d,\n", key, i))
|
||||
} else {
|
||||
bibtex.WriteString(fmt.Sprintf(" %s = {%s},\n", key, strings.TrimSpace(val.String())))
|
||||
}
|
||||
}
|
||||
bibtex.Truncate(bibtex.Len() - 2)
|
||||
bibtex.WriteString(fmt.Sprintf("\n}\n"))
|
||||
return bibtex.String()
|
||||
}
|
||||
|
||||
// RawString returns a BibTex entry data structure in its internal representation.
|
||||
func (entry *BibEntry) RawString() string {
|
||||
var bibtex bytes.Buffer
|
||||
bibtex.WriteString(fmt.Sprintf("@%s{%s,\n", entry.Type, entry.CiteName))
|
||||
for key, val := range entry.Fields {
|
||||
if i, err := strconv.Atoi(strings.TrimSpace(val.String())); err == nil {
|
||||
bibtex.WriteString(fmt.Sprintf(" %s = %d,\n", key, i))
|
||||
} else {
|
||||
bibtex.WriteString(fmt.Sprintf(" %s = %s,\n", key, val.RawString()))
|
||||
}
|
||||
}
|
||||
bibtex.Truncate(bibtex.Len() - 2)
|
||||
bibtex.WriteString(fmt.Sprintf("\n}\n"))
|
||||
return bibtex.String()
|
||||
}
|
||||
|
||||
// BibTex is a list of BibTeX entries.
|
||||
type BibTex struct {
|
||||
Preambles []BibString // List of Preambles
|
||||
Entries []*BibEntry // Items in a bibliography.
|
||||
StringVar map[string]*BibVar // Map from string variable to string.
|
||||
|
||||
// A list of default BibVars that are implicitly
|
||||
// defined and can be used without defining
|
||||
defaultVars map[string]string
|
||||
}
|
||||
|
||||
// NewBibTex creates a new BibTex data structure.
|
||||
func NewBibTex() *BibTex {
|
||||
// Sets up some default vars
|
||||
months := map[string]time.Month{
|
||||
"jan": 1, "feb": 2, "mar": 3,
|
||||
"apr": 4, "may": 5, "jun": 6,
|
||||
"jul": 7, "aug": 8, "sep": 9,
|
||||
"oct": 10, "nov": 11, "dec": 12,
|
||||
}
|
||||
|
||||
defaultVars := make(map[string]string)
|
||||
for mth, month := range months {
|
||||
// TODO(nickng): i10n of month name in user's local language
|
||||
defaultVars[mth] = month.String()
|
||||
}
|
||||
|
||||
return &BibTex{
|
||||
Preambles: []BibString{},
|
||||
Entries: []*BibEntry{},
|
||||
StringVar: make(map[string]*BibVar),
|
||||
|
||||
defaultVars: defaultVars,
|
||||
}
|
||||
}
|
||||
|
||||
// AddPreamble adds a preamble to a bibtex.
|
||||
func (bib *BibTex) AddPreamble(p BibString) {
|
||||
bib.Preambles = append(bib.Preambles, p)
|
||||
}
|
||||
|
||||
// AddEntry adds an entry to the BibTeX data structure.
|
||||
func (bib *BibTex) AddEntry(entry *BibEntry) {
|
||||
bib.Entries = append(bib.Entries, entry)
|
||||
}
|
||||
|
||||
// AddStringVar adds a new string var (if does not exist).
|
||||
func (bib *BibTex) AddStringVar(key string, val BibString) {
|
||||
bib.StringVar[key] = &BibVar{Key: key, Value: val}
|
||||
}
|
||||
|
||||
// GetStringVar looks up a string by its key.
|
||||
func (bib *BibTex) GetStringVar(key string) *BibVar {
|
||||
if bv, ok := bib.StringVar[key]; ok {
|
||||
return bv
|
||||
}
|
||||
if v, ok := bib.getDefaultVar(key); ok {
|
||||
return v
|
||||
}
|
||||
// This is undefined.
|
||||
log.Fatalf("%s: %s", ErrUnknownStringVar, key)
|
||||
return nil
|
||||
}
|
||||
|
||||
// getDefaultVar is a fallback for looking up keys (e.g. 3-character month)
|
||||
// and use them even though it hasn't been defined in the bib.
|
||||
func (bib *BibTex) getDefaultVar(key string) (*BibVar, bool) {
|
||||
if v, ok := bib.defaultVars[key]; ok {
|
||||
// if found, add this to the BibTex
|
||||
bib.StringVar[key] = &BibVar{Key: key, Value: NewBibConst(v)}
|
||||
return bib.StringVar[key], true
|
||||
}
|
||||
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// String returns a BibTex data structure as a simplified BibTex string.
|
||||
func (bib *BibTex) String() string {
|
||||
var bibtex bytes.Buffer
|
||||
for _, entry := range bib.Entries {
|
||||
bibtex.WriteString(entry.String())
|
||||
}
|
||||
return bibtex.String()
|
||||
}
|
||||
|
||||
// RawString returns a BibTex data structure in its internal representation.
|
||||
func (bib *BibTex) RawString() string {
|
||||
var bibtex bytes.Buffer
|
||||
for k, strvar := range bib.StringVar {
|
||||
bibtex.WriteString(fmt.Sprintf("@string{%s = {%s}}\n", k, strvar.String()))
|
||||
}
|
||||
for _, preamble := range bib.Preambles {
|
||||
bibtex.WriteString(fmt.Sprintf("@preamble{%s}\n", preamble.RawString()))
|
||||
}
|
||||
for _, entry := range bib.Entries {
|
||||
bibtex.WriteString(entry.RawString())
|
||||
}
|
||||
return bibtex.String()
|
||||
}
|
||||
|
||||
// PrettyString pretty prints a BibTex
|
||||
func (bib *BibTex) PrettyString(options ...PrettyStringOpt) string {
|
||||
config := defaultPrettyStringConfig
|
||||
for _, option := range options {
|
||||
option(&config)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
for i, entry := range bib.Entries {
|
||||
if i != 0 {
|
||||
fmt.Fprint(&buf, "\n")
|
||||
}
|
||||
entry.prettyStringAppend(&buf, config)
|
||||
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// stringformat determines the correct formatting verb for the given BibTeX field value.
|
||||
func stringformat(v string) string {
|
||||
// Numbers may be represented unquoted.
|
||||
if _, err := strconv.Atoi(v); err == nil {
|
||||
return "%s"
|
||||
}
|
||||
|
||||
// Strings with certain characters must be brace quoted.
|
||||
if strings.ContainsAny(v, "\"{}") {
|
||||
return "{%s}"
|
||||
}
|
||||
|
||||
// Default to quoted string.
|
||||
return "%q"
|
||||
}
|
89
src/vendor/github.com/nickng/bibtex/bibtex.y
generated
vendored
Normal file
|
@ -0,0 +1,89 @@
|
|||
%{
|
||||
package bibtex
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
type bibTag struct {
|
||||
key string
|
||||
val BibString
|
||||
}
|
||||
|
||||
var bib *BibTex // Only for holding current bib
|
||||
%}
|
||||
|
||||
%union {
|
||||
bibtex *BibTex
|
||||
strval string
|
||||
bibentry *BibEntry
|
||||
bibtag *bibTag
|
||||
bibtags []*bibTag
|
||||
strings BibString
|
||||
}
|
||||
|
||||
%token tCOMMENT tSTRING tPREAMBLE
|
||||
%token tATSIGN tCOLON tEQUAL tCOMMA tPOUND tLBRACE tRBRACE tDQUOTE tLPAREN tRPAREN
|
||||
%token <strval> tBAREIDENT tIDENT tCOMMENTBODY
|
||||
%type <bibtex> bibtex
|
||||
%type <bibentry> bibentry
|
||||
%type <bibtag> tag stringentry
|
||||
%type <bibtags> tags
|
||||
%type <strings> longstring preambleentry
|
||||
|
||||
%%
|
||||
|
||||
top : bibtex { }
|
||||
;
|
||||
|
||||
bibtex : /* empty */ { $$ = NewBibTex(); bib = $$ }
|
||||
| bibtex bibentry { $$ = $1; $$.AddEntry($2) }
|
||||
| bibtex commententry { $$ = $1 }
|
||||
| bibtex stringentry { $$ = $1; $$.AddStringVar($2.key, $2.val) }
|
||||
| bibtex preambleentry { $$ = $1; $$.AddPreamble($2) }
|
||||
;
|
||||
|
||||
bibentry : tATSIGN tBAREIDENT tLBRACE tBAREIDENT tCOMMA tags tRBRACE { $$ = NewBibEntry($2, $4); for _, t := range $6 { $$.AddField(t.key, t.val) } }
|
||||
| tATSIGN tBAREIDENT tLPAREN tBAREIDENT tCOMMA tags tRPAREN { $$ = NewBibEntry($2, $4); for _, t := range $6 { $$.AddField(t.key, t.val) } }
|
||||
;
|
||||
|
||||
commententry : tATSIGN tCOMMENT tCOMMENTBODY { }
|
||||
;
|
||||
|
||||
stringentry : tATSIGN tSTRING tLBRACE tBAREIDENT tEQUAL longstring tRBRACE { $$ = &bibTag{key: $4, val: $6 } }
|
||||
| tATSIGN tSTRING tLPAREN tBAREIDENT tEQUAL longstring tRBRACE { $$ = &bibTag{key: $4, val: $6 } }
|
||||
;
|
||||
|
||||
preambleentry : tATSIGN tPREAMBLE tLBRACE longstring tRBRACE { $$ = $4 }
|
||||
| tATSIGN tPREAMBLE tLPAREN longstring tRPAREN { $$ = $4 }
|
||||
;
|
||||
|
||||
longstring : tIDENT { $$ = NewBibConst($1) }
|
||||
| tBAREIDENT { $$ = bib.GetStringVar($1) }
|
||||
| longstring tPOUND tIDENT { $$ = NewBibComposite($1); $$.(*BibComposite).Append(NewBibConst($3))}
|
||||
| longstring tPOUND tBAREIDENT { $$ = NewBibComposite($1); $$.(*BibComposite).Append(bib.GetStringVar($3)) }
|
||||
;
|
||||
|
||||
tag : /* empty */ { }
|
||||
| tBAREIDENT tEQUAL longstring { $$ = &bibTag{key: $1, val: $3} }
|
||||
;
|
||||
|
||||
tags : tag { if $1 != nil { $$ = []*bibTag{$1}; } }
|
||||
| tags tCOMMA tag { if $3 == nil { $$ = $1 } else { $$ = append($1, $3) } }
|
||||
;
|
||||
|
||||
%%
|
||||
|
||||
// Parse is the entry point to the bibtex parser.
|
||||
func Parse(r io.Reader) (*BibTex, error) {
|
||||
l := newLexer(r)
|
||||
bibtexParse(l)
|
||||
switch {
|
||||
case len(l.Errors) > 0: // Non-yacc errors
|
||||
return nil, l.Errors[0]
|
||||
case len(l.ParseErrors) > 0:
|
||||
return nil, l.ParseErrors[0]
|
||||
default:
|
||||
return bib, nil
|
||||
}
|
||||
}
|
645
src/vendor/github.com/nickng/bibtex/bibtex.y.go
generated
vendored
Normal file
|
@ -0,0 +1,645 @@
|
|||
// Code generated by goyacc -p bibtex -o bibtex.y.go bibtex.y. DO NOT EDIT.
|
||||
|
||||
//line bibtex.y:2
|
||||
package bibtex
|
||||
|
||||
import __yyfmt__ "fmt"
|
||||
|
||||
//line bibtex.y:2
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
type bibTag struct {
|
||||
key string
|
||||
val BibString
|
||||
}
|
||||
|
||||
var bib *BibTex // Only for holding current bib
|
||||
|
||||
//line bibtex.y:16
|
||||
type bibtexSymType struct {
|
||||
yys int
|
||||
bibtex *BibTex
|
||||
strval string
|
||||
bibentry *BibEntry
|
||||
bibtag *bibTag
|
||||
bibtags []*bibTag
|
||||
strings BibString
|
||||
}
|
||||
|
||||
const tCOMMENT = 57346
|
||||
const tSTRING = 57347
|
||||
const tPREAMBLE = 57348
|
||||
const tATSIGN = 57349
|
||||
const tCOLON = 57350
|
||||
const tEQUAL = 57351
|
||||
const tCOMMA = 57352
|
||||
const tPOUND = 57353
|
||||
const tLBRACE = 57354
|
||||
const tRBRACE = 57355
|
||||
const tDQUOTE = 57356
|
||||
const tLPAREN = 57357
|
||||
const tRPAREN = 57358
|
||||
const tBAREIDENT = 57359
|
||||
const tIDENT = 57360
|
||||
const tCOMMENTBODY = 57361
|
||||
|
||||
var bibtexToknames = [...]string{
|
||||
"$end",
|
||||
"error",
|
||||
"$unk",
|
||||
"tCOMMENT",
|
||||
"tSTRING",
|
||||
"tPREAMBLE",
|
||||
"tATSIGN",
|
||||
"tCOLON",
|
||||
"tEQUAL",
|
||||
"tCOMMA",
|
||||
"tPOUND",
|
||||
"tLBRACE",
|
||||
"tRBRACE",
|
||||
"tDQUOTE",
|
||||
"tLPAREN",
|
||||
"tRPAREN",
|
||||
"tBAREIDENT",
|
||||
"tIDENT",
|
||||
"tCOMMENTBODY",
|
||||
}
|
||||
var bibtexStatenames = [...]string{}
|
||||
|
||||
const bibtexEofCode = 1
|
||||
const bibtexErrCode = 2
|
||||
const bibtexInitialStackSize = 16
|
||||
|
||||
//line bibtex.y:75
|
||||
|
||||
// Parse is the entry point to the bibtex parser.
|
||||
func Parse(r io.Reader) (*BibTex, error) {
|
||||
l := newLexer(r)
|
||||
bibtexParse(l)
|
||||
switch {
|
||||
case len(l.Errors) > 0: // Non-yacc errors
|
||||
return nil, l.Errors[0]
|
||||
case len(l.ParseErrors) > 0:
|
||||
return nil, l.ParseErrors[0]
|
||||
default:
|
||||
return bib, nil
|
||||
}
|
||||
}
|
||||
|
||||
//line yacctab:1
|
||||
var bibtexExca = [...]int{
|
||||
-1, 1,
|
||||
1, -1,
|
||||
-2, 0,
|
||||
}
|
||||
|
||||
const bibtexPrivate = 57344
|
||||
|
||||
const bibtexLast = 54
|
||||
|
||||
var bibtexAct = [...]int{
|
||||
|
||||
23, 14, 35, 34, 9, 10, 11, 25, 24, 41,
|
||||
40, 36, 43, 22, 21, 32, 20, 8, 45, 26,
|
||||
33, 17, 19, 15, 18, 12, 16, 32, 13, 47,
|
||||
38, 39, 37, 32, 43, 46, 32, 42, 31, 32,
|
||||
28, 27, 44, 30, 29, 49, 48, 7, 4, 1,
|
||||
6, 5, 3, 2,
|
||||
}
|
||||
var bibtexPact = [...]int{
|
||||
|
||||
-1000, -1000, 40, -1000, -1000, -1000, -1000, 0, 13, -18,
|
||||
11, 9, 5, -1, -1000, -3, -4, -10, -10, 31,
|
||||
30, 35, 34, 25, -1000, -1000, 4, -6, -6, -10,
|
||||
-10, -1000, -8, -1000, 24, -1000, 33, 2, 22, 16,
|
||||
-1000, -1000, -1000, -6, -10, -1000, -1000, -1000, -1000, 28,
|
||||
}
|
||||
var bibtexPgo = [...]int{
|
||||
|
||||
0, 53, 52, 2, 51, 3, 0, 50, 49, 48,
|
||||
}
|
||||
var bibtexR1 = [...]int{
|
||||
|
||||
0, 8, 1, 1, 1, 1, 1, 2, 2, 9,
|
||||
4, 4, 7, 7, 6, 6, 6, 6, 3, 3,
|
||||
5, 5,
|
||||
}
|
||||
var bibtexR2 = [...]int{
|
||||
|
||||
0, 1, 0, 2, 2, 2, 2, 7, 7, 3,
|
||||
7, 7, 5, 5, 1, 1, 3, 3, 0, 3,
|
||||
1, 3,
|
||||
}
|
||||
var bibtexChk = [...]int{
|
||||
|
||||
-1000, -8, -1, -2, -9, -4, -7, 7, 17, 4,
|
||||
5, 6, 12, 15, 19, 12, 15, 12, 15, 17,
|
||||
17, 17, 17, -6, 18, 17, -6, 10, 10, 9,
|
||||
9, 13, 11, 16, -5, -3, 17, -5, -6, -6,
|
||||
18, 17, 13, 10, 9, 16, 13, 13, -3, -6,
|
||||
}
|
||||
var bibtexDef = [...]int{
|
||||
|
||||
2, -2, 1, 3, 4, 5, 6, 0, 0, 0,
|
||||
0, 0, 0, 0, 9, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 14, 15, 0, 18, 18, 0,
|
||||
0, 12, 0, 13, 0, 20, 0, 0, 0, 0,
|
||||
16, 17, 7, 18, 0, 8, 10, 11, 21, 19,
|
||||
}
|
||||
var bibtexTok1 = [...]int{
|
||||
|
||||
1,
|
||||
}
|
||||
var bibtexTok2 = [...]int{
|
||||
|
||||
2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
|
||||
12, 13, 14, 15, 16, 17, 18, 19,
|
||||
}
|
||||
var bibtexTok3 = [...]int{
|
||||
0,
|
||||
}
|
||||
|
||||
var bibtexErrorMessages = [...]struct {
|
||||
state int
|
||||
token int
|
||||
msg string
|
||||
}{}
|
||||
|
||||
//line yaccpar:1
|
||||
|
||||
/* parser for yacc output */
|
||||
|
||||
var (
|
||||
bibtexDebug = 0
|
||||
bibtexErrorVerbose = false
|
||||
)
|
||||
|
||||
type bibtexLexer interface {
|
||||
Lex(lval *bibtexSymType) int
|
||||
Error(s string)
|
||||
}
|
||||
|
||||
type bibtexParser interface {
|
||||
Parse(bibtexLexer) int
|
||||
Lookahead() int
|
||||
}
|
||||
|
||||
type bibtexParserImpl struct {
|
||||
lval bibtexSymType
|
||||
stack [bibtexInitialStackSize]bibtexSymType
|
||||
char int
|
||||
}
|
||||
|
||||
func (p *bibtexParserImpl) Lookahead() int {
|
||||
return p.char
|
||||
}
|
||||
|
||||
func bibtexNewParser() bibtexParser {
|
||||
return &bibtexParserImpl{}
|
||||
}
|
||||
|
||||
const bibtexFlag = -1000
|
||||
|
||||
func bibtexTokname(c int) string {
|
||||
if c >= 1 && c-1 < len(bibtexToknames) {
|
||||
if bibtexToknames[c-1] != "" {
|
||||
return bibtexToknames[c-1]
|
||||
}
|
||||
}
|
||||
return __yyfmt__.Sprintf("tok-%v", c)
|
||||
}
|
||||
|
||||
func bibtexStatname(s int) string {
|
||||
if s >= 0 && s < len(bibtexStatenames) {
|
||||
if bibtexStatenames[s] != "" {
|
||||
return bibtexStatenames[s]
|
||||
}
|
||||
}
|
||||
return __yyfmt__.Sprintf("state-%v", s)
|
||||
}
|
||||
|
||||
func bibtexErrorMessage(state, lookAhead int) string {
|
||||
const TOKSTART = 4
|
||||
|
||||
if !bibtexErrorVerbose {
|
||||
return "syntax error"
|
||||
}
|
||||
|
||||
for _, e := range bibtexErrorMessages {
|
||||
if e.state == state && e.token == lookAhead {
|
||||
return "syntax error: " + e.msg
|
||||
}
|
||||
}
|
||||
|
||||
res := "syntax error: unexpected " + bibtexTokname(lookAhead)
|
||||
|
||||
// To match Bison, suggest at most four expected tokens.
|
||||
expected := make([]int, 0, 4)
|
||||
|
||||
// Look for shiftable tokens.
|
||||
base := bibtexPact[state]
|
||||
for tok := TOKSTART; tok-1 < len(bibtexToknames); tok++ {
|
||||
if n := base + tok; n >= 0 && n < bibtexLast && bibtexChk[bibtexAct[n]] == tok {
|
||||
if len(expected) == cap(expected) {
|
||||
return res
|
||||
}
|
||||
expected = append(expected, tok)
|
||||
}
|
||||
}
|
||||
|
||||
if bibtexDef[state] == -2 {
|
||||
i := 0
|
||||
for bibtexExca[i] != -1 || bibtexExca[i+1] != state {
|
||||
i += 2
|
||||
}
|
||||
|
||||
// Look for tokens that we accept or reduce.
|
||||
for i += 2; bibtexExca[i] >= 0; i += 2 {
|
||||
tok := bibtexExca[i]
|
||||
if tok < TOKSTART || bibtexExca[i+1] == 0 {
|
||||
continue
|
||||
}
|
||||
if len(expected) == cap(expected) {
|
||||
return res
|
||||
}
|
||||
expected = append(expected, tok)
|
||||
}
|
||||
|
||||
// If the default action is to accept or reduce, give up.
|
||||
if bibtexExca[i+1] != 0 {
|
||||
return res
|
||||
}
|
||||
}
|
||||
|
||||
for i, tok := range expected {
|
||||
if i == 0 {
|
||||
res += ", expecting "
|
||||
} else {
|
||||
res += " or "
|
||||
}
|
||||
res += bibtexTokname(tok)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func bibtexlex1(lex bibtexLexer, lval *bibtexSymType) (char, token int) {
|
||||
token = 0
|
||||
char = lex.Lex(lval)
|
||||
if char <= 0 {
|
||||
token = bibtexTok1[0]
|
||||
goto out
|
||||
}
|
||||
if char < len(bibtexTok1) {
|
||||
token = bibtexTok1[char]
|
||||
goto out
|
||||
}
|
||||
if char >= bibtexPrivate {
|
||||
if char < bibtexPrivate+len(bibtexTok2) {
|
||||
token = bibtexTok2[char-bibtexPrivate]
|
||||
goto out
|
||||
}
|
||||
}
|
||||
for i := 0; i < len(bibtexTok3); i += 2 {
|
||||
token = bibtexTok3[i+0]
|
||||
if token == char {
|
||||
token = bibtexTok3[i+1]
|
||||
goto out
|
||||
}
|
||||
}
|
||||
|
||||
out:
|
||||
if token == 0 {
|
||||
token = bibtexTok2[1] /* unknown char */
|
||||
}
|
||||
if bibtexDebug >= 3 {
|
||||
__yyfmt__.Printf("lex %s(%d)\n", bibtexTokname(token), uint(char))
|
||||
}
|
||||
return char, token
|
||||
}
|
||||
|
||||
func bibtexParse(bibtexlex bibtexLexer) int {
|
||||
return bibtexNewParser().Parse(bibtexlex)
|
||||
}
|
||||
|
||||
func (bibtexrcvr *bibtexParserImpl) Parse(bibtexlex bibtexLexer) int {
|
||||
var bibtexn int
|
||||
var bibtexVAL bibtexSymType
|
||||
var bibtexDollar []bibtexSymType
|
||||
_ = bibtexDollar // silence set and not used
|
||||
bibtexS := bibtexrcvr.stack[:]
|
||||
|
||||
Nerrs := 0 /* number of errors */
|
||||
Errflag := 0 /* error recovery flag */
|
||||
bibtexstate := 0
|
||||
bibtexrcvr.char = -1
|
||||
bibtextoken := -1 // bibtexrcvr.char translated into internal numbering
|
||||
defer func() {
|
||||
// Make sure we report no lookahead when not parsing.
|
||||
bibtexstate = -1
|
||||
bibtexrcvr.char = -1
|
||||
bibtextoken = -1
|
||||
}()
|
||||
bibtexp := -1
|
||||
goto bibtexstack
|
||||
|
||||
ret0:
|
||||
return 0
|
||||
|
||||
ret1:
|
||||
return 1
|
||||
|
||||
bibtexstack:
|
||||
/* put a state and value onto the stack */
|
||||
if bibtexDebug >= 4 {
|
||||
__yyfmt__.Printf("char %v in %v\n", bibtexTokname(bibtextoken), bibtexStatname(bibtexstate))
|
||||
}
|
||||
|
||||
bibtexp++
|
||||
if bibtexp >= len(bibtexS) {
|
||||
nyys := make([]bibtexSymType, len(bibtexS)*2)
|
||||
copy(nyys, bibtexS)
|
||||
bibtexS = nyys
|
||||
}
|
||||
bibtexS[bibtexp] = bibtexVAL
|
||||
bibtexS[bibtexp].yys = bibtexstate
|
||||
|
||||
bibtexnewstate:
|
||||
bibtexn = bibtexPact[bibtexstate]
|
||||
if bibtexn <= bibtexFlag {
|
||||
goto bibtexdefault /* simple state */
|
||||
}
|
||||
if bibtexrcvr.char < 0 {
|
||||
bibtexrcvr.char, bibtextoken = bibtexlex1(bibtexlex, &bibtexrcvr.lval)
|
||||
}
|
||||
bibtexn += bibtextoken
|
||||
if bibtexn < 0 || bibtexn >= bibtexLast {
|
||||
goto bibtexdefault
|
||||
}
|
||||
bibtexn = bibtexAct[bibtexn]
|
||||
if bibtexChk[bibtexn] == bibtextoken { /* valid shift */
|
||||
bibtexrcvr.char = -1
|
||||
bibtextoken = -1
|
||||
bibtexVAL = bibtexrcvr.lval
|
||||
bibtexstate = bibtexn
|
||||
if Errflag > 0 {
|
||||
Errflag--
|
||||
}
|
||||
goto bibtexstack
|
||||
}
|
||||
|
||||
bibtexdefault:
|
||||
/* default state action */
|
||||
bibtexn = bibtexDef[bibtexstate]
|
||||
if bibtexn == -2 {
|
||||
if bibtexrcvr.char < 0 {
|
||||
bibtexrcvr.char, bibtextoken = bibtexlex1(bibtexlex, &bibtexrcvr.lval)
|
||||
}
|
||||
|
||||
/* look through exception table */
|
||||
xi := 0
|
||||
for {
|
||||
if bibtexExca[xi+0] == -1 && bibtexExca[xi+1] == bibtexstate {
|
||||
break
|
||||
}
|
||||
xi += 2
|
||||
}
|
||||
for xi += 2; ; xi += 2 {
|
||||
bibtexn = bibtexExca[xi+0]
|
||||
if bibtexn < 0 || bibtexn == bibtextoken {
|
||||
break
|
||||
}
|
||||
}
|
||||
bibtexn = bibtexExca[xi+1]
|
||||
if bibtexn < 0 {
|
||||
goto ret0
|
||||
}
|
||||
}
|
||||
if bibtexn == 0 {
|
||||
/* error ... attempt to resume parsing */
|
||||
switch Errflag {
|
||||
case 0: /* brand new error */
|
||||
bibtexlex.Error(bibtexErrorMessage(bibtexstate, bibtextoken))
|
||||
Nerrs++
|
||||
if bibtexDebug >= 1 {
|
||||
__yyfmt__.Printf("%s", bibtexStatname(bibtexstate))
|
||||
__yyfmt__.Printf(" saw %s\n", bibtexTokname(bibtextoken))
|
||||
}
|
||||
fallthrough
|
||||
|
||||
case 1, 2: /* incompletely recovered error ... try again */
|
||||
Errflag = 3
|
||||
|
||||
/* find a state where "error" is a legal shift action */
|
||||
for bibtexp >= 0 {
|
||||
bibtexn = bibtexPact[bibtexS[bibtexp].yys] + bibtexErrCode
|
||||
if bibtexn >= 0 && bibtexn < bibtexLast {
|
||||
bibtexstate = bibtexAct[bibtexn] /* simulate a shift of "error" */
|
||||
if bibtexChk[bibtexstate] == bibtexErrCode {
|
||||
goto bibtexstack
|
||||
}
|
||||
}
|
||||
|
||||
/* the current p has no shift on "error", pop stack */
|
||||
if bibtexDebug >= 2 {
|
||||
__yyfmt__.Printf("error recovery pops state %d\n", bibtexS[bibtexp].yys)
|
||||
}
|
||||
bibtexp--
|
||||
}
|
||||
/* there is no state on the stack with an error shift ... abort */
|
||||
goto ret1
|
||||
|
||||
case 3: /* no shift yet; clobber input char */
|
||||
if bibtexDebug >= 2 {
|
||||
__yyfmt__.Printf("error recovery discards %s\n", bibtexTokname(bibtextoken))
|
||||
}
|
||||
if bibtextoken == bibtexEofCode {
|
||||
goto ret1
|
||||
}
|
||||
bibtexrcvr.char = -1
|
||||
bibtextoken = -1
|
||||
goto bibtexnewstate /* try again in the same state */
|
||||
}
|
||||
}
|
||||
|
||||
/* reduction by production bibtexn */
|
||||
if bibtexDebug >= 2 {
|
||||
__yyfmt__.Printf("reduce %v in:\n\t%v\n", bibtexn, bibtexStatname(bibtexstate))
|
||||
}
|
||||
|
||||
bibtexnt := bibtexn
|
||||
bibtexpt := bibtexp
|
||||
_ = bibtexpt // guard against "declared and not used"
|
||||
|
||||
bibtexp -= bibtexR2[bibtexn]
|
||||
// bibtexp is now the index of $0. Perform the default action. Iff the
|
||||
// reduced production is ε, $1 is possibly out of range.
|
||||
if bibtexp+1 >= len(bibtexS) {
|
||||
nyys := make([]bibtexSymType, len(bibtexS)*2)
|
||||
copy(nyys, bibtexS)
|
||||
bibtexS = nyys
|
||||
}
|
||||
bibtexVAL = bibtexS[bibtexp+1]
|
||||
|
||||
/* consult goto table to find next state */
|
||||
bibtexn = bibtexR1[bibtexn]
|
||||
bibtexg := bibtexPgo[bibtexn]
|
||||
bibtexj := bibtexg + bibtexS[bibtexp].yys + 1
|
||||
|
||||
if bibtexj >= bibtexLast {
|
||||
bibtexstate = bibtexAct[bibtexg]
|
||||
} else {
|
||||
bibtexstate = bibtexAct[bibtexj]
|
||||
if bibtexChk[bibtexstate] != -bibtexn {
|
||||
bibtexstate = bibtexAct[bibtexg]
|
||||
}
|
||||
}
|
||||
// dummy call; replaced with literal code
|
||||
switch bibtexnt {
|
||||
|
||||
case 1:
|
||||
bibtexDollar = bibtexS[bibtexpt-1 : bibtexpt+1]
|
||||
//line bibtex.y:36
|
||||
{
|
||||
}
|
||||
case 2:
|
||||
bibtexDollar = bibtexS[bibtexpt-0 : bibtexpt+1]
|
||||
//line bibtex.y:39
|
||||
{
|
||||
bibtexVAL.bibtex = NewBibTex()
|
||||
bib = bibtexVAL.bibtex
|
||||
}
|
||||
case 3:
|
||||
bibtexDollar = bibtexS[bibtexpt-2 : bibtexpt+1]
|
||||
//line bibtex.y:40
|
||||
{
|
||||
bibtexVAL.bibtex = bibtexDollar[1].bibtex
|
||||
bibtexVAL.bibtex.AddEntry(bibtexDollar[2].bibentry)
|
||||
}
|
||||
case 4:
|
||||
bibtexDollar = bibtexS[bibtexpt-2 : bibtexpt+1]
|
||||
//line bibtex.y:41
|
||||
{
|
||||
bibtexVAL.bibtex = bibtexDollar[1].bibtex
|
||||
}
|
||||
case 5:
|
||||
bibtexDollar = bibtexS[bibtexpt-2 : bibtexpt+1]
|
||||
//line bibtex.y:42
|
||||
{
|
||||
bibtexVAL.bibtex = bibtexDollar[1].bibtex
|
||||
bibtexVAL.bibtex.AddStringVar(bibtexDollar[2].bibtag.key, bibtexDollar[2].bibtag.val)
|
||||
}
|
||||
case 6:
|
||||
bibtexDollar = bibtexS[bibtexpt-2 : bibtexpt+1]
|
||||
//line bibtex.y:43
|
||||
{
|
||||
bibtexVAL.bibtex = bibtexDollar[1].bibtex
|
||||
bibtexVAL.bibtex.AddPreamble(bibtexDollar[2].strings)
|
||||
}
|
||||
case 7:
|
||||
bibtexDollar = bibtexS[bibtexpt-7 : bibtexpt+1]
|
||||
//line bibtex.y:46
|
||||
{
|
||||
bibtexVAL.bibentry = NewBibEntry(bibtexDollar[2].strval, bibtexDollar[4].strval)
|
||||
for _, t := range bibtexDollar[6].bibtags {
|
||||
bibtexVAL.bibentry.AddField(t.key, t.val)
|
||||
}
|
||||
}
|
||||
case 8:
|
||||
bibtexDollar = bibtexS[bibtexpt-7 : bibtexpt+1]
|
||||
//line bibtex.y:47
|
||||
{
|
||||
bibtexVAL.bibentry = NewBibEntry(bibtexDollar[2].strval, bibtexDollar[4].strval)
|
||||
for _, t := range bibtexDollar[6].bibtags {
|
||||
bibtexVAL.bibentry.AddField(t.key, t.val)
|
||||
}
|
||||
}
|
||||
case 9:
|
||||
bibtexDollar = bibtexS[bibtexpt-3 : bibtexpt+1]
|
||||
//line bibtex.y:50
|
||||
{
|
||||
}
|
||||
case 10:
|
||||
bibtexDollar = bibtexS[bibtexpt-7 : bibtexpt+1]
|
||||
//line bibtex.y:53
|
||||
{
|
||||
bibtexVAL.bibtag = &bibTag{key: bibtexDollar[4].strval, val: bibtexDollar[6].strings}
|
||||
}
|
||||
case 11:
|
||||
bibtexDollar = bibtexS[bibtexpt-7 : bibtexpt+1]
|
||||
//line bibtex.y:54
|
||||
{
|
||||
bibtexVAL.bibtag = &bibTag{key: bibtexDollar[4].strval, val: bibtexDollar[6].strings}
|
||||
}
|
||||
case 12:
|
||||
bibtexDollar = bibtexS[bibtexpt-5 : bibtexpt+1]
|
||||
//line bibtex.y:57
|
||||
{
|
||||
bibtexVAL.strings = bibtexDollar[4].strings
|
||||
}
|
||||
case 13:
|
||||
bibtexDollar = bibtexS[bibtexpt-5 : bibtexpt+1]
|
||||
//line bibtex.y:58
|
||||
{
|
||||
bibtexVAL.strings = bibtexDollar[4].strings
|
||||
}
|
||||
case 14:
|
||||
bibtexDollar = bibtexS[bibtexpt-1 : bibtexpt+1]
|
||||
//line bibtex.y:61
|
||||
{
|
||||
bibtexVAL.strings = NewBibConst(bibtexDollar[1].strval)
|
||||
}
|
||||
case 15:
|
||||
bibtexDollar = bibtexS[bibtexpt-1 : bibtexpt+1]
|
||||
//line bibtex.y:62
|
||||
{
|
||||
bibtexVAL.strings = bib.GetStringVar(bibtexDollar[1].strval)
|
||||
}
|
||||
case 16:
|
||||
bibtexDollar = bibtexS[bibtexpt-3 : bibtexpt+1]
|
||||
//line bibtex.y:63
|
||||
{
|
||||
bibtexVAL.strings = NewBibComposite(bibtexDollar[1].strings)
|
||||
bibtexVAL.strings.(*BibComposite).Append(NewBibConst(bibtexDollar[3].strval))
|
||||
}
|
||||
case 17:
|
||||
bibtexDollar = bibtexS[bibtexpt-3 : bibtexpt+1]
|
||||
//line bibtex.y:64
|
||||
{
|
||||
bibtexVAL.strings = NewBibComposite(bibtexDollar[1].strings)
|
||||
bibtexVAL.strings.(*BibComposite).Append(bib.GetStringVar(bibtexDollar[3].strval))
|
||||
}
|
||||
case 18:
|
||||
bibtexDollar = bibtexS[bibtexpt-0 : bibtexpt+1]
|
||||
//line bibtex.y:67
|
||||
{
|
||||
}
|
||||
case 19:
|
||||
bibtexDollar = bibtexS[bibtexpt-3 : bibtexpt+1]
|
||||
//line bibtex.y:68
|
||||
{
|
||||
bibtexVAL.bibtag = &bibTag{key: bibtexDollar[1].strval, val: bibtexDollar[3].strings}
|
||||
}
|
||||
case 20:
|
||||
bibtexDollar = bibtexS[bibtexpt-1 : bibtexpt+1]
|
||||
//line bibtex.y:71
|
||||
{
|
||||
if bibtexDollar[1].bibtag != nil {
|
||||
bibtexVAL.bibtags = []*bibTag{bibtexDollar[1].bibtag}
|
||||
}
|
||||
}
|
||||
case 21:
|
||||
bibtexDollar = bibtexS[bibtexpt-3 : bibtexpt+1]
|
||||
//line bibtex.y:72
|
||||
{
|
||||
if bibtexDollar[3].bibtag == nil {
|
||||
bibtexVAL.bibtags = bibtexDollar[1].bibtags
|
||||
} else {
|
||||
bibtexVAL.bibtags = append(bibtexDollar[1].bibtags, bibtexDollar[3].bibtag)
|
||||
}
|
||||
}
|
||||
}
|
||||
goto bibtexstack /* stack new state and value */
|
||||
}
|
23
src/vendor/github.com/nickng/bibtex/docs.go
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
// Package bibtex is a bibtex parser written in Go.
|
||||
//
|
||||
// The package contains a simple parser and data structure to represent bibtex
|
||||
// records.
|
||||
//
|
||||
// # Supported syntax
|
||||
//
|
||||
// The basic syntax is:
|
||||
//
|
||||
// @BIBTYPE{IDENT,
|
||||
// key1 = word,
|
||||
// key2 = "quoted",
|
||||
// key3 = {quoted},
|
||||
// }
|
||||
//
|
||||
// where BIBTYPE is the type of document (e.g. inproceedings, article, etc.)
|
||||
// and IDENT is a string identifier.
|
||||
//
|
||||
// The bibtex format is not standardised, this parser follows the descriptions
|
||||
// found in the link below. If there are any problems, please file any issues
|
||||
// with a minimal working example at the GitHub repository.
|
||||
// http://maverick.inria.fr/~Xavier.Decoret/resources/xdkbibtex/bibtex_summary.html
|
||||
package bibtex // import "github.com/nickng/bibtex"
|
23
src/vendor/github.com/nickng/bibtex/error.go
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
package bibtex
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrUnexpectedAtsign is an error for unexpected @ in {}.
|
||||
ErrUnexpectedAtsign = errors.New("unexpected @ sign")
|
||||
// ErrUnknownStringVar is an error for looking up undefined string var.
|
||||
ErrUnknownStringVar = errors.New("unknown string variable")
|
||||
)
|
||||
|
||||
// ErrParse is a parse error.
|
||||
type ErrParse struct {
|
||||
Pos tokenPos
|
||||
Err string // Error string returned from parser.
|
||||
}
|
||||
|
||||
func (e *ErrParse) Error() string {
|
||||
return fmt.Sprintf("parse failed at %s: %s", e.Pos, e.Err)
|
||||
}
|
38
src/vendor/github.com/nickng/bibtex/lexer.go
generated
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
//go:generate goyacc -p bibtex -o bibtex.y.go bibtex.y
|
||||
|
||||
package bibtex
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
// lexer for bibtex.
|
||||
type lexer struct {
|
||||
scanner *scanner
|
||||
ParseErrors []error // Parse errors from yacc
|
||||
Errors []error // Other errors
|
||||
}
|
||||
|
||||
// newLexer returns a new yacc-compatible lexer.
|
||||
func newLexer(r io.Reader) *lexer {
|
||||
return &lexer{
|
||||
scanner: newScanner(r),
|
||||
}
|
||||
}
|
||||
|
||||
// Lex is provided for yacc-compatible parser.
|
||||
func (l *lexer) Lex(yylval *bibtexSymType) int {
|
||||
token, strval, err := l.scanner.Scan()
|
||||
if err != nil {
|
||||
l.Errors = append(l.Errors, fmt.Errorf("%w at %s", err, l.scanner.pos))
|
||||
return int(0)
|
||||
}
|
||||
yylval.strval = strval
|
||||
return int(token)
|
||||
}
|
||||
|
||||
// Error handles error.
|
||||
func (l *lexer) Error(err string) {
|
||||
l.ParseErrors = append(l.ParseErrors, &ErrParse{Err: err, Pos: l.scanner.pos})
|
||||
}
|
237
src/vendor/github.com/nickng/bibtex/scanner.go
generated
vendored
Normal file
|
@ -0,0 +1,237 @@
|
|||
package bibtex
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var parseField bool
|
||||
|
||||
// scanner is a lexical scanner
|
||||
type scanner struct {
|
||||
commentMode bool
|
||||
r *bufio.Reader
|
||||
pos tokenPos
|
||||
}
|
||||
|
||||
// newScanner returns a new instance of scanner.
|
||||
func newScanner(r io.Reader) *scanner {
|
||||
return &scanner{r: bufio.NewReader(r), pos: tokenPos{Char: 0, Lines: []int{}}}
|
||||
}
|
||||
|
||||
// read reads the next rune from the buffered reader.
|
||||
// Returns the rune(0) if an error occurs (or io.eof is returned).
|
||||
func (s *scanner) read() rune {
|
||||
ch, _, err := s.r.ReadRune()
|
||||
if err != nil {
|
||||
return eof
|
||||
}
|
||||
if ch == '\n' {
|
||||
s.pos.Lines = append(s.pos.Lines, s.pos.Char)
|
||||
s.pos.Char = 0
|
||||
} else {
|
||||
s.pos.Char++
|
||||
}
|
||||
return ch
|
||||
}
|
||||
|
||||
// unread places the previously read rune back on the reader.
|
||||
func (s *scanner) unread() {
|
||||
_ = s.r.UnreadRune()
|
||||
if s.pos.Char == 0 {
|
||||
s.pos.Char = s.pos.Lines[len(s.pos.Lines)-1]
|
||||
s.pos.Lines = s.pos.Lines[:len(s.pos.Lines)-1]
|
||||
} else {
|
||||
s.pos.Char--
|
||||
}
|
||||
}
|
||||
|
||||
// Scan returns the next token and literal value.
|
||||
func (s *scanner) Scan() (tok token, lit string, err error) {
|
||||
ch := s.read()
|
||||
if isWhitespace(ch) {
|
||||
s.ignoreWhitespace()
|
||||
ch = s.read()
|
||||
}
|
||||
if isAlphanum(ch) {
|
||||
s.unread()
|
||||
return s.scanIdent()
|
||||
}
|
||||
switch ch {
|
||||
case eof:
|
||||
return 0, "", nil
|
||||
case '@':
|
||||
return tATSIGN, string(ch), nil
|
||||
case ':':
|
||||
return tCOLON, string(ch), nil
|
||||
case ',':
|
||||
parseField = false // reset parseField if reached end of field.
|
||||
return tCOMMA, string(ch), nil
|
||||
case '=':
|
||||
parseField = true // set parseField if = sign outside quoted or ident.
|
||||
return tEQUAL, string(ch), nil
|
||||
case '"':
|
||||
tok, lit := s.scanQuoted()
|
||||
return tok, lit, nil
|
||||
case '{':
|
||||
if parseField {
|
||||
return s.scanBraced()
|
||||
}
|
||||
// If we're reading a comment, return everything after {
|
||||
// to the next @-sign (exclusive)
|
||||
if s.commentMode {
|
||||
s.unread()
|
||||
commentBodyTok, commentBody := s.scanCommentBody()
|
||||
return commentBodyTok, commentBody, nil
|
||||
}
|
||||
return tLBRACE, string(ch), nil
|
||||
case '}':
|
||||
if parseField { // reset parseField if reached end of entry.
|
||||
parseField = false
|
||||
}
|
||||
return tRBRACE, string(ch), nil
|
||||
case '#':
|
||||
return tPOUND, string(ch), nil
|
||||
case ' ':
|
||||
s.ignoreWhitespace()
|
||||
}
|
||||
return tILLEGAL, string(ch), nil
|
||||
}
|
||||
|
||||
// scanIdent categorises a string to one of three categories.
|
||||
func (s *scanner) scanIdent() (tok token, lit string, err error) {
|
||||
switch ch := s.read(); ch {
|
||||
case '"':
|
||||
tok, lit := s.scanQuoted()
|
||||
return tok, lit, nil
|
||||
case '{':
|
||||
return s.scanBraced()
|
||||
default:
|
||||
s.unread() // Not open quote/brace.
|
||||
tok, lit := s.scanBare()
|
||||
return tok, lit, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *scanner) scanBare() (token, string) {
|
||||
var buf bytes.Buffer
|
||||
for {
|
||||
if ch := s.read(); ch == eof {
|
||||
break
|
||||
} else if !isAlphanum(ch) && !isBareSymbol(ch) || isWhitespace(ch) {
|
||||
s.unread()
|
||||
break
|
||||
} else {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
}
|
||||
}
|
||||
str := buf.String()
|
||||
if strings.ToLower(str) == "comment" {
|
||||
s.commentMode = true
|
||||
return tCOMMENT, str
|
||||
} else if strings.ToLower(str) == "preamble" {
|
||||
return tPREAMBLE, str
|
||||
} else if strings.ToLower(str) == "string" {
|
||||
return tSTRING, str
|
||||
} else if _, err := strconv.Atoi(str); err == nil && parseField { // Special case for numeric
|
||||
return tIDENT, str
|
||||
}
|
||||
return tBAREIDENT, str
|
||||
}
|
||||
|
||||
// scanBraced parses a braced string, like {this}.
|
||||
func (s *scanner) scanBraced() (token, string, error) {
|
||||
var buf bytes.Buffer
|
||||
var macro bool
|
||||
brace := 1
|
||||
for {
|
||||
if ch := s.read(); ch == eof {
|
||||
break
|
||||
} else if ch == '\\' {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
macro = true
|
||||
} else if ch == '{' {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
brace++
|
||||
} else if ch == '}' {
|
||||
brace--
|
||||
macro = false
|
||||
if brace == 0 { // Balances open brace.
|
||||
return tIDENT, buf.String(), nil
|
||||
}
|
||||
_, _ = buf.WriteRune(ch)
|
||||
} else if ch == '@' {
|
||||
if macro {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
} else {
|
||||
return token(0), buf.String(), ErrUnexpectedAtsign
|
||||
}
|
||||
} else if isWhitespace(ch) {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
macro = false
|
||||
} else {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
}
|
||||
}
|
||||
return tILLEGAL, buf.String(), nil
|
||||
}
|
||||
|
||||
// scanQuoted parses a quoted string, like "this".
|
||||
func (s *scanner) scanQuoted() (token, string) {
|
||||
var buf bytes.Buffer
|
||||
brace := 0
|
||||
for {
|
||||
if ch := s.read(); ch == eof {
|
||||
break
|
||||
} else if ch == '{' {
|
||||
brace++
|
||||
} else if ch == '}' {
|
||||
brace--
|
||||
} else if ch == '"' {
|
||||
if brace == 0 { // Matches open quote, unescaped
|
||||
return tIDENT, buf.String()
|
||||
}
|
||||
_, _ = buf.WriteRune(ch)
|
||||
} else {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
}
|
||||
}
|
||||
return tILLEGAL, buf.String()
|
||||
}
|
||||
|
||||
// skipCommentBody is a scan method used for reading bibtex
|
||||
// comment item by reading all runes until the next @.
|
||||
//
|
||||
// e.g.
|
||||
// @comment{...anything can go here even if braces are unbalanced@
|
||||
// comment body string will be "...anything can go here even if braces are unbalanced"
|
||||
func (s *scanner) scanCommentBody() (token, string) {
|
||||
var buf bytes.Buffer
|
||||
for {
|
||||
if ch := s.read(); ch == eof {
|
||||
break
|
||||
} else if ch == '@' {
|
||||
s.unread()
|
||||
break
|
||||
} else {
|
||||
_, _ = buf.WriteRune(ch)
|
||||
}
|
||||
}
|
||||
s.commentMode = false
|
||||
return tCOMMENTBODY, buf.String()
|
||||
}
|
||||
|
||||
// ignoreWhitespace consumes the current rune and all contiguous whitespace.
|
||||
func (s *scanner) ignoreWhitespace() {
|
||||
for {
|
||||
if ch := s.read(); ch == eof {
|
||||
break
|
||||
} else if !isWhitespace(ch) {
|
||||
s.unread()
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
55
src/vendor/github.com/nickng/bibtex/token.go
generated
vendored
Normal file
|
@ -0,0 +1,55 @@
|
|||
package bibtex
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Lexer token.
|
||||
type token int
|
||||
|
||||
const (
|
||||
// tILLEGAL stands for an invalid token.
|
||||
tILLEGAL token = iota
|
||||
)
|
||||
|
||||
var eof = rune(0)
|
||||
|
||||
// tokenPos is a pair of coordinate to identify start of token.
|
||||
type tokenPos struct {
|
||||
Char int
|
||||
Lines []int
|
||||
}
|
||||
|
||||
func (p tokenPos) String() string {
|
||||
return fmt.Sprintf("%d:%d", len(p.Lines)+1, p.Char)
|
||||
}
|
||||
|
||||
func isWhitespace(ch rune) bool {
|
||||
return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r'
|
||||
}
|
||||
|
||||
func isAlpha(ch rune) bool {
|
||||
return ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z')
|
||||
}
|
||||
|
||||
func isDigit(ch rune) bool {
|
||||
return ('0' <= ch && ch <= '9')
|
||||
}
|
||||
|
||||
func isAlphanum(ch rune) bool {
|
||||
return isAlpha(ch) || isDigit(ch)
|
||||
}
|
||||
|
||||
func isBareSymbol(ch rune) bool {
|
||||
return strings.ContainsRune("-_:./+", ch)
|
||||
}
|
||||
|
||||
// isSymbol returns true if ch is a valid symbol
|
||||
func isSymbol(ch rune) bool {
|
||||
return strings.ContainsRune("!?&*+-./:;<>[]^_`|~@", ch)
|
||||
}
|
||||
|
||||
func isOpenQuote(ch rune) bool {
|
||||
return ch == '{' || ch == '"'
|
||||
}
|
3
src/vendor/modules.txt
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
# github.com/nickng/bibtex v1.3.0
|
||||
## explicit; go 1.18
|
||||
github.com/nickng/bibtex
|