forked from mirror/oddmu
Compare commits
112 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3078d63890 | ||
|
|
143ecb8a0a | ||
|
|
d66aa03a2d | ||
|
|
64954ddf5d | ||
|
|
a1d6ebfdff | ||
|
|
db3a3f5009 | ||
|
|
ece9649e3d | ||
|
|
23074cdd58 | ||
|
|
06c07209a2 | ||
|
|
7b2a835729 | ||
|
|
d0fe534f8e | ||
|
|
ac7de17a87 | ||
|
|
84e6a757b2 | ||
|
|
2dfb2afbf5 | ||
|
|
2092b5777c | ||
|
|
f635cb738a | ||
|
|
da398a3315 | ||
|
|
7315abd5bb | ||
|
|
b39901b244 | ||
|
|
bb4843c2f4 | ||
|
|
816c981200 | ||
|
|
89d550a1a4 | ||
|
|
4eb013a4da | ||
|
|
e8f6ae0450 | ||
|
|
9bf3beb440 | ||
|
|
cd6809d791 | ||
|
|
7c5a3860e7 | ||
|
|
a7c343decb | ||
|
|
18bb5da8c0 | ||
|
|
2a0ea791ec | ||
|
|
726586b39d | ||
|
|
8f30704be9 | ||
|
|
616ae0a1ba | ||
|
|
af86b865bf | ||
|
|
7110e0af68 | ||
|
|
8841372814 | ||
|
|
fefa283775 | ||
|
|
5a09d65dab | ||
|
|
2cf0855994 | ||
|
|
f98312e12f | ||
|
|
d213ee2815 | ||
|
|
0cd09666c6 | ||
|
|
bd9364dc09 | ||
|
|
93fd49bc4c | ||
|
|
300e411960 | ||
|
|
10cea2bf2c | ||
|
|
830af140eb | ||
|
|
c758dd7df7 | ||
|
|
969df2aef9 | ||
|
|
39f414694c | ||
|
|
fa67508692 | ||
|
|
d5696135c1 | ||
|
|
284fc3094d | ||
|
|
57161bbc98 | ||
|
|
d855d9d91a | ||
|
|
ca85250514 | ||
|
|
649fde81fe | ||
|
|
8a47e9c5fe | ||
|
|
fd9a515e0f | ||
|
|
da04c6dc27 | ||
|
|
bd2da1414c | ||
|
|
6d1a5462b4 | ||
|
|
3dcaf8aca1 | ||
|
|
80ce16f873 | ||
|
|
41347ad5dc | ||
|
|
6a911b2860 | ||
|
|
1d6db77660 | ||
|
|
8a8afcb56f | ||
|
|
6803b8e90d | ||
|
|
ff357a4048 | ||
|
|
77a38ddf66 | ||
|
|
d3ffe82a90 | ||
|
|
4a12721462 | ||
|
|
07b1277764 | ||
|
|
f99a54e2ef | ||
|
|
56a4461bd6 | ||
|
|
2e38daf667 | ||
|
|
a5421372d8 | ||
|
|
7017363f6a | ||
|
|
9a7a1ee2a9 | ||
|
|
76d7598854 | ||
|
|
d7b48b975b | ||
|
|
4314a35d1d | ||
|
|
63e1c987f2 | ||
|
|
7d748f82da | ||
|
|
8385bc424a | ||
|
|
820763bf23 | ||
|
|
7d40fa4adb | ||
|
|
6e24603c27 | ||
|
|
5096627b87 | ||
|
|
0c691123ff | ||
|
|
5770966cdd | ||
|
|
a001a77692 | ||
|
|
0e29ed77ea | ||
|
|
173bb62a79 | ||
|
|
58249aac85 | ||
|
|
1fdd502e95 | ||
|
|
196ff605c3 | ||
|
|
6c1e595f13 | ||
|
|
34fdb5d9a9 | ||
|
|
b28614fa52 | ||
|
|
e7511ed059 | ||
|
|
a5d03dd136 | ||
|
|
87d5efcb7a | ||
|
|
c9bb062a04 | ||
|
|
e2eec5e052 | ||
|
|
26033de177 | ||
|
|
e1ba007f97 | ||
|
|
e90ff9e7dd | ||
|
|
70356e850a | ||
|
|
81a59fd6ac | ||
|
|
52d6f26eed |
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,3 +1,7 @@
|
||||
/oddmu
|
||||
test.md
|
||||
/testdata/
|
||||
/oddmu-darwin-*
|
||||
/oddmu-linux-*
|
||||
/oddmu-windows-*
|
||||
/oddmu.exe
|
||||
/oddmu
|
||||
|
||||
6
Makefile
6
Makefile
@@ -16,6 +16,8 @@ help:
|
||||
@echo " just build it"
|
||||
@echo make install
|
||||
@echo " install the files to ~/.local"
|
||||
@echo sudo make install PREFIX=/usr/local
|
||||
@echo " install the files to /usr/local"
|
||||
@echo make upload
|
||||
@echo " this is how I upgrade my server"
|
||||
@echo make dist
|
||||
@@ -71,8 +73,8 @@ oddmu-windows-amd64.tar.gz: oddmu.exe
|
||||
$< *.md man/*.[157].{html,md} themes/
|
||||
|
||||
%.tar.gz: %
|
||||
tar --create --file $@ --transform='s/^$</oddmu/' --transform='s/^/oddmu\//' --exclude='*~' \
|
||||
$< Makefile *.socket *.service *.md man/Makefile man/*.1 man/*.5 man/*.7 themes/
|
||||
tar --create --gzip --file $@ --transform='s/^$</oddmu/' --transform='s/^/oddmu\//' --exclude='*~' \
|
||||
$< *.html Makefile *.socket *.service *.md man/Makefile man/*.[157] themes/
|
||||
|
||||
priv:
|
||||
sudo setcap 'cap_net_bind_service=+ep' oddmu
|
||||
|
||||
92
README.md
92
README.md
@@ -1,28 +1,35 @@
|
||||
# Oddµ: A minimal wiki
|
||||
# Oddμ: A minimal wiki
|
||||
|
||||
This program helps you run a minimal wiki, blog, digital garden, memex
|
||||
or Zettelkasten. There is no version history.
|
||||
Oddμ (or Oddmu) helps you run a minimal wiki, blog, digital garden,
|
||||
memex or Zettelkasten.
|
||||
|
||||
It's well suited as a self-hosted, single-user web application, when
|
||||
there is no need for collaboration on the site itself. Links and email
|
||||
connect you to the rest of the net. The wiki can be public or private.
|
||||
Perhaps it just runs on your local machine, unreachable from the
|
||||
Internet.
|
||||
|
||||
It's well suited as a secondary medium for a close-knit group:
|
||||
collaboration and conversation happens elsewhere, in chat, on social
|
||||
media. The wiki serves as the text repository that results from these
|
||||
discussions. As there are no logins and no version histories, it is
|
||||
not possible to undo vandalism and spam. Only allow people you trust
|
||||
write-access to the site.
|
||||
|
||||
It's well suited as a simple static site generator. There are no
|
||||
Oddμ can be run as a static site generator, processing a directory
|
||||
with Markdown files, turning them into HTML files. HTML templates
|
||||
allow the customisation of headers, footers and styling. There are no
|
||||
plugins.
|
||||
|
||||
When Oddµ runs as a web server, it serves all the Markdown files
|
||||
Oddμ is well suited as a self-hosted, single-user web application,
|
||||
when there is no need for collaboration on the site itself. Links and
|
||||
email connect you to the rest of the net. The wiki can be public or
|
||||
private.
|
||||
|
||||
If the site is public, use a regular web server as a proxy to make
|
||||
people log in before making changes. As there is no version history,
|
||||
it is not possible to undo vandalism and spam. Only grant write-access
|
||||
to people you trust.
|
||||
|
||||
If the site is private, running on a local machine and unreachable
|
||||
from the Internet, no such precautions are necessary.
|
||||
|
||||
Oddμ is well suited as a secondary medium for a close-knit group:
|
||||
collaboration and conversation happens elsewhere, in chat, on social
|
||||
media. The wiki serves as the text repository that results from these
|
||||
discussions.
|
||||
|
||||
When Oddμ runs as a web server, it serves all the Markdown files
|
||||
(ending in `.md`) as web pages. These pages can be edited via the web.
|
||||
|
||||
Oddmu adds the following extensions to Markdown: local links `[[like
|
||||
Oddμ adds the following extensions to Markdown: local links `[[like
|
||||
this]]`, hashtags `#Like_This` and fediverse account links like
|
||||
`@alex@alexschroeder.ch`.
|
||||
|
||||
@@ -32,7 +39,7 @@ necessary.
|
||||
|
||||
Other files can be uploaded and images (ending in `.jpg`, `.jpeg`,
|
||||
`.png`, `.heic` or `.webp`) can be resized when they are uploaded
|
||||
(resulting in `.jpg` or `.png` files).
|
||||
(resulting in `.jpg`, `.png` or `.webp` files).
|
||||
|
||||
## Documentation
|
||||
|
||||
@@ -42,15 +49,15 @@ available:
|
||||
|
||||
[oddmu(1)](https://alexschroeder.ch/view/oddmu/oddmu.1): This man page
|
||||
has a short introduction to Oddmu, its configuration via templates and
|
||||
environment variables, plus points to the other man pages.
|
||||
environment variables, plus pointers to the other man pages.
|
||||
|
||||
[oddmu(5)](https://alexschroeder.ch/view/oddmu/oddmu.5): This man page
|
||||
talks about the Markdown and includes some examples for the
|
||||
non-standard features such as table markup. It also talks about the
|
||||
Oddmu extensions to Markdown: wiki links, hashtags and fediverse
|
||||
account links. Local links must use percent encoding for page names so
|
||||
there is a section about percent encoding. The man page also explains
|
||||
how feeds are generated.
|
||||
talks about Markdown and includes some examples for the non-standard
|
||||
features such as table markup. It also talks about the Oddmu
|
||||
extensions to Markdown: wiki links, hashtags and fediverse account
|
||||
links. Local links must use percent encoding for page names so there
|
||||
is a section about percent encoding. The man page also explains how
|
||||
feeds are generated.
|
||||
|
||||
[oddmu-releases(7)](https://alexschroeder.ch/view/oddmu/oddmu-releases.7):
|
||||
This man page lists all the Oddmu versions and their user-visible
|
||||
@@ -101,6 +108,10 @@ Static site generator:
|
||||
This man page documents the "html" subcommand to generate HTML from
|
||||
Markdown pages from the command line.
|
||||
|
||||
[oddmu-feed(1)](https://alexschroeder.ch/view/oddmu/oddmu-feed.1):
|
||||
This man page documents the "feed" subcommand to generate a feed from
|
||||
Markdown pages from the command line.
|
||||
|
||||
[oddmu-static(1)](https://alexschroeder.ch/view/oddmu/oddmu-static.1):
|
||||
This man page documents the "static" subcommand to generate an entire
|
||||
static website from the command line, avoiding the need to run Oddmu
|
||||
@@ -167,8 +178,6 @@ make docs
|
||||
The `Makefile` in the `man` directory has targets to create Markdown
|
||||
and HTML files.
|
||||
|
||||
The HEIC library uses C code and prevents cross-compilation.
|
||||
|
||||
As the repository changed URLs a few times (from GitHub, to
|
||||
self-hosted using `cgit` to self-hosted using `legit`), there is no
|
||||
way to install it using `go install`. You need to `git clone` the
|
||||
@@ -280,18 +289,10 @@ extensions can be added.
|
||||
|
||||
### Filenames and URL path
|
||||
|
||||
One of the sad parts of the code is the distinction between path and
|
||||
filepath. On a Linux system, this doesn't matter. I suspect that it
|
||||
also doesn't matter on MacOS and Windows because the file systems
|
||||
handle forward slashes just fine. The code still tries to do the right
|
||||
thing. A path that is derived from a URL is a path with slashes.
|
||||
Before accessing a file, it has to be turned into a filepath using
|
||||
`filepath.FromSlashes` and in the rare case where the inverse happens,
|
||||
use `filepath.ToSlashes`. Any path received via the URL path uses
|
||||
slashes and needs to be converted to a filepath before passing it to
|
||||
any `os` function. Any path received within a `path/filepath.WalkFunc`
|
||||
is a filepath and needs to be converted to use slashes when used in
|
||||
HTML output.
|
||||
There are some simplifications made. The code doesn't consider the
|
||||
various encodings (UTF-8 NFC on the web vs UTF-8 NFD for HFS+, for
|
||||
example; it also doesn't check for characters in page names that are
|
||||
illegal filenames on the filesystem used).
|
||||
|
||||
If you need to access the page name in code that is used from a
|
||||
template, you have to decode the path. See the code in `diff.go` for
|
||||
@@ -347,9 +348,12 @@ in turn can be used by browsers to get hyphenation right. Apache-2.0.
|
||||
is used to sniff the MIME type of files with unknown filename
|
||||
extensions. MIT.
|
||||
|
||||
[github.com/gen2brain/heic](https://github.com/gen2brain/heic) is
|
||||
used to decode HEIC files (the new default file format for photos on
|
||||
iPhones). LGPL-3.0-only.
|
||||
[github.com/gen2brain/heic](https://github.com/gen2brain/heic) is used
|
||||
to decode HEIC files (the new default file format for photos on
|
||||
iPhones). MIT.
|
||||
|
||||
[github.com/gen2brain/webp](https://github.com/gen2brain/webp) is used
|
||||
to encode and decode WebP files. MIT.
|
||||
|
||||
[github.com/disintegration/imaging](https://github.com/disintegration/imaging)
|
||||
is used to resize images. MIT.
|
||||
|
||||
8
RELEASE
8
RELEASE
@@ -13,4 +13,10 @@ When preparing a new release
|
||||
|
||||
5. Tag the release and push the tag to all remotes
|
||||
|
||||
6. cd man && make upload
|
||||
6. cd man && make upload
|
||||
|
||||
7. make dist
|
||||
|
||||
8. create a new release at https://github.com/kensanata/oddmu/releases
|
||||
|
||||
9. upload the four .tar.gz binaries to the GitHub release
|
||||
|
||||
13
add.html
13
add.html
@@ -3,20 +3,23 @@
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no">
|
||||
<title>Add to {{.Title}}</title>
|
||||
<style>
|
||||
html { max-width: 70ch; padding: 2ch; margin: auto; color: #111; background-color: #ffe; }
|
||||
form, textarea { width: 100%; }
|
||||
html { max-width: 70ch; padding: 1ch; height: calc(100% - 2ch); margin: auto }
|
||||
body { hyphens: auto; color: #111; background-color: #ffe; margin: 0; padding: 0; height: 100%; display: flex; flex-flow: column }
|
||||
form, textarea { box-sizing: border-box; width: 100%; font-size: inherit }
|
||||
#editor { flex: 1 1 auto; display: flex; flex-flow: column }
|
||||
textarea { flex: 1 1 auto }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Adding to {{.Title}}</h1>
|
||||
<form action="/append/{{.Name}}" method="POST">
|
||||
<form id="editor" action="/append/{{.Path}}" method="POST">
|
||||
<textarea name="body" rows="20" cols="80" placeholder="Text" lang="{{.Language}}" autofocus required></textarea>
|
||||
<p><label><input type="checkbox" name="notify" checked> Add link to <a href="/view/changes">the list of changes</a>.</label></p>
|
||||
<p><input type="submit" value="Add">
|
||||
<a href="/view/{{.Name}}"><button type="button">Cancel</button></a></p>
|
||||
<a href="/view/{{.Path}}"><button type="button">Cancel</button></a></p>
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -48,7 +48,7 @@ func appendHandler(w http.ResponseWriter, r *http.Request, name string) {
|
||||
return
|
||||
}
|
||||
}
|
||||
http.Redirect(w, r, "/view/"+name, http.StatusFound)
|
||||
http.Redirect(w, r, "/view/"+nameEscape(name), http.StatusFound)
|
||||
}
|
||||
|
||||
func (p *Page) append(body []byte) {
|
||||
|
||||
@@ -2,6 +2,7 @@ package main
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
@@ -34,15 +35,15 @@ It's not `)}
|
||||
data.Set("body", "barbecue")
|
||||
|
||||
assert.Regexp(t, regexp.MustCompile("a distant fire"),
|
||||
assert.HTTPBody(makeHandler(viewHandler, false),
|
||||
assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet),
|
||||
"GET", "/view/testdata/add/fire", nil))
|
||||
assert.NotRegexp(t, regexp.MustCompile("a distant fire"),
|
||||
assert.HTTPBody(makeHandler(addHandler, true),
|
||||
assert.HTTPBody(makeHandler(addHandler, true, http.MethodGet),
|
||||
"GET", "/add/testdata/add/fire", nil))
|
||||
HTTPRedirectTo(t, makeHandler(appendHandler, true),
|
||||
HTTPRedirectTo(t, makeHandler(appendHandler, true, http.MethodPost),
|
||||
"POST", "/append/testdata/add/fire", data, "/view/testdata/add/fire")
|
||||
assert.Regexp(t, regexp.MustCompile(`not</p>\s*<p>barbecue`),
|
||||
assert.HTTPBody(makeHandler(viewHandler, false),
|
||||
assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet),
|
||||
"GET", "/view/testdata/add/fire", nil))
|
||||
}
|
||||
|
||||
@@ -57,7 +58,7 @@ Blue and green and pebbles gray
|
||||
data := url.Values{}
|
||||
data.Set("body", "Stand in cold water")
|
||||
data.Add("notify", "on")
|
||||
HTTPRedirectTo(t, makeHandler(appendHandler, true),
|
||||
HTTPRedirectTo(t, makeHandler(appendHandler, true, http.MethodPost),
|
||||
"POST", "/append/testdata/append/"+today+"-water",
|
||||
data, "/view/testdata/append/"+today+"-water")
|
||||
// The changes.md file was created
|
||||
|
||||
20
archive.go
20
archive.go
@@ -16,7 +16,7 @@ import (
|
||||
// are skipped. If the environment variable ODDMU_FILTER is a regular expression that matches the starting directory,
|
||||
// this is a "separate site"; if the regular expression does not match, this is the "main site" and page names must also
|
||||
// not match the regular expression.
|
||||
func archiveHandler(w http.ResponseWriter, r *http.Request, path string) {
|
||||
func archiveHandler(w http.ResponseWriter, r *http.Request, name string) {
|
||||
filter := os.Getenv("ODDMU_FILTER")
|
||||
re, err := regexp.Compile(filter)
|
||||
if err != nil {
|
||||
@@ -24,30 +24,30 @@ func archiveHandler(w http.ResponseWriter, r *http.Request, path string) {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
matches := re.MatchString(path)
|
||||
dir := filepath.Dir(filepath.FromSlash(path))
|
||||
matches := re.MatchString(name)
|
||||
dir := filepath.Dir(filepath.FromSlash(name))
|
||||
z := zip.NewWriter(w)
|
||||
err = filepath.Walk(dir, func(path string, info fs.FileInfo, err error) error {
|
||||
err = filepath.Walk(dir, func(fp string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if info.IsDir() {
|
||||
if path != "." && strings.HasPrefix(filepath.Base(path), ".") {
|
||||
if fp != "." && strings.HasPrefix(filepath.Base(fp), ".") {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
} else if !strings.HasPrefix(filepath.Base(path), ".") &&
|
||||
(matches || !re.MatchString(path)) {
|
||||
zf, err := z.Create(path)
|
||||
} else if !strings.HasPrefix(filepath.Base(fp), ".") &&
|
||||
(matches || !re.MatchString(filepath.ToSlash(fp))) {
|
||||
zf, err := z.Create(fp)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return err
|
||||
}
|
||||
file, err := os.Open(path)
|
||||
f, err := os.Open(fp)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return err
|
||||
}
|
||||
_, err = io.Copy(zf, file)
|
||||
_, err = io.Copy(zf, f)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
return err
|
||||
|
||||
@@ -3,6 +3,7 @@ package main
|
||||
import (
|
||||
"archive/zip"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -15,7 +16,7 @@ func TestArchive(t *testing.T) {
|
||||
assert.NoError(t, os.WriteFile("testdata/archive/public/index.md", []byte("# Public\nChurch tower bells ringing\nA cold wind biting my ears\nWalk across the square"), 0644))
|
||||
assert.NoError(t, os.WriteFile("testdata/archive/secret/index.md", []byte("# Secret\nMany years ago I danced\nSpending nights in clubs and bars\nIt is my secret"), 0644))
|
||||
os.Setenv("ODDMU_FILTER", "^testdata/archive/secret/")
|
||||
body := assert.HTTPBody(makeHandler(archiveHandler, true), "GET", "/archive/testdata/data.zip", nil)
|
||||
body := assert.HTTPBody(makeHandler(archiveHandler, true, http.MethodGet), "GET", "/archive/testdata/data.zip", nil)
|
||||
r, err := zip.NewReader(strings.NewReader(body), int64(len(body)))
|
||||
assert.NoError(t, err, "Unzip")
|
||||
names := []string{}
|
||||
|
||||
89
changes.go
89
changes.go
@@ -20,10 +20,10 @@ func (p *Page) notify() error {
|
||||
if p.Title == "" {
|
||||
p.Title = p.Name
|
||||
}
|
||||
esc := nameEscape(path.Base(p.Name))
|
||||
esc := nameEscape(p.Base())
|
||||
link := "* [" + p.Title + "](" + esc + ")\n"
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(` + esc + `\)\n`)
|
||||
dir := path.Dir(p.Name)
|
||||
dir := p.Dir()
|
||||
err := addLinkWithDate(path.Join(dir, "changes"), link, re)
|
||||
if err != nil {
|
||||
log.Printf("Updating changes in %s failed: %s", dir, err)
|
||||
@@ -31,7 +31,7 @@ func (p *Page) notify() error {
|
||||
}
|
||||
if p.IsBlog() {
|
||||
// Add to the index only if the blog post is for the current year
|
||||
if strings.HasPrefix(path.Base(p.Name), time.Now().Format("2006")) {
|
||||
if strings.HasPrefix(p.Base(), time.Now().Format("2006")) {
|
||||
err := addLink(path.Join(dir, "index"), true, link, re)
|
||||
if err != nil {
|
||||
log.Printf("Updating index in %s failed: %s", dir, err)
|
||||
@@ -154,21 +154,56 @@ func addLink(name string, mandatory bool, link string, re *regexp.Regexp) error
|
||||
}
|
||||
}
|
||||
org := string(p.Body)
|
||||
addLinkToPage(p, link, re)
|
||||
// only save if something changed
|
||||
if string(p.Body) != org {
|
||||
return p.save()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addLinkToPage(p *Page, link string, re *regexp.Regexp) {
|
||||
// if a link exists, that's the place to insert the new link (in which case loc[0] and loc[1] differ)
|
||||
loc := re.FindIndex(p.Body)
|
||||
// if no link exists, find a good place to insert it
|
||||
if loc == nil {
|
||||
// locate the beginning of the list to insert the line
|
||||
re = regexp.MustCompile(`(?m)^\* \[[^\]]+\]\([^\)]+\)\n`)
|
||||
loc = re.FindIndex(p.Body)
|
||||
if loc == nil {
|
||||
// if no list was found, use the end of the page
|
||||
m := len(p.Body)
|
||||
loc = []int{m, m}
|
||||
} else {
|
||||
// if a list item was found, use just the beginning as insertion point
|
||||
loc[1] = loc[0]
|
||||
// locate the list items
|
||||
re = regexp.MustCompile(`(?m)^\* \[[^\]]+\]\([^\)]+\)\n?`)
|
||||
items := re.FindAllIndex(p.Body, -1)
|
||||
first := false
|
||||
pos := -1
|
||||
// skip newer items
|
||||
for i, it := range items {
|
||||
// break if the current line is older (earlier in sort order)
|
||||
stop := string(p.Body[it[0]:it[1]]) < link
|
||||
// before the first match is always a good insert point
|
||||
if i == 0 {
|
||||
pos = it[0]
|
||||
first = true
|
||||
}
|
||||
// if we're not stopping, then after the current item is a good insert point
|
||||
if !stop {
|
||||
pos = it[1]
|
||||
first = false
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
// otherwise it's at the end of the list, after the last item
|
||||
if pos == -1 && len(items) > 0 {
|
||||
pos = items[len(items)-1][1]
|
||||
first = false
|
||||
}
|
||||
// if no list was found, use the end of the page
|
||||
if pos == -1 {
|
||||
pos = len(p.Body)
|
||||
first = true
|
||||
}
|
||||
if first {
|
||||
p.Body, pos = ensureTwoNewlines(p.Body, pos)
|
||||
}
|
||||
// mimic a zero-width match at the insert point
|
||||
loc = []int{pos, pos}
|
||||
}
|
||||
// start with new page content
|
||||
r := []byte("")
|
||||
@@ -179,9 +214,27 @@ func addLink(name string, mandatory bool, link string, re *regexp.Regexp) error
|
||||
// append the rest
|
||||
r = append(r, p.Body[loc[1]:]...)
|
||||
p.Body = r
|
||||
// only save if something changed
|
||||
if string(p.Body) != org {
|
||||
return p.save()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ensureTwoNewlines makes sure that the two bytes before pos in buf are newlines. If the are not, newlines are inserted
|
||||
// and pos is increased. The new buf and pos is returned.
|
||||
func ensureTwoNewlines(buf []byte, pos int) ([]byte, int) {
|
||||
var insert []byte
|
||||
if pos >= 1 && buf[pos-1] != '\n' {
|
||||
// add two newlines if buf doesn't end with a newline
|
||||
insert = []byte("\n\n")
|
||||
} else if pos >= 2 && buf[pos-2] != '\n' {
|
||||
// add one newline if Body ends with just one newline
|
||||
insert = []byte("\n")
|
||||
}
|
||||
if insert != nil {
|
||||
r := []byte("")
|
||||
r = append(r, buf[:pos]...)
|
||||
r = append(r, insert...)
|
||||
r = append(r, buf[pos:]...)
|
||||
buf = r
|
||||
pos += len(insert)
|
||||
|
||||
}
|
||||
return buf, pos
|
||||
}
|
||||
|
||||
@@ -3,12 +3,67 @@ package main
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"os"
|
||||
"regexp"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Note TestEditSaveChanges and TestAddAppendChanges.
|
||||
|
||||
func TestAddLinkToPageWithNoList(t *testing.T) {
|
||||
// no newlines
|
||||
title := "# Test"
|
||||
p := &Page{Body: []byte(title)}
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(2025-08-08\)\n`)
|
||||
link := "* [2025-08-08](2025-08-08)\n"
|
||||
addLinkToPage(p, link, re)
|
||||
assert.Equal(t, title+"\n\n"+link, string(p.Body))
|
||||
}
|
||||
|
||||
func TestAddLinkToPageWithOlderLink(t *testing.T) {
|
||||
// one newline
|
||||
title := "# Test\n"
|
||||
old := "* [2025-08-08](2025-08-08)\n"
|
||||
p := &Page{Body: []byte(title + old)}
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(2025-08-10\)\n`)
|
||||
link := "* [2025-08-10](2025-08-10)\n"
|
||||
addLinkToPage(p, link, re)
|
||||
assert.Equal(t, title+"\n"+link+old, string(p.Body))
|
||||
}
|
||||
|
||||
func TestAddLinkToPageBetweenToExistingLinks(t *testing.T) {
|
||||
title := "# Test\n\n"
|
||||
new := "* [2025-08-10](2025-08-10)\n"
|
||||
old := "* [2025-08-08](2025-08-08)\n"
|
||||
p := &Page{Body: []byte(title + new + old)}
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(2025-08-09\)\n`)
|
||||
link := "* [2025-08-09](2025-08-09)\n"
|
||||
addLinkToPage(p, link, re)
|
||||
assert.Equal(t, title+new+link+old, string(p.Body))
|
||||
}
|
||||
|
||||
func TestAddLinkToPageBetweenToExistingLinks2(t *testing.T) {
|
||||
title := "# Test\n\n"
|
||||
new := "* [2025-08-10](2025-08-10)\n* [2025-08-09](2025-08-09)\n"
|
||||
old := "* [2025-08-07](2025-08-07)\n"
|
||||
p := &Page{Body: []byte(title + new + old)}
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(2025-08-08\)\n`)
|
||||
link := "* [2025-08-08](2025-08-08)\n"
|
||||
addLinkToPage(p, link, re)
|
||||
assert.Equal(t, title+new+link+old, string(p.Body))
|
||||
}
|
||||
|
||||
func TestAddLinkToPageAtTheEnd(t *testing.T) {
|
||||
title := "# Test\n\n"
|
||||
new := "* [2025-08-10](2025-08-10)\n"
|
||||
old := "* [2025-08-08](2025-08-08)\n"
|
||||
p := &Page{Body: []byte(title + new + old)}
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(2025-08-07\)\n`)
|
||||
link := "* [2025-08-07](2025-08-07)\n"
|
||||
addLinkToPage(p, link, re)
|
||||
assert.Equal(t, title+new+old+link, string(p.Body))
|
||||
}
|
||||
|
||||
func TestChanges(t *testing.T) {
|
||||
cleanup(t, "testdata/washing")
|
||||
today := time.Now().Format(time.DateOnly)
|
||||
@@ -48,7 +103,8 @@ Home away from home
|
||||
assert.Contains(t, string(s), line)
|
||||
s, err = os.ReadFile("testdata/changes/Haiku.md")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, intro+line, string(s))
|
||||
// ensure an empty line when adding at the end of the page
|
||||
assert.Equal(t, intro+"\n"+line, string(s))
|
||||
assert.NoFileExists(t, "testdata/changes/Poetry.md")
|
||||
}
|
||||
|
||||
|
||||
7
diff.go
7
diff.go
@@ -6,7 +6,6 @@ import (
|
||||
"html"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
@@ -25,11 +24,7 @@ func diffHandler(w http.ResponseWriter, r *http.Request, name string) {
|
||||
|
||||
// Diff computes the diff for a page. At this point, renderHtml has already been called so the Name is escaped.
|
||||
func (p *Page) Diff() template.HTML {
|
||||
path, err := url.PathUnescape(p.Name)
|
||||
if err != nil {
|
||||
return template.HTML("Cannot unescape " + p.Name)
|
||||
}
|
||||
fp := filepath.FromSlash(path)
|
||||
fp := filepath.FromSlash(p.Name)
|
||||
a := fp + ".md~"
|
||||
t1, err := os.ReadFile(a)
|
||||
if err != nil {
|
||||
|
||||
10
diff.html
10
diff.html
@@ -3,11 +3,11 @@
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no">
|
||||
<title>{{.Title}}</title>
|
||||
<style>
|
||||
html { max-width: 70ch; padding: 1ch; margin: auto; color: #111; background-color: #ffe; }
|
||||
body { hyphens: auto; }
|
||||
html { max-width: 70ch; padding: 1ch; margin: auto; color: #111; background-color: #ffe }
|
||||
body { hyphens: auto }
|
||||
del { background-color: #fab }
|
||||
ins { background-color: #af8 }
|
||||
pre { white-space: normal; background-color: white; border: 1px solid #eee; padding: 1ch }
|
||||
@@ -15,11 +15,11 @@ pre { white-space: normal; background-color: white; border: 1px solid #eee; padd
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<a href="/view/{{.Name}}">Back</a>
|
||||
<a href="/view/{{.Path}}">Back</a>
|
||||
</header>
|
||||
<main id="main">
|
||||
<h1>{{.Title}}</h1>
|
||||
<p>This is the diff between <a href="/view/{{.Name}}.md~">the backup</a> and <a href="/view/{{.Name}}.md">the current copy</a>.</p>
|
||||
<p>This is the diff between <a href="/view/{{.Path}}.md~">the backup</a> and <a href="/view/{{.Path}}.md">the current copy</a>.</p>
|
||||
<pre>
|
||||
{{.Diff}}
|
||||
</pre>
|
||||
|
||||
20
diff_test.go
20
diff_test.go
@@ -2,6 +2,7 @@ package main
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"net/http"
|
||||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
@@ -24,7 +25,7 @@ Oh so fresh, so warm.`
|
||||
p.save()
|
||||
p.Body = []byte(r)
|
||||
p.save()
|
||||
body := assert.HTTPBody(makeHandler(diffHandler, true),
|
||||
body := assert.HTTPBody(makeHandler(diffHandler, true, http.MethodGet),
|
||||
"GET", "/diff/testdata/diff/bread", nil)
|
||||
assert.Contains(t, body, `<del>breathe</del>`)
|
||||
assert.Contains(t, body, `<ins>whisper</ins>`)
|
||||
@@ -47,7 +48,7 @@ Mispronouncing words`
|
||||
p.save()
|
||||
p.Body = []byte(r)
|
||||
p.save()
|
||||
body := assert.HTTPBody(makeHandler(diffHandler, true),
|
||||
body := assert.HTTPBody(makeHandler(diffHandler, true, http.MethodGet),
|
||||
"GET", "/diff/testdata/diff/coup%20de%20grace", nil)
|
||||
assert.Contains(t, body, `<del>s</del>`)
|
||||
assert.Contains(t, body, `<ins>ce</ins>`)
|
||||
@@ -70,6 +71,7 @@ I hate the machine!`
|
||||
I shiver at home
|
||||
the monitor glares and moans
|
||||
my grey heart grows cold`
|
||||
// create s and overwrite it with r
|
||||
p := &Page{Name: "testdata/backup/cold", Body: []byte(s)}
|
||||
p.save()
|
||||
p = &Page{Name: "testdata/backup/cold", Body: []byte(r)}
|
||||
@@ -78,19 +80,29 @@ my grey heart grows cold`
|
||||
// diff from s to r:
|
||||
assert.Contains(t, body, `<del>fear or cold, who knows?</del>`)
|
||||
assert.Contains(t, body, `<ins>I hate the machine!</ins>`)
|
||||
// save u
|
||||
p = &Page{Name: "testdata/backup/cold", Body: []byte(u)}
|
||||
p.save()
|
||||
body = string(p.Diff())
|
||||
// diff from s to u since r was not 60 min or older
|
||||
// diff from s to u since r was not 60 min or older and so the backup is kept
|
||||
assert.Contains(t, body, `<del>fear or cold, who knows?</del>`)
|
||||
assert.Contains(t, body, `<ins>my grey heart grows cold</ins>`)
|
||||
// set timestamp 2h in the past
|
||||
ts := time.Now().Add(-2 * time.Hour)
|
||||
assert.NoError(t, os.Chtimes("testdata/backup/cold.md~", ts, ts))
|
||||
assert.NoError(t, os.Chtimes("testdata/backup/cold.md", ts, ts))
|
||||
// save r
|
||||
p = &Page{Name: "testdata/backup/cold", Body: []byte(r)}
|
||||
p.save()
|
||||
body = string(p.Diff())
|
||||
// diff from u to r:
|
||||
// diff from u to r since enough time has passed and the old backup is discarded
|
||||
assert.Contains(t, body, `<del>my grey heart grows cold</del>`)
|
||||
assert.Contains(t, body, `<ins>I hate the machine!</ins>`)
|
||||
// save s
|
||||
p = &Page{Name: "testdata/backup/cold", Body: []byte(s)}
|
||||
p.save()
|
||||
body = string(p.Diff())
|
||||
// diff from u to s since this is still "the same" editing window
|
||||
assert.Contains(t, body, `<del>my grey heart grows cold</del>`)
|
||||
assert.Contains(t, body, `<ins>fear or cold, who knows?</ins>`)
|
||||
}
|
||||
|
||||
15
edit.html
15
edit.html
@@ -3,24 +3,27 @@
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no">
|
||||
<base href="/view/{{.Dir}}">
|
||||
<title>Editing {{.Title}}</title>
|
||||
<style>
|
||||
html { max-width: 70ch; padding: 2ch; margin: auto; color: #111; background-color: #ffe; }
|
||||
form, textarea { width: 100%; }
|
||||
html { max-width: 70ch; padding: 1ch; height: calc(100% - 2ch); margin: auto }
|
||||
body { hyphens: auto; color: #111; background-color: #ffe; margin: 0; padding: 0; height: 100%; display: flex; flex-flow: column }
|
||||
form, textarea { box-sizing: border-box; width: 100%; font-size: inherit }
|
||||
#editor { flex: 1 1 auto; display: flex; flex-flow: column }
|
||||
textarea { flex: 1 1 auto }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Editing {{.Title}}</h1>
|
||||
<form action="/save/{{.Name}}" method="POST">
|
||||
<form id="editor" action="/save/{{.Path}}" method="POST">
|
||||
<textarea name="body" rows="20" cols="80" placeholder="# Title
|
||||
|
||||
Text" lang="{{.Language}}" autofocus>{{printf "%s" .Body}}</textarea>
|
||||
<p><label><input type="checkbox" name="notify" checked> Add link to <a href="changes">the list of changes</a>.</label></p>
|
||||
<p><input type="submit" value="Save">
|
||||
<button formaction="/preview/{{.Name}}" type="submit">Preview</button>
|
||||
<a href="/view/{{.Name}}"><button type="button">Cancel</button></a></p>
|
||||
<button formaction="/preview/{{.Path}}" type="submit">Preview</button>
|
||||
<a href="/view/{{.Path}}"><button type="button">Cancel</button></a></p>
|
||||
</form>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -41,5 +41,5 @@ func saveHandler(w http.ResponseWriter, r *http.Request, name string) {
|
||||
return
|
||||
}
|
||||
}
|
||||
http.Redirect(w, r, "/view/"+name, http.StatusFound)
|
||||
http.Redirect(w, r, "/view/"+nameEscape(name), http.StatusFound)
|
||||
}
|
||||
|
||||
@@ -16,24 +16,24 @@ func TestEditSave(t *testing.T) {
|
||||
data.Set("body", "Hallo!")
|
||||
|
||||
// View of the non-existing page redirects to the edit page
|
||||
HTTPRedirectTo(t, makeHandler(viewHandler, false),
|
||||
HTTPRedirectTo(t, makeHandler(viewHandler, false, http.MethodGet),
|
||||
"GET", "/view/testdata/save/alex", nil, "/edit/testdata/save/alex")
|
||||
// Edit page can be fetched
|
||||
assert.HTTPStatusCode(t, makeHandler(editHandler, true),
|
||||
assert.HTTPStatusCode(t, makeHandler(editHandler, true, http.MethodGet),
|
||||
"GET", "/edit/testdata/save/alex", nil, 200)
|
||||
// Posting to the save URL saves a page
|
||||
HTTPRedirectTo(t, makeHandler(saveHandler, true),
|
||||
HTTPRedirectTo(t, makeHandler(saveHandler, true, http.MethodPost),
|
||||
"POST", "/save/testdata/save/alex", data, "/view/testdata/save/alex")
|
||||
// Page now contains the text
|
||||
assert.Contains(t, assert.HTTPBody(makeHandler(viewHandler, false),
|
||||
assert.Contains(t, assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet),
|
||||
"GET", "/view/testdata/save/alex", nil),
|
||||
"Hallo!")
|
||||
// Delete the page and you're sent to the empty page
|
||||
data.Set("body", "")
|
||||
HTTPRedirectTo(t, makeHandler(saveHandler, true),
|
||||
HTTPRedirectTo(t, makeHandler(saveHandler, true, http.MethodPost),
|
||||
"POST", "/save/testdata/save/alex", data, "/view/testdata/save/alex")
|
||||
// Viewing the non-existing page redirects to the edit page (like in the beginning)
|
||||
HTTPRedirectTo(t, makeHandler(viewHandler, false),
|
||||
HTTPRedirectTo(t, makeHandler(viewHandler, false, http.MethodGet),
|
||||
"GET", "/view/testdata/save/alex", nil, "/edit/testdata/save/alex")
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ func TestEditSaveChanges(t *testing.T) {
|
||||
data.Add("notify", "on")
|
||||
today := time.Now().Format("2006-01-02")
|
||||
// Posting to the save URL saves a page
|
||||
HTTPRedirectTo(t, makeHandler(saveHandler, true),
|
||||
HTTPRedirectTo(t, makeHandler(saveHandler, true, http.MethodPost),
|
||||
"POST", "/save/testdata/notification/"+today,
|
||||
data, "/view/testdata/notification/"+today)
|
||||
// The changes.md file was created
|
||||
@@ -73,15 +73,15 @@ func TestEditId(t *testing.T) {
|
||||
cleanup(t, "testdata/id")
|
||||
data := url.Values{}
|
||||
data.Set("id", "testdata/id/alex")
|
||||
assert.HTTPStatusCode(t, makeHandler(editHandler, true),
|
||||
assert.HTTPStatusCode(t, makeHandler(editHandler, true, http.MethodGet),
|
||||
"GET", "/edit/", data, http.StatusBadRequest,
|
||||
"No slashes in id")
|
||||
data.Set("id", ".alex")
|
||||
assert.HTTPStatusCode(t, makeHandler(editHandler, true),
|
||||
assert.HTTPStatusCode(t, makeHandler(editHandler, true, http.MethodGet),
|
||||
"GET", "/edit/", data, http.StatusForbidden,
|
||||
"No hidden files")
|
||||
data.Set("id", "alex")
|
||||
assert.Contains(t, assert.HTTPBody(makeHandler(editHandler, true),
|
||||
assert.Contains(t, assert.HTTPBody(makeHandler(editHandler, true, http.MethodGet),
|
||||
"GET", "/edit/testdata/id/", data),
|
||||
"Editing testdata/id/alex")
|
||||
}
|
||||
|
||||
@@ -34,10 +34,6 @@ func (*exportCmd) Usage() string {
|
||||
it:
|
||||
|
||||
oddmu export > /tmp/export.rss
|
||||
|
||||
Options:
|
||||
|
||||
-template "filename" specifies the template to use (default: feed.html)
|
||||
`
|
||||
}
|
||||
|
||||
|
||||
@@ -13,8 +13,8 @@ func TestExportCmd(t *testing.T) {
|
||||
b := new(bytes.Buffer)
|
||||
s := exportCli(b, "feed.html", minimalIndex(t))
|
||||
assert.Equal(t, subcommands.ExitSuccess, s)
|
||||
assert.Contains(t, b.String(), "<title>Oddµ: A minimal wiki</title>")
|
||||
assert.Contains(t, b.String(), "<title>Welcome to Oddµ</title>")
|
||||
assert.Contains(t, b.String(), "<title>Oddμ: A minimal wiki</title>")
|
||||
assert.Contains(t, b.String(), "<title>Welcome to Oddμ</title>")
|
||||
}
|
||||
|
||||
func TestExportCmdLanguage(t *testing.T) {
|
||||
@@ -50,6 +50,6 @@ func TestExportCmdJsonFeed(t *testing.T) {
|
||||
b := new(bytes.Buffer)
|
||||
s := exportCli(b, "testdata/json/template.json", minimalIndex(t))
|
||||
assert.Equal(t, subcommands.ExitSuccess, s)
|
||||
assert.Contains(t, b.String(), `"title": "Oddµ: A minimal wiki"`)
|
||||
assert.Regexp(t, regexp.MustCompile("<h1.*>Welcome to Oddµ</h1>"), b.String()) // skip id
|
||||
assert.Contains(t, b.String(), `"title": "Oddμ: A minimal wiki"`)
|
||||
assert.Regexp(t, regexp.MustCompile("<h1.*>Welcome to Oddμ</h1>"), b.String()) // skip id
|
||||
}
|
||||
|
||||
39
feed.go
39
feed.go
@@ -34,19 +34,36 @@ type Feed struct {
|
||||
// Items are based on the pages linked in list items starting with an asterisk ("*"). Links in
|
||||
// list items starting with a minus ("-") are ignored!
|
||||
Items []Item
|
||||
|
||||
// From is where the item number where the feed starts. It defaults to 0. Prev and From are the item numbers of
|
||||
// the previous and the next page of the feed. N is the number of items per page.
|
||||
Prev, Next, From, N int
|
||||
|
||||
// Complete is set when there is no pagination.
|
||||
Complete bool
|
||||
}
|
||||
|
||||
// feed returns a RSS 2.0 feed for any page. The feed items it contains are the pages linked from in list items starting
|
||||
// with an asterisk ("*").
|
||||
func feed(p *Page, ti time.Time) *Feed {
|
||||
// with an asterisk ("*"). The feed starts from a certain item and contains n items. If n is 0, the feed is complete
|
||||
// (unpaginated).
|
||||
func feed(p *Page, ti time.Time, from, n int) *Feed {
|
||||
feed := new(Feed)
|
||||
feed.Name = p.Name
|
||||
feed.Title = p.Title
|
||||
feed.Date = ti.Format(time.RFC1123Z)
|
||||
feed.From = from
|
||||
feed.N = n
|
||||
if n == 0 {
|
||||
feed.Complete = true
|
||||
} else if from > n {
|
||||
feed.Prev = from - n
|
||||
}
|
||||
to := from + n
|
||||
parser, _ := wikiParser()
|
||||
doc := markdown.Parse(p.Body, parser)
|
||||
items := make([]Item, 0)
|
||||
inListItem := false
|
||||
i := 0
|
||||
ast.WalkFunc(doc, func(node ast.Node, entering bool) ast.WalkStatus {
|
||||
// set the flag if we're in a list item
|
||||
listItem, ok := node.(*ast.ListItem)
|
||||
@@ -58,12 +75,23 @@ func feed(p *Page, ti time.Time) *Feed {
|
||||
if !inListItem || !entering {
|
||||
return ast.GoToNext
|
||||
}
|
||||
// if we're in a link and it's local
|
||||
// if we're in a link and it's not local
|
||||
link, ok := node.(*ast.Link)
|
||||
if !ok || bytes.Contains(link.Destination, []byte("//")) {
|
||||
return ast.GoToNext
|
||||
}
|
||||
name := path.Join(path.Dir(p.Name), string(link.Destination))
|
||||
// if we're too early or too late
|
||||
i++
|
||||
if i <= from {
|
||||
return ast.GoToNext
|
||||
}
|
||||
if n > 0 && i > to {
|
||||
// set if it's likely that more items exist
|
||||
feed.Next = to
|
||||
return ast.Terminate
|
||||
}
|
||||
// i counts links, not actual existing pages
|
||||
name := path.Join(p.Dir(), string(link.Destination))
|
||||
fi, err := os.Stat(filepath.FromSlash(name) + ".md")
|
||||
if err != nil {
|
||||
return ast.GoToNext
|
||||
@@ -80,9 +108,6 @@ func feed(p *Page, ti time.Time) *Feed {
|
||||
it.Html = template.HTML(template.HTMLEscaper(p2.Html))
|
||||
it.Hashtags = p2.Hashtags
|
||||
items = append(items, it)
|
||||
if len(items) >= 10 {
|
||||
return ast.Terminate
|
||||
}
|
||||
return ast.GoToNext
|
||||
})
|
||||
feed.Items = items
|
||||
|
||||
12
feed.html
12
feed.html
@@ -1,11 +1,15 @@
|
||||
<rss xmlns:atom="http://www.w3.org/2005/Atom" version="2.0">
|
||||
<rss xmlns:atom="http://www.w3.org/2005/Atom" version="2.0"
|
||||
xmlns:fh="http://purl.org/syndication/history/1.0">
|
||||
<channel>
|
||||
<docs>http://blogs.law.harvard.edu/tech/rss</docs>
|
||||
<title>{{.Title}}</title>
|
||||
<link>https://example.org/</link>
|
||||
<managingEditor>you@example.org (Your Name)</managingEditor>
|
||||
<webMaster>you@example.org (Your Name)</webMaster>
|
||||
<atom:link href="https://example.org/view/{{.Name}}.rss" rel="self" type="application/rss+xml"/>
|
||||
<atom:link href="https://example.org/view/{{.Path}}.rss" rel="self" type="application/rss+xml"/>{{if .From}}
|
||||
<atom:link href="https://example.org/view/{{.Path}}.rss?from={{.Prev}}&n={{.N}}" rel="previous" type="application/rss+xml"/>{{end}}{{if .Next}}
|
||||
<atom:link href="https://example.org/view/{{.Path}}.rss?from={{.Next}}&n={{.N}}" rel="next" type="application/rss+xml"/>{{end}}{{if .Complete}}
|
||||
<fh:complete/>{{end}}
|
||||
<description>This is the digital garden of Your Name.</description>
|
||||
<image>
|
||||
<url>https://example.org/view/logo.jpg</url>
|
||||
@@ -15,8 +19,8 @@
|
||||
{{range .Items}}
|
||||
<item>
|
||||
<title>{{.Title}}</title>
|
||||
<link>https://example.org/view/{{.Name}}</link>
|
||||
<guid>https://example.org/view/{{.Name}}</guid>
|
||||
<link>https://example.org/view/{{.Path}}</link>
|
||||
<guid>https://example.org/view/{{.Path}}</guid>
|
||||
<description>{{.Html}}</description>
|
||||
<pubDate>{{.Date}}</pubDate>
|
||||
{{range .Hashtags}}
|
||||
|
||||
89
feed_cmd.go
Normal file
89
feed_cmd.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"github.com/google/subcommands"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type feedCmd struct {
|
||||
}
|
||||
|
||||
func (*feedCmd) Name() string { return "feed" }
|
||||
func (*feedCmd) Synopsis() string { return "render a page as feed" }
|
||||
func (*feedCmd) Usage() string {
|
||||
return `feed <page name> ...:
|
||||
Render one or more pages as a single feed.
|
||||
Use a single - to read Markdown from stdin.
|
||||
`
|
||||
}
|
||||
|
||||
func (cmd *feedCmd) SetFlags(f *flag.FlagSet) {
|
||||
}
|
||||
|
||||
func (cmd *feedCmd) Execute(_ context.Context, f *flag.FlagSet, _ ...interface{}) subcommands.ExitStatus {
|
||||
if len(f.Args()) == 0 {
|
||||
fmt.Fprint(os.Stderr, cmd.Usage())
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
return feedCli(os.Stdout, f.Args())
|
||||
}
|
||||
|
||||
func feedCli(w io.Writer, args []string) subcommands.ExitStatus {
|
||||
if len(args) == 1 && args[0] == "-" {
|
||||
body, err := io.ReadAll(os.Stdin)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot read from stdin: %s\n", err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
p := &Page{Name: "stdin", Body: body}
|
||||
return p.printFeed(w, time.Now())
|
||||
}
|
||||
for _, name := range args {
|
||||
if !strings.HasSuffix(name, ".md") {
|
||||
fmt.Fprintf(os.Stderr, "%s does not end in '.md'\n", name)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
name = name[0 : len(name)-3]
|
||||
p, err := loadPage(name)
|
||||
p.handleTitle(false)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot load %s: %s\n", name, err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
ti, _ := p.ModTime()
|
||||
status := p.printFeed(w, ti)
|
||||
if status != subcommands.ExitSuccess {
|
||||
return status
|
||||
}
|
||||
}
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
|
||||
// printFeed prints the complete feed for a page (unpaginated).
|
||||
func (p *Page) printFeed(w io.Writer, ti time.Time) subcommands.ExitStatus {
|
||||
f := feed(p, ti, 0, 0)
|
||||
if len(f.Items) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "Empty feed for %s\n", p.Name)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
_, err := w.Write([]byte(`<?xml version="1.0" encoding="UTF-8"?>`))
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot write prefix: %s\n", err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
loadTemplates()
|
||||
templates.RLock()
|
||||
defer templates.RUnlock()
|
||||
err = templates.template["feed.html"].Execute(w, f)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot execute template: %s\n", err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
22
feed_cmd_test.go
Normal file
22
feed_cmd_test.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/google/subcommands"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestFeedCmd(t *testing.T) {
|
||||
cleanup(t, "testdata/complete")
|
||||
p := &Page{Name: "testdata/complete/one", Body: []byte("# One\n")}; p.save()
|
||||
p = &Page{Name: "testdata/complete/index", Body: []byte(`# Index
|
||||
* [one](one)
|
||||
`)}
|
||||
p.save()
|
||||
|
||||
b := new(bytes.Buffer)
|
||||
s := feedCli(b, []string{"testdata/complete/index.md"})
|
||||
assert.Equal(t, subcommands.ExitSuccess, s)
|
||||
assert.Contains(t, b.String(), "<fh:complete/>")
|
||||
}
|
||||
101
feed_test.go
101
feed_test.go
@@ -4,22 +4,22 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"net/http"
|
||||
"testing"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
func TestFeed(t *testing.T) {
|
||||
assert.Contains(t,
|
||||
assert.HTTPBody(makeHandler(viewHandler, false), "GET", "/view/index.rss", nil),
|
||||
"Welcome to Oddµ")
|
||||
assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/index.rss", nil),
|
||||
"Welcome to Oddμ")
|
||||
}
|
||||
|
||||
func TestNoFeed(t *testing.T) {
|
||||
assert.HTTPStatusCode(t,
|
||||
makeHandler(viewHandler, false), "GET", "/view/no-feed.rss", nil, http.StatusNotFound)
|
||||
makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/no-feed.rss", nil, http.StatusNotFound)
|
||||
}
|
||||
|
||||
func TestFeedItems(t *testing.T) {
|
||||
cleanup(t, "testdata/feed")
|
||||
index.load()
|
||||
|
||||
p1 := &Page{Name: "testdata/feed/cactus", Body: []byte(`# Cactus
|
||||
Green head and white hair
|
||||
@@ -44,7 +44,7 @@ Writing poems about plants.
|
||||
* [My Dragon Tree](dragon)`)}
|
||||
p3.save()
|
||||
|
||||
body := assert.HTTPBody(makeHandler(viewHandler, false), "GET", "/view/testdata/feed/plants.rss", nil)
|
||||
body := assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/feed/plants.rss", nil)
|
||||
assert.Contains(t, body, "<title>Plants</title>")
|
||||
assert.Contains(t, body, "<title>Cactus</title>")
|
||||
assert.Contains(t, body, "<title>Dragon</title>")
|
||||
@@ -53,3 +53,94 @@ Writing poems about plants.
|
||||
assert.Contains(t, body, "<category>Succulent</category>")
|
||||
assert.Contains(t, body, "<category>Palmtree</category>")
|
||||
}
|
||||
|
||||
|
||||
func TestFeedPagination(t *testing.T) {
|
||||
cleanup(t, "testdata/pagination")
|
||||
|
||||
p := &Page{Name: "testdata/pagination/one", Body: []byte("# One\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/two", Body: []byte("# Two\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/three", Body: []byte("# Three\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/four", Body: []byte("# Four\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/five", Body: []byte("# Five\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/six", Body: []byte("# Six\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/seven", Body: []byte("# Seven\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/eight", Body: []byte("# Eight\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/nine", Body: []byte("# Nine\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/ten", Body: []byte("# Ten\n")}; p.save()
|
||||
|
||||
p = &Page{Name: "testdata/pagination/index", Body: []byte(`# Index
|
||||
* [one](one)
|
||||
* [two](two)
|
||||
* [three](three)
|
||||
* [four](four)
|
||||
* [five](five)
|
||||
* [six](six)
|
||||
* [seven](seven)
|
||||
* [eight](eight)
|
||||
* [nine](nine)
|
||||
* [ten](ten)
|
||||
`)}
|
||||
p.save()
|
||||
|
||||
body := assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", nil)
|
||||
assert.Contains(t, body, "<title>One</title>")
|
||||
assert.Contains(t, body, "<title>Ten</title>")
|
||||
assert.NotContains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=10&n=10" rel="next" type="application/rss+xml"/>`)
|
||||
|
||||
p = &Page{Name: "testdata/pagination/eleven", Body: []byte("# Eleven\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/index", Body: []byte(`# Index
|
||||
* [one](one)
|
||||
* [two](two)
|
||||
* [three](three)
|
||||
* [four](four)
|
||||
* [five](five)
|
||||
* [six](six)
|
||||
* [seven](seven)
|
||||
* [eight](eight)
|
||||
* [nine](nine)
|
||||
* [ten](ten)
|
||||
* [eleven](eleven)
|
||||
`)}
|
||||
p.save()
|
||||
|
||||
body = assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", nil)
|
||||
assert.NotContains(t, body, "<title>Eleven</title>")
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=10&n=10" rel="next" type="application/rss+xml"/>`)
|
||||
|
||||
params := url.Values{}
|
||||
params.Set("n", "0")
|
||||
body = assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", params)
|
||||
assert.Contains(t, body, "<title>Eleven</title>")
|
||||
assert.Contains(t, body, `<fh:complete/>`)
|
||||
|
||||
params = url.Values{}
|
||||
params.Set("n", "3")
|
||||
body = assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", params)
|
||||
assert.Contains(t, body, "<title>One</title>")
|
||||
assert.Contains(t, body, "<title>Three</title>")
|
||||
assert.NotContains(t, body, "<title>Four</title>")
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=3&n=3" rel="next" type="application/rss+xml"/>`)
|
||||
|
||||
params = url.Values{}
|
||||
params.Set("from", "3")
|
||||
params.Set("n", "3")
|
||||
body = assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", params)
|
||||
assert.NotContains(t, body, "<title>Three</title>")
|
||||
assert.Contains(t, body, "<title>Four</title>")
|
||||
assert.Contains(t, body, "<title>Six</title>")
|
||||
assert.NotContains(t, body, "<title>Seven</title>")
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=0&n=3" rel="previous" type="application/rss+xml"/>`)
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=6&n=3" rel="next" type="application/rss+xml"/>`)
|
||||
|
||||
params = url.Values{}
|
||||
params.Set("from", "2")
|
||||
params.Set("n", "3")
|
||||
body = assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", params)
|
||||
assert.NotContains(t, body, "<title>Two</title>")
|
||||
assert.Contains(t, body, "<title>Three</title>")
|
||||
assert.Contains(t, body, "<title>Five</title>")
|
||||
assert.NotContains(t, body, "<title>Six</title>")
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=0&n=3" rel="previous" type="application/rss+xml"/>`)
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=5&n=3" rel="next" type="application/rss+xml"/>`)
|
||||
}
|
||||
|
||||
11
go.mod
11
go.mod
@@ -1,4 +1,4 @@
|
||||
module alexschroeder.ch/cgit/oddmu
|
||||
module src.alexschroeder.ch/oddmu
|
||||
|
||||
go 1.22
|
||||
|
||||
@@ -9,7 +9,9 @@ require (
|
||||
github.com/edwvee/exiffix v0.0.0-20210922235313-0f6cbda5e58f
|
||||
github.com/fsnotify/fsnotify v1.7.0
|
||||
github.com/gabriel-vasile/mimetype v1.4.3
|
||||
github.com/gomarkdown/markdown v0.0.0-20240930133403-7e0a027d98c5
|
||||
github.com/gen2brain/heic v0.3.1
|
||||
github.com/gen2brain/webp v0.5.2
|
||||
github.com/gomarkdown/markdown v0.0.0-20250207164621-7a1f277a159e
|
||||
github.com/google/subcommands v1.2.0
|
||||
github.com/hexops/gotextdiff v1.0.3
|
||||
github.com/microcosm-cc/bluemonday v1.0.26
|
||||
@@ -23,15 +25,14 @@ require (
|
||||
require (
|
||||
github.com/aymerick/douceur v0.2.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/ebitengine/purego v0.7.1 // indirect
|
||||
github.com/gen2brain/heic v0.3.1 // indirect
|
||||
github.com/ebitengine/purego v0.8.1 // indirect
|
||||
github.com/gorilla/css v1.0.1 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.15 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/rivo/uniseg v0.4.6 // indirect
|
||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd // indirect
|
||||
github.com/shopspring/decimal v1.3.1 // indirect
|
||||
github.com/tetratelabs/wazero v1.7.3 // indirect
|
||||
github.com/tetratelabs/wazero v1.8.1 // indirect
|
||||
golang.org/x/image v0.15.0 // indirect
|
||||
golang.org/x/net v0.20.0 // indirect
|
||||
golang.org/x/sys v0.21.0 // indirect
|
||||
|
||||
20
go.sum
20
go.sum
@@ -5,22 +5,20 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
|
||||
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
|
||||
github.com/ebitengine/purego v0.7.1 h1:6/55d26lG3o9VCZX8lping+bZcmShseiqlh2bnUDiPA=
|
||||
github.com/ebitengine/purego v0.7.1/go.mod h1:ah1In8AOtksoNK6yk5z1HTJeUkC1Ez4Wk2idgGslMwQ=
|
||||
github.com/ebitengine/purego v0.8.1 h1:sdRKd6plj7KYW33EH5As6YKfe8m9zbN9JMrOjNVF/BE=
|
||||
github.com/ebitengine/purego v0.8.1/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
|
||||
github.com/edwvee/exiffix v0.0.0-20210922235313-0f6cbda5e58f h1:RMnUwTnNR070mFAEIoqMYjNirHj8i0h79VXTYyBCyVA=
|
||||
github.com/edwvee/exiffix v0.0.0-20210922235313-0f6cbda5e58f/go.mod h1:KoE3Ti1qbQXCb3s/XGj0yApHnbnNnn1bXTtB5Auq/Vc=
|
||||
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
|
||||
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
|
||||
github.com/gen2brain/heic v0.0.0-20230113233934-ca402e77a786 h1:zvgtcRb2B5gynWjm+Fc9oJZPHXwmcgyH0xCcNm6Rmo4=
|
||||
github.com/gen2brain/heic v0.0.0-20230113233934-ca402e77a786/go.mod h1:aKVJoQ0cc9K5Xb058XSnnAxXLliR97qbSqWBlm5ca1E=
|
||||
github.com/gen2brain/heic v0.3.1 h1:ClY5YTdXdIanw7pe9ZVUM9XcsqH6CCCa5CZBlm58qOs=
|
||||
github.com/gen2brain/heic v0.3.1/go.mod h1:m2sVIf02O7wfO8mJm+PvE91lnq4QYJy2hseUon7So10=
|
||||
github.com/gomarkdown/markdown v0.0.0-20240730141124-034f12af3bf6 h1:ZPy+2XJ8u0bB3sNFi+I72gMEMS7MTg7aZCCXPOjV8iw=
|
||||
github.com/gomarkdown/markdown v0.0.0-20240730141124-034f12af3bf6/go.mod h1:JDGcbDT52eL4fju3sZ4TeHGsQwhG9nbDV21aMyhwPoA=
|
||||
github.com/gomarkdown/markdown v0.0.0-20240930133403-7e0a027d98c5 h1:qIhG9h8tUzKsVHn0iHtWUohq7Ve7btgA8rGp7TvrIHw=
|
||||
github.com/gomarkdown/markdown v0.0.0-20240930133403-7e0a027d98c5/go.mod h1:JDGcbDT52eL4fju3sZ4TeHGsQwhG9nbDV21aMyhwPoA=
|
||||
github.com/gen2brain/webp v0.5.2 h1:aYdjbU/2L98m+bqUdkYMOIY93YC+EN3HuZLMaqgMD9U=
|
||||
github.com/gen2brain/webp v0.5.2/go.mod h1:Nb3xO5sy6MeUAHhru9H3GT7nlOQO5dKRNNlE92CZrJw=
|
||||
github.com/gomarkdown/markdown v0.0.0-20250207164621-7a1f277a159e h1:ESHlT0RVZphh4JGBz49I5R6nTdC8Qyc08vU25GQHzzQ=
|
||||
github.com/gomarkdown/markdown v0.0.0-20250207164621-7a1f277a159e/go.mod h1:JDGcbDT52eL4fju3sZ4TeHGsQwhG9nbDV21aMyhwPoA=
|
||||
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
|
||||
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/subcommands v1.2.0 h1:vWQspBTo2nEqTUFita5/KeEWlUL8kQObDFbub/EN9oE=
|
||||
@@ -59,8 +57,8 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/tetratelabs/wazero v1.7.3 h1:PBH5KVahrt3S2AHgEjKu4u+LlDbbk+nsGE3KLucy6Rw=
|
||||
github.com/tetratelabs/wazero v1.7.3/go.mod h1:ytl6Zuh20R/eROuyDaGPkp82O9C/DJfXAwJfQ3X6/7Y=
|
||||
github.com/tetratelabs/wazero v1.8.1 h1:NrcgVbWfkWvVc4UtT4LRLDf91PsOzDzefMdwhLfA550=
|
||||
github.com/tetratelabs/wazero v1.8.1/go.mod h1:yAI0XTsMBhREkM/YDAK/zNou3GoiAce1P6+rp/wQhjs=
|
||||
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a h1:Q8/wZp0KX97QFTc2ywcOE0YRjZPVIx+MXInMzdvQqcA=
|
||||
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
@@ -68,8 +66,6 @@ golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8=
|
||||
golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE=
|
||||
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
|
||||
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
||||
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
|
||||
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
|
||||
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
||||
149
hashtags_cmd.go
149
hashtags_cmd.go
@@ -4,16 +4,27 @@ import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"github.com/gomarkdown/markdown"
|
||||
"github.com/gomarkdown/markdown/ast"
|
||||
"github.com/google/subcommands"
|
||||
"github.com/hexops/gotextdiff"
|
||||
"github.com/hexops/gotextdiff/myers"
|
||||
"github.com/hexops/gotextdiff/span"
|
||||
"io"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type hashtagsCmd struct {
|
||||
update bool
|
||||
dryRun bool
|
||||
}
|
||||
|
||||
func (cmd *hashtagsCmd) SetFlags(f *flag.FlagSet) {
|
||||
f.BoolVar(&cmd.update, "update", false, "create and update hashtag pages")
|
||||
f.BoolVar(&cmd.dryRun, "dry-run", false, "only report the changes it would make")
|
||||
}
|
||||
|
||||
func (*hashtagsCmd) Name() string { return "hashtags" }
|
||||
@@ -25,6 +36,9 @@ func (*hashtagsCmd) Usage() string {
|
||||
}
|
||||
|
||||
func (cmd *hashtagsCmd) Execute(_ context.Context, f *flag.FlagSet, _ ...interface{}) subcommands.ExitStatus {
|
||||
if cmd.update {
|
||||
return hashtagsUpdateCli(os.Stdout, cmd.dryRun)
|
||||
}
|
||||
return hashtagsCli(os.Stdout)
|
||||
}
|
||||
|
||||
@@ -57,3 +71,138 @@ func hashtagsCli(w io.Writer) subcommands.ExitStatus {
|
||||
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
|
||||
// hashtagsUpdateCli runs the hashtags command on the command line and creates and updates the hashtag pages in the
|
||||
// current directory. That is, pages in subdirectories are skipped! It is used here with an io.Writer for easy testing.
|
||||
func hashtagsUpdateCli(w io.Writer, dryRun bool) subcommands.ExitStatus {
|
||||
index.load()
|
||||
// no locking necessary since this is for the command-line
|
||||
namesMap := make(map[string]string)
|
||||
for hashtag, docids := range index.token {
|
||||
if len(docids) <= 5 {
|
||||
if dryRun {
|
||||
fmt.Fprintf(w, "Skipping #%s because there are not enough entries (%d)\n", hashtag, len(docids))
|
||||
}
|
||||
continue
|
||||
}
|
||||
title, ok := namesMap[hashtag]
|
||||
if !ok {
|
||||
title = hashtagName(namesMap, hashtag, docids)
|
||||
namesMap[hashtag] = title
|
||||
}
|
||||
pageName := strings.ReplaceAll(title, " ", "_")
|
||||
h, err := loadPage(pageName)
|
||||
original := ""
|
||||
new := false
|
||||
if err != nil {
|
||||
new = true
|
||||
h = &Page{Name: pageName, Body: []byte("# " + title + "\n\n#" + pageName + "\n\nBlog posts:\n\n")}
|
||||
} else {
|
||||
original = string(h.Body)
|
||||
}
|
||||
for _, docid := range docids {
|
||||
name := index.documents[docid]
|
||||
if strings.Contains(name, "/") {
|
||||
continue
|
||||
}
|
||||
p, err := loadPage(name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(w, "Loading %s: %s\n", name, err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
if !p.IsBlog() {
|
||||
continue
|
||||
}
|
||||
p.handleTitle(false)
|
||||
if p.Title == "" {
|
||||
p.Title = p.Name
|
||||
}
|
||||
esc := nameEscape(p.Base())
|
||||
link := "* [" + p.Title + "](" + esc + ")\n"
|
||||
// I guess & used to get escaped and now no longer does
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(` + strings.ReplaceAll(esc, "&", "(&|%26)") + `\)\n`)
|
||||
addLinkToPage(h, link, re)
|
||||
}
|
||||
// only save if something changed
|
||||
if string(h.Body) != original {
|
||||
if dryRun {
|
||||
if new {
|
||||
fmt.Fprintf(w, "Creating %s.md\n", title)
|
||||
} else {
|
||||
fmt.Fprintf(w, "Updating %s.md\n", title)
|
||||
}
|
||||
fn := h.Name + ".md"
|
||||
edits := myers.ComputeEdits(span.URIFromPath(fn), original, string(h.Body))
|
||||
diff := fmt.Sprint(gotextdiff.ToUnified(fn+"~", fn, original, edits))
|
||||
fmt.Fprint(w, diff)
|
||||
} else {
|
||||
err = h.save()
|
||||
if err != nil {
|
||||
fmt.Fprintf(w, "Saving hashtag %s failed: %s", hashtag, err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
|
||||
// Go through all the documents in the same directory and look for hashtag matches in the rendered HTML in order to
|
||||
// determine the most likely capitalization.
|
||||
func hashtagName(namesMap map[string]string, hashtag string, docids []docid) string {
|
||||
candidate := make(map[string]int)
|
||||
var mostPopular string
|
||||
for _, docid := range docids {
|
||||
name := index.documents[docid]
|
||||
if strings.Contains(name, "/") {
|
||||
continue
|
||||
}
|
||||
p, err := loadPage(name)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
// parsing finds all the hashtags
|
||||
parser, _ := wikiParser()
|
||||
doc := markdown.Parse(p.Body, parser)
|
||||
ast.WalkFunc(doc, func(node ast.Node, entering bool) ast.WalkStatus {
|
||||
if entering {
|
||||
switch v := node.(type) {
|
||||
case *ast.Link:
|
||||
for _, attr := range v.AdditionalAttributes {
|
||||
if attr == `class="tag"` {
|
||||
tagName := []byte("")
|
||||
ast.WalkFunc(v, func(node ast.Node, entering bool) ast.WalkStatus {
|
||||
if entering && node.AsLeaf() != nil {
|
||||
tagName = append(tagName, node.AsLeaf().Literal...)
|
||||
}
|
||||
return ast.GoToNext
|
||||
})
|
||||
tag := string(tagName[1:])
|
||||
if strings.EqualFold(hashtag, strings.ReplaceAll(tag, " ", "_")) {
|
||||
_, ok := candidate[tag]
|
||||
if ok {
|
||||
candidate[tag] += 1
|
||||
} else {
|
||||
candidate[tag] = 1
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return ast.GoToNext
|
||||
})
|
||||
count := 0
|
||||
for key, val := range candidate {
|
||||
if val > count {
|
||||
mostPopular = key
|
||||
count = val
|
||||
}
|
||||
}
|
||||
// shortcut
|
||||
if count >= 5 {
|
||||
return mostPopular
|
||||
}
|
||||
}
|
||||
return mostPopular
|
||||
}
|
||||
|
||||
@@ -8,9 +8,18 @@ import (
|
||||
)
|
||||
|
||||
func TestHashtagsCmd(t *testing.T) {
|
||||
cleanup(t, "testdata/hashtag")
|
||||
p := &Page{Name: "testdata/hashtag/hash", Body: []byte(`# Hash
|
||||
|
||||
I hope for a time
|
||||
not like today, relentless,
|
||||
just crocus blooming
|
||||
|
||||
#Crocus`)}
|
||||
p.save()
|
||||
b := new(bytes.Buffer)
|
||||
s := hashtagsCli(b)
|
||||
assert.Equal(t, subcommands.ExitSuccess, s)
|
||||
x := b.String()
|
||||
assert.Contains(t, x, "#like_this\t")
|
||||
assert.Contains(t, x, "crocus\t")
|
||||
}
|
||||
|
||||
63
html_cmd.go
63
html_cmd.go
@@ -5,32 +5,35 @@ import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"github.com/google/subcommands"
|
||||
"html/template"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type htmlCmd struct {
|
||||
useTemplate bool
|
||||
template string
|
||||
}
|
||||
|
||||
func (*htmlCmd) Name() string { return "html" }
|
||||
func (*htmlCmd) Synopsis() string { return "render a page as HTML" }
|
||||
func (*htmlCmd) Usage() string {
|
||||
return `html [-view] <page name> ...:
|
||||
return `html [-template <template name>] <page name> ...:
|
||||
Render one or more pages as HTML.
|
||||
Use a single - to read Markdown from stdin.
|
||||
`
|
||||
}
|
||||
|
||||
func (cmd *htmlCmd) SetFlags(f *flag.FlagSet) {
|
||||
f.BoolVar(&cmd.useTemplate, "view", false, "use the 'view.html' template.")
|
||||
f.StringVar(&cmd.template, "template", "",
|
||||
"use the given HTML file as a template (probably view.html or static.html).")
|
||||
}
|
||||
|
||||
func (cmd *htmlCmd) Execute(_ context.Context, f *flag.FlagSet, _ ...interface{}) subcommands.ExitStatus {
|
||||
return htmlCli(os.Stdout, cmd.useTemplate, f.Args())
|
||||
return htmlCli(os.Stdout, cmd.template, f.Args())
|
||||
}
|
||||
|
||||
func htmlCli(w io.Writer, useTemplate bool, args []string) subcommands.ExitStatus {
|
||||
func htmlCli(w io.Writer, template string, args []string) subcommands.ExitStatus {
|
||||
if len(args) == 1 && args[0] == "-" {
|
||||
body, err := io.ReadAll(os.Stdin)
|
||||
if err != nil {
|
||||
@@ -38,15 +41,20 @@ func htmlCli(w io.Writer, useTemplate bool, args []string) subcommands.ExitStatu
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
p := &Page{Name: "stdin", Body: body}
|
||||
return p.printHtml(w, useTemplate)
|
||||
return p.printHtml(w, template)
|
||||
}
|
||||
for _, arg := range args {
|
||||
p, err := loadPage(arg)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot load %s: %s\n", arg, err)
|
||||
for _, name := range args {
|
||||
if !strings.HasSuffix(name, ".md") {
|
||||
fmt.Fprintf(os.Stderr, "%s does not end in '.md'\n", name)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
status := p.printHtml(w, useTemplate)
|
||||
name = name[0 : len(name)-3]
|
||||
p, err := loadPage(name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot load %s: %s\n", name, err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
status := p.printHtml(w, template)
|
||||
if status != subcommands.ExitSuccess {
|
||||
return status
|
||||
}
|
||||
@@ -54,21 +62,28 @@ func htmlCli(w io.Writer, useTemplate bool, args []string) subcommands.ExitStatu
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
|
||||
func (p *Page) printHtml(w io.Writer, useTemplate bool) subcommands.ExitStatus {
|
||||
if useTemplate {
|
||||
t := "view.html"
|
||||
loadTemplates()
|
||||
p.handleTitle(true)
|
||||
p.renderHtml()
|
||||
err := templates.template[t].Execute(w, p)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot execute %s template for %s: %s\n", t, p.Name, err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
} else {
|
||||
func (p *Page) printHtml(w io.Writer, fn string) subcommands.ExitStatus {
|
||||
if fn == "" {
|
||||
// do not handle title
|
||||
p.renderHtml()
|
||||
fmt.Fprintln(w, p.Html)
|
||||
_, err := fmt.Fprintln(w, p.Html)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot write to stdout: %s\n", err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
p.handleTitle(true)
|
||||
p.renderHtml()
|
||||
t, err := template.ParseFiles(fn)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot parse template %s for %s: %s\n", fn, p.Name, err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
err = t.Execute(w, p)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot execute template %s for %s: %s\n", fn, p.Name, err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
|
||||
@@ -9,9 +9,9 @@ import (
|
||||
|
||||
func TestHtmlCmd(t *testing.T) {
|
||||
b := new(bytes.Buffer)
|
||||
s := htmlCli(b, false, []string{"index"})
|
||||
s := htmlCli(b, "", []string{"index.md"})
|
||||
assert.Equal(t, subcommands.ExitSuccess, s)
|
||||
r := `<h1 id="welcome-to-oddµ">Welcome to Oddµ</h1>
|
||||
r := `<h1 id="welcome-to-oddμ">Welcome to Oddμ</h1>
|
||||
|
||||
<p>Hello! 🙃</p>
|
||||
|
||||
|
||||
15
index.go
15
index.go
@@ -62,10 +62,12 @@ func (idx *indexStore) reset() {
|
||||
}
|
||||
|
||||
// addDocument adds the text as a new document. This assumes that the index is locked!
|
||||
// The hashtags (only!) are used as tokens. They are stored in lower case.
|
||||
func (idx *indexStore) addDocument(text []byte) docid {
|
||||
id := idx.next_id
|
||||
idx.next_id++
|
||||
for _, token := range hashtags(text) {
|
||||
token = strings.ToLower(token)
|
||||
ids := idx.token[token]
|
||||
// Don't add same ID more than once. Checking the last
|
||||
// position of the []docid works because the id is
|
||||
@@ -137,12 +139,12 @@ func (idx *indexStore) load() (int, error) {
|
||||
}
|
||||
|
||||
// walk reads a file and adds it to the index. This assumes that the index is locked.
|
||||
func (idx *indexStore) walk(path string, info fs.FileInfo, err error) error {
|
||||
func (idx *indexStore) walk(fp string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// skip hidden directories and files
|
||||
if path != "." && strings.HasPrefix(filepath.Base(path), ".") {
|
||||
if fp != "." && strings.HasPrefix(filepath.Base(fp), ".") {
|
||||
if info.IsDir() {
|
||||
return filepath.SkipDir
|
||||
} else {
|
||||
@@ -150,10 +152,10 @@ func (idx *indexStore) walk(path string, info fs.FileInfo, err error) error {
|
||||
}
|
||||
}
|
||||
// skipp all but page files
|
||||
if !strings.HasSuffix(path, ".md") {
|
||||
if !strings.HasSuffix(fp, ".md") {
|
||||
return nil
|
||||
}
|
||||
p, err := loadPage(strings.TrimSuffix(path, ".md"))
|
||||
p, err := loadPage(strings.TrimSuffix(filepath.ToSlash(fp), ".md"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -193,8 +195,8 @@ func (idx *indexStore) update(p *Page) {
|
||||
idx.add(p)
|
||||
}
|
||||
|
||||
// search searches the index for a query string and returns page
|
||||
// names.
|
||||
// search searches the index. The query string is parsed for tokens. Each token is turned to lower cased and looked up
|
||||
// in the index. Each page in the result must contain all the tokens. Returns page names.
|
||||
func (idx *indexStore) search(q string) []string {
|
||||
idx.RLock()
|
||||
defer idx.RUnlock()
|
||||
@@ -203,6 +205,7 @@ func (idx *indexStore) search(q string) []string {
|
||||
if len(hashtags) > 0 {
|
||||
var r []docid
|
||||
for _, token := range hashtags {
|
||||
token = strings.ToLower(token)
|
||||
if ids, ok := idx.token[token]; ok {
|
||||
if r == nil {
|
||||
r = ids
|
||||
|
||||
@@ -11,8 +11,8 @@ func TestIndexAdd(t *testing.T) {
|
||||
idx.reset()
|
||||
idx.Lock()
|
||||
defer idx.Unlock()
|
||||
tag := "#hello"
|
||||
id := idx.addDocument([]byte("oh hi " + tag))
|
||||
tag := "hello"
|
||||
id := idx.addDocument([]byte("oh hi #" + tag))
|
||||
assert.Contains(t, idx.token, tag)
|
||||
idx.deleteDocument(id)
|
||||
assert.NotContains(t, idx.token, tag)
|
||||
@@ -21,7 +21,7 @@ func TestIndexAdd(t *testing.T) {
|
||||
// TestIndex relies on README.md being indexed
|
||||
func TestIndex(t *testing.T) {
|
||||
index.load()
|
||||
q := "Oddµ"
|
||||
q := "Oddμ"
|
||||
pages, _ := search(q, "", "", 1, false)
|
||||
assert.NotZero(t, len(pages))
|
||||
for _, p := range pages {
|
||||
@@ -31,10 +31,19 @@ func TestIndex(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// Lower case hashtag!
|
||||
func TestSearchHashtag(t *testing.T) {
|
||||
cleanup(t, "testdata/search-hashtag")
|
||||
p := &Page{Name: "testdata/search-hashtag/search", Body: []byte(`# Search
|
||||
|
||||
I'm back in this room
|
||||
Shelf, table, chair, and shelf again
|
||||
Where are my glasses?
|
||||
|
||||
#Searching`)}
|
||||
p.save()
|
||||
index.load()
|
||||
q := "#like_this"
|
||||
pages, _ := search(q, "", "", 1, false)
|
||||
pages, _ := search("#searching", "", "", 1, false)
|
||||
assert.NotZero(t, len(pages))
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/google/subcommands"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type linksCmd struct {
|
||||
@@ -43,6 +44,11 @@ func linksCli(w io.Writer, args []string) subcommands.ExitStatus {
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
for _, name := range args {
|
||||
if !strings.HasSuffix(name, ".md") {
|
||||
fmt.Fprintf(os.Stderr, "%s does not end in '.md'\n", name)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
name = name[0 : len(name)-3]
|
||||
p, err := loadPage(name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(w, "Loading %s: %s\n", name, err)
|
||||
|
||||
@@ -9,7 +9,7 @@ import (
|
||||
|
||||
func TestLinksCmd(t *testing.T) {
|
||||
b := new(bytes.Buffer)
|
||||
s := linksCli(b, []string{"README"})
|
||||
s := linksCli(b, []string{"README.md"})
|
||||
assert.Equal(t, subcommands.ExitSuccess, s)
|
||||
x := b.String()
|
||||
assert.Contains(t, x, "https://alexschroeder.ch/view/oddmu/oddmu.1\n")
|
||||
|
||||
102
list.go
102
list.go
@@ -1,102 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// ListItem is used to display the list of files.
|
||||
type File struct {
|
||||
Name, Title string
|
||||
IsDir, IsUp bool
|
||||
// Date is the last modification date of the file storing the page. As the pages used by Oddmu are plain
|
||||
// Markdown files, they don't contain any metadata. Instead, the last modification date of the file is used.
|
||||
// This makes it work well with changes made to the files outside of Oddmu.
|
||||
Date string
|
||||
}
|
||||
|
||||
type List struct {
|
||||
Dir string
|
||||
Files []File
|
||||
}
|
||||
|
||||
// listHandler uses the "list.html" template to enable file management in a particular directory.
|
||||
func listHandler(w http.ResponseWriter, r *http.Request, dir string) {
|
||||
files := []File{}
|
||||
d := filepath.FromSlash(dir)
|
||||
if d == "" {
|
||||
d = "."
|
||||
} else if !strings.HasSuffix(d, "/") {
|
||||
http.Redirect(w, r, "/list/"+d+"/", http.StatusFound)
|
||||
return
|
||||
} else {
|
||||
it := File{Name: "..", IsUp: true, IsDir: true }
|
||||
files = append(files, it)
|
||||
}
|
||||
err := filepath.Walk(d, func (path string, fi fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
isDir := false
|
||||
if fi.IsDir() {
|
||||
if d == path {
|
||||
return nil
|
||||
}
|
||||
isDir = true
|
||||
}
|
||||
name := filepath.ToSlash(path)
|
||||
base := filepath.Base(name)
|
||||
title := ""
|
||||
if !isDir && strings.HasSuffix(name, ".md") {
|
||||
index.RLock()
|
||||
defer index.RUnlock()
|
||||
title = index.titles[name[:len(name)-3]]
|
||||
}
|
||||
if isDir {
|
||||
base += "/"
|
||||
}
|
||||
it := File{Name: base, Title: title, Date: fi.ModTime().Format(time.DateTime), IsDir: isDir }
|
||||
files = append(files, it)
|
||||
if isDir {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
renderTemplate(w, dir, "list", &List{Dir: dir, Files: files})
|
||||
}
|
||||
|
||||
|
||||
// deleteHandler deletes the named file and then redirects back to the list
|
||||
func deleteHandler(w http.ResponseWriter, r *http.Request, path string) {
|
||||
fn := filepath.Clean(filepath.FromSlash(path))
|
||||
err := os.RemoveAll(fn) // and all its children!
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.Redirect(w, r, "/list/"+filepath.Dir(fn)+"/", http.StatusFound)
|
||||
}
|
||||
|
||||
// renameHandler renames the named file and then redirects back to the list
|
||||
func renameHandler(w http.ResponseWriter, r *http.Request, path string) {
|
||||
fn := filepath.Clean(filepath.FromSlash(path))
|
||||
target := filepath.Join(filepath.Dir(fn), r.FormValue("name"))
|
||||
err := os.Rename(fn, target)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.Redirect(w, r, "/list/"+filepath.Dir(target)+"/", http.StatusFound)
|
||||
}
|
||||
59
list.html
59
list.html
@@ -1,59 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<title>Manage Files</title>
|
||||
<style>
|
||||
html { max-width: 70ch; padding: 2ch; margin: auto; color: #111; background-color: #ffe }
|
||||
body { hyphens: auto }
|
||||
form { width: 100% }
|
||||
table { border-collapse: collapse }
|
||||
th:nth-child(3) { max-width: 3ex; overflow: visible }
|
||||
td form { display: inline }
|
||||
td { padding-right: 1ch }
|
||||
td:last-child { padding-right: 0 }
|
||||
td:first-child { max-width: 30ch; overflow: hidden }
|
||||
tr:nth-child(odd) { background-color: #eed }
|
||||
td:first-child, td:last-child { white-space: nowrap }
|
||||
</style>
|
||||
</head>
|
||||
<body lang="en">
|
||||
<header>
|
||||
<a href="#main">Skip navigation</a>
|
||||
<a href="/view/index">Home</a>
|
||||
<a href="/archive/{{.Dir}}data.zip" accesskey="z">Zip</a>
|
||||
<a href="/upload/{{.Dir}}?filename=image-1.jpg" accesskey="u">Upload</a>
|
||||
<form role="search" action="/search/{{.Dir}}" method="GET">
|
||||
<label for="search">Search:</label>
|
||||
<input id="search" type="text" spellcheck="false" name="q" accesskey="f" placeholder="term #tag title:term blog:true" required>
|
||||
<button>Go</button>
|
||||
</form>
|
||||
</header>
|
||||
<main>
|
||||
<h1>Manage Files</h1>
|
||||
<form id="manage">
|
||||
<p><mark>Deletions and renamings take effect immediately and there is no undo!</mark></p>
|
||||
</form>
|
||||
<table>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Title</th>
|
||||
<th>Delete</th>
|
||||
<th>Rename</th>
|
||||
</tr>{{range .Files}}
|
||||
<tr>
|
||||
<td>{{if .IsDir}}<a href="/list/{{$.Dir}}{{.Name}}">{{.Name}}</a>{{else}}<a href="/view/{{$.Dir}}{{.Name}}">{{.Name}}</a>{{end}}</td>
|
||||
<td>{{.Title}}</td>
|
||||
<td>{{if .IsUp}}{{else}}<button form="manage" formaction="/delete/{{$.Dir}}{{.Name}}" title="Delete {{.Name}}">🗑</button>{{end}}</td>
|
||||
<td>{{if .IsUp}}{{else}}
|
||||
<form action="/rename/{{$.Dir}}{{.Name}}">
|
||||
<input name="name" placeholder="New name"/>
|
||||
<button title="Rename {{.Name}}">♺</button>
|
||||
</form>{{end}}</td>
|
||||
</tr>{{end}}
|
||||
</table>
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
@@ -12,8 +12,8 @@ func TestListCmd(t *testing.T) {
|
||||
s := listCli(b, "", nil)
|
||||
assert.Equal(t, subcommands.ExitSuccess, s)
|
||||
x := b.String()
|
||||
assert.Contains(t, x, "README\tOddµ: A minimal wiki\n")
|
||||
assert.Contains(t, x, "index\tWelcome to Oddµ\n")
|
||||
assert.Contains(t, x, "README\tOddμ: A minimal wiki\n")
|
||||
assert.Contains(t, x, "index\tWelcome to Oddμ\n")
|
||||
}
|
||||
|
||||
func TestListSubdirCmd(t *testing.T) {
|
||||
|
||||
30
list_test.go
30
list_test.go
@@ -1,30 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// relies on index.md in the current directory!
|
||||
func TestListHandler(t *testing.T) {
|
||||
assert.Contains(t,
|
||||
assert.HTTPBody(makeHandler(listHandler, false), "GET", "/list/", nil),
|
||||
"index.md")
|
||||
}
|
||||
|
||||
func TestDeleteHandler(t *testing.T) {
|
||||
cleanup(t, "testdata/delete")
|
||||
assert.NoError(t, os.Mkdir("testdata/delete", 0755))
|
||||
p := &Page{Name: "testdata/delete/haiku", Body: []byte(`# Sunset
|
||||
|
||||
Walk the fields outside
|
||||
See the forest loom above
|
||||
And an orange sky
|
||||
`)}
|
||||
p.save()
|
||||
list := assert.HTTPBody(makeHandler(listHandler, false), "GET", "/list/testdata/delete/", nil)
|
||||
assert.Contains(t, list, `<a href="/view/testdata/delete/haiku.md">haiku.md</a>`)
|
||||
assert.Contains(t, list, `<td>Sunset</td>`)
|
||||
assert.Contains(t, list, `<button form="manage" formaction="/delete/testdata/delete/haiku.md" title="Delete haiku.md">`)
|
||||
}
|
||||
@@ -27,7 +27,7 @@ html: ${HTML}
|
||||
%.html: %.md
|
||||
@echo Making $@
|
||||
@echo '<!DOCTYPE html>' > $@
|
||||
@oddmu html $(basename $<) | sed --regexp-extended \
|
||||
@oddmu html $< | sed --regexp-extended \
|
||||
-e 's/<a href="(oddmu[a-z.-]*.[1-9])">([^<>]*)<\/a>/<a href="\1.html">\2<\/a>/g' >> $@
|
||||
|
||||
md: ${MD}
|
||||
@@ -43,7 +43,7 @@ README.md: ../README.md
|
||||
< $< > $@
|
||||
|
||||
upload: ${MD} README.md
|
||||
rsync --itemize-changes --archive *.md sibirocobombus:alexschroeder.ch/wiki/oddmu/
|
||||
rsync --itemize-changes --archive *.md ../README.md sibirocobombus:alexschroeder.ch/wiki/oddmu/
|
||||
make clean
|
||||
|
||||
clean:
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-APACHE" "5" "2024-09-25"
|
||||
.TH "ODDMU-APACHE" "5" "2025-07-16"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -48,7 +48,7 @@ ServerAdmin alex@alexschroeder\&.ch
|
||||
<VirtualHost *:443>
|
||||
ServerName transjovian\&.org
|
||||
SSLEngine on
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/(\&.*))?$"
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|search|archive)/(\&.*))?$"
|
||||
"http://localhost:8080/$1"
|
||||
</VirtualHost>
|
||||
.fi
|
||||
@@ -126,13 +126,13 @@ ServerAdmin alex@alexschroeder\&.ch
|
||||
ServerName transjovian\&.org
|
||||
ProxyPassMatch "^/((view|diff|search|archive)/(\&.*))?$"
|
||||
"http://localhost:8080/$1"
|
||||
RedirectMatch "^/((edit|save|add|append|upload|drop|list|delete|rename)/(\&.*))?$"
|
||||
RedirectMatch "^/((edit|save|add|append|upload|drop)/(\&.*))?$"
|
||||
"https://transjovian\&.org/$1"
|
||||
</VirtualHost>
|
||||
<VirtualHost *:443>
|
||||
ServerName transjovian\&.org
|
||||
SSLEngine on
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/(\&.*))?$"
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|search|archive)/(\&.*))?$"
|
||||
"http://localhost:8080/$1"
|
||||
</VirtualHost>
|
||||
.fi
|
||||
@@ -170,7 +170,7 @@ In that case, you need to use the ProxyPassMatch directive.\&
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/(\&.*))?$"
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|search|archive)/(\&.*))?$"
|
||||
"unix:/run/oddmu/oddmu\&.sock|http://localhost/$1"
|
||||
.fi
|
||||
.RE
|
||||
@@ -189,7 +189,7 @@ A workaround is to add the redirect manually and drop the question-mark:
|
||||
.nf
|
||||
.RS 4
|
||||
RedirectMatch "^/$" "/view/index"
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/(\&.*))$"
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|search|archive)/(\&.*))$"
|
||||
"unix:/run/oddmu/oddmu\&.sock|http://localhost/$1"
|
||||
.fi
|
||||
.RE
|
||||
@@ -234,12 +234,12 @@ htpasswd -D \&.htpasswd berta
|
||||
.RE
|
||||
.PP
|
||||
Modify your site configuration and protect the "/edit/", "/save/", "/add/",
|
||||
"/append/", "/upload/", "/drop/", "/list/", "/delete/" and "/rename/" URLs with
|
||||
a password by adding the following to your "<VirtualHost *:443>" section:
|
||||
"/append/", "/upload/" and "/drop/" URLs with a password by adding the following
|
||||
to your "<VirtualHost *:443>" section:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop|list|delete|rename)/">
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop)/">
|
||||
AuthType Basic
|
||||
AuthName "Password Required"
|
||||
AuthUserFile /home/oddmu/\&.htpasswd
|
||||
@@ -274,7 +274,7 @@ directory:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop|list|delete|rename|(view|preview|search|archive)/secret)/">
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop|(view|preview|search|archive)/secret)/">
|
||||
AuthType Basic
|
||||
AuthName "Password Required"
|
||||
AuthUserFile /home/oddmu/\&.htpasswd
|
||||
@@ -300,9 +300,8 @@ DocumentRoot /home/oddmu
|
||||
.PP
|
||||
Make sure that none of the subdirectories look like the wiki paths "/view/",
|
||||
"/diff/", "/edit/", "/save/", "/add/", "/append/", "/upload/", "/drop/",
|
||||
"/list", "/delete/", "/rename/" "/search/" or "/archive/".\& For example, create a
|
||||
file called "robots.\&txt" containing the following, telling all robots that
|
||||
they'\&re not welcome.\&
|
||||
"/search/" or "/archive/".\& For example, create a file called "robots.\&txt"
|
||||
containing the following, telling all robots that they'\&re not welcome.\&
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
@@ -350,7 +349,7 @@ This requires a valid login by the user "alex" or "berta":
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop|list|delete|rename)/intetebi/">
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop)/intetebi/">
|
||||
Require user alex berta
|
||||
</LocationMatch>
|
||||
.fi
|
||||
|
||||
@@ -40,7 +40,7 @@ ServerAdmin alex@alexschroeder.ch
|
||||
<VirtualHost *:443>
|
||||
ServerName transjovian.org
|
||||
SSLEngine on
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/(.*))?$" \
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|search|archive)/(.*))?$" \
|
||||
"http://localhost:8080/$1"
|
||||
</VirtualHost>
|
||||
```
|
||||
@@ -106,13 +106,13 @@ ServerAdmin alex@alexschroeder.ch
|
||||
ServerName transjovian.org
|
||||
ProxyPassMatch "^/((view|diff|search|archive)/(.*))?$" \
|
||||
"http://localhost:8080/$1"
|
||||
RedirectMatch "^/((edit|save|add|append|upload|drop|list|delete|rename)/(.*))?$" \
|
||||
RedirectMatch "^/((edit|save|add|append|upload|drop)/(.*))?$" \
|
||||
"https://transjovian.org/$1"
|
||||
</VirtualHost>
|
||||
<VirtualHost *:443>
|
||||
ServerName transjovian.org
|
||||
SSLEngine on
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/(.*))?$" \
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|search|archive)/(.*))?$" \
|
||||
"http://localhost:8080/$1"
|
||||
</VirtualHost>
|
||||
```
|
||||
@@ -144,7 +144,7 @@ You probably want to serve some static files as well (see *Serve static files*).
|
||||
In that case, you need to use the ProxyPassMatch directive.
|
||||
|
||||
```
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/(.*))?$" \
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|search|archive)/(.*))?$" \
|
||||
"unix:/run/oddmu/oddmu.sock|http://localhost/$1"
|
||||
```
|
||||
|
||||
@@ -159,7 +159,7 @@ A workaround is to add the redirect manually and drop the question-mark:
|
||||
|
||||
```
|
||||
RedirectMatch "^/$" "/view/index"
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/(.*))$" \
|
||||
ProxyPassMatch "^/((view|preview|diff|edit|save|add|append|upload|drop|search|archive)/(.*))$" \
|
||||
"unix:/run/oddmu/oddmu.sock|http://localhost/$1"
|
||||
```
|
||||
|
||||
@@ -197,11 +197,11 @@ htpasswd -D .htpasswd berta
|
||||
```
|
||||
|
||||
Modify your site configuration and protect the "/edit/", "/save/", "/add/",
|
||||
"/append/", "/upload/", "/drop/", "/list/", "/delete/" and "/rename/" URLs with
|
||||
a password by adding the following to your "<VirtualHost \*:443>" section:
|
||||
"/append/", "/upload/" and "/drop/" URLs with a password by adding the following
|
||||
to your "<VirtualHost \*:443>" section:
|
||||
|
||||
```
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop|list|delete|rename)/">
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop)/">
|
||||
AuthType Basic
|
||||
AuthName "Password Required"
|
||||
AuthUserFile /home/oddmu/.htpasswd
|
||||
@@ -234,7 +234,7 @@ You need to configure the web server to prevent access to the "secret/"
|
||||
directory:
|
||||
|
||||
```
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop|list|delete|rename|(view|preview|search|archive)/secret)/">
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop|(view|preview|search|archive)/secret)/">
|
||||
AuthType Basic
|
||||
AuthName "Password Required"
|
||||
AuthUserFile /home/oddmu/.htpasswd
|
||||
@@ -257,9 +257,8 @@ DocumentRoot /home/oddmu
|
||||
|
||||
Make sure that none of the subdirectories look like the wiki paths "/view/",
|
||||
"/diff/", "/edit/", "/save/", "/add/", "/append/", "/upload/", "/drop/",
|
||||
"/list", "/delete/", "/rename/" "/search/" or "/archive/". For example, create a
|
||||
file called "robots.txt" containing the following, telling all robots that
|
||||
they're not welcome.
|
||||
"/search/" or "/archive/". For example, create a file called "robots.txt"
|
||||
containing the following, telling all robots that they're not welcome.
|
||||
|
||||
```
|
||||
User-agent: *
|
||||
@@ -302,7 +301,7 @@ password file mentioned above.
|
||||
This requires a valid login by the user "alex" or "berta":
|
||||
|
||||
```
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop|list|delete|rename)/intetebi/">
|
||||
<LocationMatch "^/(edit|save|add|append|upload|drop)/intetebi/">
|
||||
Require user alex berta
|
||||
</LocationMatch>
|
||||
```
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-EXPORT" "1" "2024-08-29"
|
||||
.TH "ODDMU-EXPORT" "1" "2025-08-31"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -22,8 +22,8 @@ You probably want to redirect this into a file so that you can upload and import
|
||||
it somewhere.\&
|
||||
.PP
|
||||
Note that this only handles pages (Markdown files).\& All other files (images,
|
||||
PDFs, whatever else you uploaded) are not part of the feed and has to be
|
||||
uploaded to the new platform in some other way.\&
|
||||
PDFs, whatever else you uploaded) are not part of the feed and have to be
|
||||
uploaded to the new platform using some other way.\&
|
||||
.PP
|
||||
The \fB-template\fR option specifies the template to use.\& If the template filename
|
||||
ends in \fI.\&xml\fR, \fI.\&html\fR or \fI.\&rss\fR, it is assumed to contain XML and the optional
|
||||
|
||||
@@ -15,8 +15,8 @@ You probably want to redirect this into a file so that you can upload and import
|
||||
it somewhere.
|
||||
|
||||
Note that this only handles pages (Markdown files). All other files (images,
|
||||
PDFs, whatever else you uploaded) are not part of the feed and has to be
|
||||
uploaded to the new platform in some other way.
|
||||
PDFs, whatever else you uploaded) are not part of the feed and have to be
|
||||
uploaded to the new platform using some other way.
|
||||
|
||||
The *-template* option specifies the template to use. If the template filename
|
||||
ends in _.xml_, _.html_ or _.rss_, it is assumed to contain XML and the optional
|
||||
|
||||
53
man/oddmu-feed.1
Normal file
53
man/oddmu-feed.1
Normal file
@@ -0,0 +1,53 @@
|
||||
.\" Generated by scdoc 1.11.3
|
||||
.\" Complete documentation for this program is not available as a GNU info page
|
||||
.ie \n(.g .ds Aq \(aq
|
||||
.el .ds Aq '
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-FEED" "1" "2025-08-31"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
oddmu-feed - render Oddmu page feed
|
||||
.PP
|
||||
.SH SYNOPSIS
|
||||
.PP
|
||||
\fBoddmu feed\fR \fIpage-name\fR .\&.\&.\&
|
||||
.PP
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
The "feed" subcommand opens the given Markdown files and writes the resulting
|
||||
RSS files without item limit (ordinarily, this default is 10 items per feed).\&
|
||||
This uses the "feed.\&html" template.\& Use "-" as the page name if you want to read
|
||||
Markdown from \fBstdin\fR.\&
|
||||
.PP
|
||||
Unlike the feeds generated by the \fBstatic\fR subcommand, the \fBfeed\fR command does
|
||||
not limit the feed to the ten most recent items.\& Instead, all items on the list
|
||||
are turned into feed items.\&
|
||||
.PP
|
||||
.SH EXAMPLES
|
||||
.PP
|
||||
Generate "emacs.\&rss" from "emacs.\&md":
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
oddmu feed emacs\&.md
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
Alternatively:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
oddmu feed - < emacs\&.md > emacs\&.rss
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
.SH SEE ALSO
|
||||
.PP
|
||||
\fIoddmu\fR(1), \fIoddmu-export\fR(1), \fIoddmu-static\fR(1)
|
||||
.PP
|
||||
.SH AUTHORS
|
||||
.PP
|
||||
Maintained by Alex Schroeder <alex@gnu.\&org>.\&
|
||||
42
man/oddmu-feed.1.txt
Normal file
42
man/oddmu-feed.1.txt
Normal file
@@ -0,0 +1,42 @@
|
||||
ODDMU-FEED(1)
|
||||
|
||||
# NAME
|
||||
|
||||
oddmu-feed - render Oddmu page feed
|
||||
|
||||
# SYNOPSIS
|
||||
|
||||
*oddmu feed* _page-name_ ...
|
||||
|
||||
# DESCRIPTION
|
||||
|
||||
The "feed" subcommand opens the given Markdown files and writes the resulting
|
||||
RSS files without item limit (ordinarily, this default is 10 items per feed).
|
||||
This uses the "feed.html" template. Use "-" as the page name if you want to read
|
||||
Markdown from *stdin*.
|
||||
|
||||
Unlike the feeds generated by the *static* subcommand, the *feed* command does
|
||||
not limit the feed to the ten most recent items. Instead, all items on the list
|
||||
are turned into feed items.
|
||||
|
||||
# EXAMPLES
|
||||
|
||||
Generate "emacs.rss" from "emacs.md":
|
||||
|
||||
```
|
||||
oddmu feed emacs.md
|
||||
```
|
||||
|
||||
Alternatively:
|
||||
|
||||
```
|
||||
oddmu feed - < emacs.md > emacs.rss
|
||||
```
|
||||
|
||||
# SEE ALSO
|
||||
|
||||
_oddmu_(1), _oddmu-export_(1), _oddmu-static_(1)
|
||||
|
||||
# AUTHORS
|
||||
|
||||
Maintained by Alex Schroeder <alex@gnu.org>.
|
||||
@@ -5,20 +5,31 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-HASHTAGS" "1" "2024-08-29"
|
||||
.TH "ODDMU-HASHTAGS" "1" "2025-08-09"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
oddmu-hashtags - count the hashtags used
|
||||
oddmu-hashtags - work with hashtags
|
||||
.PP
|
||||
.SH SYNOPSIS
|
||||
.PP
|
||||
\fBoddmu hashtags\fR
|
||||
.PP
|
||||
\fBoddmu hashtags -update\fR [\fB-dry-run\fR]
|
||||
.PP
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
The "hashtags" subcommand counts all the hashtags used and lists them, separated
|
||||
by a TAB character.\&
|
||||
By default, the "hashtags" subcommand counts all the hashtags used and lists
|
||||
them, separated by a TAB character.\&
|
||||
.PP
|
||||
With the \fB-update\fR flag, the hashtag pages are update with links to all the blog
|
||||
pages having the corresponding tag.\& This only necessary when migrating a
|
||||
collection of Markdown files.\& Ordinarily, Oddmu maintains the hashtag pages
|
||||
automatically.\& When writing pages offline, use \fIoddmu-notify\fR(1) to update the
|
||||
hashtag pages.\&
|
||||
.PP
|
||||
Use the \fB-dry-run\fR flag to see what would change with the \fB-update\fR flag without
|
||||
actually changing any files.\&
|
||||
.PP
|
||||
.SH EXAMPLES
|
||||
.PP
|
||||
@@ -30,6 +41,22 @@ oddmu hashtags | head -n 11
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
See what kind of changes Oddmu would suggest:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
oddmu hashtags -update -dry-run
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
And then do it:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
oddmu hashtags -update
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
.SH SEE ALSO
|
||||
.PP
|
||||
\fIoddmu\fR(1)
|
||||
|
||||
@@ -2,16 +2,27 @@ ODDMU-HASHTAGS(1)
|
||||
|
||||
# NAME
|
||||
|
||||
oddmu-hashtags - count the hashtags used
|
||||
oddmu-hashtags - work with hashtags
|
||||
|
||||
# SYNOPSIS
|
||||
|
||||
*oddmu hashtags*
|
||||
|
||||
*oddmu hashtags -update* [*-dry-run*]
|
||||
|
||||
# DESCRIPTION
|
||||
|
||||
The "hashtags" subcommand counts all the hashtags used and lists them, separated
|
||||
by a TAB character.
|
||||
By default, the "hashtags" subcommand counts all the hashtags used and lists
|
||||
them, separated by a TAB character.
|
||||
|
||||
With the *-update* flag, the hashtag pages are update with links to all the blog
|
||||
pages having the corresponding tag. This only necessary when migrating a
|
||||
collection of Markdown files. Ordinarily, Oddmu maintains the hashtag pages
|
||||
automatically. When writing pages offline, use _oddmu-notify_(1) to update the
|
||||
hashtag pages.
|
||||
|
||||
Use the *-dry-run* flag to see what would change with the *-update* flag without
|
||||
actually changing any files.
|
||||
|
||||
# EXAMPLES
|
||||
|
||||
@@ -21,6 +32,18 @@ List the top 10 hashtags. This requires 11 lines because of the header line.
|
||||
oddmu hashtags | head -n 11
|
||||
```
|
||||
|
||||
See what kind of changes Oddmu would suggest:
|
||||
|
||||
```
|
||||
oddmu hashtags -update -dry-run
|
||||
```
|
||||
|
||||
And then do it:
|
||||
|
||||
```
|
||||
oddmu hashtags -update
|
||||
```
|
||||
|
||||
# SEE ALSO
|
||||
|
||||
_oddmu_(1)
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-HTML" "1" "2024-08-29"
|
||||
.TH "ODDMU-HTML" "1" "2025-04-05"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -13,21 +13,21 @@ oddmu-html - render Oddmu page HTML
|
||||
.PP
|
||||
.SH SYNOPSIS
|
||||
.PP
|
||||
\fBoddmu html\fR [-view] \fIpage-name\fR
|
||||
\fBoddmu html\fR [\fB\fR-template\fB\fR \fItemplate-name\fR] \fIpage-name\fR
|
||||
.PP
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
The "html" subcommand opens the Markdown file for the given page name (appending
|
||||
the ".\&md" extension) and prints the HTML to STDOUT without invoking the
|
||||
"view.\&html" template.\& Use "-" as the page name if you want to read Markdown from
|
||||
\fBstdin\fR.\&
|
||||
The "html" subcommand opens the given Markdown file and prints the resulting
|
||||
HTML to STDOUT without invoking the "view.\&html" template.\& Use "-" as the page
|
||||
name if you want to read Markdown from \fBstdin\fR.\&
|
||||
.PP
|
||||
.SH OPTIONS
|
||||
.PP
|
||||
\fB-view\fR
|
||||
\fB\fR-template\fB\fR \fItemplate-name\fR
|
||||
.RS 4
|
||||
Use the "view.\&html" template to render the page.\& Without this, the HTML
|
||||
lacks html and body tags.\&
|
||||
Use the given template to render the page.\& Without this, the HTML lacks
|
||||
html and body tags.\& The only two options that make sense are "view.\&html"
|
||||
and "static.\&html".\&
|
||||
.PP
|
||||
.RE
|
||||
.SH EXAMPLES
|
||||
@@ -36,7 +36,7 @@ Generate "README.\&html" from "README.\&md":
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
oddmu html README > README\&.html
|
||||
oddmu html README\&.md > README\&.html
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
|
||||
@@ -6,27 +6,27 @@ oddmu-html - render Oddmu page HTML
|
||||
|
||||
# SYNOPSIS
|
||||
|
||||
*oddmu html* [-view] _page-name_
|
||||
*oddmu html* [**-template** _template-name_] _page-name_
|
||||
|
||||
# DESCRIPTION
|
||||
|
||||
The "html" subcommand opens the Markdown file for the given page name (appending
|
||||
the ".md" extension) and prints the HTML to STDOUT without invoking the
|
||||
"view.html" template. Use "-" as the page name if you want to read Markdown from
|
||||
*stdin*.
|
||||
The "html" subcommand opens the given Markdown file and prints the resulting
|
||||
HTML to STDOUT without invoking the "view.html" template. Use "-" as the page
|
||||
name if you want to read Markdown from *stdin*.
|
||||
|
||||
# OPTIONS
|
||||
|
||||
*-view*
|
||||
Use the "view.html" template to render the page. Without this, the HTML
|
||||
lacks html and body tags.
|
||||
**-template** _template-name_
|
||||
Use the given template to render the page. Without this, the HTML lacks
|
||||
html and body tags. The only two options that make sense are "view.html"
|
||||
and "static.html".
|
||||
|
||||
# EXAMPLES
|
||||
|
||||
Generate "README.html" from "README.md":
|
||||
|
||||
```
|
||||
oddmu html README > README.html
|
||||
oddmu html README.md > README.html
|
||||
```
|
||||
|
||||
Alternatively:
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-LINKS" "1" "2024-08-15"
|
||||
.TH "ODDMU-LINKS" "1" "2025-04-05"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -17,8 +17,8 @@ oddmu-links - list outgoing links for pages
|
||||
.PP
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
The "links" subcommand lists outgoing links for one or more page names.\& Use "-"
|
||||
as the page name if you want to read Markdown from \fBstdin\fR.\&
|
||||
The "links" subcommand lists outgoing links for one or more Markdown files.\& Use
|
||||
"-" as the page name if you want to read Markdown from \fBstdin\fR.\&
|
||||
.PP
|
||||
.SH SEE ALSO
|
||||
.PP
|
||||
|
||||
@@ -10,8 +10,8 @@ oddmu-links - list outgoing links for pages
|
||||
|
||||
# DESCRIPTION
|
||||
|
||||
The "links" subcommand lists outgoing links for one or more page names. Use "-"
|
||||
as the page name if you want to read Markdown from *stdin*.
|
||||
The "links" subcommand lists outgoing links for one or more Markdown files. Use
|
||||
"-" as the page name if you want to read Markdown from *stdin*.
|
||||
|
||||
# SEE ALSO
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-LIST" "1" "2024-08-29"
|
||||
.TH "ODDMU-LIST" "1" "2025-08-31"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
|
||||
@@ -5,19 +5,19 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-NGINX" "5" "2024-08-29"
|
||||
.TH "ODDMU-NGINX" "5" "2025-07-16"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
oddmu-nginx - how to setup Nginx as a reverse proxy for Oddmu
|
||||
oddmu-nginx - how to setup nginx as a reverse proxy for Oddmu
|
||||
.PP
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
The oddmu program serves the current working directory as a wiki on port 8080.\&
|
||||
This is an unpriviledged port so an ordinary user account can do this.\&
|
||||
.PP
|
||||
This page explains how to setup NGINX on Debian to act as a reverse proxy for
|
||||
Oddmu.\& Once this is done, you can use NGINX to provide HTTPS, request users to
|
||||
This page explains how to setup nginx on Debian to act as a reverse proxy for
|
||||
Oddmu.\& Once this is done, you can use nginx to provide HTTPS, request users to
|
||||
authenticate themselves, and so on.\&
|
||||
.PP
|
||||
.SH CONFIGURATION
|
||||
@@ -27,7 +27,7 @@ section.\& Add a new \fIlocation\fR section after the existing \fIlocation\fR se
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
location ~ ^/(view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/ {
|
||||
location ~ ^/(view|preview|diff|edit|save|add|append|upload|drop|search|archive)/ {
|
||||
proxy_pass http://localhost:8080;
|
||||
}
|
||||
.fi
|
||||
@@ -53,7 +53,7 @@ location ~ ^/(view|diff|search)/ {
|
||||
proxy_pass http://localhost:8080;
|
||||
}
|
||||
# password required
|
||||
location ~ ^/(edit|save|add|append|upload|drop|list|delete|rename|archive)/ {
|
||||
location ~ ^/(edit|save|add|append|upload|drop|archive)/ {
|
||||
auth_basic "Oddmu author";
|
||||
auth_basic_user_file /etc/nginx/conf\&.d/htpasswd;
|
||||
proxy_pass http://localhost:8080;
|
||||
@@ -97,7 +97,7 @@ server configuration.\& On a Debian system, that'\&d be in
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
location ~ ^/(view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/ {
|
||||
location ~ ^/(view|preview|diff|edit|save|add|append|upload|drop|search|archive)/ {
|
||||
proxy_pass http://unix:/run/oddmu/oddmu\&.sock:;
|
||||
}
|
||||
.fi
|
||||
|
||||
@@ -2,15 +2,15 @@ ODDMU-NGINX(5)
|
||||
|
||||
# NAME
|
||||
|
||||
oddmu-nginx - how to setup Nginx as a reverse proxy for Oddmu
|
||||
oddmu-nginx - how to setup nginx as a reverse proxy for Oddmu
|
||||
|
||||
# DESCRIPTION
|
||||
|
||||
The oddmu program serves the current working directory as a wiki on port 8080.
|
||||
This is an unpriviledged port so an ordinary user account can do this.
|
||||
|
||||
This page explains how to setup NGINX on Debian to act as a reverse proxy for
|
||||
Oddmu. Once this is done, you can use NGINX to provide HTTPS, request users to
|
||||
This page explains how to setup nginx on Debian to act as a reverse proxy for
|
||||
Oddmu. Once this is done, you can use nginx to provide HTTPS, request users to
|
||||
authenticate themselves, and so on.
|
||||
|
||||
# CONFIGURATION
|
||||
@@ -19,7 +19,7 @@ The site is defined in "/etc/nginx/sites-available/default", in the _server_
|
||||
section. Add a new _location_ section after the existing _location_ section:
|
||||
|
||||
```
|
||||
location ~ ^/(view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/ {
|
||||
location ~ ^/(view|preview|diff|edit|save|add|append|upload|drop|search|archive)/ {
|
||||
proxy_pass http://localhost:8080;
|
||||
}
|
||||
```
|
||||
@@ -43,7 +43,7 @@ location ~ ^/(view|diff|search)/ {
|
||||
proxy_pass http://localhost:8080;
|
||||
}
|
||||
# password required
|
||||
location ~ ^/(edit|save|add|append|upload|drop|list|delete|rename|archive)/ {
|
||||
location ~ ^/(edit|save|add|append|upload|drop|archive)/ {
|
||||
auth_basic "Oddmu author";
|
||||
auth_basic_user_file /etc/nginx/conf.d/htpasswd;
|
||||
proxy_pass http://localhost:8080;
|
||||
@@ -81,7 +81,7 @@ server configuration. On a Debian system, that'd be in
|
||||
"/etc/nginx/sites-available/default".
|
||||
|
||||
```
|
||||
location ~ ^/(view|preview|diff|edit|save|add|append|upload|drop|list|delete|rename|search|archive)/ {
|
||||
location ~ ^/(view|preview|diff|edit|save|add|append|upload|drop|search|archive)/ {
|
||||
proxy_pass http://unix:/run/oddmu/oddmu.sock:;
|
||||
}
|
||||
```
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-NOTIFY" "1" "2024-08-29"
|
||||
.TH "ODDMU-NOTIFY" "1" "2025-04-05"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -17,8 +17,8 @@ oddmu-notify - add links to changes.\&md, index.\&md, and hashtag pages
|
||||
.PP
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
The "notify" subcommand takes all the page names provided (without the ".\&md"
|
||||
extension) and adds links to it from other pages.\&
|
||||
The "notify" subcommand takes all the Markdown files provided and adds links to
|
||||
these pages from other pages.\&
|
||||
.PP
|
||||
A new link is added to the \fBchanges\fR page in the current directory if it doesn'\&t
|
||||
exist.\& The current date of the machine Oddmu is running on is used as the
|
||||
@@ -57,7 +57,7 @@ it exists):
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
oddmu notify 2023-11-05-climate
|
||||
oddmu notify 2023-11-05-climate\&.md
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
|
||||
@@ -10,8 +10,8 @@ oddmu-notify - add links to changes.md, index.md, and hashtag pages
|
||||
|
||||
# DESCRIPTION
|
||||
|
||||
The "notify" subcommand takes all the page names provided (without the ".md"
|
||||
extension) and adds links to it from other pages.
|
||||
The "notify" subcommand takes all the Markdown files provided and adds links to
|
||||
these pages from other pages.
|
||||
|
||||
A new link is added to the *changes* page in the current directory if it doesn't
|
||||
exist. The current date of the machine Oddmu is running on is used as the
|
||||
@@ -49,7 +49,7 @@ After writing the file "2023-11-05-climate.md" containing the hashtag
|
||||
it exists):
|
||||
|
||||
```
|
||||
oddmu notify 2023-11-05-climate
|
||||
oddmu notify 2023-11-05-climate.md
|
||||
```
|
||||
|
||||
The changes file might look as follows:
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-RELEASES" "7" "2024-11-15"
|
||||
.TH "ODDMU-RELEASES" "7" "2025-09-28"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -15,6 +15,112 @@ oddmu-releases - what'\&s new?\&
|
||||
.PP
|
||||
This page lists user-visible features and template changes to consider.\&
|
||||
.PP
|
||||
.SS 1.19 (2025)
|
||||
.PP
|
||||
Add \fIfeed\fR subcommand.\& This produces a "complete" feed.\&
|
||||
.PP
|
||||
Add feed pagination for the \fIfeed\fR action.\& This produces a "paginated" feed.\&
|
||||
.PP
|
||||
See RFC 5005 for more information.\&
|
||||
.PP
|
||||
If you like the idea of feed pagination (not a given since that also helps bots
|
||||
scrape your site!\&) you need to add the necessary links to the feed template
|
||||
("feed.\&html").\& See \fIoddmu-templates\fR(5) for more.\&
|
||||
.PP
|
||||
Example, adding the feed history namespace:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
<rss xmlns:atom="http://www\&.w3\&.org/2005/Atom" version="2\&.0"
|
||||
xmlns:fh="http://purl\&.org/syndication/history/1\&.0">
|
||||
…
|
||||
{{if \&.From}}
|
||||
<atom:link rel="previous" type="application/rss+xml"
|
||||
href="https://example\&.org/view/{{\&.Path}}\&.rss?from={{\&.Prev}}&n={{\&.N}}"/>
|
||||
{{end}}
|
||||
{{if \&.Next}}
|
||||
<atom:link rel="next" type="application/rss+xml"
|
||||
href="https://example\&.org/view/{{\&.Path}}\&.rss?from={{\&.Next}}&n={{\&.N}}"/>
|
||||
{{end}}
|
||||
{{if \&.Complete}}<fh:complete/>{{end}}
|
||||
…
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
.SS 1.18 (2025)
|
||||
.PP
|
||||
The \fIhashtags\fR gained the option of checking and fixing the hashtag pages by
|
||||
adding missing links to tagged blog pages.\& See \fIoddmu-hashtags\fR(1) for more.\&
|
||||
.PP
|
||||
In an effort to remove features that can be handled by the web server, the
|
||||
\fIlist\fR, \fIdelete\fR and \fIrename\fR actions were removed again.\& See \fIoddmu-webdav\fR(5)
|
||||
for a better solution.\&
|
||||
.PP
|
||||
You probably need to remove a sentence linking to the list action from the
|
||||
upload template ("upload.\&html").\&
|
||||
.PP
|
||||
.SS 1.17 (2025)
|
||||
.PP
|
||||
You need to update the upload template ("upload.\&html").\& Many things have
|
||||
changed!\& See \fIoddmu-templates\fR(5) for more.\&
|
||||
.PP
|
||||
You probably want to ensure that the upload link on the view template
|
||||
("view.\&html") and others, if you added it, has a \fIfilename\fR and \fIpagename\fR
|
||||
parameters.\&
|
||||
.PP
|
||||
Example:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
<a href="/upload/{{\&.Dir}}?filename={{\&.Base}}-1\&.jpg&pagename={{\&.Base}}">Upload</a>
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
You need to change {{.\&Name}} to {{.\&Path}} when it is used in URLs, in the list
|
||||
template ("list.\&html").\& If you don'\&t do this, file deleting and rename may not
|
||||
work on files containing a comma, a semicolon, a questionmark or a hash
|
||||
character.\& This fix was necessary because URLs for files containing a
|
||||
questionmark or a hash character would end the path at this character and treat
|
||||
the rest as a query parameter or fragment, respectively.\&
|
||||
.PP
|
||||
Updated the example themes.\& Some of my sites got a text area that tries to take
|
||||
all the vertical space available.\& This is great for monitors in portrait mode.\&
|
||||
.PP
|
||||
\fIlist\fR action now skips dot files.\&
|
||||
.PP
|
||||
.SS 1.16 (2025)
|
||||
.PP
|
||||
Add support for WebP images for uploading and resizing.\&
|
||||
.PP
|
||||
You need to change {{.\&Name}} to {{.\&Path}} in HTML templates where pages are
|
||||
concerned.\& If you don'\&t do this, your page names (i.\&e.\& filenames for pages) may
|
||||
not include a comma, a semicolon, a questionmark or a hash sign.\& This fix was
|
||||
necessary because file uploads of filenames with non-ASCII characters ended up
|
||||
double-encoded.\&
|
||||
.PP
|
||||
Note that on the "list.\&html" template, {{.\&Name}} refers to file instead of a
|
||||
page and File.\&Path() isn'\&t implemented, yet.\& This is fixed in the next release.\&
|
||||
.PP
|
||||
Improved the example themes.\& The chat theme got better list styling and better
|
||||
upload functionality with automatic "add" button; the plain theme got rocket
|
||||
links via JavaScript; the alexschroeder.\&ch theme got a preview button and better
|
||||
image support for upload and search; the transjovian.\&org theme got better image
|
||||
support for upload.\&
|
||||
.PP
|
||||
Switch the \fIhtml\fR, \fIlink\fR, \fInotify\fR and \fItoc\fR subcommand to take filenames
|
||||
(including the `.\&md` suffix) instead of page names (without the `.\&md` suffix).\&
|
||||
.PP
|
||||
.SS 1.15 (2025)
|
||||
.PP
|
||||
Fix the hashtag detection.\& This was necessary to cut down on the many false
|
||||
positives.\& They were most obvious with the \fIhashtags\fR subcommand.\& Now the
|
||||
Markdown parser is used at startup to index the pages, making startup slower
|
||||
(about twice as long with my blog).\& The Markdown parser is also used to parse
|
||||
search terms (where it makes little difference).\&
|
||||
.PP
|
||||
Fix the timestamp for backup files.\& This was necessary because the diff didn'\&t
|
||||
work as intended.\&
|
||||
.PP
|
||||
.SS 1.14 (2024)
|
||||
.PP
|
||||
Add \fIlist\fR, \fIdelete\fR and \fIrename\fR actions.\&
|
||||
|
||||
@@ -8,6 +8,108 @@ oddmu-releases - what's new?
|
||||
|
||||
This page lists user-visible features and template changes to consider.
|
||||
|
||||
## 1.19 (2025)
|
||||
|
||||
Add _feed_ subcommand. This produces a "complete" feed.
|
||||
|
||||
Add feed pagination for the _feed_ action. This produces a "paginated" feed.
|
||||
|
||||
See RFC 5005 for more information.
|
||||
|
||||
If you like the idea of feed pagination (not a given since that also helps bots
|
||||
scrape your site!) you need to add the necessary links to the feed template
|
||||
("feed.html"). See _oddmu-templates_(5) for more.
|
||||
|
||||
Example, adding the feed history namespace:
|
||||
|
||||
```
|
||||
<rss xmlns:atom="http://www.w3.org/2005/Atom" version="2.0"
|
||||
xmlns:fh="http://purl.org/syndication/history/1.0">
|
||||
…
|
||||
{{if .From}}
|
||||
<atom:link rel="previous" type="application/rss+xml"
|
||||
href="https://example.org/view/{{.Path}}.rss?from={{.Prev}}&n={{.N}}"/>
|
||||
{{end}}
|
||||
{{if .Next}}
|
||||
<atom:link rel="next" type="application/rss+xml"
|
||||
href="https://example.org/view/{{.Path}}.rss?from={{.Next}}&n={{.N}}"/>
|
||||
{{end}}
|
||||
{{if .Complete}}<fh:complete/>{{end}}
|
||||
…
|
||||
```
|
||||
|
||||
## 1.18 (2025)
|
||||
|
||||
The _hashtags_ gained the option of checking and fixing the hashtag pages by
|
||||
adding missing links to tagged blog pages. See _oddmu-hashtags_(1) for more.
|
||||
|
||||
In an effort to remove features that can be handled by the web server, the
|
||||
_list_, _delete_ and _rename_ actions were removed again. See _oddmu-webdav_(5)
|
||||
for a better solution.
|
||||
|
||||
You probably need to remove a sentence linking to the list action from the
|
||||
upload template ("upload.html").
|
||||
|
||||
## 1.17 (2025)
|
||||
|
||||
You need to update the upload template ("upload.html"). Many things have
|
||||
changed! See _oddmu-templates_(5) for more.
|
||||
|
||||
You probably want to ensure that the upload link on the view template
|
||||
("view.html") and others, if you added it, has a _filename_ and _pagename_
|
||||
parameters.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
<a href="/upload/{{.Dir}}?filename={{.Base}}-1.jpg&pagename={{.Base}}">Upload</a>
|
||||
```
|
||||
|
||||
You need to change {{.Name}} to {{.Path}} when it is used in URLs, in the list
|
||||
template ("list.html"). If you don't do this, file deleting and rename may not
|
||||
work on files containing a comma, a semicolon, a questionmark or a hash
|
||||
character. This fix was necessary because URLs for files containing a
|
||||
questionmark or a hash character would end the path at this character and treat
|
||||
the rest as a query parameter or fragment, respectively.
|
||||
|
||||
Updated the example themes. Some of my sites got a text area that tries to take
|
||||
all the vertical space available. This is great for monitors in portrait mode.
|
||||
|
||||
_list_ action now skips dot files.
|
||||
|
||||
## 1.16 (2025)
|
||||
|
||||
Add support for WebP images for uploading and resizing.
|
||||
|
||||
You need to change {{.Name}} to {{.Path}} in HTML templates where pages are
|
||||
concerned. If you don't do this, your page names (i.e. filenames for pages) may
|
||||
not include a comma, a semicolon, a questionmark or a hash sign. This fix was
|
||||
necessary because file uploads of filenames with non-ASCII characters ended up
|
||||
double-encoded.
|
||||
|
||||
Note that on the "list.html" template, {{.Name}} refers to file instead of a
|
||||
page and File.Path() isn't implemented, yet. This is fixed in the next release.
|
||||
|
||||
Improved the example themes. The chat theme got better list styling and better
|
||||
upload functionality with automatic "add" button; the plain theme got rocket
|
||||
links via JavaScript; the alexschroeder.ch theme got a preview button and better
|
||||
image support for upload and search; the transjovian.org theme got better image
|
||||
support for upload.
|
||||
|
||||
Switch the _html_, _link_, _notify_ and _toc_ subcommand to take filenames
|
||||
(including the `.md` suffix) instead of page names (without the `.md` suffix).
|
||||
|
||||
## 1.15 (2025)
|
||||
|
||||
Fix the hashtag detection. This was necessary to cut down on the many false
|
||||
positives. They were most obvious with the _hashtags_ subcommand. Now the
|
||||
Markdown parser is used at startup to index the pages, making startup slower
|
||||
(about twice as long with my blog). The Markdown parser is also used to parse
|
||||
search terms (where it makes little difference).
|
||||
|
||||
Fix the timestamp for backup files. This was necessary because the diff didn't
|
||||
work as intended.
|
||||
|
||||
## 1.14 (2024)
|
||||
|
||||
Add _list_, _delete_ and _rename_ actions.
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-REPLACE" "1" "2024-08-29"
|
||||
.TH "ODDMU-REPLACE" "1" "2025-03-05"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -42,7 +42,7 @@ Replace "Oddmu" in the Markdown files of the current directory:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
oddmu replace Oddmu Oddµ
|
||||
oddmu replace Oddmu Oddμ
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
|
||||
@@ -30,7 +30,7 @@ the current directory and its subdirectories.
|
||||
Replace "Oddmu" in the Markdown files of the current directory:
|
||||
|
||||
```
|
||||
oddmu replace Oddmu Oddµ
|
||||
oddmu replace Oddmu Oddμ
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-SEARCH" "1" "2024-08-29"
|
||||
.TH "ODDMU-SEARCH" "1" "2025-03-05"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -65,7 +65,7 @@ The ordering of terms does not matter.\&
|
||||
~/src/oddmu $ oddmu search Alex Schroeder
|
||||
Search for Alex Schroeder, page 1: 3 results
|
||||
* [Alex Schroeder theme](themes/alexschroeder\&.ch/README)
|
||||
* [Oddµ: A minimal wiki](README)
|
||||
* [Oddμ: A minimal wiki](README)
|
||||
* [Themes](themes/index)
|
||||
.fi
|
||||
.RE
|
||||
|
||||
@@ -49,7 +49,7 @@ The ordering of terms does not matter.
|
||||
~/src/oddmu $ oddmu search Alex Schroeder
|
||||
Search for Alex Schroeder, page 1: 3 results
|
||||
* [Alex Schroeder theme](themes/alexschroeder.ch/README)
|
||||
* [Oddµ: A minimal wiki](README)
|
||||
* [Oddμ: A minimal wiki](README)
|
||||
* [Themes](themes/index)
|
||||
```
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-SEARCH" "7" "2024-02-19"
|
||||
.TH "ODDMU-SEARCH" "7" "2025-03-05"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -57,9 +57,9 @@ exactly (without the leading '\&#'\&) is listed first, even if it doesn'\&t cont
|
||||
the hashtag.\& It is assumed that this page offers some kind of introduction to
|
||||
people searching for the hashtag.\&
|
||||
.PP
|
||||
Example: When people click on the hashtag "#Oddµ" and a page named "Oddµ" exists
|
||||
(in other words, the file "Oddµ.\&md" exists), it is prepended to the results even
|
||||
if it doesn'\&t have the hashtag "#Oddµ" and even if it has a title of "Oddµ, a
|
||||
Example: When people click on the hashtag "#Oddμ" and a page named "Oddμ" exists
|
||||
(in other words, the file "Oddμ.\&md" exists), it is prepended to the results even
|
||||
if it doesn'\&t have the hashtag "#Oddμ" and even if it has a title of "Oddμ, a
|
||||
minimal wiki" (which wouldn'\&t be an exact match).\&
|
||||
.PP
|
||||
The score and highlighting of snippets is used to help visitors decide which
|
||||
|
||||
@@ -44,9 +44,9 @@ exactly (without the leading '#') is listed first, even if it doesn't contain
|
||||
the hashtag. It is assumed that this page offers some kind of introduction to
|
||||
people searching for the hashtag.
|
||||
|
||||
Example: When people click on the hashtag "#Oddµ" and a page named "Oddµ" exists
|
||||
(in other words, the file "Oddµ.md" exists), it is prepended to the results even
|
||||
if it doesn't have the hashtag "#Oddµ" and even if it has a title of "Oddµ, a
|
||||
Example: When people click on the hashtag "#Oddμ" and a page named "Oddμ" exists
|
||||
(in other words, the file "Oddμ.md" exists), it is prepended to the results even
|
||||
if it doesn't have the hashtag "#Oddμ" and even if it has a title of "Oddμ, a
|
||||
minimal wiki" (which wouldn't be an exact match).
|
||||
|
||||
The score and highlighting of snippets is used to help visitors decide which
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-STATIC" "1" "2024-08-29"
|
||||
.TH "ODDMU-STATIC" "1" "2025-08-31"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -28,7 +28,8 @@ pages get ".\&html" appended.\&
|
||||
If a page has a name case-insensitively matching a hashtag, a feed file is
|
||||
generated (ending with ".\&rss") if any suitable links are found.\& A suitable link
|
||||
for a feed item must appear in a bullet list item using an asterisk ("*").\& If
|
||||
no feed items are found, no feed is written.\&
|
||||
no feed items are found, no feed is written.\& The feed is limited to the ten most
|
||||
recent items.\&
|
||||
.PP
|
||||
Hidden files and directories (starting with a ".\&") and backup files (ending with
|
||||
a "~") are skipped.\&
|
||||
@@ -89,7 +90,11 @@ speed language determination up.\&
|
||||
.PP
|
||||
.SH SEE ALSO
|
||||
.PP
|
||||
\fIoddmu\fR(1), \fIoddmu-templates\fR(5)
|
||||
See \fIoddmu\fR(1) and \fIoddmu-templates\fR(5) for general information.\&
|
||||
.PP
|
||||
See \fIoddmu-html\fR(1) for a subcommand that converts individual pages file to HTML
|
||||
and see \fIoddmu-feed\fR(1) for a subcommand that generates feeds for individual
|
||||
files.\&
|
||||
.PP
|
||||
.SH AUTHORS
|
||||
.PP
|
||||
|
||||
@@ -21,7 +21,8 @@ pages get ".html" appended.
|
||||
If a page has a name case-insensitively matching a hashtag, a feed file is
|
||||
generated (ending with ".rss") if any suitable links are found. A suitable link
|
||||
for a feed item must appear in a bullet list item using an asterisk ("\*"). If
|
||||
no feed items are found, no feed is written.
|
||||
no feed items are found, no feed is written. The feed is limited to the ten most
|
||||
recent items.
|
||||
|
||||
Hidden files and directories (starting with a ".") and backup files (ending with
|
||||
a "~") are skipped.
|
||||
@@ -80,7 +81,11 @@ speed language determination up.
|
||||
|
||||
# SEE ALSO
|
||||
|
||||
_oddmu_(1), _oddmu-templates_(5)
|
||||
See _oddmu_(1) and _oddmu-templates_(5) for general information.
|
||||
|
||||
See _oddmu-html_(1) for a subcommand that converts individual pages file to HTML
|
||||
and see _oddmu-feed_(1) for a subcommand that generates feeds for individual
|
||||
files.
|
||||
|
||||
# AUTHORS
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-TEMPLATES" "5" "2024-08-30" "File Formats Manual"
|
||||
.TH "ODDMU-TEMPLATES" "5" "2025-09-24" "File Formats Manual"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -44,6 +44,29 @@ placeholders.\&
|
||||
\fIview.\&html\fR uses a \fIpage\fR
|
||||
.PD
|
||||
.PP
|
||||
The following property lists always indicate whether the property is
|
||||
percent-encoded or not.\& In theory, the html/template package would handle this.\&
|
||||
The problem is that the package gives special treatment to the semicolon, comma,
|
||||
question-mark and hash-sign as these are potential separators in a URL.\&
|
||||
.PP
|
||||
Consider the following:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
<a href="{{\&.Name}}">{{\&.Name}}</a>
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
If \fI.\&Name\fR is "#foo", the html/template package treats it as a URL fragment
|
||||
inside the attribute instead of a file path that needs to be escaped to
|
||||
"%23foo".\& The same problem arises if \fI.\&Name\fR is "foo?\&" as the questionmark is
|
||||
not escaped and therefore treated as the separator between URL path and query
|
||||
parameters instead of being part of the name.\&
|
||||
.PP
|
||||
The consequences for template authors is that the properties that are
|
||||
percent-encoded must be used in links where as the regular properties must be
|
||||
used outside of links.\&
|
||||
.PP
|
||||
.SS Page
|
||||
.PP
|
||||
A page has the following properties:
|
||||
@@ -51,14 +74,14 @@ A page has the following properties:
|
||||
\fI{{.\&Title}}\fR is the page title.\& If the page doesn'\&t provide its own title, the
|
||||
page name is used.\&
|
||||
.PP
|
||||
\fI{{.\&Name}}\fR is the page name, escaped for use in URLs.\& More specifically, it is
|
||||
percent-escaped except for the slashes.\& The page name doesn'\&t include the \fI.\&md\fR
|
||||
extension.\&
|
||||
\fI{{.\&Name}}\fR is the page name.\& The page name doesn'\&t include the \fI.\&md\fR extension.\&
|
||||
.PP
|
||||
\fI{{.\&Dir}}\fR is the page directory, percent-escaped except for the slashes.\&
|
||||
\fI{{.\&Path}}\fR is the page name, percent-encoded.\&
|
||||
.PP
|
||||
\fI{{.\&Dir}}\fR is the page directory, percent-encoded.\&
|
||||
.PP
|
||||
\fI{{.\&Base}}\fR is the basename of the current file (without the directory and
|
||||
without the \fI.\&md\fR extension), escaped for use in URLs.\&
|
||||
without the \fI.\&md\fR extension), percent-encoded.\&
|
||||
.PP
|
||||
\fI{{.\&Language}}\fR is the suspected language of the page.\& This is used to set the
|
||||
language on the \fIview.\&html\fR template.\& See "Non-English hyphenation" below.\&
|
||||
@@ -109,11 +132,31 @@ An item is a page plus a date.\& All the properties of a page can be used (see
|
||||
.PP
|
||||
\fI{{.\&Date}}\fR is the date of the last update to the page, in RFC 822 format.\&
|
||||
.PP
|
||||
In order to paginate feeds, the following attributes are also available in the
|
||||
feed:
|
||||
.PP
|
||||
\fI{{.\&From}}\fR is the item number where the feed starts.\& The first page starts at
|
||||
0.\& This can be passed to Oddmu via the query parameter \fIfrom\fR.\&
|
||||
.PP
|
||||
\fI{{.\&N}}\fR is the number items per page.\& The default is 10.\& This can be passed to
|
||||
Oddmu via the query parameter \fIn\fR.\& If this is set to 0, the feed is not
|
||||
paginated.\&
|
||||
.PP
|
||||
\fI{{.\&Complete}}\fR is a boolean that is true if the feed is not paginated.\& Such a
|
||||
feed cannot have a previous or next page.\&
|
||||
.PP
|
||||
\fI{{.\&Prev}}\fR is the item number where the previous page of the feed starts.\& On
|
||||
the first page, it'\&s value is 0 instead of -10.\& You need to test if \fI{{.\&From}}\fR
|
||||
is non-zero (in which case this is not the first page) before using \fI{{.\&Prev}}\fR.\&
|
||||
.PP
|
||||
\fI{{.\&Next}}\fR is the item number where the next feed starts, if there are any
|
||||
items left.\& If there are none, it'\&s value is 0.\&
|
||||
.PP
|
||||
.SS List
|
||||
.PP
|
||||
The list contains a directory name and an array of files.\&
|
||||
.PP
|
||||
\fI{{.\&Dir}}\fR is the directory name that is being listed.\&
|
||||
\fI{{.\&Dir}}\fR is the directory name that is being listed, percent-encoded.\&
|
||||
.PP
|
||||
\fI{{.\&Files}}\fR is the array of files.\& To refer to them, you need to use a \fI{{range
|
||||
Files}}\fR … \fI{{end}}\fR construct.\&
|
||||
@@ -123,6 +166,8 @@ Each file has the following attributes:
|
||||
\fI{{.\&Name}}\fR is the filename.\& The ".\&md" suffix for Markdown files is part of the
|
||||
name (unlike page names).\&
|
||||
.PP
|
||||
\fI{{.\&Path}}\fR is the page name, percent-encoded.\&
|
||||
.PP
|
||||
\fI{{.\&Title}}\fR is the page title, if the file in question is a Markdown file.\&
|
||||
.PP
|
||||
\fI{{.\&IsDir}}\fR is a boolean used to indicate that this file is a directory.\&
|
||||
@@ -137,8 +182,7 @@ directory).\& The filename of this file is ".\&.\&".\&
|
||||
.PP
|
||||
\fI{{.\&Query}}\fR is the query string.\&
|
||||
.PP
|
||||
\fI{{.\&Dir}}\fR is the directory in which the search starts, percent-escaped except
|
||||
for the slashes.\&
|
||||
\fI{{.\&Dir}}\fR is the directory in which the search starts, percent-encoded.\&
|
||||
.PP
|
||||
\fI{{.\&Previous}}\fR, \fI{{.\&Page}}\fR and \fI{{.\&Next}}\fR are the previous, current and next
|
||||
page number in the results since doing arithmetics in templates is hard.\& The
|
||||
@@ -175,23 +219,18 @@ search term that matched.\&
|
||||
.SS Upload
|
||||
.PP
|
||||
\fI{{.\&Dir}}\fR is the directory where the uploaded file ends up, based on the URL
|
||||
path, percent-escaped except for the slashes.\&
|
||||
path, percent-encoded.\&
|
||||
.PP
|
||||
\fI{{.\&Name}}\fR is the \fIfilename\fR query parameter.\&
|
||||
\fI{{.\&FileName}}\fR is the \fIfilename\fR query parameter used to suggested a filename.\&
|
||||
.PP
|
||||
\fI{{.\&Last}}\fR is the filename of the last file uploaded.\&
|
||||
\fI{{.\&FilePath}}\fR is the filename, percent-encoded.\&
|
||||
.PP
|
||||
\fI{{.\&Actual}}\fR is an array of filenames of all the files uploaded.\& Use {{range
|
||||
Actual}} … {{.\&}} … {{end}} to loop over all the filenames.\&
|
||||
\fI{{.\&Name}}\fR is the \fIpagename\fR query parameter used to indicate where to append
|
||||
links to the files.\&
|
||||
.PP
|
||||
\fI{{.\&Base}}\fR is the basename of the first file uploaded (without the directory,
|
||||
extension and numeric part at the end), escaped for use in URLs.\&
|
||||
\fI{{.\&Path}}\fR is the page name, percent-encoded.\&
|
||||
.PP
|
||||
\fI{{.\&Title}}\fR is the title of the basename, if it exists.\&
|
||||
.PP
|
||||
\fI{{.\&Image}}\fR is a boolean to indicate whether the last file uploaded has a file
|
||||
name indicating an image or not (such as ending in \fI.\&jpg\fR).\& If so, a thumbnail
|
||||
can be shown by the template, for example.\&
|
||||
\fI{{.\&Title}}\fR is the title of the page, if it exists.\&
|
||||
.PP
|
||||
\fI{{.\&MaxWidth}}\fR is the \fImaxwidth\fR query parameter, i.\&e.\& the value used for the
|
||||
previous image uploaded.\&
|
||||
@@ -201,6 +240,22 @@ previous image uploaded.\&
|
||||
.PP
|
||||
\fI{{.\&Today}}\fR is the current date, in ISO format.\&
|
||||
.PP
|
||||
\fI{{.\&Uploads}}\fR an array of files already uploaded, based on the \fIuploads\fR query
|
||||
parameter.\& To refer to them, you need to use a \fI{{range .\&Uploads}}\fR … \fI{{end}}\fR
|
||||
construct.\& This is required because the \fIdrop\fR action redirects back to the
|
||||
\fIupload\fR action, so after saving one or more files, you can upload even more
|
||||
files.\&
|
||||
.PP
|
||||
Each upload has the following attributes:
|
||||
.PP
|
||||
\fI{{.\&Name}}\fR is the filename.\&
|
||||
.PP
|
||||
\fI{{.\&Path}}\fR is the file name, percent-encoded.\&
|
||||
.PP
|
||||
\fI{{.\&Image}}\fR is a boolean to indicate whether the upload is an image or not
|
||||
(such as ending in \fI.\&jpg\fR).\& If so, a thumbnail can be shown by the template, for
|
||||
example.\&
|
||||
.PP
|
||||
.SS Non-English hyphenation
|
||||
.PP
|
||||
Automatic hyphenation by the browser requires two things: The style sheet must
|
||||
@@ -214,16 +269,16 @@ use a small number of languages – or just a single language!\& – you can set
|
||||
environment variable ODDMU_LANGUAGES to a comma-separated list of ISO 639-1
|
||||
codes, e.\&g.\& "en" or "en,de,fr,pt".\&
|
||||
.PP
|
||||
"view.\&html" is used the template to render a single page and so the language
|
||||
detected is added to the "html" element.\&
|
||||
"view.\&html" is used to render a single page and so the language detected is
|
||||
added to the "html" element.\&
|
||||
.PP
|
||||
"search.\&html" is the template used to render search results and so "en" is used
|
||||
for the "html" element and the language detected for every page in the search
|
||||
result is added to the "article" element for each snippet.\&
|
||||
.PP
|
||||
"edit.\&html" and "add.\&html" are the templates used to edit a page and at that
|
||||
point, the language isn'\&t known, so "en" is used for the "html" element and no
|
||||
language is used for the "textarea" element.\&
|
||||
"edit.\&html" and "add.\&html" are the templates used to edit a page.\& If the page
|
||||
already exists, its language is used for the "textarea" element.\& If the page is
|
||||
new, no language is used for the "textarea" element.\&
|
||||
.PP
|
||||
.SH EXAMPLES
|
||||
.PP
|
||||
|
||||
@@ -25,6 +25,27 @@ placeholders.
|
||||
- _upload.html_ uses an _upload_
|
||||
- _view.html_ uses a _page_
|
||||
|
||||
The following property lists always indicate whether the property is
|
||||
percent-encoded or not. In theory, the html/template package would handle this.
|
||||
The problem is that the package gives special treatment to the semicolon, comma,
|
||||
question-mark and hash-sign as these are potential separators in a URL.
|
||||
|
||||
Consider the following:
|
||||
|
||||
```
|
||||
<a href="{{.Name}}">{{.Name}}</a>
|
||||
```
|
||||
|
||||
If _.Name_ is "#foo", the html/template package treats it as a URL fragment
|
||||
inside the attribute instead of a file path that needs to be escaped to
|
||||
"%23foo". The same problem arises if _.Name_ is "foo?" as the questionmark is
|
||||
not escaped and therefore treated as the separator between URL path and query
|
||||
parameters instead of being part of the name.
|
||||
|
||||
The consequences for template authors is that the properties that are
|
||||
percent-encoded must be used in links where as the regular properties must be
|
||||
used outside of links.
|
||||
|
||||
## Page
|
||||
|
||||
A page has the following properties:
|
||||
@@ -32,14 +53,14 @@ A page has the following properties:
|
||||
_{{.Title}}_ is the page title. If the page doesn't provide its own title, the
|
||||
page name is used.
|
||||
|
||||
_{{.Name}}_ is the page name, escaped for use in URLs. More specifically, it is
|
||||
percent-escaped except for the slashes. The page name doesn't include the _.md_
|
||||
extension.
|
||||
_{{.Name}}_ is the page name. The page name doesn't include the _.md_ extension.
|
||||
|
||||
_{{.Dir}}_ is the page directory, percent-escaped except for the slashes.
|
||||
_{{.Path}}_ is the page name, percent-encoded.
|
||||
|
||||
_{{.Dir}}_ is the page directory, percent-encoded.
|
||||
|
||||
_{{.Base}}_ is the basename of the current file (without the directory and
|
||||
without the _.md_ extension), escaped for use in URLs.
|
||||
without the _.md_ extension), percent-encoded.
|
||||
|
||||
_{{.Language}}_ is the suspected language of the page. This is used to set the
|
||||
language on the _view.html_ template. See "Non-English hyphenation" below.
|
||||
@@ -85,11 +106,31 @@ An item is a page plus a date. All the properties of a page can be used (see
|
||||
|
||||
_{{.Date}}_ is the date of the last update to the page, in RFC 822 format.
|
||||
|
||||
In order to paginate feeds, the following attributes are also available in the
|
||||
feed:
|
||||
|
||||
_{{.From}}_ is the item number where the feed starts. The first page starts at
|
||||
0. This can be passed to Oddmu via the query parameter _from_.
|
||||
|
||||
_{{.N}}_ is the number items per page. The default is 10. This can be passed to
|
||||
Oddmu via the query parameter _n_. If this is set to 0, the feed is not
|
||||
paginated.
|
||||
|
||||
_{{.Complete}}_ is a boolean that is true if the feed is not paginated. Such a
|
||||
feed cannot have a previous or next page.
|
||||
|
||||
_{{.Prev}}_ is the item number where the previous page of the feed starts. On
|
||||
the first page, it's value is 0 instead of -10. You need to test if _{{.From}}_
|
||||
is non-zero (in which case this is not the first page) before using _{{.Prev}}_.
|
||||
|
||||
_{{.Next}}_ is the item number where the next feed starts, if there are any
|
||||
items left. If there are none, it's value is 0.
|
||||
|
||||
## List
|
||||
|
||||
The list contains a directory name and an array of files.
|
||||
|
||||
_{{.Dir}}_ is the directory name that is being listed.
|
||||
_{{.Dir}}_ is the directory name that is being listed, percent-encoded.
|
||||
|
||||
_{{.Files}}_ is the array of files. To refer to them, you need to use a _{{range
|
||||
.Files}}_ … _{{end}}_ construct.
|
||||
@@ -99,6 +140,8 @@ Each file has the following attributes:
|
||||
_{{.Name}}_ is the filename. The ".md" suffix for Markdown files is part of the
|
||||
name (unlike page names).
|
||||
|
||||
_{{.Path}}_ is the page name, percent-encoded.
|
||||
|
||||
_{{.Title}}_ is the page title, if the file in question is a Markdown file.
|
||||
|
||||
_{{.IsDir}}_ is a boolean used to indicate that this file is a directory.
|
||||
@@ -113,8 +156,7 @@ _{{.Date}}_ is the last modification date of the file.
|
||||
|
||||
_{{.Query}}_ is the query string.
|
||||
|
||||
_{{.Dir}}_ is the directory in which the search starts, percent-escaped except
|
||||
for the slashes.
|
||||
_{{.Dir}}_ is the directory in which the search starts, percent-encoded.
|
||||
|
||||
_{{.Previous}}_, _{{.Page}}_ and _{{.Next}}_ are the previous, current and next
|
||||
page number in the results since doing arithmetics in templates is hard. The
|
||||
@@ -151,23 +193,18 @@ search term that matched.
|
||||
## Upload
|
||||
|
||||
_{{.Dir}}_ is the directory where the uploaded file ends up, based on the URL
|
||||
path, percent-escaped except for the slashes.
|
||||
path, percent-encoded.
|
||||
|
||||
_{{.Name}}_ is the _filename_ query parameter.
|
||||
_{{.FileName}}_ is the _filename_ query parameter used to suggested a filename.
|
||||
|
||||
_{{.Last}}_ is the filename of the last file uploaded.
|
||||
_{{.FilePath}}_ is the filename, percent-encoded.
|
||||
|
||||
_{{.Actual}}_ is an array of filenames of all the files uploaded. Use {{range
|
||||
.Actual}} … {{.}} … {{end}} to loop over all the filenames.
|
||||
_{{.Name}}_ is the _pagename_ query parameter used to indicate where to append
|
||||
links to the files.
|
||||
|
||||
_{{.Base}}_ is the basename of the first file uploaded (without the directory,
|
||||
extension and numeric part at the end), escaped for use in URLs.
|
||||
_{{.Path}}_ is the page name, percent-encoded.
|
||||
|
||||
_{{.Title}}_ is the title of the basename, if it exists.
|
||||
|
||||
_{{.Image}}_ is a boolean to indicate whether the last file uploaded has a file
|
||||
name indicating an image or not (such as ending in _.jpg_). If so, a thumbnail
|
||||
can be shown by the template, for example.
|
||||
_{{.Title}}_ is the title of the page, if it exists.
|
||||
|
||||
_{{.MaxWidth}}_ is the _maxwidth_ query parameter, i.e. the value used for the
|
||||
previous image uploaded.
|
||||
@@ -177,6 +214,22 @@ previous image uploaded.
|
||||
|
||||
_{{.Today}}_ is the current date, in ISO format.
|
||||
|
||||
_{{.Uploads}}_ an array of files already uploaded, based on the _uploads_ query
|
||||
parameter. To refer to them, you need to use a _{{range .Uploads}}_ … _{{end}}_
|
||||
construct. This is required because the _drop_ action redirects back to the
|
||||
_upload_ action, so after saving one or more files, you can upload even more
|
||||
files.
|
||||
|
||||
Each upload has the following attributes:
|
||||
|
||||
_{{.Name}}_ is the filename.
|
||||
|
||||
_{{.Path}}_ is the file name, percent-encoded.
|
||||
|
||||
_{{.Image}}_ is a boolean to indicate whether the upload is an image or not
|
||||
(such as ending in _.jpg_). If so, a thumbnail can be shown by the template, for
|
||||
example.
|
||||
|
||||
## Non-English hyphenation
|
||||
|
||||
Automatic hyphenation by the browser requires two things: The style sheet must
|
||||
@@ -190,16 +243,16 @@ use a small number of languages – or just a single language! – you can set t
|
||||
environment variable ODDMU_LANGUAGES to a comma-separated list of ISO 639-1
|
||||
codes, e.g. "en" or "en,de,fr,pt".
|
||||
|
||||
"view.html" is used the template to render a single page and so the language
|
||||
detected is added to the "html" element.
|
||||
"view.html" is used to render a single page and so the language detected is
|
||||
added to the "html" element.
|
||||
|
||||
"search.html" is the template used to render search results and so "en" is used
|
||||
for the "html" element and the language detected for every page in the search
|
||||
result is added to the "article" element for each snippet.
|
||||
|
||||
"edit.html" and "add.html" are the templates used to edit a page and at that
|
||||
point, the language isn't known, so "en" is used for the "html" element and no
|
||||
language is used for the "textarea" element.
|
||||
"edit.html" and "add.html" are the templates used to edit a page. If the page
|
||||
already exists, its language is used for the "textarea" element. If the page is
|
||||
new, no language is used for the "textarea" element.
|
||||
|
||||
# EXAMPLES
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-TOC" "1" "2024-08-15"
|
||||
.TH "ODDMU-TOC" "1" "2025-04-05"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -13,13 +13,12 @@ oddmu-toc - print the table of contents (toc) for pages
|
||||
.PP
|
||||
.SH SYNOPSIS
|
||||
.PP
|
||||
\fBoddmu toc\fR \fIpage names.\&.\&.\&\fR
|
||||
\fBoddmu toc\fR \fIpage names.\&.\&.\&\fR
|
||||
.PP
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
The "toc" subcommand prints the table of contents for one or more page
|
||||
names.\& Use "-" as the page name if you want to read Markdown from
|
||||
\fBstdin\fR.\&
|
||||
The "toc" subcommand prints the table of contents for one or more Markdown
|
||||
files.\& Use "-" as the page name if you want to read Markdown from \fBstdin\fR.\&
|
||||
.PP
|
||||
This can be useful for very long pages that need a table of contents
|
||||
at the beginning.\&
|
||||
|
||||
@@ -6,13 +6,12 @@ oddmu-toc - print the table of contents (toc) for pages
|
||||
|
||||
# SYNOPSIS
|
||||
|
||||
*oddmu toc* _page names..._
|
||||
*oddmu toc* _page names..._
|
||||
|
||||
# DESCRIPTION
|
||||
|
||||
The "toc" subcommand prints the table of contents for one or more page
|
||||
names. Use "-" as the page name if you want to read Markdown from
|
||||
*stdin*.
|
||||
The "toc" subcommand prints the table of contents for one or more Markdown
|
||||
files. Use "-" as the page name if you want to read Markdown from *stdin*.
|
||||
|
||||
This can be useful for very long pages that need a table of contents
|
||||
at the beginning.
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-WEBDAV" "5" "2024-09-25"
|
||||
.TH "ODDMU-WEBDAV" "5" "2025-07-16"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -14,27 +14,34 @@ oddmu-webdav - how to setup Web-DAV using Apache for Oddmu
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
With the Apache Web-DAV module enabled, users can mount the wiki as a remote
|
||||
file system and edit files using their favourite text editor.\& If you want to
|
||||
offer users direct file access to the wiki, this can be accomplished via ssh,
|
||||
sftp or Web-DAV.\&
|
||||
.PP
|
||||
The benefit of using the Apache Web-DAV module is that access has to be
|
||||
configured only once.\&
|
||||
file system and manage the files using some other tool.\& Using the Apache Web-DAV
|
||||
module means that the same user accounts can be used as for the regular wiki.\&
|
||||
.PP
|
||||
.SH CONFIGURATION
|
||||
.PP
|
||||
In the following example, "data" is not an action provided by Oddmu but an
|
||||
actual directory for Oddmu files.\& In the example below,
|
||||
"/home/alex/campaignwiki.\&org/data" is both the document root for static files
|
||||
and the data directory for Oddmu.\& This is the directory where Oddmu needs to
|
||||
run.\& When users request the "/data" path, authentication is required but the
|
||||
request is not proxied to Oddmu since the "ProxyPassMatch" directive doesn'\&t
|
||||
handle "/data".\& Instead, Apache gets to handle it.\& Since "data" is part of all
|
||||
the "LocationMatch" directives, credentials are required to save (PUT) files.\&
|
||||
Consider the "campaignwiki.\&org" site in the example below.\& This site offers
|
||||
users their own wikis.\& Thus:
|
||||
.PP
|
||||
"Dav On" enables Web-DAV for the "knochentanz" wiki.\& It is enabled for all the
|
||||
actions, but since only "/data" is handled by Apache, this has no effect for all
|
||||
the other actions, allowing us to specify the required users only once.\&
|
||||
"https://campaignwiki.\&org/" is a regular website with static files.\&
|
||||
.PP
|
||||
"https://campaignwiki.\&org/view/index" is one of the requests that gets passed to
|
||||
a Unix domain socket.\& See "Socket Activation" in \fIoddmu\fR(1).\&
|
||||
.PP
|
||||
Some of these actions are protected by basic authentication.\& A valid user is
|
||||
required to make changes to the site.\& Valid users are "admin" and "alex".\&
|
||||
.PP
|
||||
"data" is the Oddmu working directory.\& WebDAV is turned on for this directory.\& A
|
||||
shortcut has been taken, here: The "data" subdirectory requires authentication
|
||||
and offers WebDAV access.\& The other paths also require authentication and map to
|
||||
Oddmu actions.\& The fact that WebDAV access is "enabled" for the Oddmu actions
|
||||
has no effect.\& The only drawback is that "https://campaignwiki.\&org/data/" now
|
||||
requires authentication even if only used for reading.\&
|
||||
.PP
|
||||
"https://campaignwiki.\&org/view/knochentanz/index" is a separate site called
|
||||
"knochentanz".\& The only valid user is "knochentanz".\&
|
||||
.PP
|
||||
Notice how the \fIarchive\fR action is not available at the top level, only for
|
||||
subdirectories.\&
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
@@ -48,28 +55,32 @@ MDomain campaignwiki\&.org
|
||||
<VirtualHost *:443>
|
||||
ServerAdmin alex@campaignwiki\&.org
|
||||
ServerName campaignwiki\&.org
|
||||
# Static HTML, CSS, JavaScript files and so on are saved here\&.
|
||||
DocumentRoot /home/alex/campaignwiki\&.org
|
||||
<Directory /home/alex/campaignwiki\&.org>
|
||||
Options Includes Indexes MultiViews SymLinksIfOwnerMatch
|
||||
Options Indexes MultiViews SymLinksIfOwnerMatch
|
||||
AllowOverride All
|
||||
Require all granted
|
||||
Require all granted
|
||||
</Directory>
|
||||
SSLEngine on
|
||||
# Any request to the following paths is passed on to the Unix domain socket\&.
|
||||
ProxyPassMatch
|
||||
"^/((view|preview|diff|edit|save|add|append|upload|drop|list|delete|search|archive/\&.+)/(\&.*))$"
|
||||
"^/((view|preview|diff|edit|save|add|append|upload|drop|search|archive/\&.+)/(\&.*))$"
|
||||
"unix:/home/oddmu/campaignwiki\&.sock|http://localhost/$1"
|
||||
# /archive only for subdirectories
|
||||
Redirect "/archive/data\&.zip" "/view/archive"
|
||||
<LocationMatch "^/(data|edit|preview|save|add|append|upload|drop|list|delete)/">
|
||||
# Making changes to the wiki requires authentication\&.
|
||||
<LocationMatch "^/(data|edit|preview|save|add|append|upload|drop)/">
|
||||
AuthType Basic
|
||||
AuthName "Password Required"
|
||||
AuthUserFile /home/oddmu/\&.htpasswd
|
||||
Require user admin alex
|
||||
</LocationMatch>
|
||||
<LocationMatch "^/(data|edit|preview|save|add|append|upload|drop|list|delete|archive)/knochentanz">
|
||||
Require user admin alex knochentanz
|
||||
Dav On
|
||||
</LocationMatch>
|
||||
# Making changes to a subdirectory requires different accounts\&.
|
||||
<LocationMatch "^/(data|edit|preview|save|add|append|upload|drop|archive)/knochentanz">
|
||||
Require user knochentanz
|
||||
</LocationMatch>
|
||||
</VirtualHost>
|
||||
.fi
|
||||
.RE
|
||||
@@ -101,9 +112,13 @@ sudo chmod g+w /home/alex/campaignwiki\&.org/data/knochentanz
|
||||
Web-DAV clients are often implemented such that they only work with servers that
|
||||
exactly match their assumptions.\& If you'\&re trying to use \fIgvfs\fR(7), the Windows
|
||||
File Explorer or the macOS Finder to edit Oddmu pages using Web-DAV, you'\&re on
|
||||
your own.\&
|
||||
your own.\& Sometimes it works.\& I'\&ve used Nemo 5.\&6.\&4 to connect to the server and
|
||||
edited files using gedit 44.\&2.\& But I'\&ve used other file managers and other
|
||||
editors with WebDAV support and they didn'\&t work very well.\&
|
||||
.PP
|
||||
This section has examples sessions using tools that work.\&
|
||||
On Windows, try third party tools like WinSCP.\&
|
||||
.PP
|
||||
This section has examples sessions using command-line tools that work.\&
|
||||
.PP
|
||||
.SS cadaver
|
||||
.PP
|
||||
@@ -183,6 +198,9 @@ alex@melanobombus ~> echo test >> knochentanz/index\&.md
|
||||
"Apache Module mod_dav".\&
|
||||
https://httpd.\&apache.\&org/docs/current/mod/mod_dav.\&html
|
||||
.PP
|
||||
"WinSCP"
|
||||
https://winscp.\&net/
|
||||
.PP
|
||||
.SH AUTHORS
|
||||
.PP
|
||||
Maintained by Alex Schroeder <alex@gnu.\&org>.\&
|
||||
|
||||
@@ -7,27 +7,34 @@ oddmu-webdav - how to setup Web-DAV using Apache for Oddmu
|
||||
# DESCRIPTION
|
||||
|
||||
With the Apache Web-DAV module enabled, users can mount the wiki as a remote
|
||||
file system and edit files using their favourite text editor. If you want to
|
||||
offer users direct file access to the wiki, this can be accomplished via ssh,
|
||||
sftp or Web-DAV.
|
||||
|
||||
The benefit of using the Apache Web-DAV module is that access has to be
|
||||
configured only once.
|
||||
file system and manage the files using some other tool. Using the Apache Web-DAV
|
||||
module means that the same user accounts can be used as for the regular wiki.
|
||||
|
||||
# CONFIGURATION
|
||||
|
||||
In the following example, "data" is not an action provided by Oddmu but an
|
||||
actual directory for Oddmu files. In the example below,
|
||||
"/home/alex/campaignwiki.org/data" is both the document root for static files
|
||||
and the data directory for Oddmu. This is the directory where Oddmu needs to
|
||||
run. When users request the "/data" path, authentication is required but the
|
||||
request is not proxied to Oddmu since the "ProxyPassMatch" directive doesn't
|
||||
handle "/data". Instead, Apache gets to handle it. Since "data" is part of all
|
||||
the "LocationMatch" directives, credentials are required to save (PUT) files.
|
||||
Consider the "campaignwiki.org" site in the example below. This site offers
|
||||
users their own wikis. Thus:
|
||||
|
||||
"Dav On" enables Web-DAV for the "knochentanz" wiki. It is enabled for all the
|
||||
actions, but since only "/data" is handled by Apache, this has no effect for all
|
||||
the other actions, allowing us to specify the required users only once.
|
||||
"https://campaignwiki.org/" is a regular website with static files.
|
||||
|
||||
"https://campaignwiki.org/view/index" is one of the requests that gets passed to
|
||||
a Unix domain socket. See "Socket Activation" in _oddmu_(1).
|
||||
|
||||
Some of these actions are protected by basic authentication. A valid user is
|
||||
required to make changes to the site. Valid users are "admin" and "alex".
|
||||
|
||||
"data" is the Oddmu working directory. WebDAV is turned on for this directory. A
|
||||
shortcut has been taken, here: The "data" subdirectory requires authentication
|
||||
and offers WebDAV access. The other paths also require authentication and map to
|
||||
Oddmu actions. The fact that WebDAV access is "enabled" for the Oddmu actions
|
||||
has no effect. The only drawback is that "https://campaignwiki.org/data/" now
|
||||
requires authentication even if only used for reading.
|
||||
|
||||
"https://campaignwiki.org/view/knochentanz/index" is a separate site called
|
||||
"knochentanz". The only valid user is "knochentanz".
|
||||
|
||||
Notice how the _archive_ action is not available at the top level, only for
|
||||
subdirectories.
|
||||
|
||||
```
|
||||
MDomain campaignwiki.org
|
||||
@@ -40,28 +47,32 @@ MDomain campaignwiki.org
|
||||
<VirtualHost *:443>
|
||||
ServerAdmin alex@campaignwiki.org
|
||||
ServerName campaignwiki.org
|
||||
# Static HTML, CSS, JavaScript files and so on are saved here.
|
||||
DocumentRoot /home/alex/campaignwiki.org
|
||||
<Directory /home/alex/campaignwiki.org>
|
||||
Options Includes Indexes MultiViews SymLinksIfOwnerMatch
|
||||
Options Indexes MultiViews SymLinksIfOwnerMatch
|
||||
AllowOverride All
|
||||
Require all granted
|
||||
Require all granted
|
||||
</Directory>
|
||||
SSLEngine on
|
||||
# Any request to the following paths is passed on to the Unix domain socket.
|
||||
ProxyPassMatch \
|
||||
"^/((view|preview|diff|edit|save|add|append|upload|drop|list|delete|search|archive/.+)/(.*))$" \
|
||||
"^/((view|preview|diff|edit|save|add|append|upload|drop|search|archive/.+)/(.*))$" \
|
||||
"unix:/home/oddmu/campaignwiki.sock|http://localhost/$1"
|
||||
# /archive only for subdirectories
|
||||
Redirect "/archive/data.zip" "/view/archive"
|
||||
<LocationMatch "^/(data|edit|preview|save|add|append|upload|drop|list|delete)/">
|
||||
# Making changes to the wiki requires authentication.
|
||||
<LocationMatch "^/(data|edit|preview|save|add|append|upload|drop)/">
|
||||
AuthType Basic
|
||||
AuthName "Password Required"
|
||||
AuthUserFile /home/oddmu/.htpasswd
|
||||
Require user admin alex
|
||||
</LocationMatch>
|
||||
<LocationMatch "^/(data|edit|preview|save|add|append|upload|drop|list|delete|archive)/knochentanz">
|
||||
Require user admin alex knochentanz
|
||||
Dav On
|
||||
</LocationMatch>
|
||||
# Making changes to a subdirectory requires different accounts.
|
||||
<LocationMatch "^/(data|edit|preview|save|add|append|upload|drop|archive)/knochentanz">
|
||||
Require user knochentanz
|
||||
</LocationMatch>
|
||||
</VirtualHost>
|
||||
```
|
||||
|
||||
@@ -88,9 +99,13 @@ sudo chmod g+w /home/alex/campaignwiki.org/data/knochentanz
|
||||
Web-DAV clients are often implemented such that they only work with servers that
|
||||
exactly match their assumptions. If you're trying to use _gvfs_(7), the Windows
|
||||
File Explorer or the macOS Finder to edit Oddmu pages using Web-DAV, you're on
|
||||
your own.
|
||||
your own. Sometimes it works. I've used Nemo 5.6.4 to connect to the server and
|
||||
edited files using gedit 44.2. But I've used other file managers and other
|
||||
editors with WebDAV support and they didn't work very well.
|
||||
|
||||
This section has examples sessions using tools that work.
|
||||
On Windows, try third party tools like WinSCP.
|
||||
|
||||
This section has examples sessions using command-line tools that work.
|
||||
|
||||
## cadaver
|
||||
|
||||
@@ -164,6 +179,9 @@ _oddmu_(1), _oddmu-apache_(5)
|
||||
"Apache Module mod_dav".
|
||||
https://httpd.apache.org/docs/current/mod/mod_dav.html
|
||||
|
||||
"WinSCP"
|
||||
https://winscp.net/
|
||||
|
||||
# AUTHORS
|
||||
|
||||
Maintained by Alex Schroeder <alex@gnu.org>.
|
||||
|
||||
424
man/oddmu.1
424
man/oddmu.1
@@ -1,424 +0,0 @@
|
||||
.\" Generated by scdoc 1.11.3
|
||||
.\" Complete documentation for this program is not available as a GNU info page
|
||||
.ie \n(.g .ds Aq \(aq
|
||||
.el .ds Aq '
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU" "1" "2024-09-25"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
oddmu - a wiki server
|
||||
.PP
|
||||
Oddmu is sometimes written Oddµ because µ is the letter mu.\&
|
||||
.PP
|
||||
.SH SYNOPSIS
|
||||
.PP
|
||||
\fBoddmu\fR
|
||||
.PP
|
||||
\fBoddmu\fR \fIsubcommand\fR [\fIarguments\fR.\&.\&.\&]
|
||||
.PP
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
Oddmu can be used as a static site generator, turning Markdown files into HTML
|
||||
files, or it can be used as a public or a private wiki server.\& If it runs as a
|
||||
public wiki server, a regular webserver should be used as reverse proxy.\&
|
||||
.PP
|
||||
Run Oddmu without any arguments to serve the current working directory as a wiki
|
||||
on port 8080.\& Point your browser to http://localhost:8080/ to use it.\& This
|
||||
redirects you to http://localhost:8080/view/index – the first page you'\&ll
|
||||
create, most likely.\&
|
||||
.PP
|
||||
See \fIoddmu\fR(5) for details about the page formatting.\&
|
||||
.PP
|
||||
If you request a page that doesn'\&t exist, Oddmu tries to find a matching
|
||||
Markdown file by appending the extension ".\&md" to the page name.\& In the example
|
||||
above, the page name requested is "index" and the file name Oddmu tries to read
|
||||
is "index.\&md".\& If no such file exists, Oddmu offers you to create the page.\&
|
||||
.PP
|
||||
If your files don'\&t provide their own title ("# title"), the file name (without
|
||||
".\&md") is used for the page title.\&
|
||||
.PP
|
||||
Every file can be viewed as feed by using the extension ".\&rss".\& The
|
||||
feed items are based on links in bullet lists using the asterix
|
||||
("*").\&
|
||||
.PP
|
||||
Subdirectories are created as necessary.\&
|
||||
.PP
|
||||
The wiki knows the following actions for a given page name and (optional)
|
||||
directory:
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
\fI/\fR redirects to /view/index
|
||||
.IP \(bu 4
|
||||
\fI/view/dir/\fR redirects to /view/dir/index
|
||||
.IP \(bu 4
|
||||
\fI/view/dir/name\fR shows a page
|
||||
.IP \(bu 4
|
||||
\fI/view/dir/name.\&md\fR shows the source text of a page
|
||||
.IP \(bu 4
|
||||
\fI/view/dir/name.\&rss\fR shows the RSS feed for the pages linked
|
||||
.IP \(bu 4
|
||||
\fI/diff/dir/name\fR shows the last change to a page
|
||||
.IP \(bu 4
|
||||
\fI/edit/dir/name\fR shows a form to edit a page
|
||||
.IP \(bu 4
|
||||
\fI/preview/dir/name\fR shows a preview of a page edit and the form to edit it
|
||||
.IP \(bu 4
|
||||
\fI/save/dir/name\fR saves an edit
|
||||
.IP \(bu 4
|
||||
\fI/add/dir/name\fR shows a form to add to a page
|
||||
.IP \(bu 4
|
||||
\fI/append/dir/name\fR appends an addition to a page
|
||||
.IP \(bu 4
|
||||
\fI/upload/dir/name\fR shows a form to upload a file
|
||||
.IP \(bu 4
|
||||
\fI/drop/dir/name\fR saves an upload
|
||||
.IP \(bu 4
|
||||
\fI/list/dir/\fR lists the files in a directory
|
||||
.IP \(bu 4
|
||||
\fI/delete/dir/name\fR deletes a file or directory
|
||||
.IP \(bu 4
|
||||
\fI/rename/dir/name?\&name=new\fR renames a file or directory
|
||||
.IP \(bu 4
|
||||
\fI/search/dir/?\&q=term\fR to search for a term
|
||||
.IP \(bu 4
|
||||
\fI/archive/dir/name.\&zip\fR to download a zip file of a directory
|
||||
.PD
|
||||
.PP
|
||||
When calling the \fIsave\fR and \fIappend\fR action, the page name is taken from the URL
|
||||
path and the page content is taken from the \fIbody\fR form parameter.\& To
|
||||
illustrate, here'\&s how to edit the "welcome" page using \fIcurl\fR:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
curl --form body="Did you bring a towel?"
|
||||
http://localhost:8080/save/welcome
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
When calling the \fIdrop\fR action, the query parameters used are \fIname\fR for the
|
||||
target filename and \fIfile\fR for the file to upload.\& If the query parameter
|
||||
\fImaxwidth\fR is set, an attempt is made to decode and resize the image.\& JPG, PNG,
|
||||
WEBP and HEIC files can be decoded.\& Only JPG and PNG files can be encoded,
|
||||
however.\& If the target name ends in \fI.\&jpg\fR, the \fIquality\fR query parameter is
|
||||
also taken into account.\& To upload some thumbnails:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
for f in *\&.jpg; do
|
||||
curl --form name="$f" --form file=@"$f" --form maxwidth=100
|
||||
http://localhost:8080/drop/
|
||||
done
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
When calling the \fIsearch\fR action, the search terms are taken from the query
|
||||
parameter \fIq\fR.\&
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
curl \&'http://localhost:8080/search/?q=towel\&'
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
The page name to act upon is optionally taken from the query parameter \fIid\fR.\& In
|
||||
this case, the directory must also be part of the query parameter and not of the
|
||||
URL path.\&
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
curl \&'http://localhost:8080/view/?id=man/oddmu\&.1\&.txt\&'
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
The base name for the \fIarchive\fR action is used by the browser to save the
|
||||
downloaded file.\& For Oddmu, only the directory is important.\& The following zips
|
||||
the \fIman\fR directory and saves it as \fIman.\&zip\fR.\&
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
curl --remote-name \&'http://localhost:8080/archive/man/man\&.zip
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
.SH CONFIGURATION
|
||||
.PP
|
||||
The template files are the HTML files in the working directory.\& Please change
|
||||
these templates!\&
|
||||
.PP
|
||||
The first change you should make is to replace the name and email address in the
|
||||
footer of \fIview.\&html\fR.\& Look for "Your Name" and "example.\&org".\&
|
||||
.PP
|
||||
The second change you should make is to replace the name, email address and
|
||||
domain name in "feed.\&html".\& Look for "Your Name" and "example.\&org".\&
|
||||
.PP
|
||||
See \fIoddmu-templates\fR(5) for more.\&
|
||||
.PP
|
||||
.SH ENVIRONMENT
|
||||
.PP
|
||||
You can change the port served by setting the ODDMU_PORT environment variable.\&
|
||||
.PP
|
||||
You can change the address served by setting the ODDMU_ADDRESS environment
|
||||
variable to either an IPv4 address or an IPv6 address.\& If ODDMU_ADDRESS is
|
||||
unset, then the program listens on all available unicast addresses, both IPv4
|
||||
and IPv6.\& Here are a few example addresses:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
ODDMU_ADDRESS=127\&.0\&.0\&.1 # The loopback IPv4 address\&.
|
||||
ODDMU_ADDRESS=2001:db8::3:1 # An IPv6 address\&.
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
See the Socket Activation section for an alternative method of listening which
|
||||
supports Unix-domain sockets.\&
|
||||
.PP
|
||||
In order to limit language-detection to the languages you actually use, set the
|
||||
environment variable ODDMU_LANGUAGES to a comma-separated list of ISO 639-1
|
||||
codes, e.\&g.\& "en" or "en,de,fr,pt".\&
|
||||
.PP
|
||||
You can enable webfinger to link fediverse accounts to their correct profile
|
||||
pages by setting ODDMU_WEBFINGER to "1".\& See \fIoddmu\fR(5).\&
|
||||
.PP
|
||||
If you use secret subdirectories, you cannot rely on the web server to hide
|
||||
those pages because some actions such as searching and archiving include
|
||||
subdirectories.\& They act upon a whole tree of pages, not just a single page.\& The
|
||||
ODDMU_FILTER can be used to exclude subdirectories from such tree actions.\& See
|
||||
\fIoddmu-filter\fR(7) and \fIoddmu-apache\fR(5).\&
|
||||
.PP
|
||||
.SH Socket Activation
|
||||
.PP
|
||||
Instead of specifying ODDMU_ADDRESS or ODDMU_PORT, you can start the service
|
||||
through socket activation.\& The advantage of this method is that you can use a
|
||||
Unix-domain socket instead of a TCP socket, and the permissions and ownership of
|
||||
the socket are set before the program starts.\& See \fIoddmu.\&service\fR(5),
|
||||
\fIoddmu-apache\fR(5) and \fIoddmu-nginx\fR(5) for an example of how to use socket
|
||||
activation with a Unix-domain socket under systemd and Apache.\&
|
||||
.PP
|
||||
.SH SECURITY
|
||||
.PP
|
||||
If the machine you are running Oddmu on is accessible from the Internet, you
|
||||
must secure your installation.\& The best way to do this is use a regular web
|
||||
server as a reverse proxy.\& See \fIoddmu-apache\fR(5) and \fIoddmu-nginx\fR(5) for
|
||||
example configurations.\&
|
||||
.PP
|
||||
Oddmu assumes that all the users that can edit pages or upload files are trusted
|
||||
users and therefore their content is trusted.\& Oddmu does not perform HTML
|
||||
sanitization!\&
|
||||
.PP
|
||||
For an extra dose of security, consider using a Unix-domain socket.\&
|
||||
.PP
|
||||
.SH OPTIONS
|
||||
.PP
|
||||
Oddmu can be run on the command-line using various subcommands.\&
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
to generate the HTML for a single page, see \fIoddmu-html\fR(1)
|
||||
.IP \(bu 4
|
||||
to generate the HTML for the entire site, using Oddmu as a static site
|
||||
generator, see \fIoddmu-static\fR(1)
|
||||
.IP \(bu 4
|
||||
to export the HTML for the entire site in one big feed, see \fIoddmu-export\fR(1)
|
||||
.IP \(bu 4
|
||||
to emulate a search of the files, see \fIoddmu-search\fR(1); to understand how the
|
||||
search engine indexes pages and how it sorts and scores results, see
|
||||
\fIoddmu-search\fR(7)
|
||||
.IP \(bu 4
|
||||
to search a regular expression and replace it across all files, see
|
||||
\fIoddmu-replace\fR(1)
|
||||
.IP \(bu 4
|
||||
to learn what the most popular hashtags are, see \fIoddmu-hashtags\fR(1)
|
||||
.IP \(bu 4
|
||||
to print a table of contents (TOC) for a page, see \fIoddmu-toc\fR(1)
|
||||
.IP \(bu 4
|
||||
to list the outgoing links for a page, see \fIoddmu-links\fR(1)
|
||||
.IP \(bu 4
|
||||
to find missing pages (local links that go nowhere), see \fIoddmu-missing\fR(1)
|
||||
.IP \(bu 4
|
||||
to list all the pages with name and title, see \fIoddmu-list\fR(1)
|
||||
.IP \(bu 4
|
||||
to add links to changes, index and hashtag pages to pages you created locally,
|
||||
see \fIoddmu-notify\fR(1)
|
||||
.IP \(bu 4
|
||||
to display build information, see \fIoddmu-version\fR(1)
|
||||
.PD
|
||||
.PP
|
||||
.SH EXAMPLES
|
||||
.PP
|
||||
When saving a page, the page name is take from the URL and the page content is
|
||||
taken from the "body" form parameter.\& To illustrate, here'\&s how to edit a page
|
||||
using \fIcurl\fR(1):
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
curl --form body="Did you bring a towel?"
|
||||
http://localhost:8080/save/welcome
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
To compute the space used by your setup, use regular tools:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
du --exclude=\&'*/.*\&' --exclude \&'*~\&' --block-size=M
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
.SH DESIGN
|
||||
.PP
|
||||
This is a minimal wiki.\& There is no version history.\& It'\&s well suited as a
|
||||
\fIsecondary\fR medium: collaboration and conversation happens elsewhere, in chat,
|
||||
on social media.\& The wiki serves as the text repository that results from these
|
||||
discussions.\&
|
||||
.PP
|
||||
The idea is that the webserver handles as many tasks as possible.\& It logs
|
||||
requests, does rate limiting, handles encryption, gets the certificates, and so
|
||||
on.\& The web server acts as a reverse proxy and the wiki ends up being a content
|
||||
management system with almost no structure – or endless malleability, depending
|
||||
on your point of view.\& See \fIoddmu-apache\fR(5).\&
|
||||
.PP
|
||||
.SH NOTES
|
||||
.PP
|
||||
Page names are filenames with ".\&md" appended.\& If your filesystem cannot handle
|
||||
it, it can'\&t be a page name.\& Filenames can contain slashes and Oddmu creates
|
||||
subdirectories as necessary.\&
|
||||
.PP
|
||||
Files may not end with a tilde ('\&~'\&) – these are backup files.\& When saving pages
|
||||
and file uploads, the old file renamed to the backup file unless the backup file
|
||||
is less than an hour old, thus collapsing all edits made in an hour into a
|
||||
single diff when comparing backup and current version.\&
|
||||
.PP
|
||||
The \fBindex\fR page is the default page.\& People visiting the "root" of the site are
|
||||
redirected to "/view/index".\&
|
||||
.PP
|
||||
The \fBchanges\fR page is where links to new and changed files are added.\& As an
|
||||
author, you can prevent this from happening by deselecting the checkbox "Add
|
||||
link to the list of changes.\&" The changes page can be edited like every other
|
||||
page, so it'\&s easy to undo mistakes.\&
|
||||
.PP
|
||||
Links on the changes page are grouped by date.\& When new links are added, the
|
||||
current date of the machine Oddmu is running on is used.\& If a link already
|
||||
exists on the changes page, it is moved up to the current date.\& If that leaves
|
||||
an old date without any links, that date heading is removed.\&
|
||||
.PP
|
||||
If you want to link to the changes page, you need to do this yourself.\& Add a
|
||||
link from the index, for example.\& The "view.\&html" template currently doesn'\&t do
|
||||
it.\& See \fIoddmu-templates\fR(5) if you want to add the link to the template.\&
|
||||
.PP
|
||||
A page whose name starts with an ISO date (YYYY-MM-DD, e.\&g.\& "2023-10-28") is
|
||||
called a \fBblog\fR page.\& When creating or editing blog pages, links to it are added
|
||||
from other pages.\&
|
||||
.PP
|
||||
If the blog page name starts with the current year, a link is created from the
|
||||
index page back to the blog page being created or edited.\& Again, you can prevent
|
||||
this from happening by deselecting the checkbox "Add link to the list of
|
||||
changes.\&" The index page can be edited like every other page, so it'\&s easy to
|
||||
undo mistakes.\&
|
||||
.PP
|
||||
For every \fBhashtag\fR used, another link might be created.\& If a page named like
|
||||
the hashtag exists, a backlink is added to it, linking to the new or edited blog
|
||||
page.\&
|
||||
.PP
|
||||
If a link to the new or edited blog page already exists but it'\&s title is no
|
||||
longer correct, it is updated.\&
|
||||
.PP
|
||||
New links added for blog pages are added at the top of the first unnumbered list
|
||||
using the asterisk ('\&*'\&).\& If no such list exists, a new one is started at the
|
||||
bottom of the page.\& This allows you to have a different unnumbered list further
|
||||
up on the page, as long as it uses the minus for items ('\&-'\&).\&
|
||||
.PP
|
||||
Changes made locally do not create any links on the changes page, the index page
|
||||
or on any hashtag pages.\& See \fIoddmu-notify\fR(1) for a way to add the necessary
|
||||
links to the changes page and possibly to the index and hashtag pages.\&
|
||||
.PP
|
||||
A hashtag consists of a number sign ('\&#'\&) followed by Unicode letters, numbers
|
||||
or the underscore ('\&_'\&).\& Thus, a hashtag ends with punctuation or whitespace.\&
|
||||
.PP
|
||||
The page names, titles and hashtags are loaded into memory when the server
|
||||
starts.\& If you have a lot of pages, this takes a lot of memory.\&
|
||||
.PP
|
||||
Oddmu watches the working directory and any subdirectories for changes made
|
||||
directly.\& Thus, in theory, it'\&s not necessary to restart it after making such
|
||||
changes.\&
|
||||
.PP
|
||||
You cannot edit uploaded files.\& If you upload a file called "hello.\&txt" and
|
||||
attempt to edit it by using "/edit/hello.\&txt" you create a page with the name
|
||||
"hello.\&txt.\&md" instead.\&
|
||||
.PP
|
||||
In order to delete uploaded files via the web, create an empty file and upload
|
||||
it.\& In order to delete a wiki page, save an empty page.\&
|
||||
.PP
|
||||
Note that some HTML file names are special: they act as templates.\& See
|
||||
\fIoddmu-templates\fR(5) for their names and their use.\&
|
||||
.PP
|
||||
.SH SEE ALSO
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
\fIoddmu\fR(5), about the markup syntax and how feeds are generated based on link
|
||||
lists
|
||||
.IP \(bu 4
|
||||
\fIoddmu-releases\fR(7), on what features are part of the latest release
|
||||
.IP \(bu 4
|
||||
\fIoddmu-filter\fR(7), on how to treat subdirectories as separate sites
|
||||
.IP \(bu 4
|
||||
\fIoddmu-search\fR(7), on how search works
|
||||
.IP \(bu 4
|
||||
\fIoddmu-templates\fR(5), on how to write the HTML templates
|
||||
.PD
|
||||
.PP
|
||||
If you run Oddmu as a web server:
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
\fIoddmu-apache\fR(5), on how to set up Apache as a reverse proxy
|
||||
.IP \(bu 4
|
||||
\fIoddmu-nginx\fR(5), on how to set up freenginx as a reverse proxy
|
||||
.IP \(bu 4
|
||||
\fIoddmu-webdav\fR(5), on how to set up Apache as a Web-DAV server
|
||||
.IP \(bu 4
|
||||
\fIoddmu.\&service\fR(5), on how to run the service under systemd
|
||||
.PD
|
||||
.PP
|
||||
If you run Oddmu as a static site generator or pages offline and sync them with
|
||||
Oddmu running as a webserver:
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
\fIoddmu-hashtags\fR(1), on how to count the hashtags used
|
||||
.IP \(bu 4
|
||||
\fIoddmu-html\fR(1), on how to render a page
|
||||
.IP \(bu 4
|
||||
\fIoddmu-list\fR(1), on how to list pages and titles
|
||||
.IP \(bu 4
|
||||
\fIoddmu-links\fR(1), on how to list the outgoing links for a page
|
||||
.IP \(bu 4
|
||||
\fIoddmu-missing\fR(1), on how to find broken local links
|
||||
.IP \(bu 4
|
||||
\fIoddmu-notify\fR(1), on updating index, changes and hashtag pages
|
||||
.IP \(bu 4
|
||||
\fIoddmu-replace\fR(1), on how to search and replace text
|
||||
.IP \(bu 4
|
||||
\fIoddmu-search\fR(1), on how to run a search
|
||||
.IP \(bu 4
|
||||
\fIoddmu-static\fR(1), on generating a static site
|
||||
.IP \(bu 4
|
||||
\fIoddmu-toc\fR(1), on how to list the table of contents (toc) a page
|
||||
.IP \(bu 4
|
||||
\fIoddmu-version\fR(1), on how to get all the build information from the binary
|
||||
.PD
|
||||
.PP
|
||||
If you want to stop using Oddmu:
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
\fIoddmu-export\fR(1), on how to export all the files as one big RSS file
|
||||
.PD
|
||||
.PP
|
||||
.SH AUTHORS
|
||||
.PP
|
||||
Maintained by Alex Schroeder <alex@gnu.\&org>.\&
|
||||
|
||||
@@ -4,7 +4,7 @@ ODDMU(1)
|
||||
|
||||
oddmu - a wiki server
|
||||
|
||||
Oddmu is sometimes written Oddµ because µ is the letter mu.
|
||||
Oddmu is sometimes written Oddμ because μ is the letter mu.
|
||||
|
||||
# SYNOPSIS
|
||||
|
||||
@@ -55,9 +55,6 @@ directory:
|
||||
- _/append/dir/name_ appends an addition to a page
|
||||
- _/upload/dir/name_ shows a form to upload a file
|
||||
- _/drop/dir/name_ saves an upload
|
||||
- _/list/dir/_ lists the files in a directory
|
||||
- _/delete/dir/name_ deletes a file or directory
|
||||
- _/rename/dir/name?name=new_ renames a file or directory
|
||||
- _/search/dir/?q=term_ to search for a term
|
||||
- _/archive/dir/name.zip_ to download a zip file of a directory
|
||||
|
||||
@@ -231,9 +228,10 @@ it, it can't be a page name. Filenames can contain slashes and Oddmu creates
|
||||
subdirectories as necessary.
|
||||
|
||||
Files may not end with a tilde ('~') – these are backup files. When saving pages
|
||||
and file uploads, the old file renamed to the backup file unless the backup file
|
||||
is less than an hour old, thus collapsing all edits made in an hour into a
|
||||
single diff when comparing backup and current version.
|
||||
and file uploads, the old file is renamed to the backup file unless the backup
|
||||
file is less than an hour old, thus collapsing all edits made in an hour into a
|
||||
single diff when comparing backup and current version. The backup also gets an
|
||||
updated timestamp so that subsequent edits don't immediately overwrite it.
|
||||
|
||||
The *index* page is the default page. People visiting the "root" of the site are
|
||||
redirected to "/view/index".
|
||||
@@ -317,8 +315,9 @@ If you run Oddmu as a web server:
|
||||
If you run Oddmu as a static site generator or pages offline and sync them with
|
||||
Oddmu running as a webserver:
|
||||
|
||||
- _oddmu-hashtags_(1), on how to count the hashtags used
|
||||
- _oddmu-hashtags_(1), on working with hashtags
|
||||
- _oddmu-html_(1), on how to render a page
|
||||
- _oddmu-feed_(1), on how to render a feed
|
||||
- _oddmu-list_(1), on how to list pages and titles
|
||||
- _oddmu-links_(1), on how to list the outgoing links for a page
|
||||
- _oddmu-missing_(1), on how to find broken local links
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU" "5" "2024-09-30" "File Formats Manual"
|
||||
.TH "ODDMU" "5" "2025-03-05" "File Formats Manual"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -28,7 +28,7 @@ The page name has to be percent-encoded.\& See the section "Percent Encoding".\&
|
||||
If you link to the actual Markdown file (with the ".\&md" extension), then Oddmu
|
||||
serves the Markdown file!\&
|
||||
.PP
|
||||
There are three Oddµ-specific extensions: local links, hashtags and fediverse
|
||||
There are three Oddμ-specific extensions: local links, hashtags and fediverse
|
||||
account links.\& The Markdown library used features some additional extensions,
|
||||
most importantly tables and definition lists.\&
|
||||
.PP
|
||||
@@ -94,7 +94,7 @@ linked to a profile page.\& In this case, "@alex" would be linked to
|
||||
"https://alexschroeder.\&ch/users/alex".\&
|
||||
.PP
|
||||
In many cases, this works as is.\& In reality, however, the link to the profile
|
||||
page needs to be retrieved via webfinger.\& Oddµ does that in the background, and
|
||||
page needs to be retrieved via webfinger.\& Oddμ does that in the background, and
|
||||
as soon as the information is available, the actual profile link is used when
|
||||
pages are rendered.\& In the example above, the result would be
|
||||
"https://social.\&alexschroeder.\&ch/@alex".\&
|
||||
|
||||
@@ -19,7 +19,7 @@ The page name has to be percent-encoded. See the section "Percent Encoding".
|
||||
If you link to the actual Markdown file (with the ".md" extension), then Oddmu
|
||||
serves the Markdown file!
|
||||
|
||||
There are three Oddµ-specific extensions: local links, hashtags and fediverse
|
||||
There are three Oddμ-specific extensions: local links, hashtags and fediverse
|
||||
account links. The Markdown library used features some additional extensions,
|
||||
most importantly tables and definition lists.
|
||||
|
||||
@@ -79,7 +79,7 @@ linked to a profile page. In this case, "@alex" would be linked to
|
||||
"https://alexschroeder.ch/users/alex".
|
||||
|
||||
In many cases, this works as is. In reality, however, the link to the profile
|
||||
page needs to be retrieved via webfinger. Oddµ does that in the background, and
|
||||
page needs to be retrieved via webfinger. Oddμ does that in the background, and
|
||||
as soon as the information is available, the actual profile link is used when
|
||||
pages are rendered. In the example above, the result would be
|
||||
"https://social.alexschroeder.ch/@alex".
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU.SERVICE" "5" "2024-08-23"
|
||||
.TH "ODDMU.SERVICE" "5" "2025-03-14"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -94,8 +94,8 @@ sudo mkdir /run/oddmu
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
The unit file for the service defines where the "oddmu" is and where the data
|
||||
directory is.\& These are the lines you most likely have to take care of:
|
||||
The unit file for the service defines where the Oddmu binary is and where the
|
||||
data directory is.\& These are the lines you most likely have to take care of:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
@@ -152,6 +152,9 @@ Environment="ODDMU_LANGUAGES=de,en"
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
Make sure to change the "ExecStart" entry so that it points to your copy of the
|
||||
Oddmu binary.\&
|
||||
.PP
|
||||
Since this is a user service, the same user can edit the files using their
|
||||
favourite text editor.\&
|
||||
.PP
|
||||
|
||||
@@ -75,8 +75,8 @@ the directory or change the file name.
|
||||
sudo mkdir /run/oddmu
|
||||
```
|
||||
|
||||
The unit file for the service defines where the "oddmu" is and where the data
|
||||
directory is. These are the lines you most likely have to take care of:
|
||||
The unit file for the service defines where the Oddmu binary is and where the
|
||||
data directory is. These are the lines you most likely have to take care of:
|
||||
|
||||
```
|
||||
ExecStart=/home/oddmu/oddmu
|
||||
@@ -125,6 +125,9 @@ WorkingDirectory=/home/alex/wiki
|
||||
Environment="ODDMU_LANGUAGES=de,en"
|
||||
```
|
||||
|
||||
Make sure to change the "ExecStart" entry so that it points to your copy of the
|
||||
Oddmu binary.
|
||||
|
||||
Since this is a user service, the same user can edit the files using their
|
||||
favourite text editor.
|
||||
|
||||
|
||||
@@ -16,9 +16,9 @@ while (<>) {
|
||||
# italic
|
||||
s/\b_([^_]+)_\b/*$1*/g;
|
||||
# move all H1 headers to H2
|
||||
s/^# /## /;
|
||||
s/^# (.*)/"## ".ucfirst(lc($1))/e;
|
||||
# the new H1 title
|
||||
s/^([A-Z.-]*\([1-9]\))( ".*")?$/# $1/;
|
||||
s/^([A-Z.-]*\([1-9]\))( ".*")?$/"# ".lc($1)/e;
|
||||
# quoted URLs
|
||||
s/"(http.*?)"/`$1`/g;
|
||||
# quoted wiki links
|
||||
|
||||
68
man_test.go
68
man_test.go
@@ -20,14 +20,14 @@ func TestManPages(t *testing.T) {
|
||||
main := string(b)
|
||||
assert.NoError(t, err)
|
||||
count := 0
|
||||
filepath.Walk("man", func(path string, info fs.FileInfo, err error) error {
|
||||
filepath.Walk("man", func(fp string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if strings.HasSuffix(path, ".txt") &&
|
||||
path != "man/oddmu.1.txt" {
|
||||
if strings.HasSuffix(fp, ".txt") &&
|
||||
fp != "man/oddmu.1.txt" {
|
||||
count++
|
||||
s := strings.TrimPrefix(path, "man/")
|
||||
s := strings.TrimPrefix(fp, "man/")
|
||||
s = strings.TrimSuffix(s, ".txt")
|
||||
i := strings.LastIndex(s, ".")
|
||||
ref := "_" + s[:i] + "_(" + s[i+1:] + ")"
|
||||
@@ -44,15 +44,15 @@ func TestManTemplates(t *testing.T) {
|
||||
man := string(b)
|
||||
assert.NoError(t, err)
|
||||
count := 0
|
||||
filepath.Walk(".", func(path string, info fs.FileInfo, err error) error {
|
||||
filepath.Walk(".", func(fp string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if strings.HasSuffix(path, ".html") {
|
||||
if strings.HasSuffix(fp, ".html") {
|
||||
count++
|
||||
assert.Contains(t, man, path, path)
|
||||
assert.Contains(t, man, fp, fp)
|
||||
}
|
||||
if path != "." && info.IsDir() {
|
||||
if fp != "." && info.IsDir() {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
return nil
|
||||
@@ -60,6 +60,40 @@ func TestManTemplates(t *testing.T) {
|
||||
assert.Greater(t, count, 0, "no templates were found")
|
||||
}
|
||||
|
||||
// Does oddmu-templates(5) mention all the templates?
|
||||
func TestManTemplateAttributess(t *testing.T) {
|
||||
mfp := "man/oddmu-templates.5.txt"
|
||||
b, err := os.ReadFile(mfp)
|
||||
man := string(b)
|
||||
assert.NoError(t, err)
|
||||
re := regexp.MustCompile(`{{(?:(?:if|range) )?(\.[A-Z][a-z]*)}}`)
|
||||
filepath.Walk(".", func(fp string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if fp != "." && info.IsDir() {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
if !strings.HasSuffix(fp, ".html") {
|
||||
return nil
|
||||
}
|
||||
h, err := os.ReadFile(fp)
|
||||
matches := re.FindAllSubmatch(h, -1)
|
||||
assert.Greater(t, len(matches), 0, "%s contains no attributes", fp)
|
||||
seen := make(map[string]bool)
|
||||
for _, m := range matches {
|
||||
attr := string(m[1])
|
||||
if seen[attr] {
|
||||
continue
|
||||
}
|
||||
seen[attr] = true
|
||||
assert.Contains(t, man, "_{{"+attr+"}}_", "%s does not mention _{{%s}}_", mfp, attr)
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// Does oddmu(1) mention all the actions? We're not going to parse the go file and make sure to catch them all. I tried
|
||||
// it, and it's convoluted.
|
||||
func TestManActions(t *testing.T) {
|
||||
@@ -71,7 +105,7 @@ func TestManActions(t *testing.T) {
|
||||
wiki := string(b)
|
||||
count := 0
|
||||
// this doesn't match the root handler
|
||||
re := regexp.MustCompile(`http.HandleFunc\("(/[a-z]+/)", makeHandler\([a-z]+Handler, (true|false)\)\)`)
|
||||
re := regexp.MustCompile(`\.HandleFunc\("(/[a-z]+/)", makeHandler\([a-z]+Handler, (true|false)(, http\.Method(Get|Post))+\)\)`)
|
||||
for _, match := range re.FindAllStringSubmatch(wiki, -1) {
|
||||
count++
|
||||
var path string
|
||||
@@ -94,13 +128,13 @@ func TestReadme(t *testing.T) {
|
||||
readme := string(b)
|
||||
assert.NoError(t, err)
|
||||
count := 0
|
||||
filepath.Walk("man", func(path string, info fs.FileInfo, err error) error {
|
||||
filepath.Walk("man", func(fp string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if strings.HasSuffix(path, ".txt") {
|
||||
if strings.HasSuffix(fp, ".txt") {
|
||||
count++
|
||||
s := strings.TrimPrefix(path, "man/")
|
||||
s := strings.TrimPrefix(fp, "man/")
|
||||
s = strings.TrimSuffix(s, ".txt")
|
||||
i := strings.LastIndex(s, ".")
|
||||
ref := "[" + s[:i] + "(" + s[i+1:] + ")]"
|
||||
@@ -110,15 +144,15 @@ func TestReadme(t *testing.T) {
|
||||
})
|
||||
assert.Greater(t, count, 0, "no man pages were found")
|
||||
count = 0
|
||||
filepath.Walk(".", func(path string, info fs.FileInfo, err error) error {
|
||||
filepath.Walk(".", func(fp string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if strings.HasSuffix(path, ".go") &&
|
||||
!strings.HasSuffix(path, "_test.go") &&
|
||||
!strings.HasSuffix(path, "_cmd.go") {
|
||||
if strings.HasSuffix(fp, ".go") &&
|
||||
!strings.HasSuffix(fp, "_test.go") &&
|
||||
!strings.HasSuffix(fp, "_cmd.go") {
|
||||
count++
|
||||
s := strings.TrimPrefix(path, "./")
|
||||
s := strings.TrimPrefix(fp, "./")
|
||||
ref := "`" + s + "`"
|
||||
assert.Contains(t, readme, ref, ref)
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/google/subcommands"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type notifyCmd struct {
|
||||
@@ -32,6 +33,11 @@ func (cmd *notifyCmd) Execute(_ context.Context, f *flag.FlagSet, _ ...interface
|
||||
func notifyCli(w io.Writer, args []string) subcommands.ExitStatus {
|
||||
index.load()
|
||||
for _, name := range args {
|
||||
if !strings.HasSuffix(name, ".md") {
|
||||
fmt.Fprintf(os.Stderr, "%s does not end in '.md'\n", name)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
name = name[0 : len(name)-3]
|
||||
p, err := loadPage(name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(w, "Loading %s: %s\n", name, err)
|
||||
|
||||
81
page.go
81
page.go
@@ -64,7 +64,7 @@ func nameEscape(s string) string {
|
||||
// carriage return characters ("\r"). Page.Title and Page.Html are not saved. There is no caching. Before removing or
|
||||
// writing a file, the old copy is renamed to a backup, appending "~". Errors are not logged but returned.
|
||||
func (p *Page) save() error {
|
||||
fp := filepath.FromSlash(p.Name + ".md")
|
||||
fp := filepath.FromSlash(p.Name) + ".md"
|
||||
watches.ignore(fp)
|
||||
s := bytes.ReplaceAll(p.Body, []byte{'\r'}, []byte{})
|
||||
if len(s) == 0 {
|
||||
@@ -88,9 +88,19 @@ func (p *Page) save() error {
|
||||
return os.WriteFile(fp, s, 0644)
|
||||
}
|
||||
|
||||
// backup a file by renaming (!) it unless the existing backup is less than an hour old. A backup gets a tilde appended
|
||||
// to it ("~"). This is true even if the file refers to a binary file like "image.png" and most applications don't know
|
||||
// what to do with a file called "image.png~". This expects a file path. Use filepath.FromSlash(path) if necessary.
|
||||
func (p *Page) ModTime() (time.Time, error) {
|
||||
fp := filepath.FromSlash(p.Name) + ".md"
|
||||
fi, err := os.Stat(fp)
|
||||
if err != nil {
|
||||
return time.Now(), err
|
||||
}
|
||||
return fi.ModTime(), nil
|
||||
}
|
||||
|
||||
// backup a file by renaming it unless the existing backup is less than an hour old. A backup gets a tilde appended to
|
||||
// it ("~"). This is true even if the file refers to a binary file like "image.png" and most applications don't know
|
||||
// what to do with a file called "image.png~". This expects a filepath. The backup file gets its modification time set
|
||||
// to now so that subsequent edits don't immediately overwrite it again.
|
||||
func backup(fp string) error {
|
||||
_, err := os.Stat(fp)
|
||||
if err != nil {
|
||||
@@ -99,7 +109,12 @@ func backup(fp string) error {
|
||||
bp := fp + "~"
|
||||
fi, err := os.Stat(bp)
|
||||
if err != nil || time.Since(fi.ModTime()).Minutes() >= 60 {
|
||||
return os.Rename(fp, bp)
|
||||
err = os.Rename(fp, bp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ts := time.Now()
|
||||
return os.Chtimes(bp, ts, ts)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -107,13 +122,13 @@ func backup(fp string) error {
|
||||
// loadPage loads a Page given a name. The path loaded is that Page.Name with the ".md" extension. The Page.Title is set
|
||||
// to the Page.Name (and possibly changed, later). The Page.Body is set to the file content. The Page.Html remains
|
||||
// undefined (there is no caching).
|
||||
func loadPage(path string) (*Page, error) {
|
||||
path = strings.TrimPrefix(path, "./") // result of a filepath.TreeWalk starting with "."
|
||||
body, err := os.ReadFile(filepath.FromSlash(path + ".md"))
|
||||
func loadPage(name string) (*Page, error) {
|
||||
name = strings.TrimPrefix(name, "./") // result of a path.TreeWalk starting with "."
|
||||
body, err := os.ReadFile(filepath.FromSlash(name) + ".md")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Page{Title: path, Name: path, Body: body}, nil
|
||||
return &Page{Title: name, Name: name, Body: body}, nil
|
||||
}
|
||||
|
||||
// handleTitle extracts the title from a Page and sets Page.Title, if any. If replace is true, the page title is also
|
||||
@@ -133,7 +148,6 @@ func (p *Page) handleTitle(replace bool) {
|
||||
// summarize sets Page.Html to an extract.
|
||||
func (p *Page) summarize(q string) {
|
||||
t := p.plainText()
|
||||
p.Name = nameEscape(p.Name)
|
||||
p.Html = sanitizeStrict(snippets(q, t))
|
||||
}
|
||||
|
||||
@@ -143,20 +157,53 @@ func (p *Page) IsBlog() bool {
|
||||
return blogRe.MatchString(name)
|
||||
}
|
||||
|
||||
// Dir returns the directory the page is in. It's either the empty string if the page is in the Oddmu working directory,
|
||||
// or it ends in a slash. This is used to create the upload link in "view.html", for example.
|
||||
const upperhex = "0123456789ABCDEF"
|
||||
|
||||
// Path returns the Page.Name with some characters escaped because html/template doesn't escape those. This is suitable
|
||||
// for use in HTML templates.
|
||||
func (p *Page) Path() string {
|
||||
return pathEncode(p.Name)
|
||||
}
|
||||
|
||||
// pathEncode returns the page name with some characters escaped because html/template doesn't escape those. This is
|
||||
// suitable for use in HTML templates.
|
||||
func pathEncode(s string) string {
|
||||
n := strings.Count(s, ";") + strings.Count(s, ",") + strings.Count(s, "?") + strings.Count(s, "#")
|
||||
if n == 0 {
|
||||
return s
|
||||
}
|
||||
t := make([]byte, len(s)+2*n)
|
||||
j := 0
|
||||
for i := 0; i < len(s); i++ {
|
||||
switch s[i] {
|
||||
case ';', ',', '?', '#':
|
||||
t[j] = '%'
|
||||
t[j+1] = upperhex[s[i]>>4]
|
||||
t[j+2] = upperhex[s[i]&15]
|
||||
j += 3
|
||||
default:
|
||||
t[j] = s[i]
|
||||
j++
|
||||
}
|
||||
}
|
||||
return string(t)
|
||||
}
|
||||
|
||||
// Dir returns the directory part of the page name, percent-escaped except for the slashes. It's either the empty string
|
||||
// if the page is in the Oddmu working directory, or it ends in a slash. This is used to create the upload link in
|
||||
// "view.html", for example.
|
||||
func (p *Page) Dir() string {
|
||||
d := filepath.Dir(p.Name)
|
||||
d := path.Dir(p.Name)
|
||||
if d == "." {
|
||||
return ""
|
||||
}
|
||||
return d + "/"
|
||||
return pathEncode(d) + "/"
|
||||
}
|
||||
|
||||
// Base returns the basename of the page name: no directory. This is used to create the upload link in "view.html", for
|
||||
// example.
|
||||
// Base returns the basename of the page name: no directory, percent-escaped except for the slashes. This is used to
|
||||
// create the upload link in "view.html", for example.
|
||||
func (p *Page) Base() string {
|
||||
n := filepath.Base(p.Name)
|
||||
n := path.Base(p.Name)
|
||||
if n == "." {
|
||||
return ""
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ And untouchable`)}
|
||||
p = &Page{Name: "testdata/parents/children/something/other"}
|
||||
// "testdata/parents/children/something/index" is a sibling and doesn't count!
|
||||
parents := p.Parents()
|
||||
assert.Equal(t, "Welcome to Oddµ", parents[0].Title)
|
||||
assert.Equal(t, "Welcome to Oddμ", parents[0].Title)
|
||||
assert.Equal(t, "../../../../index", parents[0].Url)
|
||||
assert.Equal(t, "…", parents[1].Title)
|
||||
assert.Equal(t, "../../../index", parents[1].Url)
|
||||
|
||||
15
parser.go
15
parser.go
@@ -8,7 +8,6 @@ import (
|
||||
"github.com/gomarkdown/markdown/parser"
|
||||
"net/url"
|
||||
"path"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// wikiLink returns an inline parser function. This indirection is
|
||||
@@ -37,9 +36,13 @@ func wikiLink(fn func(p *parser.Parser, data []byte, offset int) (int, ast.Node)
|
||||
|
||||
// hashtag returns an inline parser function. This indirection is
|
||||
// required because we want to receive an array of hashtags found.
|
||||
// The hashtags in the array keep their case.
|
||||
func hashtag() (func(p *parser.Parser, data []byte, offset int) (int, ast.Node), *[]string) {
|
||||
hashtags := make([]string, 0)
|
||||
return func(p *parser.Parser, data []byte, offset int) (int, ast.Node) {
|
||||
if p.InsideLink {
|
||||
return 0, nil
|
||||
}
|
||||
data = data[offset:]
|
||||
i := 0
|
||||
n := len(data)
|
||||
@@ -91,7 +94,6 @@ func (p *Page) renderHtml() {
|
||||
parser, hashtags := wikiParser()
|
||||
renderer := wikiRenderer()
|
||||
maybeUnsafeHTML := markdown.ToHTML(p.Body, parser, renderer)
|
||||
p.Name = nameEscape(p.Name)
|
||||
p.Html = unsafeBytes(maybeUnsafeHTML)
|
||||
p.Hashtags = *hashtags
|
||||
}
|
||||
@@ -125,7 +127,7 @@ func (p *Page) plainText() string {
|
||||
|
||||
// images returns an array of ImageData.
|
||||
func (p *Page) images() []ImageData {
|
||||
dir := path.Dir(filepath.ToSlash(p.Name))
|
||||
dir := p.Dir()
|
||||
images := make([]ImageData, 0)
|
||||
parser := parser.New()
|
||||
doc := markdown.Parse(p.Body, parser)
|
||||
@@ -148,6 +150,13 @@ func (p *Page) images() []ImageData {
|
||||
return images
|
||||
}
|
||||
|
||||
// hashtags returns an array of hashtags
|
||||
func hashtags(s []byte) []string {
|
||||
parser, hashtags := wikiParser()
|
||||
markdown.Parse(s, parser)
|
||||
return *hashtags
|
||||
}
|
||||
|
||||
// toString for a node returns the text nodes' literals, concatenated. There is no whitespace added so the expectation
|
||||
// is that there is only one child node. Otherwise, there may be a space missing between the literals, depending on the
|
||||
// exact child nodes they belong to.
|
||||
|
||||
@@ -51,11 +51,13 @@ I am cold, alone</p>
|
||||
func TestPageHtmlHashtagCornerCases(t *testing.T) {
|
||||
p := &Page{Body: []byte(`#
|
||||
|
||||
ok # #o #ok`)}
|
||||
ok # #o #ok
|
||||
[oh #ok \#nok](ok)`)}
|
||||
p.renderHtml()
|
||||
r := `<p>#</p>
|
||||
|
||||
<p>ok # <a class="tag" href="/search/?q=%23o">#o</a> <a class="tag" href="/search/?q=%23ok">#ok</a></p>
|
||||
<p>ok # <a class="tag" href="/search/?q=%23o">#o</a> <a class="tag" href="/search/?q=%23ok">#ok</a>
|
||||
<a href="ok">oh #ok #nok</a></p>
|
||||
`
|
||||
assert.Equal(t, r, string(p.Html))
|
||||
}
|
||||
@@ -117,17 +119,17 @@ func TestAt(t *testing.T) {
|
||||
// prevent lookups
|
||||
accounts.Lock()
|
||||
accounts.uris = make(map[string]string)
|
||||
accounts.uris["alex@alexschroeder.ch"] = "https://social.alexschroeder.ch/@alex";
|
||||
accounts.uris["alex@alexschroeder.ch"] = "https://social.alexschroeder.ch/@alex"
|
||||
accounts.Unlock()
|
||||
// test account
|
||||
p := &Page{Body: []byte(`My fedi handle is @alex@alexschroeder.ch.`)}
|
||||
p.renderHtml()
|
||||
assert.Contains(t,string(p.Html),
|
||||
assert.Contains(t, string(p.Html),
|
||||
`My fedi handle is <a class="account" href="https://social.alexschroeder.ch/@alex" title="@alex@alexschroeder.ch">@alex</a>.`)
|
||||
// test escaped account
|
||||
p = &Page{Body: []byte(`My fedi handle is \@alex@alexschroeder.ch. \`)}
|
||||
p.renderHtml()
|
||||
assert.Contains(t,string(p.Html),
|
||||
assert.Contains(t, string(p.Html),
|
||||
`My fedi handle is @alex@alexschroeder.ch.`)
|
||||
// disable webfinger
|
||||
useWebfinger = false
|
||||
|
||||
@@ -10,8 +10,13 @@ import (
|
||||
// otherwise the rendered template has garbage bytes at the end. Note also that we need to remove the title from the
|
||||
// page so that the preview works as intended (and much like the "view.html" template) where as the editing requires the
|
||||
// page content including the header… which is why it needs to be added in the "preview.html" template. This makes me
|
||||
// sad.
|
||||
// sad. While viewing the preview, links will point to the /preview path. In order to handle this, regular GET requests
|
||||
// are passed on the the {viewHandler}.
|
||||
func previewHandler(w http.ResponseWriter, r *http.Request, path string) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Redirect(w, r, "/view/"+strings.TrimPrefix(path, "/preview/"), http.StatusFound)
|
||||
return
|
||||
}
|
||||
body := strings.ReplaceAll(r.FormValue("body"), "\r", "")
|
||||
p := &Page{Name: path, Body: []byte(body)}
|
||||
p.handleTitle(true)
|
||||
|
||||
22
preview.html
22
preview.html
@@ -3,18 +3,18 @@
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no">
|
||||
<base href="/view/{{.Dir}}">
|
||||
<title>Preview: {{.Title}}</title>
|
||||
<style>
|
||||
html { max-width: 70ch; padding: 1ch; margin: auto; color: #111; background-color: #ffe; }
|
||||
body { hyphens: auto; }
|
||||
header a { margin-right: 1ch; }
|
||||
form { display: inline-block; }
|
||||
input#search { width: 12ch; }
|
||||
button { background-color: #eee; color: inherit; border-radius: 4px; border-width: 1px; }
|
||||
html { max-width: 70ch; padding: 1ch; margin: auto; color: #111; background-color: #ffe }
|
||||
body { hyphens: auto }
|
||||
header a { margin-right: 1ch }
|
||||
form { display: inline-block }
|
||||
input#search { width: 12ch }
|
||||
button { background-color: #eee; color: inherit; border-radius: 4px; border-width: 1px }
|
||||
footer { border-top: 1px solid #888 }
|
||||
img { max-width: 100%; }
|
||||
img { max-width: 100% }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
@@ -28,12 +28,12 @@ img { max-width: 100%; }
|
||||
<hr>
|
||||
<section id="edit">
|
||||
<h2>Editing {{.Title}}</h2>
|
||||
<form action="/save/{{.Name}}" method="POST">
|
||||
<form action="/save/{{.Path}}" method="POST">
|
||||
<textarea name="body" rows="20" cols="80" lang="{{.Language}}" autofocus>{{printf "# %s\n\n%s" .Title .Body}}</textarea>
|
||||
<p><label><input type="checkbox" name="notify" checked> Add link to <a href="changes">the list of changes</a>.</label></p>
|
||||
<p><input type="submit" value="Save">
|
||||
<button formaction="/preview/{{.Name}}" type="submit">Preview</button>
|
||||
<a href="/view/{{.Name}}"><button type="button">Cancel</button></a></p>
|
||||
<button formaction="/preview/{{.Path}}" type="submit">Preview</button>
|
||||
<a href="/view/{{.Path}}"><button type="button">Cancel</button></a></p>
|
||||
</form>
|
||||
</section>
|
||||
</body>
|
||||
|
||||
18
preview_test.go
Normal file
18
preview_test.go
Normal file
@@ -0,0 +1,18 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPreview(t *testing.T) {
|
||||
cleanup(t, "testdata/preview")
|
||||
|
||||
data := url.Values{}
|
||||
data.Set("body", "**Hallo**!")
|
||||
|
||||
r := assert.HTTPBody(makeHandler(previewHandler, false, http.MethodGet), "POST", "/view/testdata/preview/alex", data)
|
||||
assert.Contains(t, r, "<strong>Hallo</strong>!")
|
||||
}
|
||||
@@ -55,12 +55,12 @@ func replaceCli(w io.Writer, isConfirmed bool, isRegexp bool, args []string) sub
|
||||
}
|
||||
repl := []byte(args[1])
|
||||
changes := 0
|
||||
err := filepath.Walk(".", func(path string, info fs.FileInfo, err error) error {
|
||||
err := filepath.Walk(".", func(fp string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// skip hidden directories and files
|
||||
if path != "." && strings.HasPrefix(filepath.Base(path), ".") {
|
||||
if fp != "." && strings.HasPrefix(filepath.Base(fp), ".") {
|
||||
if info.IsDir() {
|
||||
return filepath.SkipDir
|
||||
} else {
|
||||
@@ -68,10 +68,10 @@ func replaceCli(w io.Writer, isConfirmed bool, isRegexp bool, args []string) sub
|
||||
}
|
||||
}
|
||||
// skipp all but page files
|
||||
if !strings.HasSuffix(path, ".md") {
|
||||
if !strings.HasSuffix(fp, ".md") {
|
||||
return nil
|
||||
}
|
||||
body, err := os.ReadFile(path)
|
||||
body, err := os.ReadFile(fp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -79,15 +79,15 @@ func replaceCli(w io.Writer, isConfirmed bool, isRegexp bool, args []string) sub
|
||||
if !slices.Equal(result, body) {
|
||||
changes++
|
||||
if isConfirmed {
|
||||
fmt.Fprintln(w, path)
|
||||
_ = os.Rename(path, path+"~")
|
||||
err = os.WriteFile(path, result, 0644)
|
||||
fmt.Fprintln(w, fp)
|
||||
_ = os.Rename(fp, fp+"~")
|
||||
err = os.WriteFile(fp, result, 0644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
edits := myers.ComputeEdits(span.URIFromPath(path+"~"), string(body), string(result))
|
||||
diff := fmt.Sprint(gotextdiff.ToUnified(path+"~", path, string(body), edits))
|
||||
edits := myers.ComputeEdits(span.URIFromPath(fp+"~"), string(body), string(result))
|
||||
diff := fmt.Sprint(gotextdiff.ToUnified(fp+"~", fp, string(body), edits))
|
||||
fmt.Fprintln(w, diff)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -300,3 +300,9 @@ func searchHandler(w http.ResponseWriter, r *http.Request, dir string) {
|
||||
Results: len(items) > 0, More: more}
|
||||
renderTemplate(w, dir, "search", s)
|
||||
}
|
||||
|
||||
// Path returns the ImageData.Name with some characters escaped because html/template doesn't escape those. This is
|
||||
// suitable for use in HTML templates.
|
||||
func (img *ImageData) Path() string {
|
||||
return pathEncode(img.Name)
|
||||
}
|
||||
|
||||
26
search.html
26
search.html
@@ -3,24 +3,24 @@
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no">
|
||||
<title>Search for {{.Query}}</title>
|
||||
<style>
|
||||
html { max-width: 70ch; padding: 2ch; margin: auto; color: #111; background-color: #ffe; }
|
||||
body { hyphens: auto; }
|
||||
header a { margin-right: 1ch; }
|
||||
form { display: inline-block; }
|
||||
input#search { width: 20ch; }
|
||||
button { background-color: #eee; color: inherit; border-radius: 4px; border-width: 1px; }
|
||||
html { max-width: 70ch; padding: 2ch; margin: auto; color: #111; background-color: #ffe }
|
||||
body { hyphens: auto }
|
||||
header a { margin-right: 1ch }
|
||||
form { display: inline-block }
|
||||
input#search { width: 20ch }
|
||||
button { background-color: #eee; color: inherit; border-radius: 4px; border-width: 1px }
|
||||
.result { font-size: larger }
|
||||
.score { font-size: smaller; opacity: 0.8; }
|
||||
.image { display: inline-block; margin-right: 1em; max-width: calc(20% - 1em); font-size: small; }
|
||||
.image img { max-width: 100%; }
|
||||
.score { font-size: smaller; opacity: 0.8 }
|
||||
.image { display: inline-block; margin-right: 1em; max-width: calc(20% - 1em); font-size: small }
|
||||
.image img { max-width: 100% }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<a href="#main">Skip navigation</a>
|
||||
<a href="#main">Skip</a>
|
||||
<a href="/view/index">Home</a>
|
||||
<form role="search" action="/search/{{.Dir}}" method="GET">
|
||||
<label for="search">Search:</label>
|
||||
@@ -38,11 +38,11 @@ button { background-color: #eee; color: inherit; border-radius: 4px; border-widt
|
||||
{{if .More}}<a href="/search/{{.Dir}}?q={{.Query}}&page={{.Next}}">Next</a>{{end}}
|
||||
{{range .Items}}
|
||||
<article lang="{{.Language}}">
|
||||
<p><a class="result" href="/view/{{.Name}}">{{.Title}}</a>
|
||||
<p><a class="result" href="/view/{{.Path}}">{{.Title}}</a>
|
||||
<span class="score">{{.Score}}</span></p>
|
||||
<blockquote>{{.Html}}</blockquote>
|
||||
{{range .Images}}
|
||||
<p class="image"><a href="/view/{{.Name}}"><img loading="lazy" src="/view/{{.Name}}"></a><br/>{{.Html}}
|
||||
<p class="image"><a href="/view/{{.Path}}"><img loading="lazy" src="/view/{{.Path}}"></a><br/>{{.Html}}
|
||||
{{end}}
|
||||
</article>
|
||||
{{end}}
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"io"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
@@ -18,6 +19,8 @@ type searchCmd struct {
|
||||
page int
|
||||
all bool
|
||||
extract bool
|
||||
files bool
|
||||
quiet bool
|
||||
}
|
||||
|
||||
func (cmd *searchCmd) SetFlags(f *flag.FlagSet) {
|
||||
@@ -25,12 +28,14 @@ func (cmd *searchCmd) SetFlags(f *flag.FlagSet) {
|
||||
f.IntVar(&cmd.page, "page", 1, "the page in the search result set, default 1")
|
||||
f.BoolVar(&cmd.all, "all", false, "show all the pages and ignore -page")
|
||||
f.BoolVar(&cmd.extract, "extract", false, "print page extract instead of link list")
|
||||
f.BoolVar(&cmd.files, "files", false, "show just the filenames")
|
||||
f.BoolVar(&cmd.quiet, "quiet", false, "suppress summary line at the top")
|
||||
}
|
||||
|
||||
func (*searchCmd) Name() string { return "search" }
|
||||
func (*searchCmd) Synopsis() string { return "search pages and print a list of links" }
|
||||
func (*searchCmd) Usage() string {
|
||||
return `search [-dir string] [-page <n>|-all] [-extract] <terms>:
|
||||
return `search [-dir string] [-page <n>|-all] [-extract|-files] [-quiet] <terms>:
|
||||
Search for pages matching terms and print the result set as a
|
||||
Markdown list. Before searching, all the pages are indexed. Thus,
|
||||
startup is slow. The benefit is that the page order is exactly as
|
||||
@@ -39,24 +44,24 @@ func (*searchCmd) Usage() string {
|
||||
}
|
||||
|
||||
func (cmd *searchCmd) Execute(_ context.Context, f *flag.FlagSet, _ ...interface{}) subcommands.ExitStatus {
|
||||
return searchCli(os.Stdout, cmd.dir, cmd.page, cmd.all, cmd.extract, false, f.Args())
|
||||
return searchCli(os.Stdout, cmd, f.Args())
|
||||
}
|
||||
|
||||
// searchCli runs the search command on the command line. It is used
|
||||
// here with an io.Writer for easy testing.
|
||||
func searchCli(w io.Writer, dir string, n int, all, extract bool, quiet bool, args []string) subcommands.ExitStatus {
|
||||
dir, err := checkDir(dir)
|
||||
func searchCli(w io.Writer, cmd *searchCmd, args []string) subcommands.ExitStatus {
|
||||
dir, err := checkDir(cmd.dir)
|
||||
if err != nil {
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
index.reset()
|
||||
index.load()
|
||||
q := strings.Join(args, " ")
|
||||
items, more := search(q, dir, "", n, true)
|
||||
if !quiet {
|
||||
items, more := search(q, dir, "", cmd.page, true)
|
||||
if !cmd.quiet {
|
||||
fmt.Fprint(os.Stderr, "Search for ", q)
|
||||
if !all {
|
||||
fmt.Fprint(os.Stderr, ", page ", n)
|
||||
if !cmd.all {
|
||||
fmt.Fprint(os.Stderr, ", page ", cmd.page)
|
||||
}
|
||||
fmt.Fprint(os.Stderr, ": ", len(items))
|
||||
if len(items) == 1 {
|
||||
@@ -65,8 +70,13 @@ func searchCli(w io.Writer, dir string, n int, all, extract bool, quiet bool, ar
|
||||
fmt.Fprint(os.Stderr, " results\n")
|
||||
}
|
||||
}
|
||||
if extract {
|
||||
if cmd.extract {
|
||||
searchExtract(w, items)
|
||||
} else if cmd.files {
|
||||
for _, p := range items {
|
||||
name := filepath.FromSlash(p.Name) + ".md\n"
|
||||
fmt.Fprintf(w, name)
|
||||
}
|
||||
} else {
|
||||
for _, p := range items {
|
||||
name := p.Name
|
||||
|
||||
@@ -9,11 +9,11 @@ import (
|
||||
|
||||
func TestSearchCmd(t *testing.T) {
|
||||
b := new(bytes.Buffer)
|
||||
s := searchCli(b, "", 1, false, false, true, []string{"oddµ"})
|
||||
s := searchCli(b, &searchCmd{quiet: true}, []string{"oddμ"})
|
||||
assert.Equal(t, subcommands.ExitSuccess, s)
|
||||
r := `* [Oddµ: A minimal wiki](README)
|
||||
r := `* [Oddμ: A minimal wiki](README)
|
||||
* [Themes](themes/index)
|
||||
* [Welcome to Oddµ](index)
|
||||
* [Welcome to Oddμ](index)
|
||||
`
|
||||
assert.Equal(t, r, b.String())
|
||||
}
|
||||
@@ -26,7 +26,7 @@ that before we type and speak
|
||||
we hear that moment`)}
|
||||
p.save()
|
||||
b := new(bytes.Buffer)
|
||||
s := searchCli(b, "testdata/search", 1, false, false, true, []string{"speak"})
|
||||
s := searchCli(b, &searchCmd{dir: "testdata/search", quiet: true}, []string{"speak"})
|
||||
assert.Equal(t, subcommands.ExitSuccess, s)
|
||||
r := `* [Wait](wait)
|
||||
`
|
||||
|
||||
@@ -3,6 +3,7 @@ package main
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"slices"
|
||||
"testing"
|
||||
@@ -61,17 +62,17 @@ func TestSearch(t *testing.T) {
|
||||
index.load()
|
||||
|
||||
data := url.Values{}
|
||||
data.Set("q", "oddµ")
|
||||
data.Set("q", "oddμ")
|
||||
|
||||
body := assert.HTTPBody(makeHandler(searchHandler, false), "GET", "/search/", data)
|
||||
body := assert.HTTPBody(makeHandler(searchHandler, false, http.MethodGet), "GET", "/search/", data)
|
||||
assert.Contains(t, body, "Welcome")
|
||||
assert.Contains(t, body, `<span class="score">5</span>`)
|
||||
|
||||
body = assert.HTTPBody(makeHandler(searchHandler, false), "GET", "/search/testdata", data)
|
||||
body = assert.HTTPBody(makeHandler(searchHandler, false, http.MethodGet), "GET", "/search/testdata", data)
|
||||
assert.NotContains(t, body, "Welcome")
|
||||
|
||||
data.Set("q", "'create a new page'")
|
||||
body = assert.HTTPBody(makeHandler(searchHandler, false), "GET", "/search/", data)
|
||||
body = assert.HTTPBody(makeHandler(searchHandler, false, http.MethodGet), "GET", "/search/", data)
|
||||
assert.Contains(t, body, "Welcome")
|
||||
}
|
||||
|
||||
@@ -158,16 +159,16 @@ Where is lady luck?`)}
|
||||
data := url.Values{}
|
||||
data.Set("q", "luck")
|
||||
|
||||
body := assert.HTTPBody(makeHandler(searchHandler, false), "GET", "/search/", data)
|
||||
body := assert.HTTPBody(makeHandler(searchHandler, false, http.MethodGet), "GET", "/search/", data)
|
||||
assert.Contains(t, body, "luck")
|
||||
|
||||
body = assert.HTTPBody(makeHandler(searchHandler, false), "GET", "/search/testdata", data)
|
||||
body = assert.HTTPBody(makeHandler(searchHandler, false, http.MethodGet), "GET", "/search/testdata", data)
|
||||
assert.Contains(t, body, "luck")
|
||||
|
||||
body = assert.HTTPBody(makeHandler(searchHandler, false), "GET", "/search/testdata/dir", data)
|
||||
body = assert.HTTPBody(makeHandler(searchHandler, false, http.MethodGet), "GET", "/search/testdata/dir", data)
|
||||
assert.Contains(t, body, "luck")
|
||||
|
||||
body = assert.HTTPBody(makeHandler(searchHandler, false), "GET", "/search/testdata/other", data)
|
||||
body = assert.HTTPBody(makeHandler(searchHandler, false, http.MethodGet), "GET", "/search/testdata/other", data)
|
||||
assert.Contains(t, body, "No results")
|
||||
}
|
||||
|
||||
@@ -182,7 +183,7 @@ func TestTitleSearch(t *testing.T) {
|
||||
|
||||
items, more = search("title:wel", "", "", 1, false) // README also contains "wel"
|
||||
assert.Equal(t, 1, len(items), "one page found")
|
||||
assert.Equal(t, "index", items[0].Name, "Welcome to Oddµ")
|
||||
assert.Equal(t, "index", items[0].Name, "Welcome to Oddμ")
|
||||
assert.Greater(t, items[0].Score, 0, "matches result in a score")
|
||||
assert.False(t, more)
|
||||
|
||||
@@ -277,7 +278,7 @@ The silence streches.`)}
|
||||
p.save()
|
||||
data := url.Values{}
|
||||
data.Set("q", "look")
|
||||
body := assert.HTTPBody(makeHandler(searchHandler, false), "GET", "/search/", data)
|
||||
body := assert.HTTPBody(makeHandler(searchHandler, false, http.MethodGet), "GET", "/search/testdata/question/", data)
|
||||
assert.Contains(t, body, "We <b>look</b>")
|
||||
assert.NotContains(t, body, "Odd?")
|
||||
assert.Contains(t, body, "Even?")
|
||||
|
||||
16
static.html
16
static.html
@@ -3,17 +3,17 @@
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="format-detection" content="telephone=no">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no">
|
||||
<title>{{.Title}}</title>
|
||||
<style>
|
||||
html { max-width: 65ch; padding: 1ch; margin: auto; color: #111; background-color: #ffe; }
|
||||
body { hyphens: auto; }
|
||||
header a { margin-right: 1ch; }
|
||||
form { display: inline-block; }
|
||||
input#search { width: 12ch; }
|
||||
button { background-color: #eee; color: inherit; border-radius: 4px; border-width: 1px; }
|
||||
html { max-width: 65ch; padding: 1ch; margin: auto; color: #111; background-color: #ffe }
|
||||
body { hyphens: auto }
|
||||
header a { margin-right: 1ch }
|
||||
form { display: inline-block }
|
||||
input#search { width: 12ch }
|
||||
button { background-color: #eee; color: inherit; border-radius: 4px; border-width: 1px }
|
||||
footer { border-top: 1px solid #888 }
|
||||
img { max-width: 100%; }
|
||||
img { max-width: 100% }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
@@ -84,7 +84,7 @@ func staticCli(source, target string, jobs int, quiet bool) subcommands.ExitStat
|
||||
func staticWalk(source, target string, tasks chan (args), stop chan (error)) {
|
||||
// The error returned here is what's in the stop channel but at the very end, a worker might return an error
|
||||
// even though the walk is already done. This is why we cannot rely on the return value of the walk.
|
||||
filepath.Walk(source, func(path string, info fs.FileInfo, err error) error {
|
||||
filepath.Walk(source, func(fp string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -92,9 +92,8 @@ func staticWalk(source, target string, tasks chan (args), stop chan (error)) {
|
||||
case err := <-stop:
|
||||
return err
|
||||
default:
|
||||
base := filepath.Base(path)
|
||||
// skip hidden directories and files
|
||||
if path != "." && strings.HasPrefix(base, ".") {
|
||||
if fp != "." && strings.HasPrefix(filepath.Base(fp), ".") {
|
||||
if info.IsDir() {
|
||||
return filepath.SkipDir
|
||||
} else {
|
||||
@@ -102,28 +101,28 @@ func staticWalk(source, target string, tasks chan (args), stop chan (error)) {
|
||||
}
|
||||
}
|
||||
// skip backup files, avoid recursion
|
||||
if strings.HasSuffix(path, "~") || strings.HasPrefix(path, target) {
|
||||
if strings.HasSuffix(fp, "~") || strings.HasPrefix(fp, target) {
|
||||
return nil
|
||||
}
|
||||
// determine the actual target: if source is a/ and target is b/ and path is a/file, then the
|
||||
// target is b/file
|
||||
var actual_target string
|
||||
var actualTarget string
|
||||
if source == "." {
|
||||
actual_target = filepath.Join(target, path)
|
||||
actualTarget = filepath.Join(target, fp)
|
||||
} else {
|
||||
if !strings.HasPrefix(path, source) {
|
||||
return fmt.Errorf("%s is not a subdirectory of %s", path, source)
|
||||
if !strings.HasPrefix(fp, source) {
|
||||
return fmt.Errorf("%s is not a subdirectory of %s", fp, source)
|
||||
}
|
||||
actual_target = filepath.Join(target, path[len(source):])
|
||||
actualTarget = filepath.Join(target, fp[len(source):])
|
||||
}
|
||||
// recreate subdirectories
|
||||
if info.IsDir() {
|
||||
return os.Mkdir(actual_target, 0755)
|
||||
return os.Mkdir(actualTarget, 0755)
|
||||
}
|
||||
// do the task if the target file doesn't exist or if the source file is newer
|
||||
other, err := os.Stat(actual_target)
|
||||
other, err := os.Stat(actualTarget)
|
||||
if err != nil || info.ModTime().After(other.ModTime()) {
|
||||
tasks <- args{source: path, target: actual_target, info: info}
|
||||
tasks <- args{source: fp, target: actualTarget, info: info}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -211,7 +210,6 @@ func staticPage(source, target string) (*Page, error) {
|
||||
}
|
||||
renderer := html.NewRenderer(opts)
|
||||
maybeUnsafeHTML := markdown.Render(doc, renderer)
|
||||
p.Name = nameEscape(p.Name)
|
||||
p.Html = unsafeBytes(maybeUnsafeHTML)
|
||||
p.Hashtags = *hashtags
|
||||
return p, write(p, target, "", "static.html")
|
||||
@@ -221,9 +219,9 @@ func staticPage(source, target string) (*Page, error) {
|
||||
func staticFeed(source, target string, p *Page, ti time.Time) error {
|
||||
// render feed, maybe
|
||||
base := filepath.Base(source)
|
||||
_, ok := index.token["#"+strings.ToLower(base)]
|
||||
_, ok := index.token[strings.ToLower(base)]
|
||||
if base == "index" || ok {
|
||||
f := feed(p, ti)
|
||||
f := feed(p, ti, 0, 10)
|
||||
if len(f.Items) > 0 {
|
||||
return write(f, target, `<?xml version="1.0" encoding="UTF-8"?>`, "feed.html")
|
||||
}
|
||||
@@ -257,22 +255,22 @@ func staticLinks(node ast.Node, entering bool) ast.WalkStatus {
|
||||
}
|
||||
|
||||
// write a page or feed with an appropriate template to a specific destination, overwriting it.
|
||||
func write(data any, path, prefix, templateFile string) error {
|
||||
file, err := os.Create(path)
|
||||
func write(data any, fp, prefix, templateFile string) error {
|
||||
file, err := os.Create(fp)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot create %s: %s\n", path, err)
|
||||
fmt.Fprintf(os.Stderr, "Cannot create %s: %s\n", fp, err)
|
||||
return err
|
||||
}
|
||||
_, err = file.Write([]byte(prefix))
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot write prefix %s: %s\n", path, err)
|
||||
fmt.Fprintf(os.Stderr, "Cannot write prefix %s: %s\n", fp, err)
|
||||
return err
|
||||
}
|
||||
templates.RLock()
|
||||
defer templates.RUnlock()
|
||||
err = templates.template[templateFile].Execute(file, data)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot execute %s template for %s: %s\n", templateFile, path, err)
|
||||
fmt.Fprintf(os.Stderr, "Cannot execute %s template for %s: %s\n", templateFile, fp, err)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
|
||||
17
templates.go
17
templates.go
@@ -5,7 +5,6 @@ import (
|
||||
"io/fs"
|
||||
"log"
|
||||
"net/http"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
@@ -16,17 +15,16 @@ import (
|
||||
// able to generate HTML output. This always requires a template.
|
||||
var templateFiles = []string{"edit.html", "add.html", "view.html", "preview.html",
|
||||
"diff.html", "search.html", "static.html", "upload.html", "feed.html",
|
||||
"list.html" }
|
||||
"list.html"}
|
||||
|
||||
// templateStore controls access to map of parsed HTML templates. Make sure to lock and unlock as appropriate. See
|
||||
// renderTemplate and loadTemplates.
|
||||
type templateStore struct {
|
||||
sync.RWMutex
|
||||
|
||||
// template is a map of parsed HTML templates. The key is their path name. By default, the map only contains
|
||||
// template is a map of parsed HTML templates. The key is their filepath name. By default, the map only contains
|
||||
// top-level templates like "view.html". Subdirectories may contain their own templates which override the
|
||||
// templates in the root directory. If so, they are paths like "dir/view.html", not filepaths. Use
|
||||
// filepath.ToSlash() if necessary.
|
||||
// templates in the root directory. If so, they are filepaths like "dir/view.html".
|
||||
template map[string]*template.Template
|
||||
}
|
||||
|
||||
@@ -58,8 +56,7 @@ func loadTemplate(fp string, info fs.FileInfo, err error) error {
|
||||
log.Println("Cannot parse template:", fp, err)
|
||||
// ignore error
|
||||
} else {
|
||||
// log.Println("Parse template:", path)
|
||||
templates.template[filepath.ToSlash(fp)] = t
|
||||
templates.template[fp] = t
|
||||
}
|
||||
}
|
||||
return nil
|
||||
@@ -75,7 +72,7 @@ func updateTemplate(fp string) {
|
||||
} else {
|
||||
templates.Lock()
|
||||
defer templates.Unlock()
|
||||
templates.template[filepath.ToSlash(fp)] = t
|
||||
templates.template[fp] = t
|
||||
log.Println("Parse template:", fp)
|
||||
}
|
||||
}
|
||||
@@ -87,7 +84,7 @@ func removeTemplate(fp string) {
|
||||
filepath.Dir(fp) != "." {
|
||||
templates.Lock()
|
||||
defer templates.Unlock()
|
||||
delete(templates.template, filepath.ToSlash(fp))
|
||||
delete(templates.template, fp)
|
||||
log.Println("Discard template:", fp)
|
||||
}
|
||||
}
|
||||
@@ -99,7 +96,7 @@ func renderTemplate(w http.ResponseWriter, dir, tmpl string, data any) {
|
||||
base := tmpl + ".html"
|
||||
templates.RLock()
|
||||
defer templates.RUnlock()
|
||||
t := templates.template[path.Join(dir, base)]
|
||||
t := templates.template[filepath.Join(dir, base)]
|
||||
if t == nil {
|
||||
t = templates.template[base]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"bytes"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"testing"
|
||||
)
|
||||
|
||||
@@ -18,13 +19,12 @@ Memories of cold
|
||||
`)}
|
||||
p.save()
|
||||
assert.Contains(t,
|
||||
assert.HTTPBody(makeHandler(viewHandler, false), "GET", "/view/testdata/templates/snow", nil),
|
||||
"Skip navigation")
|
||||
assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/templates/snow", nil), "Skip")
|
||||
// save a new view handler
|
||||
html := "<body><h1>{{.Title}}</h1>{{.Html}}"
|
||||
form := new(bytes.Buffer)
|
||||
writer := multipart.NewWriter(form)
|
||||
field, err := writer.CreateFormField("name")
|
||||
field, err := writer.CreateFormField("filename")
|
||||
assert.NoError(t, err)
|
||||
field.Write([]byte("view.html"))
|
||||
file, err := writer.CreateFormFile("file", "test.html")
|
||||
@@ -33,18 +33,17 @@ Memories of cold
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, len(html), n)
|
||||
writer.Close()
|
||||
HTTPUploadLocation(t, makeHandler(dropHandler, false), "/drop/testdata/templates/", writer.FormDataContentType(), form)
|
||||
HTTPUploadLocation(t, makeHandler(dropHandler, false, http.MethodPost), "/drop/testdata/templates/", writer.FormDataContentType(), form)
|
||||
assert.FileExists(t, "view.html", "original view.html still exists")
|
||||
assert.FileExists(t, "testdata/templates/view.html", "new view.html also exists")
|
||||
assert.Contains(t,
|
||||
assert.HTTPBody(makeHandler(viewHandler, false), "GET", "/view/testdata/templates/view.html", nil),
|
||||
assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/templates/view.html", nil),
|
||||
html)
|
||||
// verify that it works
|
||||
body := assert.HTTPBody(makeHandler(viewHandler, false), "GET", "/view/testdata/templates/snow", nil)
|
||||
body := assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/templates/snow", nil)
|
||||
assert.Contains(t, body, "<h1>Snow</h1>")
|
||||
assert.NotContains(t, body, "Skip")
|
||||
// verify that the top level still uses the old template
|
||||
assert.Contains(t,
|
||||
assert.HTTPBody(makeHandler(viewHandler, false), "GET", "/view/index", nil),
|
||||
"Skip navigation")
|
||||
assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/index", nil), "Skip")
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user