forked from mirror/oddmu
Compare commits
22 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3078d63890 | ||
|
|
143ecb8a0a | ||
|
|
d66aa03a2d | ||
|
|
64954ddf5d | ||
|
|
a1d6ebfdff | ||
|
|
db3a3f5009 | ||
|
|
ece9649e3d | ||
|
|
23074cdd58 | ||
|
|
06c07209a2 | ||
|
|
7b2a835729 | ||
|
|
d0fe534f8e | ||
|
|
ac7de17a87 | ||
|
|
84e6a757b2 | ||
|
|
2dfb2afbf5 | ||
|
|
2092b5777c | ||
|
|
f635cb738a | ||
|
|
da398a3315 | ||
|
|
7315abd5bb | ||
|
|
b39901b244 | ||
|
|
bb4843c2f4 | ||
|
|
816c981200 | ||
|
|
89d550a1a4 |
4
Makefile
4
Makefile
@@ -73,8 +73,8 @@ oddmu-windows-amd64.tar.gz: oddmu.exe
|
||||
$< *.md man/*.[157].{html,md} themes/
|
||||
|
||||
%.tar.gz: %
|
||||
tar --create --file $@ --transform='s/^$</oddmu/' --transform='s/^/oddmu\//' --exclude='*~' \
|
||||
$< *.html Makefile *.socket *.service *.md man/Makefile man/*.1 man/*.5 man/*.7 themes/
|
||||
tar --create --gzip --file $@ --transform='s/^$</oddmu/' --transform='s/^/oddmu\//' --exclude='*~' \
|
||||
$< *.html Makefile *.socket *.service *.md man/Makefile man/*.[157] themes/
|
||||
|
||||
priv:
|
||||
sudo setcap 'cap_net_bind_service=+ep' oddmu
|
||||
|
||||
@@ -108,6 +108,10 @@ Static site generator:
|
||||
This man page documents the "html" subcommand to generate HTML from
|
||||
Markdown pages from the command line.
|
||||
|
||||
[oddmu-feed(1)](https://alexschroeder.ch/view/oddmu/oddmu-feed.1):
|
||||
This man page documents the "feed" subcommand to generate a feed from
|
||||
Markdown pages from the command line.
|
||||
|
||||
[oddmu-static(1)](https://alexschroeder.ch/view/oddmu/oddmu-static.1):
|
||||
This man page documents the "static" subcommand to generate an entire
|
||||
static website from the command line, avoiding the need to run Oddmu
|
||||
|
||||
@@ -48,7 +48,7 @@ func appendHandler(w http.ResponseWriter, r *http.Request, name string) {
|
||||
return
|
||||
}
|
||||
}
|
||||
http.Redirect(w, r, "/view/" + nameEscape(name), http.StatusFound)
|
||||
http.Redirect(w, r, "/view/"+nameEscape(name), http.StatusFound)
|
||||
}
|
||||
|
||||
func (p *Page) append(body []byte) {
|
||||
|
||||
@@ -17,7 +17,7 @@ func TestAddLinkToPageWithNoList(t *testing.T) {
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(2025-08-08\)\n`)
|
||||
link := "* [2025-08-08](2025-08-08)\n"
|
||||
addLinkToPage(p, link, re)
|
||||
assert.Equal(t, title + "\n\n" + link, string(p.Body))
|
||||
assert.Equal(t, title+"\n\n"+link, string(p.Body))
|
||||
}
|
||||
|
||||
func TestAddLinkToPageWithOlderLink(t *testing.T) {
|
||||
@@ -28,7 +28,7 @@ func TestAddLinkToPageWithOlderLink(t *testing.T) {
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(2025-08-10\)\n`)
|
||||
link := "* [2025-08-10](2025-08-10)\n"
|
||||
addLinkToPage(p, link, re)
|
||||
assert.Equal(t, title + "\n" + link + old, string(p.Body))
|
||||
assert.Equal(t, title+"\n"+link+old, string(p.Body))
|
||||
}
|
||||
|
||||
func TestAddLinkToPageBetweenToExistingLinks(t *testing.T) {
|
||||
@@ -39,7 +39,7 @@ func TestAddLinkToPageBetweenToExistingLinks(t *testing.T) {
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(2025-08-09\)\n`)
|
||||
link := "* [2025-08-09](2025-08-09)\n"
|
||||
addLinkToPage(p, link, re)
|
||||
assert.Equal(t, title + new + link + old, string(p.Body))
|
||||
assert.Equal(t, title+new+link+old, string(p.Body))
|
||||
}
|
||||
|
||||
func TestAddLinkToPageBetweenToExistingLinks2(t *testing.T) {
|
||||
@@ -50,7 +50,7 @@ func TestAddLinkToPageBetweenToExistingLinks2(t *testing.T) {
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(2025-08-08\)\n`)
|
||||
link := "* [2025-08-08](2025-08-08)\n"
|
||||
addLinkToPage(p, link, re)
|
||||
assert.Equal(t, title + new + link + old, string(p.Body))
|
||||
assert.Equal(t, title+new+link+old, string(p.Body))
|
||||
}
|
||||
|
||||
func TestAddLinkToPageAtTheEnd(t *testing.T) {
|
||||
@@ -61,7 +61,7 @@ func TestAddLinkToPageAtTheEnd(t *testing.T) {
|
||||
re := regexp.MustCompile(`(?m)^\* \[[^\]]+\]\(2025-08-07\)\n`)
|
||||
link := "* [2025-08-07](2025-08-07)\n"
|
||||
addLinkToPage(p, link, re)
|
||||
assert.Equal(t, title + new + old + link, string(p.Body))
|
||||
assert.Equal(t, title+new+old+link, string(p.Body))
|
||||
}
|
||||
|
||||
func TestChanges(t *testing.T) {
|
||||
|
||||
@@ -41,5 +41,5 @@ func saveHandler(w http.ResponseWriter, r *http.Request, name string) {
|
||||
return
|
||||
}
|
||||
}
|
||||
http.Redirect(w, r, "/view/" + nameEscape(name), http.StatusFound)
|
||||
http.Redirect(w, r, "/view/"+nameEscape(name), http.StatusFound)
|
||||
}
|
||||
|
||||
37
feed.go
37
feed.go
@@ -34,19 +34,36 @@ type Feed struct {
|
||||
// Items are based on the pages linked in list items starting with an asterisk ("*"). Links in
|
||||
// list items starting with a minus ("-") are ignored!
|
||||
Items []Item
|
||||
|
||||
// From is where the item number where the feed starts. It defaults to 0. Prev and From are the item numbers of
|
||||
// the previous and the next page of the feed. N is the number of items per page.
|
||||
Prev, Next, From, N int
|
||||
|
||||
// Complete is set when there is no pagination.
|
||||
Complete bool
|
||||
}
|
||||
|
||||
// feed returns a RSS 2.0 feed for any page. The feed items it contains are the pages linked from in list items starting
|
||||
// with an asterisk ("*").
|
||||
func feed(p *Page, ti time.Time) *Feed {
|
||||
// with an asterisk ("*"). The feed starts from a certain item and contains n items. If n is 0, the feed is complete
|
||||
// (unpaginated).
|
||||
func feed(p *Page, ti time.Time, from, n int) *Feed {
|
||||
feed := new(Feed)
|
||||
feed.Name = p.Name
|
||||
feed.Title = p.Title
|
||||
feed.Date = ti.Format(time.RFC1123Z)
|
||||
feed.From = from
|
||||
feed.N = n
|
||||
if n == 0 {
|
||||
feed.Complete = true
|
||||
} else if from > n {
|
||||
feed.Prev = from - n
|
||||
}
|
||||
to := from + n
|
||||
parser, _ := wikiParser()
|
||||
doc := markdown.Parse(p.Body, parser)
|
||||
items := make([]Item, 0)
|
||||
inListItem := false
|
||||
i := 0
|
||||
ast.WalkFunc(doc, func(node ast.Node, entering bool) ast.WalkStatus {
|
||||
// set the flag if we're in a list item
|
||||
listItem, ok := node.(*ast.ListItem)
|
||||
@@ -58,11 +75,22 @@ func feed(p *Page, ti time.Time) *Feed {
|
||||
if !inListItem || !entering {
|
||||
return ast.GoToNext
|
||||
}
|
||||
// if we're in a link and it's local
|
||||
// if we're in a link and it's not local
|
||||
link, ok := node.(*ast.Link)
|
||||
if !ok || bytes.Contains(link.Destination, []byte("//")) {
|
||||
return ast.GoToNext
|
||||
}
|
||||
// if we're too early or too late
|
||||
i++
|
||||
if i <= from {
|
||||
return ast.GoToNext
|
||||
}
|
||||
if n > 0 && i > to {
|
||||
// set if it's likely that more items exist
|
||||
feed.Next = to
|
||||
return ast.Terminate
|
||||
}
|
||||
// i counts links, not actual existing pages
|
||||
name := path.Join(p.Dir(), string(link.Destination))
|
||||
fi, err := os.Stat(filepath.FromSlash(name) + ".md")
|
||||
if err != nil {
|
||||
@@ -80,9 +108,6 @@ func feed(p *Page, ti time.Time) *Feed {
|
||||
it.Html = template.HTML(template.HTMLEscaper(p2.Html))
|
||||
it.Hashtags = p2.Hashtags
|
||||
items = append(items, it)
|
||||
if len(items) >= 10 {
|
||||
return ast.Terminate
|
||||
}
|
||||
return ast.GoToNext
|
||||
})
|
||||
feed.Items = items
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
<rss xmlns:atom="http://www.w3.org/2005/Atom" version="2.0">
|
||||
<rss xmlns:atom="http://www.w3.org/2005/Atom" version="2.0"
|
||||
xmlns:fh="http://purl.org/syndication/history/1.0">
|
||||
<channel>
|
||||
<docs>http://blogs.law.harvard.edu/tech/rss</docs>
|
||||
<title>{{.Title}}</title>
|
||||
<link>https://example.org/</link>
|
||||
<managingEditor>you@example.org (Your Name)</managingEditor>
|
||||
<webMaster>you@example.org (Your Name)</webMaster>
|
||||
<atom:link href="https://example.org/view/{{.Path}}.rss" rel="self" type="application/rss+xml"/>
|
||||
<atom:link href="https://example.org/view/{{.Path}}.rss" rel="self" type="application/rss+xml"/>{{if .From}}
|
||||
<atom:link href="https://example.org/view/{{.Path}}.rss?from={{.Prev}}&n={{.N}}" rel="previous" type="application/rss+xml"/>{{end}}{{if .Next}}
|
||||
<atom:link href="https://example.org/view/{{.Path}}.rss?from={{.Next}}&n={{.N}}" rel="next" type="application/rss+xml"/>{{end}}{{if .Complete}}
|
||||
<fh:complete/>{{end}}
|
||||
<description>This is the digital garden of Your Name.</description>
|
||||
<image>
|
||||
<url>https://example.org/view/logo.jpg</url>
|
||||
|
||||
89
feed_cmd.go
Normal file
89
feed_cmd.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"github.com/google/subcommands"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type feedCmd struct {
|
||||
}
|
||||
|
||||
func (*feedCmd) Name() string { return "feed" }
|
||||
func (*feedCmd) Synopsis() string { return "render a page as feed" }
|
||||
func (*feedCmd) Usage() string {
|
||||
return `feed <page name> ...:
|
||||
Render one or more pages as a single feed.
|
||||
Use a single - to read Markdown from stdin.
|
||||
`
|
||||
}
|
||||
|
||||
func (cmd *feedCmd) SetFlags(f *flag.FlagSet) {
|
||||
}
|
||||
|
||||
func (cmd *feedCmd) Execute(_ context.Context, f *flag.FlagSet, _ ...interface{}) subcommands.ExitStatus {
|
||||
if len(f.Args()) == 0 {
|
||||
fmt.Fprint(os.Stderr, cmd.Usage())
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
return feedCli(os.Stdout, f.Args())
|
||||
}
|
||||
|
||||
func feedCli(w io.Writer, args []string) subcommands.ExitStatus {
|
||||
if len(args) == 1 && args[0] == "-" {
|
||||
body, err := io.ReadAll(os.Stdin)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot read from stdin: %s\n", err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
p := &Page{Name: "stdin", Body: body}
|
||||
return p.printFeed(w, time.Now())
|
||||
}
|
||||
for _, name := range args {
|
||||
if !strings.HasSuffix(name, ".md") {
|
||||
fmt.Fprintf(os.Stderr, "%s does not end in '.md'\n", name)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
name = name[0 : len(name)-3]
|
||||
p, err := loadPage(name)
|
||||
p.handleTitle(false)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot load %s: %s\n", name, err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
ti, _ := p.ModTime()
|
||||
status := p.printFeed(w, ti)
|
||||
if status != subcommands.ExitSuccess {
|
||||
return status
|
||||
}
|
||||
}
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
|
||||
// printFeed prints the complete feed for a page (unpaginated).
|
||||
func (p *Page) printFeed(w io.Writer, ti time.Time) subcommands.ExitStatus {
|
||||
f := feed(p, ti, 0, 0)
|
||||
if len(f.Items) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "Empty feed for %s\n", p.Name)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
_, err := w.Write([]byte(`<?xml version="1.0" encoding="UTF-8"?>`))
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot write prefix: %s\n", err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
loadTemplates()
|
||||
templates.RLock()
|
||||
defer templates.RUnlock()
|
||||
err = templates.template["feed.html"].Execute(w, f)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot execute template: %s\n", err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
22
feed_cmd_test.go
Normal file
22
feed_cmd_test.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/google/subcommands"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestFeedCmd(t *testing.T) {
|
||||
cleanup(t, "testdata/complete")
|
||||
p := &Page{Name: "testdata/complete/one", Body: []byte("# One\n")}; p.save()
|
||||
p = &Page{Name: "testdata/complete/index", Body: []byte(`# Index
|
||||
* [one](one)
|
||||
`)}
|
||||
p.save()
|
||||
|
||||
b := new(bytes.Buffer)
|
||||
s := feedCli(b, []string{"testdata/complete/index.md"})
|
||||
assert.Equal(t, subcommands.ExitSuccess, s)
|
||||
assert.Contains(t, b.String(), "<fh:complete/>")
|
||||
}
|
||||
93
feed_test.go
93
feed_test.go
@@ -4,6 +4,7 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"net/http"
|
||||
"testing"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
func TestFeed(t *testing.T) {
|
||||
@@ -19,7 +20,6 @@ func TestNoFeed(t *testing.T) {
|
||||
|
||||
func TestFeedItems(t *testing.T) {
|
||||
cleanup(t, "testdata/feed")
|
||||
index.load()
|
||||
|
||||
p1 := &Page{Name: "testdata/feed/cactus", Body: []byte(`# Cactus
|
||||
Green head and white hair
|
||||
@@ -53,3 +53,94 @@ Writing poems about plants.
|
||||
assert.Contains(t, body, "<category>Succulent</category>")
|
||||
assert.Contains(t, body, "<category>Palmtree</category>")
|
||||
}
|
||||
|
||||
|
||||
func TestFeedPagination(t *testing.T) {
|
||||
cleanup(t, "testdata/pagination")
|
||||
|
||||
p := &Page{Name: "testdata/pagination/one", Body: []byte("# One\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/two", Body: []byte("# Two\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/three", Body: []byte("# Three\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/four", Body: []byte("# Four\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/five", Body: []byte("# Five\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/six", Body: []byte("# Six\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/seven", Body: []byte("# Seven\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/eight", Body: []byte("# Eight\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/nine", Body: []byte("# Nine\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/ten", Body: []byte("# Ten\n")}; p.save()
|
||||
|
||||
p = &Page{Name: "testdata/pagination/index", Body: []byte(`# Index
|
||||
* [one](one)
|
||||
* [two](two)
|
||||
* [three](three)
|
||||
* [four](four)
|
||||
* [five](five)
|
||||
* [six](six)
|
||||
* [seven](seven)
|
||||
* [eight](eight)
|
||||
* [nine](nine)
|
||||
* [ten](ten)
|
||||
`)}
|
||||
p.save()
|
||||
|
||||
body := assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", nil)
|
||||
assert.Contains(t, body, "<title>One</title>")
|
||||
assert.Contains(t, body, "<title>Ten</title>")
|
||||
assert.NotContains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=10&n=10" rel="next" type="application/rss+xml"/>`)
|
||||
|
||||
p = &Page{Name: "testdata/pagination/eleven", Body: []byte("# Eleven\n")}; p.save()
|
||||
p = &Page{Name: "testdata/pagination/index", Body: []byte(`# Index
|
||||
* [one](one)
|
||||
* [two](two)
|
||||
* [three](three)
|
||||
* [four](four)
|
||||
* [five](five)
|
||||
* [six](six)
|
||||
* [seven](seven)
|
||||
* [eight](eight)
|
||||
* [nine](nine)
|
||||
* [ten](ten)
|
||||
* [eleven](eleven)
|
||||
`)}
|
||||
p.save()
|
||||
|
||||
body = assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", nil)
|
||||
assert.NotContains(t, body, "<title>Eleven</title>")
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=10&n=10" rel="next" type="application/rss+xml"/>`)
|
||||
|
||||
params := url.Values{}
|
||||
params.Set("n", "0")
|
||||
body = assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", params)
|
||||
assert.Contains(t, body, "<title>Eleven</title>")
|
||||
assert.Contains(t, body, `<fh:complete/>`)
|
||||
|
||||
params = url.Values{}
|
||||
params.Set("n", "3")
|
||||
body = assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", params)
|
||||
assert.Contains(t, body, "<title>One</title>")
|
||||
assert.Contains(t, body, "<title>Three</title>")
|
||||
assert.NotContains(t, body, "<title>Four</title>")
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=3&n=3" rel="next" type="application/rss+xml"/>`)
|
||||
|
||||
params = url.Values{}
|
||||
params.Set("from", "3")
|
||||
params.Set("n", "3")
|
||||
body = assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", params)
|
||||
assert.NotContains(t, body, "<title>Three</title>")
|
||||
assert.Contains(t, body, "<title>Four</title>")
|
||||
assert.Contains(t, body, "<title>Six</title>")
|
||||
assert.NotContains(t, body, "<title>Seven</title>")
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=0&n=3" rel="previous" type="application/rss+xml"/>`)
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=6&n=3" rel="next" type="application/rss+xml"/>`)
|
||||
|
||||
params = url.Values{}
|
||||
params.Set("from", "2")
|
||||
params.Set("n", "3")
|
||||
body = assert.HTTPBody(makeHandler(viewHandler, false, http.MethodGet), "GET", "/view/testdata/pagination/index.rss", params)
|
||||
assert.NotContains(t, body, "<title>Two</title>")
|
||||
assert.Contains(t, body, "<title>Three</title>")
|
||||
assert.Contains(t, body, "<title>Five</title>")
|
||||
assert.NotContains(t, body, "<title>Six</title>")
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=0&n=3" rel="previous" type="application/rss+xml"/>`)
|
||||
assert.Contains(t, body, `<atom:link href="https://example.org/view/testdata/pagination/index.rss?from=5&n=3" rel="next" type="application/rss+xml"/>`)
|
||||
}
|
||||
|
||||
2
go.mod
2
go.mod
@@ -1,4 +1,4 @@
|
||||
module alexschroeder.ch/cgit/oddmu
|
||||
module src.alexschroeder.ch/oddmu
|
||||
|
||||
go 1.22
|
||||
|
||||
|
||||
@@ -4,9 +4,9 @@ import (
|
||||
"context"
|
||||
"flag"
|
||||
"fmt"
|
||||
"github.com/google/subcommands"
|
||||
"github.com/gomarkdown/markdown"
|
||||
"github.com/gomarkdown/markdown/ast"
|
||||
"github.com/google/subcommands"
|
||||
"github.com/hexops/gotextdiff"
|
||||
"github.com/hexops/gotextdiff/myers"
|
||||
"github.com/hexops/gotextdiff/span"
|
||||
@@ -86,7 +86,7 @@ func hashtagsUpdateCli(w io.Writer, dryRun bool) subcommands.ExitStatus {
|
||||
continue
|
||||
}
|
||||
title, ok := namesMap[hashtag]
|
||||
if (!ok) {
|
||||
if !ok {
|
||||
title = hashtagName(namesMap, hashtag, docids)
|
||||
namesMap[hashtag] = title
|
||||
}
|
||||
@@ -133,7 +133,7 @@ func hashtagsUpdateCli(w io.Writer, dryRun bool) subcommands.ExitStatus {
|
||||
}
|
||||
fn := h.Name + ".md"
|
||||
edits := myers.ComputeEdits(span.URIFromPath(fn), original, string(h.Body))
|
||||
diff := fmt.Sprint(gotextdiff.ToUnified(fn + "~", fn, original, edits))
|
||||
diff := fmt.Sprint(gotextdiff.ToUnified(fn+"~", fn, original, edits))
|
||||
fmt.Fprint(w, diff)
|
||||
} else {
|
||||
err = h.save()
|
||||
@@ -149,7 +149,7 @@ func hashtagsUpdateCli(w io.Writer, dryRun bool) subcommands.ExitStatus {
|
||||
|
||||
// Go through all the documents in the same directory and look for hashtag matches in the rendered HTML in order to
|
||||
// determine the most likely capitalization.
|
||||
func hashtagName (namesMap map[string]string, hashtag string, docids []docid) string {
|
||||
func hashtagName(namesMap map[string]string, hashtag string, docids []docid) string {
|
||||
candidate := make(map[string]int)
|
||||
var mostPopular string
|
||||
for _, docid := range docids {
|
||||
|
||||
36
html_cmd.go
36
html_cmd.go
@@ -5,6 +5,7 @@ import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"github.com/google/subcommands"
|
||||
"html/template"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
@@ -47,7 +48,7 @@ func htmlCli(w io.Writer, template string, args []string) subcommands.ExitStatus
|
||||
fmt.Fprintf(os.Stderr, "%s does not end in '.md'\n", name)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
name = name[0:len(name)-3]
|
||||
name = name[0 : len(name)-3]
|
||||
p, err := loadPage(name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot load %s: %s\n", name, err)
|
||||
@@ -61,21 +62,28 @@ func htmlCli(w io.Writer, template string, args []string) subcommands.ExitStatus
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
|
||||
func (p *Page) printHtml(w io.Writer, template string) subcommands.ExitStatus {
|
||||
if len(template) > 0 {
|
||||
t := template
|
||||
loadTemplates()
|
||||
p.handleTitle(true)
|
||||
p.renderHtml()
|
||||
err := templates.template[t].Execute(w, p)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot execute %s template for %s: %s\n", t, p.Name, err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
} else {
|
||||
func (p *Page) printHtml(w io.Writer, fn string) subcommands.ExitStatus {
|
||||
if fn == "" {
|
||||
// do not handle title
|
||||
p.renderHtml()
|
||||
fmt.Fprintln(w, p.Html)
|
||||
_, err := fmt.Fprintln(w, p.Html)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot write to stdout: %s\n", err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
p.handleTitle(true)
|
||||
p.renderHtml()
|
||||
t, err := template.ParseFiles(fn)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot parse template %s for %s: %s\n", fn, p.Name, err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
err = t.Execute(w, p)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cannot execute template %s for %s: %s\n", fn, p.Name, err)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
return subcommands.ExitSuccess
|
||||
}
|
||||
|
||||
@@ -48,7 +48,7 @@ func linksCli(w io.Writer, args []string) subcommands.ExitStatus {
|
||||
fmt.Fprintf(os.Stderr, "%s does not end in '.md'\n", name)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
name = name[0:len(name)-3]
|
||||
name = name[0 : len(name)-3]
|
||||
p, err := loadPage(name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(w, "Loading %s: %s\n", name, err)
|
||||
|
||||
@@ -43,7 +43,7 @@ README.md: ../README.md
|
||||
< $< > $@
|
||||
|
||||
upload: ${MD} README.md
|
||||
rsync --itemize-changes --archive *.md sibirocobombus:alexschroeder.ch/wiki/oddmu/
|
||||
rsync --itemize-changes --archive *.md ../README.md sibirocobombus:alexschroeder.ch/wiki/oddmu/
|
||||
make clean
|
||||
|
||||
clean:
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-EXPORT" "1" "2024-08-29"
|
||||
.TH "ODDMU-EXPORT" "1" "2025-08-31"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -22,8 +22,8 @@ You probably want to redirect this into a file so that you can upload and import
|
||||
it somewhere.\&
|
||||
.PP
|
||||
Note that this only handles pages (Markdown files).\& All other files (images,
|
||||
PDFs, whatever else you uploaded) are not part of the feed and has to be
|
||||
uploaded to the new platform in some other way.\&
|
||||
PDFs, whatever else you uploaded) are not part of the feed and have to be
|
||||
uploaded to the new platform using some other way.\&
|
||||
.PP
|
||||
The \fB-template\fR option specifies the template to use.\& If the template filename
|
||||
ends in \fI.\&xml\fR, \fI.\&html\fR or \fI.\&rss\fR, it is assumed to contain XML and the optional
|
||||
|
||||
@@ -15,8 +15,8 @@ You probably want to redirect this into a file so that you can upload and import
|
||||
it somewhere.
|
||||
|
||||
Note that this only handles pages (Markdown files). All other files (images,
|
||||
PDFs, whatever else you uploaded) are not part of the feed and has to be
|
||||
uploaded to the new platform in some other way.
|
||||
PDFs, whatever else you uploaded) are not part of the feed and have to be
|
||||
uploaded to the new platform using some other way.
|
||||
|
||||
The *-template* option specifies the template to use. If the template filename
|
||||
ends in _.xml_, _.html_ or _.rss_, it is assumed to contain XML and the optional
|
||||
|
||||
53
man/oddmu-feed.1
Normal file
53
man/oddmu-feed.1
Normal file
@@ -0,0 +1,53 @@
|
||||
.\" Generated by scdoc 1.11.3
|
||||
.\" Complete documentation for this program is not available as a GNU info page
|
||||
.ie \n(.g .ds Aq \(aq
|
||||
.el .ds Aq '
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-FEED" "1" "2025-08-31"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
oddmu-feed - render Oddmu page feed
|
||||
.PP
|
||||
.SH SYNOPSIS
|
||||
.PP
|
||||
\fBoddmu feed\fR \fIpage-name\fR .\&.\&.\&
|
||||
.PP
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
The "feed" subcommand opens the given Markdown files and writes the resulting
|
||||
RSS files without item limit (ordinarily, this default is 10 items per feed).\&
|
||||
This uses the "feed.\&html" template.\& Use "-" as the page name if you want to read
|
||||
Markdown from \fBstdin\fR.\&
|
||||
.PP
|
||||
Unlike the feeds generated by the \fBstatic\fR subcommand, the \fBfeed\fR command does
|
||||
not limit the feed to the ten most recent items.\& Instead, all items on the list
|
||||
are turned into feed items.\&
|
||||
.PP
|
||||
.SH EXAMPLES
|
||||
.PP
|
||||
Generate "emacs.\&rss" from "emacs.\&md":
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
oddmu feed emacs\&.md
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
Alternatively:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
oddmu feed - < emacs\&.md > emacs\&.rss
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
.SH SEE ALSO
|
||||
.PP
|
||||
\fIoddmu\fR(1), \fIoddmu-export\fR(1), \fIoddmu-static\fR(1)
|
||||
.PP
|
||||
.SH AUTHORS
|
||||
.PP
|
||||
Maintained by Alex Schroeder <alex@gnu.\&org>.\&
|
||||
42
man/oddmu-feed.1.txt
Normal file
42
man/oddmu-feed.1.txt
Normal file
@@ -0,0 +1,42 @@
|
||||
ODDMU-FEED(1)
|
||||
|
||||
# NAME
|
||||
|
||||
oddmu-feed - render Oddmu page feed
|
||||
|
||||
# SYNOPSIS
|
||||
|
||||
*oddmu feed* _page-name_ ...
|
||||
|
||||
# DESCRIPTION
|
||||
|
||||
The "feed" subcommand opens the given Markdown files and writes the resulting
|
||||
RSS files without item limit (ordinarily, this default is 10 items per feed).
|
||||
This uses the "feed.html" template. Use "-" as the page name if you want to read
|
||||
Markdown from *stdin*.
|
||||
|
||||
Unlike the feeds generated by the *static* subcommand, the *feed* command does
|
||||
not limit the feed to the ten most recent items. Instead, all items on the list
|
||||
are turned into feed items.
|
||||
|
||||
# EXAMPLES
|
||||
|
||||
Generate "emacs.rss" from "emacs.md":
|
||||
|
||||
```
|
||||
oddmu feed emacs.md
|
||||
```
|
||||
|
||||
Alternatively:
|
||||
|
||||
```
|
||||
oddmu feed - < emacs.md > emacs.rss
|
||||
```
|
||||
|
||||
# SEE ALSO
|
||||
|
||||
_oddmu_(1), _oddmu-export_(1), _oddmu-static_(1)
|
||||
|
||||
# AUTHORS
|
||||
|
||||
Maintained by Alex Schroeder <alex@gnu.org>.
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-LIST" "1" "2024-08-29"
|
||||
.TH "ODDMU-LIST" "1" "2025-08-31"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-RELEASES" "7" "2025-08-10"
|
||||
.TH "ODDMU-RELEASES" "7" "2025-09-28"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -15,6 +15,38 @@ oddmu-releases - what'\&s new?\&
|
||||
.PP
|
||||
This page lists user-visible features and template changes to consider.\&
|
||||
.PP
|
||||
.SS 1.19 (2025)
|
||||
.PP
|
||||
Add \fIfeed\fR subcommand.\& This produces a "complete" feed.\&
|
||||
.PP
|
||||
Add feed pagination for the \fIfeed\fR action.\& This produces a "paginated" feed.\&
|
||||
.PP
|
||||
See RFC 5005 for more information.\&
|
||||
.PP
|
||||
If you like the idea of feed pagination (not a given since that also helps bots
|
||||
scrape your site!\&) you need to add the necessary links to the feed template
|
||||
("feed.\&html").\& See \fIoddmu-templates\fR(5) for more.\&
|
||||
.PP
|
||||
Example, adding the feed history namespace:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
<rss xmlns:atom="http://www\&.w3\&.org/2005/Atom" version="2\&.0"
|
||||
xmlns:fh="http://purl\&.org/syndication/history/1\&.0">
|
||||
…
|
||||
{{if \&.From}}
|
||||
<atom:link rel="previous" type="application/rss+xml"
|
||||
href="https://example\&.org/view/{{\&.Path}}\&.rss?from={{\&.Prev}}&n={{\&.N}}"/>
|
||||
{{end}}
|
||||
{{if \&.Next}}
|
||||
<atom:link rel="next" type="application/rss+xml"
|
||||
href="https://example\&.org/view/{{\&.Path}}\&.rss?from={{\&.Next}}&n={{\&.N}}"/>
|
||||
{{end}}
|
||||
{{if \&.Complete}}<fh:complete/>{{end}}
|
||||
…
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
.SS 1.18 (2025)
|
||||
.PP
|
||||
The \fIhashtags\fR gained the option of checking and fixing the hashtag pages by
|
||||
|
||||
@@ -8,6 +8,36 @@ oddmu-releases - what's new?
|
||||
|
||||
This page lists user-visible features and template changes to consider.
|
||||
|
||||
## 1.19 (2025)
|
||||
|
||||
Add _feed_ subcommand. This produces a "complete" feed.
|
||||
|
||||
Add feed pagination for the _feed_ action. This produces a "paginated" feed.
|
||||
|
||||
See RFC 5005 for more information.
|
||||
|
||||
If you like the idea of feed pagination (not a given since that also helps bots
|
||||
scrape your site!) you need to add the necessary links to the feed template
|
||||
("feed.html"). See _oddmu-templates_(5) for more.
|
||||
|
||||
Example, adding the feed history namespace:
|
||||
|
||||
```
|
||||
<rss xmlns:atom="http://www.w3.org/2005/Atom" version="2.0"
|
||||
xmlns:fh="http://purl.org/syndication/history/1.0">
|
||||
…
|
||||
{{if .From}}
|
||||
<atom:link rel="previous" type="application/rss+xml"
|
||||
href="https://example.org/view/{{.Path}}.rss?from={{.Prev}}&n={{.N}}"/>
|
||||
{{end}}
|
||||
{{if .Next}}
|
||||
<atom:link rel="next" type="application/rss+xml"
|
||||
href="https://example.org/view/{{.Path}}.rss?from={{.Next}}&n={{.N}}"/>
|
||||
{{end}}
|
||||
{{if .Complete}}<fh:complete/>{{end}}
|
||||
…
|
||||
```
|
||||
|
||||
## 1.18 (2025)
|
||||
|
||||
The _hashtags_ gained the option of checking and fixing the hashtag pages by
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-STATIC" "1" "2024-08-29"
|
||||
.TH "ODDMU-STATIC" "1" "2025-08-31"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -28,7 +28,8 @@ pages get ".\&html" appended.\&
|
||||
If a page has a name case-insensitively matching a hashtag, a feed file is
|
||||
generated (ending with ".\&rss") if any suitable links are found.\& A suitable link
|
||||
for a feed item must appear in a bullet list item using an asterisk ("*").\& If
|
||||
no feed items are found, no feed is written.\&
|
||||
no feed items are found, no feed is written.\& The feed is limited to the ten most
|
||||
recent items.\&
|
||||
.PP
|
||||
Hidden files and directories (starting with a ".\&") and backup files (ending with
|
||||
a "~") are skipped.\&
|
||||
@@ -89,7 +90,11 @@ speed language determination up.\&
|
||||
.PP
|
||||
.SH SEE ALSO
|
||||
.PP
|
||||
\fIoddmu\fR(1), \fIoddmu-templates\fR(5)
|
||||
See \fIoddmu\fR(1) and \fIoddmu-templates\fR(5) for general information.\&
|
||||
.PP
|
||||
See \fIoddmu-html\fR(1) for a subcommand that converts individual pages file to HTML
|
||||
and see \fIoddmu-feed\fR(1) for a subcommand that generates feeds for individual
|
||||
files.\&
|
||||
.PP
|
||||
.SH AUTHORS
|
||||
.PP
|
||||
|
||||
@@ -21,7 +21,8 @@ pages get ".html" appended.
|
||||
If a page has a name case-insensitively matching a hashtag, a feed file is
|
||||
generated (ending with ".rss") if any suitable links are found. A suitable link
|
||||
for a feed item must appear in a bullet list item using an asterisk ("\*"). If
|
||||
no feed items are found, no feed is written.
|
||||
no feed items are found, no feed is written. The feed is limited to the ten most
|
||||
recent items.
|
||||
|
||||
Hidden files and directories (starting with a ".") and backup files (ending with
|
||||
a "~") are skipped.
|
||||
@@ -80,7 +81,11 @@ speed language determination up.
|
||||
|
||||
# SEE ALSO
|
||||
|
||||
_oddmu_(1), _oddmu-templates_(5)
|
||||
See _oddmu_(1) and _oddmu-templates_(5) for general information.
|
||||
|
||||
See _oddmu-html_(1) for a subcommand that converts individual pages file to HTML
|
||||
and see _oddmu-feed_(1) for a subcommand that generates feeds for individual
|
||||
files.
|
||||
|
||||
# AUTHORS
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU-TEMPLATES" "5" "2025-04-26" "File Formats Manual"
|
||||
.TH "ODDMU-TEMPLATES" "5" "2025-09-24" "File Formats Manual"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
@@ -132,6 +132,26 @@ An item is a page plus a date.\& All the properties of a page can be used (see
|
||||
.PP
|
||||
\fI{{.\&Date}}\fR is the date of the last update to the page, in RFC 822 format.\&
|
||||
.PP
|
||||
In order to paginate feeds, the following attributes are also available in the
|
||||
feed:
|
||||
.PP
|
||||
\fI{{.\&From}}\fR is the item number where the feed starts.\& The first page starts at
|
||||
0.\& This can be passed to Oddmu via the query parameter \fIfrom\fR.\&
|
||||
.PP
|
||||
\fI{{.\&N}}\fR is the number items per page.\& The default is 10.\& This can be passed to
|
||||
Oddmu via the query parameter \fIn\fR.\& If this is set to 0, the feed is not
|
||||
paginated.\&
|
||||
.PP
|
||||
\fI{{.\&Complete}}\fR is a boolean that is true if the feed is not paginated.\& Such a
|
||||
feed cannot have a previous or next page.\&
|
||||
.PP
|
||||
\fI{{.\&Prev}}\fR is the item number where the previous page of the feed starts.\& On
|
||||
the first page, it'\&s value is 0 instead of -10.\& You need to test if \fI{{.\&From}}\fR
|
||||
is non-zero (in which case this is not the first page) before using \fI{{.\&Prev}}\fR.\&
|
||||
.PP
|
||||
\fI{{.\&Next}}\fR is the item number where the next feed starts, if there are any
|
||||
items left.\& If there are none, it'\&s value is 0.\&
|
||||
.PP
|
||||
.SS List
|
||||
.PP
|
||||
The list contains a directory name and an array of files.\&
|
||||
|
||||
@@ -106,6 +106,26 @@ An item is a page plus a date. All the properties of a page can be used (see
|
||||
|
||||
_{{.Date}}_ is the date of the last update to the page, in RFC 822 format.
|
||||
|
||||
In order to paginate feeds, the following attributes are also available in the
|
||||
feed:
|
||||
|
||||
_{{.From}}_ is the item number where the feed starts. The first page starts at
|
||||
0. This can be passed to Oddmu via the query parameter _from_.
|
||||
|
||||
_{{.N}}_ is the number items per page. The default is 10. This can be passed to
|
||||
Oddmu via the query parameter _n_. If this is set to 0, the feed is not
|
||||
paginated.
|
||||
|
||||
_{{.Complete}}_ is a boolean that is true if the feed is not paginated. Such a
|
||||
feed cannot have a previous or next page.
|
||||
|
||||
_{{.Prev}}_ is the item number where the previous page of the feed starts. On
|
||||
the first page, it's value is 0 instead of -10. You need to test if _{{.From}}_
|
||||
is non-zero (in which case this is not the first page) before using _{{.Prev}}_.
|
||||
|
||||
_{{.Next}}_ is the item number where the next feed starts, if there are any
|
||||
items left. If there are none, it's value is 0.
|
||||
|
||||
## List
|
||||
|
||||
The list contains a directory name and an array of files.
|
||||
|
||||
419
man/oddmu.1
419
man/oddmu.1
@@ -1,419 +0,0 @@
|
||||
.\" Generated by scdoc 1.11.3
|
||||
.\" Complete documentation for this program is not available as a GNU info page
|
||||
.ie \n(.g .ds Aq \(aq
|
||||
.el .ds Aq '
|
||||
.nh
|
||||
.ad l
|
||||
.\" Begin generated content:
|
||||
.TH "ODDMU" "1" "2025-08-09"
|
||||
.PP
|
||||
.SH NAME
|
||||
.PP
|
||||
oddmu - a wiki server
|
||||
.PP
|
||||
Oddmu is sometimes written Oddμ because μ is the letter mu.\&
|
||||
.PP
|
||||
.SH SYNOPSIS
|
||||
.PP
|
||||
\fBoddmu\fR
|
||||
.PP
|
||||
\fBoddmu\fR \fIsubcommand\fR [\fIarguments\fR.\&.\&.\&]
|
||||
.PP
|
||||
.SH DESCRIPTION
|
||||
.PP
|
||||
Oddmu can be used as a static site generator, turning Markdown files into HTML
|
||||
files, or it can be used as a public or a private wiki server.\& If it runs as a
|
||||
public wiki server, a regular webserver should be used as reverse proxy.\&
|
||||
.PP
|
||||
Run Oddmu without any arguments to serve the current working directory as a wiki
|
||||
on port 8080.\& Point your browser to http://localhost:8080/ to use it.\& This
|
||||
redirects you to http://localhost:8080/view/index – the first page you'\&ll
|
||||
create, most likely.\&
|
||||
.PP
|
||||
See \fIoddmu\fR(5) for details about the page formatting.\&
|
||||
.PP
|
||||
If you request a page that doesn'\&t exist, Oddmu tries to find a matching
|
||||
Markdown file by appending the extension ".\&md" to the page name.\& In the example
|
||||
above, the page name requested is "index" and the file name Oddmu tries to read
|
||||
is "index.\&md".\& If no such file exists, Oddmu offers you to create the page.\&
|
||||
.PP
|
||||
If your files don'\&t provide their own title ("# title"), the file name (without
|
||||
".\&md") is used for the page title.\&
|
||||
.PP
|
||||
Every file can be viewed as feed by using the extension ".\&rss".\& The
|
||||
feed items are based on links in bullet lists using the asterix
|
||||
("*").\&
|
||||
.PP
|
||||
Subdirectories are created as necessary.\&
|
||||
.PP
|
||||
The wiki knows the following actions for a given page name and (optional)
|
||||
directory:
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
\fI/\fR redirects to /view/index
|
||||
.IP \(bu 4
|
||||
\fI/view/dir/\fR redirects to /view/dir/index
|
||||
.IP \(bu 4
|
||||
\fI/view/dir/name\fR shows a page
|
||||
.IP \(bu 4
|
||||
\fI/view/dir/name.\&md\fR shows the source text of a page
|
||||
.IP \(bu 4
|
||||
\fI/view/dir/name.\&rss\fR shows the RSS feed for the pages linked
|
||||
.IP \(bu 4
|
||||
\fI/diff/dir/name\fR shows the last change to a page
|
||||
.IP \(bu 4
|
||||
\fI/edit/dir/name\fR shows a form to edit a page
|
||||
.IP \(bu 4
|
||||
\fI/preview/dir/name\fR shows a preview of a page edit and the form to edit it
|
||||
.IP \(bu 4
|
||||
\fI/save/dir/name\fR saves an edit
|
||||
.IP \(bu 4
|
||||
\fI/add/dir/name\fR shows a form to add to a page
|
||||
.IP \(bu 4
|
||||
\fI/append/dir/name\fR appends an addition to a page
|
||||
.IP \(bu 4
|
||||
\fI/upload/dir/name\fR shows a form to upload a file
|
||||
.IP \(bu 4
|
||||
\fI/drop/dir/name\fR saves an upload
|
||||
.IP \(bu 4
|
||||
\fI/search/dir/?\&q=term\fR to search for a term
|
||||
.IP \(bu 4
|
||||
\fI/archive/dir/name.\&zip\fR to download a zip file of a directory
|
||||
.PD
|
||||
.PP
|
||||
When calling the \fIsave\fR and \fIappend\fR action, the page name is taken from the URL
|
||||
path and the page content is taken from the \fIbody\fR form parameter.\& To
|
||||
illustrate, here'\&s how to edit the "welcome" page using \fIcurl\fR:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
curl --form body="Did you bring a towel?"
|
||||
http://localhost:8080/save/welcome
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
When calling the \fIdrop\fR action, the query parameters used are \fIname\fR for the
|
||||
target filename and \fIfile\fR for the file to upload.\& If the query parameter
|
||||
\fImaxwidth\fR is set, an attempt is made to decode and resize the image.\& JPG, PNG,
|
||||
WEBP and HEIC files can be decoded.\& Only JPG and PNG files can be encoded,
|
||||
however.\& If the target name ends in \fI.\&jpg\fR, the \fIquality\fR query parameter is
|
||||
also taken into account.\& To upload some thumbnails:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
for f in *\&.jpg; do
|
||||
curl --form name="$f" --form file=@"$f" --form maxwidth=100
|
||||
http://localhost:8080/drop/
|
||||
done
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
When calling the \fIsearch\fR action, the search terms are taken from the query
|
||||
parameter \fIq\fR.\&
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
curl \&'http://localhost:8080/search/?q=towel\&'
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
The page name to act upon is optionally taken from the query parameter \fIid\fR.\& In
|
||||
this case, the directory must also be part of the query parameter and not of the
|
||||
URL path.\&
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
curl \&'http://localhost:8080/view/?id=man/oddmu\&.1\&.txt\&'
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
The base name for the \fIarchive\fR action is used by the browser to save the
|
||||
downloaded file.\& For Oddmu, only the directory is important.\& The following zips
|
||||
the \fIman\fR directory and saves it as \fIman.\&zip\fR.\&
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
curl --remote-name \&'http://localhost:8080/archive/man/man\&.zip
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
.SH CONFIGURATION
|
||||
.PP
|
||||
The template files are the HTML files in the working directory.\& Please change
|
||||
these templates!\&
|
||||
.PP
|
||||
The first change you should make is to replace the name and email address in the
|
||||
footer of \fIview.\&html\fR.\& Look for "Your Name" and "example.\&org".\&
|
||||
.PP
|
||||
The second change you should make is to replace the name, email address and
|
||||
domain name in "feed.\&html".\& Look for "Your Name" and "example.\&org".\&
|
||||
.PP
|
||||
See \fIoddmu-templates\fR(5) for more.\&
|
||||
.PP
|
||||
.SH ENVIRONMENT
|
||||
.PP
|
||||
You can change the port served by setting the ODDMU_PORT environment variable.\&
|
||||
.PP
|
||||
You can change the address served by setting the ODDMU_ADDRESS environment
|
||||
variable to either an IPv4 address or an IPv6 address.\& If ODDMU_ADDRESS is
|
||||
unset, then the program listens on all available unicast addresses, both IPv4
|
||||
and IPv6.\& Here are a few example addresses:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
ODDMU_ADDRESS=127\&.0\&.0\&.1 # The loopback IPv4 address\&.
|
||||
ODDMU_ADDRESS=2001:db8::3:1 # An IPv6 address\&.
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
See the Socket Activation section for an alternative method of listening which
|
||||
supports Unix-domain sockets.\&
|
||||
.PP
|
||||
In order to limit language-detection to the languages you actually use, set the
|
||||
environment variable ODDMU_LANGUAGES to a comma-separated list of ISO 639-1
|
||||
codes, e.\&g.\& "en" or "en,de,fr,pt".\&
|
||||
.PP
|
||||
You can enable webfinger to link fediverse accounts to their correct profile
|
||||
pages by setting ODDMU_WEBFINGER to "1".\& See \fIoddmu\fR(5).\&
|
||||
.PP
|
||||
If you use secret subdirectories, you cannot rely on the web server to hide
|
||||
those pages because some actions such as searching and archiving include
|
||||
subdirectories.\& They act upon a whole tree of pages, not just a single page.\& The
|
||||
ODDMU_FILTER can be used to exclude subdirectories from such tree actions.\& See
|
||||
\fIoddmu-filter\fR(7) and \fIoddmu-apache\fR(5).\&
|
||||
.PP
|
||||
.SH Socket Activation
|
||||
.PP
|
||||
Instead of specifying ODDMU_ADDRESS or ODDMU_PORT, you can start the service
|
||||
through socket activation.\& The advantage of this method is that you can use a
|
||||
Unix-domain socket instead of a TCP socket, and the permissions and ownership of
|
||||
the socket are set before the program starts.\& See \fIoddmu.\&service\fR(5),
|
||||
\fIoddmu-apache\fR(5) and \fIoddmu-nginx\fR(5) for an example of how to use socket
|
||||
activation with a Unix-domain socket under systemd and Apache.\&
|
||||
.PP
|
||||
.SH SECURITY
|
||||
.PP
|
||||
If the machine you are running Oddmu on is accessible from the Internet, you
|
||||
must secure your installation.\& The best way to do this is use a regular web
|
||||
server as a reverse proxy.\& See \fIoddmu-apache\fR(5) and \fIoddmu-nginx\fR(5) for
|
||||
example configurations.\&
|
||||
.PP
|
||||
Oddmu assumes that all the users that can edit pages or upload files are trusted
|
||||
users and therefore their content is trusted.\& Oddmu does not perform HTML
|
||||
sanitization!\&
|
||||
.PP
|
||||
For an extra dose of security, consider using a Unix-domain socket.\&
|
||||
.PP
|
||||
.SH OPTIONS
|
||||
.PP
|
||||
Oddmu can be run on the command-line using various subcommands.\&
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
to generate the HTML for a single page, see \fIoddmu-html\fR(1)
|
||||
.IP \(bu 4
|
||||
to generate the HTML for the entire site, using Oddmu as a static site
|
||||
generator, see \fIoddmu-static\fR(1)
|
||||
.IP \(bu 4
|
||||
to export the HTML for the entire site in one big feed, see \fIoddmu-export\fR(1)
|
||||
.IP \(bu 4
|
||||
to emulate a search of the files, see \fIoddmu-search\fR(1); to understand how the
|
||||
search engine indexes pages and how it sorts and scores results, see
|
||||
\fIoddmu-search\fR(7)
|
||||
.IP \(bu 4
|
||||
to search a regular expression and replace it across all files, see
|
||||
\fIoddmu-replace\fR(1)
|
||||
.IP \(bu 4
|
||||
to learn what the most popular hashtags are, see \fIoddmu-hashtags\fR(1)
|
||||
.IP \(bu 4
|
||||
to print a table of contents (TOC) for a page, see \fIoddmu-toc\fR(1)
|
||||
.IP \(bu 4
|
||||
to list the outgoing links for a page, see \fIoddmu-links\fR(1)
|
||||
.IP \(bu 4
|
||||
to find missing pages (local links that go nowhere), see \fIoddmu-missing\fR(1)
|
||||
.IP \(bu 4
|
||||
to list all the pages with name and title, see \fIoddmu-list\fR(1)
|
||||
.IP \(bu 4
|
||||
to add links to changes, index and hashtag pages to pages you created locally,
|
||||
see \fIoddmu-notify\fR(1)
|
||||
.IP \(bu 4
|
||||
to display build information, see \fIoddmu-version\fR(1)
|
||||
.PD
|
||||
.PP
|
||||
.SH EXAMPLES
|
||||
.PP
|
||||
When saving a page, the page name is take from the URL and the page content is
|
||||
taken from the "body" form parameter.\& To illustrate, here'\&s how to edit a page
|
||||
using \fIcurl\fR(1):
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
curl --form body="Did you bring a towel?"
|
||||
http://localhost:8080/save/welcome
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
To compute the space used by your setup, use regular tools:
|
||||
.PP
|
||||
.nf
|
||||
.RS 4
|
||||
du --exclude=\&'*/.*\&' --exclude \&'*~\&' --block-size=M
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
.SH DESIGN
|
||||
.PP
|
||||
This is a minimal wiki.\& There is no version history.\& It'\&s well suited as a
|
||||
\fIsecondary\fR medium: collaboration and conversation happens elsewhere, in chat,
|
||||
on social media.\& The wiki serves as the text repository that results from these
|
||||
discussions.\&
|
||||
.PP
|
||||
The idea is that the webserver handles as many tasks as possible.\& It logs
|
||||
requests, does rate limiting, handles encryption, gets the certificates, and so
|
||||
on.\& The web server acts as a reverse proxy and the wiki ends up being a content
|
||||
management system with almost no structure – or endless malleability, depending
|
||||
on your point of view.\& See \fIoddmu-apache\fR(5).\&
|
||||
.PP
|
||||
.SH NOTES
|
||||
.PP
|
||||
Page names are filenames with ".\&md" appended.\& If your filesystem cannot handle
|
||||
it, it can'\&t be a page name.\& Filenames can contain slashes and Oddmu creates
|
||||
subdirectories as necessary.\&
|
||||
.PP
|
||||
Files may not end with a tilde ('\&~'\&) – these are backup files.\& When saving pages
|
||||
and file uploads, the old file is renamed to the backup file unless the backup
|
||||
file is less than an hour old, thus collapsing all edits made in an hour into a
|
||||
single diff when comparing backup and current version.\& The backup also gets an
|
||||
updated timestamp so that subsequent edits don'\&t immediately overwrite it.\&
|
||||
.PP
|
||||
The \fBindex\fR page is the default page.\& People visiting the "root" of the site are
|
||||
redirected to "/view/index".\&
|
||||
.PP
|
||||
The \fBchanges\fR page is where links to new and changed files are added.\& As an
|
||||
author, you can prevent this from happening by deselecting the checkbox "Add
|
||||
link to the list of changes.\&" The changes page can be edited like every other
|
||||
page, so it'\&s easy to undo mistakes.\&
|
||||
.PP
|
||||
Links on the changes page are grouped by date.\& When new links are added, the
|
||||
current date of the machine Oddmu is running on is used.\& If a link already
|
||||
exists on the changes page, it is moved up to the current date.\& If that leaves
|
||||
an old date without any links, that date heading is removed.\&
|
||||
.PP
|
||||
If you want to link to the changes page, you need to do this yourself.\& Add a
|
||||
link from the index, for example.\& The "view.\&html" template currently doesn'\&t do
|
||||
it.\& See \fIoddmu-templates\fR(5) if you want to add the link to the template.\&
|
||||
.PP
|
||||
A page whose name starts with an ISO date (YYYY-MM-DD, e.\&g.\& "2023-10-28") is
|
||||
called a \fBblog\fR page.\& When creating or editing blog pages, links to it are added
|
||||
from other pages.\&
|
||||
.PP
|
||||
If the blog page name starts with the current year, a link is created from the
|
||||
index page back to the blog page being created or edited.\& Again, you can prevent
|
||||
this from happening by deselecting the checkbox "Add link to the list of
|
||||
changes.\&" The index page can be edited like every other page, so it'\&s easy to
|
||||
undo mistakes.\&
|
||||
.PP
|
||||
For every \fBhashtag\fR used, another link might be created.\& If a page named like
|
||||
the hashtag exists, a backlink is added to it, linking to the new or edited blog
|
||||
page.\&
|
||||
.PP
|
||||
If a link to the new or edited blog page already exists but it'\&s title is no
|
||||
longer correct, it is updated.\&
|
||||
.PP
|
||||
New links added for blog pages are added at the top of the first unnumbered list
|
||||
using the asterisk ('\&*'\&).\& If no such list exists, a new one is started at the
|
||||
bottom of the page.\& This allows you to have a different unnumbered list further
|
||||
up on the page, as long as it uses the minus for items ('\&-'\&).\&
|
||||
.PP
|
||||
Changes made locally do not create any links on the changes page, the index page
|
||||
or on any hashtag pages.\& See \fIoddmu-notify\fR(1) for a way to add the necessary
|
||||
links to the changes page and possibly to the index and hashtag pages.\&
|
||||
.PP
|
||||
A hashtag consists of a number sign ('\&#'\&) followed by Unicode letters, numbers
|
||||
or the underscore ('\&_'\&).\& Thus, a hashtag ends with punctuation or whitespace.\&
|
||||
.PP
|
||||
The page names, titles and hashtags are loaded into memory when the server
|
||||
starts.\& If you have a lot of pages, this takes a lot of memory.\&
|
||||
.PP
|
||||
Oddmu watches the working directory and any subdirectories for changes made
|
||||
directly.\& Thus, in theory, it'\&s not necessary to restart it after making such
|
||||
changes.\&
|
||||
.PP
|
||||
You cannot edit uploaded files.\& If you upload a file called "hello.\&txt" and
|
||||
attempt to edit it by using "/edit/hello.\&txt" you create a page with the name
|
||||
"hello.\&txt.\&md" instead.\&
|
||||
.PP
|
||||
In order to delete uploaded files via the web, create an empty file and upload
|
||||
it.\& In order to delete a wiki page, save an empty page.\&
|
||||
.PP
|
||||
Note that some HTML file names are special: they act as templates.\& See
|
||||
\fIoddmu-templates\fR(5) for their names and their use.\&
|
||||
.PP
|
||||
.SH SEE ALSO
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
\fIoddmu\fR(5), about the markup syntax and how feeds are generated based on link
|
||||
lists
|
||||
.IP \(bu 4
|
||||
\fIoddmu-releases\fR(7), on what features are part of the latest release
|
||||
.IP \(bu 4
|
||||
\fIoddmu-filter\fR(7), on how to treat subdirectories as separate sites
|
||||
.IP \(bu 4
|
||||
\fIoddmu-search\fR(7), on how search works
|
||||
.IP \(bu 4
|
||||
\fIoddmu-templates\fR(5), on how to write the HTML templates
|
||||
.PD
|
||||
.PP
|
||||
If you run Oddmu as a web server:
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
\fIoddmu-apache\fR(5), on how to set up Apache as a reverse proxy
|
||||
.IP \(bu 4
|
||||
\fIoddmu-nginx\fR(5), on how to set up freenginx as a reverse proxy
|
||||
.IP \(bu 4
|
||||
\fIoddmu-webdav\fR(5), on how to set up Apache as a Web-DAV server
|
||||
.IP \(bu 4
|
||||
\fIoddmu.\&service\fR(5), on how to run the service under systemd
|
||||
.PD
|
||||
.PP
|
||||
If you run Oddmu as a static site generator or pages offline and sync them with
|
||||
Oddmu running as a webserver:
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
\fIoddmu-hashtags\fR(1), on working with hashtags
|
||||
.IP \(bu 4
|
||||
\fIoddmu-html\fR(1), on how to render a page
|
||||
.IP \(bu 4
|
||||
\fIoddmu-list\fR(1), on how to list pages and titles
|
||||
.IP \(bu 4
|
||||
\fIoddmu-links\fR(1), on how to list the outgoing links for a page
|
||||
.IP \(bu 4
|
||||
\fIoddmu-missing\fR(1), on how to find broken local links
|
||||
.IP \(bu 4
|
||||
\fIoddmu-notify\fR(1), on updating index, changes and hashtag pages
|
||||
.IP \(bu 4
|
||||
\fIoddmu-replace\fR(1), on how to search and replace text
|
||||
.IP \(bu 4
|
||||
\fIoddmu-search\fR(1), on how to run a search
|
||||
.IP \(bu 4
|
||||
\fIoddmu-static\fR(1), on generating a static site
|
||||
.IP \(bu 4
|
||||
\fIoddmu-toc\fR(1), on how to list the table of contents (toc) a page
|
||||
.IP \(bu 4
|
||||
\fIoddmu-version\fR(1), on how to get all the build information from the binary
|
||||
.PD
|
||||
.PP
|
||||
If you want to stop using Oddmu:
|
||||
.PP
|
||||
.PD 0
|
||||
.IP \(bu 4
|
||||
\fIoddmu-export\fR(1), on how to export all the files as one big RSS file
|
||||
.PD
|
||||
.PP
|
||||
.SH AUTHORS
|
||||
.PP
|
||||
Maintained by Alex Schroeder <alex@gnu.\&org>.\&
|
||||
|
||||
@@ -317,6 +317,7 @@ Oddmu running as a webserver:
|
||||
|
||||
- _oddmu-hashtags_(1), on working with hashtags
|
||||
- _oddmu-html_(1), on how to render a page
|
||||
- _oddmu-feed_(1), on how to render a feed
|
||||
- _oddmu-list_(1), on how to list pages and titles
|
||||
- _oddmu-links_(1), on how to list the outgoing links for a page
|
||||
- _oddmu-missing_(1), on how to find broken local links
|
||||
|
||||
34
man_test.go
34
man_test.go
@@ -60,6 +60,40 @@ func TestManTemplates(t *testing.T) {
|
||||
assert.Greater(t, count, 0, "no templates were found")
|
||||
}
|
||||
|
||||
// Does oddmu-templates(5) mention all the templates?
|
||||
func TestManTemplateAttributess(t *testing.T) {
|
||||
mfp := "man/oddmu-templates.5.txt"
|
||||
b, err := os.ReadFile(mfp)
|
||||
man := string(b)
|
||||
assert.NoError(t, err)
|
||||
re := regexp.MustCompile(`{{(?:(?:if|range) )?(\.[A-Z][a-z]*)}}`)
|
||||
filepath.Walk(".", func(fp string, info fs.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if fp != "." && info.IsDir() {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
if !strings.HasSuffix(fp, ".html") {
|
||||
return nil
|
||||
}
|
||||
h, err := os.ReadFile(fp)
|
||||
matches := re.FindAllSubmatch(h, -1)
|
||||
assert.Greater(t, len(matches), 0, "%s contains no attributes", fp)
|
||||
seen := make(map[string]bool)
|
||||
for _, m := range matches {
|
||||
attr := string(m[1])
|
||||
if seen[attr] {
|
||||
continue
|
||||
}
|
||||
seen[attr] = true
|
||||
assert.Contains(t, man, "_{{"+attr+"}}_", "%s does not mention _{{%s}}_", mfp, attr)
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// Does oddmu(1) mention all the actions? We're not going to parse the go file and make sure to catch them all. I tried
|
||||
// it, and it's convoluted.
|
||||
func TestManActions(t *testing.T) {
|
||||
|
||||
@@ -37,7 +37,7 @@ func notifyCli(w io.Writer, args []string) subcommands.ExitStatus {
|
||||
fmt.Fprintf(os.Stderr, "%s does not end in '.md'\n", name)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
name = name[0:len(name)-3]
|
||||
name = name[0 : len(name)-3]
|
||||
p, err := loadPage(name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(w, "Loading %s: %s\n", name, err)
|
||||
|
||||
11
page.go
11
page.go
@@ -88,6 +88,15 @@ func (p *Page) save() error {
|
||||
return os.WriteFile(fp, s, 0644)
|
||||
}
|
||||
|
||||
func (p *Page) ModTime() (time.Time, error) {
|
||||
fp := filepath.FromSlash(p.Name) + ".md"
|
||||
fi, err := os.Stat(fp)
|
||||
if err != nil {
|
||||
return time.Now(), err
|
||||
}
|
||||
return fi.ModTime(), nil
|
||||
}
|
||||
|
||||
// backup a file by renaming it unless the existing backup is less than an hour old. A backup gets a tilde appended to
|
||||
// it ("~"). This is true even if the file refers to a binary file like "image.png" and most applications don't know
|
||||
// what to do with a file called "image.png~". This expects a filepath. The backup file gets its modification time set
|
||||
@@ -163,7 +172,7 @@ func pathEncode(s string) string {
|
||||
if n == 0 {
|
||||
return s
|
||||
}
|
||||
t := make([]byte, len(s) + 2*n)
|
||||
t := make([]byte, len(s)+2*n)
|
||||
j := 0
|
||||
for i := 0; i < len(s); i++ {
|
||||
switch s[i] {
|
||||
|
||||
@@ -119,17 +119,17 @@ func TestAt(t *testing.T) {
|
||||
// prevent lookups
|
||||
accounts.Lock()
|
||||
accounts.uris = make(map[string]string)
|
||||
accounts.uris["alex@alexschroeder.ch"] = "https://social.alexschroeder.ch/@alex";
|
||||
accounts.uris["alex@alexschroeder.ch"] = "https://social.alexschroeder.ch/@alex"
|
||||
accounts.Unlock()
|
||||
// test account
|
||||
p := &Page{Body: []byte(`My fedi handle is @alex@alexschroeder.ch.`)}
|
||||
p.renderHtml()
|
||||
assert.Contains(t,string(p.Html),
|
||||
assert.Contains(t, string(p.Html),
|
||||
`My fedi handle is <a class="account" href="https://social.alexschroeder.ch/@alex" title="@alex@alexschroeder.ch">@alex</a>.`)
|
||||
// test escaped account
|
||||
p = &Page{Body: []byte(`My fedi handle is \@alex@alexschroeder.ch. \`)}
|
||||
p.renderHtml()
|
||||
assert.Contains(t,string(p.Html),
|
||||
assert.Contains(t, string(p.Html),
|
||||
`My fedi handle is @alex@alexschroeder.ch.`)
|
||||
// disable webfinger
|
||||
useWebfinger = false
|
||||
|
||||
@@ -14,7 +14,7 @@ import (
|
||||
// are passed on the the {viewHandler}.
|
||||
func previewHandler(w http.ResponseWriter, r *http.Request, path string) {
|
||||
if r.Method != http.MethodPost {
|
||||
http.Redirect(w, r, "/view/" + strings.TrimPrefix(path, "/preview/"), http.StatusFound)
|
||||
http.Redirect(w, r, "/view/"+strings.TrimPrefix(path, "/preview/"), http.StatusFound)
|
||||
return
|
||||
}
|
||||
body := strings.ReplaceAll(r.FormValue("body"), "\r", "")
|
||||
|
||||
@@ -2,8 +2,8 @@ package main
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"net/url"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"testing"
|
||||
)
|
||||
|
||||
|
||||
@@ -80,14 +80,14 @@ func replaceCli(w io.Writer, isConfirmed bool, isRegexp bool, args []string) sub
|
||||
changes++
|
||||
if isConfirmed {
|
||||
fmt.Fprintln(w, fp)
|
||||
_ = os.Rename(fp, fp + "~")
|
||||
_ = os.Rename(fp, fp+"~")
|
||||
err = os.WriteFile(fp, result, 0644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
edits := myers.ComputeEdits(span.URIFromPath(fp + "~"), string(body), string(result))
|
||||
diff := fmt.Sprint(gotextdiff.ToUnified(fp + "~", fp, string(body), edits))
|
||||
edits := myers.ComputeEdits(span.URIFromPath(fp+"~"), string(body), string(result))
|
||||
diff := fmt.Sprint(gotextdiff.ToUnified(fp+"~", fp, string(body), edits))
|
||||
fmt.Fprintln(w, diff)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -221,7 +221,7 @@ func staticFeed(source, target string, p *Page, ti time.Time) error {
|
||||
base := filepath.Base(source)
|
||||
_, ok := index.token[strings.ToLower(base)]
|
||||
if base == "index" || ok {
|
||||
f := feed(p, ti)
|
||||
f := feed(p, ti, 0, 10)
|
||||
if len(f.Items) > 0 {
|
||||
return write(f, target, `<?xml version="1.0" encoding="UTF-8"?>`, "feed.html")
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ import (
|
||||
// able to generate HTML output. This always requires a template.
|
||||
var templateFiles = []string{"edit.html", "add.html", "view.html", "preview.html",
|
||||
"diff.html", "search.html", "static.html", "upload.html", "feed.html",
|
||||
"list.html" }
|
||||
"list.html"}
|
||||
|
||||
// templateStore controls access to map of parsed HTML templates. Make sure to lock and unlock as appropriate. See
|
||||
// renderTemplate and loadTemplates.
|
||||
|
||||
@@ -52,7 +52,7 @@ func tocCli(w io.Writer, args []string) subcommands.ExitStatus {
|
||||
fmt.Fprintf(os.Stderr, "%s does not end in '.md'\n", name)
|
||||
return subcommands.ExitFailure
|
||||
}
|
||||
name = name[0:len(name)-3]
|
||||
name = name[0 : len(name)-3]
|
||||
p, err := loadPage(name)
|
||||
if err != nil {
|
||||
fmt.Fprintf(w, "Loading %s: %s\n", name, err)
|
||||
|
||||
@@ -6,12 +6,12 @@ package main
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
_ "github.com/gen2brain/heic"
|
||||
"github.com/disintegration/imaging"
|
||||
"github.com/edwvee/exiffix"
|
||||
_ "github.com/gen2brain/heic"
|
||||
"github.com/gen2brain/webp"
|
||||
"image/png"
|
||||
"image/jpeg"
|
||||
"image/png"
|
||||
"io"
|
||||
"log"
|
||||
"mime"
|
||||
@@ -36,8 +36,8 @@ type Upload struct {
|
||||
}
|
||||
|
||||
type FileUpload struct {
|
||||
Name string
|
||||
Image bool
|
||||
Name string
|
||||
Image bool
|
||||
}
|
||||
|
||||
var lastRe = regexp.MustCompile(`^(.*?)([0-9]+)([^0-9]*)$`)
|
||||
@@ -86,7 +86,7 @@ func uploadHandler(w http.ResponseWriter, r *http.Request, dir string) {
|
||||
data.Uploads[i].Name = s
|
||||
mimeType := mime.TypeByExtension(path.Ext(s))
|
||||
data.Uploads[i].Image = strings.HasPrefix(mimeType, "image/")
|
||||
|
||||
|
||||
}
|
||||
renderTemplate(w, dir, "upload", data)
|
||||
}
|
||||
@@ -229,7 +229,7 @@ func dropHandler(w http.ResponseWriter, r *http.Request, dir string) {
|
||||
// do not use imaging.Decode(file, imaging.AutoOrientation(true)) because that only works for JPEG files
|
||||
img, fmt, err := exiffix.Decode(file)
|
||||
if err != nil {
|
||||
http.Error(w, "The image could not be decoded from " + from + " format", http.StatusBadRequest)
|
||||
http.Error(w, "The image could not be decoded from "+from+" format", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
log.Println("Decoded", fmt, "file")
|
||||
@@ -241,7 +241,7 @@ func dropHandler(w http.ResponseWriter, r *http.Request, dir string) {
|
||||
}
|
||||
}
|
||||
// images are always reencoded, so image quality goes down
|
||||
switch (to) {
|
||||
switch to {
|
||||
case ".png":
|
||||
err = png.Encode(dst, img)
|
||||
case ".jpg", ".jpeg":
|
||||
@@ -287,7 +287,7 @@ func dropHandler(w http.ResponseWriter, r *http.Request, dir string) {
|
||||
}
|
||||
updateTemplate(fp)
|
||||
}
|
||||
http.Redirect(w, r, "/upload/" + nameEscape(dir) + "?" + data.Encode(), http.StatusFound)
|
||||
http.Redirect(w, r, "/upload/"+nameEscape(dir)+"?"+data.Encode(), http.StatusFound)
|
||||
}
|
||||
|
||||
// basename returns a name matching the uploaded file but with no extension and no appended number. Given an uploaded
|
||||
|
||||
11
view.go
11
view.go
@@ -9,6 +9,7 @@ import (
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
@@ -132,7 +133,15 @@ func viewHandler(w http.ResponseWriter, r *http.Request, name string) {
|
||||
}
|
||||
p.handleTitle(true)
|
||||
if t == rss {
|
||||
it := feed(p, fi.ModTime())
|
||||
from, err := strconv.Atoi(r.FormValue("from"))
|
||||
if err != nil {
|
||||
from = 0
|
||||
}
|
||||
n, err := strconv.Atoi(r.FormValue("n"))
|
||||
if err != nil {
|
||||
n = 10
|
||||
}
|
||||
it := feed(p, fi.ModTime(), from, n)
|
||||
w.Write([]byte(`<?xml version="1.0" encoding="UTF-8"?>`))
|
||||
renderTemplate(w, p.Dir(), "feed", it)
|
||||
return
|
||||
|
||||
@@ -14,7 +14,7 @@ form { display: inline-block }
|
||||
input#search { width: 12ch }
|
||||
button { background-color: #eee; color: inherit; border-radius: 4px; border-width: 1px }
|
||||
footer { border-top: 1px solid #888 }
|
||||
img { max-width: 100% }
|
||||
img, video { max-width: 100% }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
5
wiki.go
5
wiki.go
@@ -29,7 +29,7 @@ var validPath = regexp.MustCompile("^/([^/]+)/(.*)$")
|
||||
var titleRegexp = regexp.MustCompile("(?m)^#\\s*(.*)\n+")
|
||||
|
||||
// isHiddenName returns true if any path segment starts with a dot. This also catches '..' segments.
|
||||
func isHiddenName (name string) bool {
|
||||
func isHiddenName(name string) bool {
|
||||
for _, segment := range strings.Split(name, "/") {
|
||||
if strings.HasPrefix(segment, ".") {
|
||||
return true
|
||||
@@ -211,7 +211,7 @@ func serve() {
|
||||
ReadTimeout: 2 * time.Minute,
|
||||
WriteTimeout: 5 * time.Minute,
|
||||
IdleTimeout: 2 * time.Minute,
|
||||
Handler: mux,
|
||||
Handler: mux,
|
||||
}
|
||||
err = srv.Serve(listener)
|
||||
if err != nil {
|
||||
@@ -227,6 +227,7 @@ func commands() {
|
||||
subcommands.Register(subcommands.CommandsCommand(), "")
|
||||
subcommands.Register(&exportCmd{}, "")
|
||||
subcommands.Register(&hashtagsCmd{}, "")
|
||||
subcommands.Register(&feedCmd{}, "")
|
||||
subcommands.Register(&htmlCmd{}, "")
|
||||
subcommands.Register(&listCmd{}, "")
|
||||
subcommands.Register(&linksCmd{}, "")
|
||||
|
||||
@@ -45,7 +45,7 @@ func HTTPRedirectTo(t *testing.T, handler http.HandlerFunc, method, url string,
|
||||
handler(w, req)
|
||||
code := w.Code
|
||||
isRedirectCode := code >= http.StatusMultipleChoices && code <= http.StatusTemporaryRedirect
|
||||
if (values != nil) {
|
||||
if values != nil {
|
||||
url += "?" + values.Encode()
|
||||
}
|
||||
assert.True(t, isRedirectCode, "Expected HTTP redirect status code for %q but received %d", url, code)
|
||||
|
||||
Reference in New Issue
Block a user