aboutsummaryrefslogtreecommitdiff
path: root/cmd/server/feed.go
diff options
context:
space:
mode:
authorMitchell Riedstra <mitch@riedstra.dev>2021-07-12 23:08:58 -0400
committerMitchell Riedstra <mitch@riedstra.dev>2021-07-12 23:18:44 -0400
commit904e37a88a6a2eab3919f7f2c40bbb2c07544a7c (patch)
tree07ec38801bf572a2933d51d272fc4cd3ab74b61c /cmd/server/feed.go
parente6d53f71c9718ecdb9fde16a924d75a71aadd2d2 (diff)
downloadgo-website-904e37a88a6a2eab3919f7f2c40bbb2c07544a7c.tar.gz
go-website-904e37a88a6a2eab3919f7f2c40bbb2c07544a7c.tar.xz
Add atom feed to the go website
Diffstat (limited to 'cmd/server/feed.go')
-rw-r--r--cmd/server/feed.go216
1 files changed, 216 insertions, 0 deletions
diff --git a/cmd/server/feed.go b/cmd/server/feed.go
new file mode 100644
index 0000000..2d4a75b
--- /dev/null
+++ b/cmd/server/feed.go
@@ -0,0 +1,216 @@
+package main
+
+import (
+ "bytes"
+ "encoding/xml"
+ "errors"
+ "fmt"
+ "log"
+ "net/http"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/gorilla/mux"
+ "riedstra.dev/mitch/go-website/page"
+)
+
+type Author struct {
+ Name string `xml:"name"` // Required
+ Uri string `xml:"uri,omitempty"`
+ Email string `xml:"email,omitempty"`
+}
+
+type Link struct {
+ Href string `xml:"href,attr,omitempty"`
+ Rel string `xml:"rel,attr,omitempty"`
+ Type string `xml:"type,attr,omitempty"`
+ Title string `xml:"Title,attr,omitempty"`
+ Length string `xml:"Length,attr,omitempty"`
+}
+
+type Content struct {
+ Type string `xml:"type,attr"`
+ Data string `xml:",chardata"`
+}
+
+type Entry struct {
+ // Spec requires this, autogenerated from Title and updated if otherwise
+ // left empty
+ Id string `xml:"id"`
+
+ Title string `xml:"title"` // Required
+ Updated *time.Time `xml:"updated"` // Required
+ Author *Author `xml:"author,omitempty"`
+ Published *time.Time `xml:"published,omitempty"`
+ Links []Link `xml:"link,omitempty"`
+ Content *Content `xml:"content,omitempty"`
+}
+
+func (i Entry) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
+ type Alias Entry
+
+ errs := []string{}
+ if i.Title == "" {
+ errs = append(errs, "Title Cannot be empty")
+ }
+ if i.Updated == nil {
+ errs = append(errs, "Updated cannot be nil")
+ }
+
+ if len(errs) > 0 {
+ return errors.New(strings.Join(errs, ","))
+ }
+
+ if i.Id == "" {
+ i.Id = fmt.Sprintf("%s::%d", i.Title, i.Updated.Unix())
+ }
+
+ i2 := (*Alias)(&i)
+
+ return e.EncodeElement(i2, start)
+}
+
+type Atom struct {
+ Ns string `xml:"xmlns,attr"`
+ Title string `xml:"title"` // Required
+ Id string `xml:"id"` // Required
+ Author Author `xml:"author,omitempty"` // Required
+ Updated *time.Time `xml:"updated"` // Required
+ Published *time.Time `xml:"published,omitempty"`
+ Subtitle string `xml:"subtitle,omitempty"`
+ Entries []Entry `xml:"entry"`
+}
+
+func (a Atom) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
+ type Alias Atom
+
+ a.Ns = "http://www.w3.org/2005/Atom"
+ errs := []string{}
+ if a.Id == "" {
+ errs = append(errs, "ID Cannot be empty")
+ }
+ if a.Author.Name == "" {
+ errs = append(errs, "Author Name cannot be empty")
+ }
+ if a.Updated == nil {
+ errs = append(errs, "Updated cannot be empty")
+ }
+
+ if len(errs) > 0 {
+ return errors.New(strings.Join(errs, ","))
+ }
+
+ start.Name = xml.Name{Local: "feed"}
+
+ a2 := (*Alias)(&a)
+
+ return e.EncodeElement(a2, start)
+}
+
+// FeedHandler takes care of pulling from the index all of the relevant posts
+// and dumping them into an Atom feed.
+//
+// Relevant query parameters are:
+//
+// "content" if unset, or set to false content is omitted from the feed
+// "limit=n" stop at "n" and return the feed
+//
+func (a *App) FeedHandler(w http.ResponseWriter, r *http.Request) {
+ vars := mux.Vars(r)
+ var addContent bool
+ var limit int
+
+ if _, ok := r.URL.Query()["content"]; ok {
+ if r.URL.Query().Get("content") != "false" {
+ addContent = true
+ }
+ }
+
+ if l := r.URL.Query().Get("limit"); l != "" {
+ i, err := strconv.Atoi(l)
+ if err == nil {
+ limit = i
+ }
+ }
+
+ tag, ok := vars["tag"]
+ if !ok {
+ http.Error(w, "Tag not found or supplied", http.StatusNotFound)
+ return
+ }
+
+ p := page.NewPage("index")
+ index, err := p.Index()
+ if err != nil {
+ log.Println(err)
+ http.Error(w, "Internal server error", http.StatusInternalServerError)
+ return
+ }
+
+ pages, ok := index[tag]
+ if !ok {
+ http.Error(w, "Invalid tag", http.StatusNotFound)
+ return
+ }
+
+ pages, dateless := pages.RemoveDateless()
+ for _, p := range dateless {
+ log.Printf("Warning, page %s has no Date field. Skipping inclusion on feed", p)
+ }
+ pages.SortDate()
+
+ feed := &Atom{
+ Author: a.Author,
+ Title: a.Title,
+ Id: a.FeedId,
+ Updated: &a.Updated.Time,
+ Subtitle: a.Description,
+ }
+
+ entries := []Entry{}
+
+ for n, p := range pages {
+ if limit != 0 && n >= limit {
+ break
+ }
+
+ content := &bytes.Buffer{}
+ err := p.Render(content)
+ if err != nil {
+ log.Println(err)
+ http.Error(w, "Internal server error", http.StatusInternalServerError)
+ return
+ }
+
+ entry := Entry{
+ Title: p.Title,
+ Updated: &p.Date.Time,
+ Links: []Link{Link{Href: strings.Join([]string{a.SiteURL, p.Path()}, "/")}},
+ }
+
+ if addContent {
+ entry.Content = &Content{Type: "html", Data: content.String()}
+ }
+
+ entries = append(entries, entry)
+
+ }
+
+ feed.Entries = entries
+
+ w.Header().Add("Content-type", "application/xml")
+ w.Write([]byte(xml.Header))
+
+ enc := xml.NewEncoder(w)
+ enc.Indent("", " ")
+
+ err = enc.Encode(feed)
+ if err != nil {
+ log.Println(err)
+ // Headers probably already sent, but we'll try anyway
+ http.Error(w, "Internal server error", http.StatusInternalServerError)
+ }
+
+ return
+}