aboutsummaryrefslogtreecommitdiff
path: root/cmd/server/feed.go
diff options
context:
space:
mode:
authorMitchell Riedstra <mitch@riedstra.dev>2023-01-06 01:22:38 -0500
committerMitchell Riedstra <mitch@riedstra.dev>2023-01-06 01:27:48 -0500
commit97dd660925434be537cd9a49a1d0c893b223e357 (patch)
tree21d521b08f3a08eb2398a47893eb1543000387b8 /cmd/server/feed.go
parent1d01acca36b78eeba99da1adb10e72d186433b39 (diff)
downloadgo-website-97dd660925434be537cd9a49a1d0c893b223e357.tar.gz
go-website-97dd660925434be537cd9a49a1d0c893b223e357.tar.xz
Refactor routing and handlers
We were building a new gorilla mux on each connection, change that to an *http.ServeMux and build it once for the lifetime of the application. Tell redis to only cache GET requests.
Diffstat (limited to 'cmd/server/feed.go')
-rw-r--r--cmd/server/feed.go184
1 files changed, 88 insertions, 96 deletions
diff --git a/cmd/server/feed.go b/cmd/server/feed.go
index 212e5da..7e36cb3 100644
--- a/cmd/server/feed.go
+++ b/cmd/server/feed.go
@@ -11,7 +11,6 @@ import (
"strings"
"time"
- "github.com/gorilla/mux"
"riedstra.dev/mitch/go-website/page"
)
@@ -120,128 +119,121 @@ func (a Atom) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
//
// "content" if unset, or set to false content is omitted from the feed
// "limit=n" stop at "n" and return the feed.
-//
-func (a *App) FeedHandler(w http.ResponseWriter, r *http.Request) { //nolint:funlen
- vars := mux.Vars(r)
-
- var (
- addContent bool
- limit int
- )
-
- if _, ok := r.URL.Query()["content"]; ok {
- if r.URL.Query().Get("content") != "false" {
- addContent = true
+func (a *App) FeedHandler() http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { //nolint:funlen
+ var (
+ addContent bool
+ limit int
+ )
+
+ if _, ok := r.URL.Query()["content"]; ok {
+ if r.URL.Query().Get("content") != "false" {
+ addContent = true
+ }
}
- }
- if l := r.URL.Query().Get("limit"); l != "" {
- i, err := strconv.Atoi(l)
- if err == nil {
- limit = i
+ if l := r.URL.Query().Get("limit"); l != "" {
+ i, err := strconv.Atoi(l)
+ if err == nil {
+ limit = i
+ }
}
- }
-
- tag, ok := vars["tag"]
- if !ok {
- http.Error(w, "Tag not found or supplied", http.StatusNotFound)
-
- return
- }
- p := page.NewPage("index")
+ tag := r.URL.Path
+ p := page.NewPage("index")
- index, err := p.Index()
- if err != nil {
- log.Println(err)
- http.Error(w, "Internal server error", http.StatusInternalServerError)
+ index, err := p.Index()
+ if err != nil {
+ log.Println(err)
+ http.Error(w, "Internal server error", http.StatusInternalServerError)
- return
- }
+ return
+ }
- pages, ok := index[tag]
- if !ok {
- http.Error(w, "Invalid tag", http.StatusNotFound)
+ pages, ok := index[tag]
+ if !ok {
+ http.Error(w, "Invalid tag", http.StatusNotFound)
- return
- }
+ return
+ }
- pages, dateless := pages.RemoveDateless()
- for _, p := range dateless {
- log.Printf("Warning, page %s has no Date field. Skipping inclusion on feed", p)
- }
+ pages, dateless := pages.RemoveDateless()
+ for _, p := range dateless {
+ log.Printf("Warning, page %s has no Date field. Skipping inclusion on feed", p)
+ }
- pages.SortDate()
+ pages.SortDate()
- feed := &Atom{
- Author: a.Author,
- Title: a.Title,
- Id: a.FeedId,
- Updated: &a.Updated.Time,
- Subtitle: a.Description,
- }
+ feed := &Atom{
+ Author: a.Author,
+ Title: a.Title,
+ Id: a.FeedId,
+ Updated: &a.Updated.Time,
+ Subtitle: a.Description,
+ }
- entries := []Entry{}
+ entries := []Entry{}
- for n, p := range pages {
- if limit != 0 && n >= limit {
- break
- }
+ for n, p := range pages {
+ if limit != 0 && n >= limit {
+ break
+ }
- if !p.Published {
- continue
- }
+ if !p.Published {
+ continue
+ }
- content := &bytes.Buffer{}
+ content := &bytes.Buffer{}
- err := p.Render(content)
- if err != nil {
- log.Println(err)
- http.Error(w, "Internal server error", http.StatusInternalServerError)
+ err := p.Render(content)
+ if err != nil {
+ log.Println(err)
+ http.Error(w, "Internal server error", http.StatusInternalServerError)
- return
- }
+ return
+ }
- entry := Entry{
- Title: p.Title(),
- Updated: &p.Date.Time,
- Links: []Link{{Href: strings.Join([]string{a.SiteURL, p.Path()}, "/")}},
- }
+ entry := Entry{
+ Title: p.Title(),
+ Updated: &p.Date.Time,
+ Links: []Link{{Href: strings.Join([]string{a.SiteURL, p.Path()}, "/")}},
+ }
- if p.AuthorName != "" {
- entry.Author = &Author{
- Name: p.AuthorName,
+ if p.AuthorName != "" {
+ entry.Author = &Author{
+ Name: p.AuthorName,
+ }
+ if p.AuthorEmail != "" {
+ entry.Author.Email = p.AuthorEmail
+ }
}
- if p.AuthorEmail != "" {
- entry.Author.Email = p.AuthorEmail
+
+ if addContent {
+ entry.Content = &Content{Type: "html", Data: content.String()}
}
- }
- if addContent {
- entry.Content = &Content{Type: "html", Data: content.String()}
+ entries = append(entries, entry)
}
- entries = append(entries, entry)
- }
-
- feed.Entries = entries
+ feed.Entries = entries
- w.Header().Add("Content-type", "application/xml")
+ w.Header().Add("Content-type", "application/xml")
- _, err = w.Write([]byte(xml.Header))
- if err != nil {
- log.Println("Writing xml: ", err)
+ _, err = w.Write([]byte(xml.Header))
+ if err != nil {
+ log.Println("Writing xml: ", err)
- return
- }
+ return
+ }
- enc := xml.NewEncoder(w)
- enc.Indent("", " ")
+ enc := xml.NewEncoder(w)
+ enc.Indent("", " ")
- err = enc.Encode(feed)
- if err != nil {
- log.Println(err)
- // Headers probably already sent, but we'll try anyway
- http.Error(w, "Internal server error", http.StatusInternalServerError)
- }
+ err = enc.Encode(feed)
+ if err != nil {
+ log.Println(err)
+ // Headers probably already sent, but we'll try anyway
+ http.Error(w, "Internal server error", http.StatusInternalServerError)
+ }
+ })
}