package main import ( "bytes" "encoding/xml" "errors" "fmt" "log" "net/http" "strconv" "strings" "time" "github.com/gorilla/mux" "riedstra.dev/mitch/go-website/page" ) type Author struct { Name string `xml:"name"` // Required Uri string `xml:"uri,omitempty"` Email string `xml:"email,omitempty"` } type Link struct { Href string `xml:"href,attr,omitempty"` Rel string `xml:"rel,attr,omitempty"` Type string `xml:"type,attr,omitempty"` Title string `xml:"Title,attr,omitempty"` Length string `xml:"Length,attr,omitempty"` } type Content struct { Type string `xml:"type,attr"` Data string `xml:",chardata"` } type Entry struct { // Spec requires this, autogenerated from Title and updated if otherwise // left empty Id string `xml:"id"` Title string `xml:"title"` // Required Updated *time.Time `xml:"updated"` // Required Author *Author `xml:"author,omitempty"` Published *time.Time `xml:"published,omitempty"` Links []Link `xml:"link,omitempty"` Content *Content `xml:"content,omitempty"` } func (i Entry) MarshalXML(e *xml.Encoder, start xml.StartElement) error { type Alias Entry errs := []string{} if i.Title == "" { errs = append(errs, "Title Cannot be empty") } if i.Updated == nil { errs = append(errs, "Updated cannot be nil") } if len(errs) > 0 { return errors.New(strings.Join(errs, ",")) } if i.Id == "" { i.Id = fmt.Sprintf("%s::%d", i.Title, i.Updated.Unix()) } i2 := (*Alias)(&i) return e.EncodeElement(i2, start) } type Atom struct { Ns string `xml:"xmlns,attr"` Title string `xml:"title"` // Required Id string `xml:"id"` // Required Author Author `xml:"author,omitempty"` // Required Updated *time.Time `xml:"updated"` // Required Published *time.Time `xml:"published,omitempty"` Subtitle string `xml:"subtitle,omitempty"` Entries []Entry `xml:"entry"` } func (a Atom) MarshalXML(e *xml.Encoder, start xml.StartElement) error { type Alias Atom a.Ns = "http://www.w3.org/2005/Atom" errs := []string{} if a.Id == "" { errs = append(errs, "ID Cannot be empty") } if a.Author.Name == "" { errs = append(errs, "Author Name cannot be empty") } if a.Updated == nil { errs = append(errs, "Updated cannot be empty") } if len(errs) > 0 { return errors.New(strings.Join(errs, ",")) } start.Name = xml.Name{Local: "feed"} a2 := (*Alias)(&a) return e.EncodeElement(a2, start) } // FeedHandler takes care of pulling from the index all of the relevant posts // and dumping them into an Atom feed. // // Relevant query parameters are: // // "content" if unset, or set to false content is omitted from the feed // "limit=n" stop at "n" and return the feed // func (a *App) FeedHandler(w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) var addContent bool var limit int if _, ok := r.URL.Query()["content"]; ok { if r.URL.Query().Get("content") != "false" { addContent = true } } if l := r.URL.Query().Get("limit"); l != "" { i, err := strconv.Atoi(l) if err == nil { limit = i } } tag, ok := vars["tag"] if !ok { http.Error(w, "Tag not found or supplied", http.StatusNotFound) return } p := page.NewPage("index") index, err := p.Index() if err != nil { log.Println(err) http.Error(w, "Internal server error", http.StatusInternalServerError) return } pages, ok := index[tag] if !ok { http.Error(w, "Invalid tag", http.StatusNotFound) return } pages, dateless := pages.RemoveDateless() for _, p := range dateless { log.Printf("Warning, page %s has no Date field. Skipping inclusion on feed", p) } pages.SortDate() feed := &Atom{ Author: a.Author, Title: a.Title, Id: a.FeedId, Updated: &a.Updated.Time, Subtitle: a.Description, } entries := []Entry{} for n, p := range pages { if limit != 0 && n >= limit { break } content := &bytes.Buffer{} err := p.Render(content) if err != nil { log.Println(err) http.Error(w, "Internal server error", http.StatusInternalServerError) return } entry := Entry{ Title: p.Title, Updated: &p.Date.Time, Links: []Link{Link{Href: strings.Join([]string{a.SiteURL, p.Path()}, "/")}}, } if p.AuthorName != "" { entry.Author = &Author{ Name: p.AuthorName, } if p.AuthorEmail != "" { entry.Author.Email = p.AuthorEmail } } if addContent { entry.Content = &Content{Type: "html", Data: content.String()} } entries = append(entries, entry) } feed.Entries = entries w.Header().Add("Content-type", "application/xml") w.Write([]byte(xml.Header)) enc := xml.NewEncoder(w) enc.Indent("", " ") err = enc.Encode(feed) if err != nil { log.Println(err) // Headers probably already sent, but we'll try anyway http.Error(w, "Internal server error", http.StatusInternalServerError) } return }