diff --git a/.gitignore b/.gitignore index adf8f72..7b4a7d0 100644 --- a/.gitignore +++ b/.gitignore @@ -21,3 +21,4 @@ # Go workspace file go.work +datastore.json diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..36790d1 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,22 @@ +FROM golang:1.22-alpine AS backend-build + +WORKDIR /build + +COPY *.go . +COPY go.mod . + +RUN go build -ldflags "-s -w" -o /tmp/rss + +FROM alpine:3.20 AS base + +WORKDIR /app + +COPY --from=backend-build /tmp/rss /app/rss +COPY go.mod . +COPY templates templates +COPY feeds.txt . +COPY static static + +RUN chmod +x /app/rss + +CMD ["/app/rss"] diff --git a/api.go b/api.go new file mode 100644 index 0000000..90a6404 --- /dev/null +++ b/api.go @@ -0,0 +1,42 @@ +package main + +import ( + "fmt" + "log/slog" + "net/http" + "time" +) + +type Route struct { + Path string + Handler http.HandlerFunc +} + +type API struct { + Routes []Route + StaticRoot string +} + +func LoggingMiddleware(f http.HandlerFunc) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + start := time.Now() + f(w, r) + slog.Info(fmt.Sprintf("%s - %s (%dms)", r.Method, r.URL, time.Now().Sub(start).Milliseconds())) + } +} + +func (a API) Start(addr string) { + for _, route := range a.Routes { + http.HandleFunc(route.Path, LoggingMiddleware(route.Handler)) + } + + if a.StaticRoot != "" { + http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir(a.StaticRoot)))) + } + + http.ListenAndServe(addr, nil) +} + +func (a *API) AddRoute(path string, handler http.HandlerFunc) { + a.Routes = append(a.Routes, Route{Path: path, Handler: handler}) +} diff --git a/datastore.go b/datastore.go new file mode 100644 index 0000000..95c65fb --- /dev/null +++ b/datastore.go @@ -0,0 +1,60 @@ +package main + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "log/slog" + "os" + "strings" + "time" +) + +type Datastore struct { + Data map[string]string `json:"data"` + LastUpdate time.Time `json:"updated"` +} + +func FromFile(cachePath string) *Datastore { + bytes, err := ioutil.ReadFile(cachePath) + + if err != nil { + return nil + } + + var store Datastore + + json.Unmarshal(bytes, &store) + + return &store +} + +func (d Datastore) List(namespace string) map[string]string { + slog.Debug(fmt.Sprintf("Listing entries with namespace: %s", namespace)) + withinNamespace := map[string]string{} + + for key, value := range d.Data { + if strings.HasPrefix(key, namespace) { + withinNamespace[key] = value + } + } + + return withinNamespace +} + +func (d Datastore) Get(key string) string { + slog.Debug(fmt.Sprintf("Getting entry with key: %s", key)) + return d.Data[key] +} + +func (d *Datastore) Set(key string, value string) { + slog.Debug(fmt.Sprintf("Setting entry for key %s", key)) + d.Data[key] = value + os.WriteFile("datastore.json", []byte(d.Serialize()), 0755) +} + +func (d Datastore) Serialize() string { + slog.Debug("Serialized state") + b, _ := json.Marshal(d) + return string(b) +} diff --git a/feed_fetch.go b/feed_fetch.go new file mode 100644 index 0000000..33e9913 --- /dev/null +++ b/feed_fetch.go @@ -0,0 +1,34 @@ +package main + +import ( + "encoding/json" + "fmt" + "io" + "log/slog" + "net/http" +) + +func fetchFeed(url string) { + slog.Info((fmt.Sprintf("Fetching %s\n", url))) + re, err := http.Get(url) + + if err != nil { + fmt.Printf("%+v", err) + return + } + + defer re.Body.Close() + b, _ := io.ReadAll(re.Body) + + d := ParseFeed(b) + + b, _ = json.Marshal(d.Channel.ItemsToLinks()) + cacheKey := fmt.Sprintf("feeds:%s", url) + SharedCache.Set(cacheKey, string(b)) +} + +func refreshFeeds() { + for _, url := range SharedCache.List("feedurl") { + fetchFeed(url) + } +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..6570b09 --- /dev/null +++ b/go.mod @@ -0,0 +1,3 @@ +module rss + +go 1.22.2 diff --git a/main.go b/main.go new file mode 100644 index 0000000..f239d4f --- /dev/null +++ b/main.go @@ -0,0 +1,100 @@ +package main + +import ( + "encoding/json" + "fmt" + "net/http" + "net/url" + "text/template" + "time" +) + +var SharedCache *Datastore + +type Link struct { + Url string `json:"url"` + PublishedDate string `json:"publishedDate"` + Title string `json:"title"` +} + +func healthcheck(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(200) +} + +func about(w http.ResponseWriter, r *http.Request) { + tmpl, _ := template.New("about.html.tmpl").ParseFiles("templates/about.html.tmpl") + tmpl.Execute(w, nil) +} + +func listContent(w http.ResponseWriter, r *http.Request) { + links := []Link{} + for _, feed := range SharedCache.List("feeds") { + var formattedItems []Link + json.Unmarshal([]byte(feed), &formattedItems) + links = append(links, formattedItems...) + } + + tmpl, _ := template.New("index.html.tmpl").ParseFiles("templates/index.html.tmpl") + tmpl.Execute(w, links) +} + +func manageContent(w http.ResponseWriter, r *http.Request) { + if r.Method == "POST" { + r.ParseForm() + + urlValue := r.PostFormValue("url") + + if _, err := url.Parse(urlValue); err != nil { + w.WriteHeader(400) + return + } + + cacheKey := fmt.Sprintf("feedurl:%s", urlValue) + SharedCache.Set(cacheKey, urlValue) + + go fetchFeed(urlValue) + + w.WriteHeader(201) + } + + allFeeds := SharedCache.List("feedurl") + + type ManageTmplData struct { + Feeds map[string]string + } + + tmpl, _ := template.New("manage.html.tmpl").ParseFiles("templates/manage.html.tmpl") + tmpl.Execute(w, ManageTmplData{Feeds: allFeeds}) +} + +var routeMap = map[string]http.HandlerFunc{ + "/": listContent, + "/about": about, + "/manage": manageContent, + "/ping": healthcheck, +} + +func main() { + SharedCache = &Datastore{ + Data: map[string]string{}, + } + + if existingStore := FromFile("datastore.json"); existingStore != nil { + SharedCache = existingStore + } + + api := API{StaticRoot: "./static"} + + for route, handler := range routeMap { + api.AddRoute(route, handler) + } + + go func() { + for { + refreshFeeds() + time.Sleep(10 * time.Minute) + } + }() + + api.Start(":9000") +} diff --git a/rss.go b/rss.go new file mode 100644 index 0000000..a0b5e6e --- /dev/null +++ b/rss.go @@ -0,0 +1,42 @@ +package main + +import ( + "encoding/xml" +) + +type Document struct { + Channel Channel `xml:"channel"` +} + +type Item struct { + Title string `xml:"title"` + Link string `xml:"link"` + Description string `xml:"description"` + PublishedDate string `xml:"pubDate"` +} + +type Channel struct { + Title string `xml:"title"` + Description string `xml:"description"` + Items []Item `xml:"item"` +} + +func (c Channel) ItemsToLinks() []Link { + formattedItems := []Link{} + + for _, feedItem := range c.Items { + formattedItems = append(formattedItems, Link{Url: feedItem.Link, Title: feedItem.Title, PublishedDate: feedItem.PublishedDate}) + } + + return formattedItems +} + +func ParseFeed(raw []byte) Document { + var doc Document + + if err := xml.Unmarshal(raw, &doc); err != nil { + return Document{} + } + + return doc +} diff --git a/static/main.css b/static/main.css new file mode 100644 index 0000000..66b2b68 --- /dev/null +++ b/static/main.css @@ -0,0 +1,34 @@ +body { + margin: 0; +} + +header { + padding: 10px; +} + +header > h1 { + margin: 0; + font-size: 1.2em; +} + +header ul { + list-style: none; + display: flex; + gap: 5px; + margin: 0; + padding: 5px; +} + +#items { + list-style: none; + padding-left: 0; + margin: 5px; +} + +#items > li { + padding: 5px; + margin: 2px; + background-color: #eef0ff; + display: flex; + flex-direction: column; +} diff --git a/templates/about.html.tmpl b/templates/about.html.tmpl new file mode 100644 index 0000000..5b1728d --- /dev/null +++ b/templates/about.html.tmpl @@ -0,0 +1,22 @@ + +
+