feat: equivalent CF worker implementation for proxy + deployment
fix: ensure CORS headers
This commit is contained in:
parent
74446bd0cd
commit
3aa4b1e3bc
3 changed files with 80 additions and 0 deletions
20
.github/workflows/main.yml
vendored
20
.github/workflows/main.yml
vendored
|
@ -93,6 +93,26 @@ jobs:
|
||||||
with:
|
with:
|
||||||
name: build-artifacts
|
name: build-artifacts
|
||||||
path: dist
|
path: dist
|
||||||
|
deploy-functions:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Deploy functions
|
||||||
|
needs: build-app
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Yarn cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
id: yarn-cache-restore
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
.yarn
|
||||||
|
key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }}-${{ env.NODE_VERSION }}
|
||||||
|
- run: |
|
||||||
|
. script/bootstrap.sh
|
||||||
|
- uses: cloudflare/wrangler-action@v3
|
||||||
|
with:
|
||||||
|
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
command: deploy --name=rss-reader-proxy --minify ./functions/rss-proxy/rss-proxy-cloudflare.mts
|
||||||
preview:
|
preview:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: Deploy preview
|
name: Deploy preview
|
||||||
|
|
59
functions/rss-proxy/rss-proxy-cloudflare.mts
Executable file
59
functions/rss-proxy/rss-proxy-cloudflare.mts
Executable file
|
@ -0,0 +1,59 @@
|
||||||
|
/*
|
||||||
|
* Fetches and parses RSS feeds.
|
||||||
|
*
|
||||||
|
* This handles the fetching, XML parsing and formatting of
|
||||||
|
* RSS feed data so that the frontent clients do not have to.
|
||||||
|
*
|
||||||
|
* This is operating on a "by-feed" basis such that each
|
||||||
|
* run only processes one feed, and the clients are expected
|
||||||
|
* to make multiple requests if they have a list of feeds to
|
||||||
|
* follow.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { parseFeed } from "htmlparser2";
|
||||||
|
|
||||||
|
const defaultHeaders = {
|
||||||
|
"Access-Control-Allow-Origin": "https://rss-reader.karnov.club",
|
||||||
|
};
|
||||||
|
|
||||||
|
function processFeedXML(feed) {
|
||||||
|
return {
|
||||||
|
title: feed.title,
|
||||||
|
lastPull: String(Date.now()),
|
||||||
|
items: feed.items.reduce((items, feedItem) => {
|
||||||
|
items.push({
|
||||||
|
title: feedItem.title,
|
||||||
|
url: feedItem.link,
|
||||||
|
published: new Date(feedItem.pubDate),
|
||||||
|
});
|
||||||
|
|
||||||
|
return items;
|
||||||
|
}, []),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchHandler(request: Request) {
|
||||||
|
const requestUrl = new URL(request.url);
|
||||||
|
const proxiedUrl = requestUrl.searchParams.get("url");
|
||||||
|
try {
|
||||||
|
const responseData = await fetch(proxiedUrl);
|
||||||
|
const data = await responseData.text();
|
||||||
|
const newFeedData = parseFeed(data);
|
||||||
|
const newFeed = processFeedXML(newFeedData);
|
||||||
|
const mergedFeeds = newFeed;
|
||||||
|
|
||||||
|
return new Response(JSON.stringify(mergedFeeds), {
|
||||||
|
status: 200,
|
||||||
|
headers: defaultHeaders,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
return new Response(error.toString(), {
|
||||||
|
status: 500,
|
||||||
|
headers: defaultHeaders,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default {
|
||||||
|
fetch: fetchHandler,
|
||||||
|
};
|
1
wrangler.toml
Normal file
1
wrangler.toml
Normal file
|
@ -0,0 +1 @@
|
||||||
|
compatibility_date = "2024-03-18"
|
Loading…
Reference in a new issue