aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChristine Dodrill <me@christine.website>2021-01-14 22:36:34 -0500
committerGitHub <noreply@github.com>2021-01-14 22:36:34 -0500
commitd2455aa1c1bfc599a07966a7d717c1380d41bbc0 (patch)
treec2b206aa41cd6f0e13d61b5455861f09ab5d1304
parenta359f54a91f4aeb914c69f59a02afabccd72450e (diff)
downloadxesite-d2455aa1c1bfc599a07966a7d717c1380d41bbc0.tar.xz
xesite-d2455aa1c1bfc599a07966a7d717c1380d41bbc0.zip
Cache better (#296)
* Many improvements around bandwidth use - Use ETags for RSS/Atom feeds - Use cache-control headers - Update to rust nightly (for rust-analyzer and faster builds) - Limit feeds to the last 20 posts: https://twitter.com/theprincessxena/status/1349891678857998339 - Use if-none-match to limit bandwidth further Also does this: - bump go_vanity to 0.3.0 and lets users customize the branch name - fix formatting on jsonfeed - remove last vestige of kubernetes/docker support Signed-off-by: Christine Dodrill <me@christine.website> * expire cache quicker for dynamic pages Signed-off-by: Christine Dodrill <me@christine.website> * add rss ttl Signed-off-by: Christine Dodrill <me@christine.website> * add blogpost Signed-off-by: Christine Dodrill <me@christine.website>
-rw-r--r--Cargo.lock13
-rw-r--r--Cargo.toml3
-rw-r--r--blog/site-update-rss-bandwidth-2021-01-14.markdown69
-rw-r--r--default.nix7
-rw-r--r--docker.nix23
-rw-r--r--lib/go_vanity/Cargo.toml2
-rw-r--r--lib/go_vanity/src/lib.rs10
-rw-r--r--lib/go_vanity/templates/gitea.rs.html4
-rw-r--r--lib/go_vanity/templates/github.rs.html4
-rw-r--r--lib/jsonfeed/src/builder.rs11
-rw-r--r--lib/jsonfeed/src/errors.rs3
-rw-r--r--lib/jsonfeed/src/feed.rs36
-rw-r--r--lib/jsonfeed/src/item.rs116
-rw-r--r--lib/jsonfeed/src/lib.rs66
-rw-r--r--nix/rust.nix4
-rw-r--r--shell.nix2
-rw-r--r--src/app/mod.rs2
-rw-r--r--src/handlers/feeds.rs39
-rw-r--r--src/main.rs57
-rw-r--r--src/post/frontmatter.rs7
-rw-r--r--src/post/mod.rs1
-rw-r--r--templates/blog_rss.rs.xml1
-rw-r--r--templates/footer.rs.html2
23 files changed, 307 insertions, 175 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 4370ea7..5a1e9e9 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -644,7 +644,7 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
[[package]]
name = "go_vanity"
-version = "0.1.0"
+version = "0.2.0"
dependencies = [
"mime",
"ructe",
@@ -2365,6 +2365,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05e42f7c18b8f902290b009cde6d651262f956c98bc51bca4cd1d511c9cd85c7"
[[package]]
+name = "uuid"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
+dependencies = [
+ "getrandom 0.2.1",
+ "serde",
+]
+
+[[package]]
name = "vcpkg"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2626,6 +2636,7 @@ dependencies = [
"tracing-futures",
"tracing-subscriber",
"url",
+ "uuid",
"warp",
"xml-rs",
]
diff --git a/Cargo.toml b/Cargo.toml
index 48b0eee..f398181 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "xesite"
-version = "2.1.0"
+version = "2.2.0"
authors = ["Christine Dodrill <me@christine.website>"]
edition = "2018"
build = "src/build.rs"
@@ -34,6 +34,7 @@ tracing-subscriber = { version = "0.2", features = ["fmt"] }
warp = "0.2"
xml-rs = "0.8"
url = "2"
+uuid = { version = "0.8", features = ["serde", "v4"] }
# workspace dependencies
go_vanity = { path = "./lib/go_vanity" }
diff --git a/blog/site-update-rss-bandwidth-2021-01-14.markdown b/blog/site-update-rss-bandwidth-2021-01-14.markdown
new file mode 100644
index 0000000..ce68c48
--- /dev/null
+++ b/blog/site-update-rss-bandwidth-2021-01-14.markdown
@@ -0,0 +1,69 @@
+---
+title: "Site Update: RSS Bandwidth Fixes"
+date: 2021-01-14
+tags:
+ - devops
+ - optimization
+---
+
+# Site Update: RSS Bandwidth Fixes
+
+Well, so I think I found out where my Kubernetes cluster cost came from. For
+context, this blog gets a lot of traffic. Since the last deploy, my blog has
+served its RSS feed over 19,000 times. I have some pretty naiive code powering
+the RSS feed. It basically looked something like this:
+
+- Write RSS feed content-type and beginning of feed
+- For every post I have ever made, include its metadata and content
+- Write end of RSS feed
+
+This code was _fantastically simple_ to develop, however it was very expensive
+in terms of bandwidth. When you add all this up, my RSS feed used to be more
+than a _one megabyte_ response. It was also only getting larger as I posted more
+content.
+
+This is unsustainable, so I have taken multiple actions to try and fix this from
+several angles.
+
+<blockquote class="twitter-tweet"><p lang="en" dir="ltr">Rationale: this is my
+most commonly hit and largest endpoint. I want to try and cut down its size.
+<br><br>current feed (everything): 1356706 bytes<br>20 posts: 177931 bytes<br>10
+posts: 53004 bytes<br>5 posts: 29318 bytes <a
+href="https://t.co/snjnn8RFh8">pic.twitter.com/snjnn8RFh8</a></p>&mdash; Cadey
+A. Ratio (@theprincessxena) <a
+href="https://twitter.com/theprincessxena/status/1349892662871150594?ref_src=twsrc%5Etfw">January
+15, 2021</a></blockquote> <script async
+src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
+
+[Yes, that graph is showing in _gigabytes_. We're so lucky that bandwidth is
+free on Hetzner.](conversation://Mara/hacker)
+
+First I finally set up the site to run behind Cloudflare. The Cloudflare
+settings are set very permissively, so your RSS feed reading bots or whatever
+should NOT be affected by this change. If you run into any side effects as a
+result of this change, [contact me](/contact) and I can fix it.
+
+Second, I also now set cache control headers on every response. By default the
+"static" pages are cached for a day and the "dynamic" pages are cached for 5
+minutes. This should allow new posts to show up quickly as they have previously.
+
+Thirdly, I set up
+[ETags](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag) for the
+feeds. Each of my feeds will send an ETag in a response header. Please use this
+tag in future requests to ensure that you don't ask for content you already
+have. From what I recall most RSS readers should already support this, however
+I'll monitor the situation as reality demands.
+
+Lastly, I adjusted the
+[ttl](https://cyber.harvard.edu/rss/rss.html#ltttlgtSubelementOfLtchannelgt) of
+the RSS feed so that compliant feed readers should only check once per day. I've
+seen some feed readers request the feed up to every 5 minutes, which is very
+excessive. Hopefully this setting will gently nudge them into behaving.
+
+As a nice side effect I should have slightly lower ram usage on the blog server
+too! Right now it's sitting at about 58 and a half MB of ram, however with fewer
+copies of my posts sitting in memory this should fall by a significant amount.
+
+If you have any feedback about this, please [contact me](/contact) or mention me
+on Twitter. I read my email frequently and am notified about Twitter mentions
+very quickly.
diff --git a/default.nix b/default.nix
index 68536a1..0fa38c5 100644
--- a/default.nix
+++ b/default.nix
@@ -2,12 +2,17 @@
with pkgs;
let
+ rust = pkgs.callPackage ./nix/rust.nix { };
+
srcNoTarget = dir:
builtins.filterSource
(path: type: type != "directory" || builtins.baseNameOf path != "target")
dir;
- naersk = pkgs.callPackage sources.naersk { };
+ naersk = pkgs.callPackage sources.naersk {
+ rustc = rust;
+ cargo = rust;
+ };
dhallpkgs = import sources.easy-dhall-nix { inherit pkgs; };
src = srcNoTarget ./.;
diff --git a/docker.nix b/docker.nix
deleted file mode 100644
index a49bb18..0000000
--- a/docker.nix
+++ /dev/null
@@ -1,23 +0,0 @@
-{ system ? builtins.currentSystem }:
-
-let
- sources = import ./nix/sources.nix;
- pkgs = import sources.nixpkgs { inherit system; };
- callPackage = pkgs.lib.callPackageWith pkgs;
- site = callPackage ./default.nix { };
-
- dockerImage = pkg:
- pkgs.dockerTools.buildLayeredImage {
- name = "xena/christinewebsite";
- tag = "latest";
-
- contents = [ pkgs.cacert pkg ];
-
- config = {
- Cmd = [ "${pkg}/bin/xesite" ];
- Env = [ "CONFIG_FNAME=${pkg}/config.dhall" "RUST_LOG=info" ];
- WorkingDir = "/";
- };
- };
-
-in dockerImage site
diff --git a/lib/go_vanity/Cargo.toml b/lib/go_vanity/Cargo.toml
index 90fa4f2..f4e5432 100644
--- a/lib/go_vanity/Cargo.toml
+++ b/lib/go_vanity/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "go_vanity"
-version = "0.1.0"
+version = "0.2.0"
authors = ["Christine Dodrill <me@christine.website>"]
edition = "2018"
build = "src/build.rs"
diff --git a/lib/go_vanity/src/lib.rs b/lib/go_vanity/src/lib.rs
index e4a11e2..756c555 100644
--- a/lib/go_vanity/src/lib.rs
+++ b/lib/go_vanity/src/lib.rs
@@ -1,12 +1,12 @@
+use crate::templates::RenderRucte;
use warp::{http::Response, Rejection, Reply};
-use crate::templates::{RenderRucte};
include!(concat!(env!("OUT_DIR"), "/templates.rs"));
-pub async fn gitea(pkg_name: &str, git_repo: &str) -> Result<impl Reply, Rejection> {
- Response::builder().html(|o| templates::gitea_html(o, pkg_name, git_repo))
+pub async fn gitea(pkg_name: &str, git_repo: &str, branch: &str) -> Result<impl Reply, Rejection> {
+ Response::builder().html(|o| templates::gitea_html(o, pkg_name, git_repo, branch))
}
-pub async fn github(pkg_name: &str, git_repo: &str) -> Result<impl Reply, Rejection> {
- Response::builder().html(|o| templates::github_html(o, pkg_name, git_repo))
+pub async fn github(pkg_name: &str, git_repo: &str, branch: &str) -> Result<impl Reply, Rejection> {
+ Response::builder().html(|o| templates::github_html(o, pkg_name, git_repo, branch))
}
diff --git a/lib/go_vanity/templates/gitea.rs.html b/lib/go_vanity/templates/gitea.rs.html
index b20985b..f062d91 100644
--- a/lib/go_vanity/templates/gitea.rs.html
+++ b/lib/go_vanity/templates/gitea.rs.html
@@ -1,11 +1,11 @@
-@(pkg_name: &str, git_repo: &str)
+@(pkg_name: &str, git_repo: &str, branch: &str)
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
<meta name="go-import" content="@pkg_name git @git_repo">
- <meta name="go-source" content="@pkg_name @git_repo @git_repo/src/master@{/dir@} @git_repo/src/master@{/dir@}/@{file@}#L@{line@}">
+ <meta name="go-source" content="@pkg_name @git_repo @git_repo/src/@branch@{/dir@} @git_repo/src/@branch@{/dir@}/@{file@}#L@{line@}">
<meta http-equiv="refresh" content="0; url=https://godoc.org/@pkg_name">
</head>
<body>
diff --git a/lib/go_vanity/templates/github.rs.html b/lib/go_vanity/templates/github.rs.html
index 61f42e5..9782b1c 100644
--- a/lib/go_vanity/templates/github.rs.html
+++ b/lib/go_vanity/templates/github.rs.html
@@ -1,11 +1,11 @@
-@(pkg_name: &str, git_repo: &str)
+@(pkg_name: &str, git_repo: &str, branch: &str)
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
<meta name="go-import" content="@pkg_name git @git_repo">
- <meta name="go-source" content="@pkg_name @git_repo @git_repo/tree/master@{/dir@} @git_repo/blob/master@{/dir@}/@{file@}#L@{line@}">
+ <meta name="go-source" content="@pkg_name @git_repo @git_repo/tree/@branch@{/dir@} @git_repo/blob/@branch@{/dir@}/@{file@}#L@{line@}">
<meta http-equiv="refresh" content="0; url=https://godoc.org/@pkg_name">
</head>
<body>
diff --git a/lib/jsonfeed/src/builder.rs b/lib/jsonfeed/src/builder.rs
index f17740f..640a280 100644
--- a/lib/jsonfeed/src/builder.rs
+++ b/lib/jsonfeed/src/builder.rs
@@ -1,7 +1,7 @@
use std::default::Default;
use errors::*;
-use feed::{Feed, Author, Attachment};
+use feed::{Attachment, Author, Feed};
use item::{Content, Item};
/// Feed Builder
@@ -160,7 +160,7 @@ impl ItemBuilder {
match self.content {
Some(Content::Text(t)) => {
self.content = Some(Content::Both(i.into(), t));
- },
+ }
_ => {
self.content = Some(Content::Html(i.into()));
}
@@ -172,10 +172,10 @@ impl ItemBuilder {
match self.content {
Some(Content::Html(s)) => {
self.content = Some(Content::Both(s, i.into()));
- },
+ }
_ => {
self.content = Some(Content::Text(i.into()));
- },
+ }
}
self
}
@@ -197,8 +197,7 @@ impl ItemBuilder {
date_modified: self.date_modified,
author: self.author,
tags: self.tags,
- attachments: self.attachments
+ attachments: self.attachments,
})
}
}
-
diff --git a/lib/jsonfeed/src/errors.rs b/lib/jsonfeed/src/errors.rs
index 936b7ec..b94779c 100644
--- a/lib/jsonfeed/src/errors.rs
+++ b/lib/jsonfeed/src/errors.rs
@@ -1,7 +1,6 @@
use serde_json;
-error_chain!{
+error_chain! {
foreign_links {
Serde(serde_json::Error);
}
}
-
diff --git a/lib/jsonfeed/src/feed.rs b/lib/jsonfeed/src/feed.rs
index 8b5b5ce..320feb6 100644
--- a/lib/jsonfeed/src/feed.rs
+++ b/lib/jsonfeed/src/feed.rs
@@ -1,7 +1,7 @@
use std::default::Default;
-use item::Item;
use builder::Builder;
+use item::Item;
const VERSION_1: &'static str = "https://jsonfeed.org/version/1";
@@ -145,9 +145,9 @@ pub struct Hub {
#[cfg(test)]
mod tests {
+ use super::*;
use serde_json;
use std::default::Default;
- use super::*;
#[test]
fn serialize_feed() {
@@ -168,18 +168,16 @@ mod tests {
#[test]
fn deserialize_feed() {
- let json = r#"{"version":"https://jsonfeed.org/version/1","title":"some title","items":[]}"#;
+ let json =
+ r#"{"version":"https://jsonfeed.org/version/1","title":"some title","items":[]}"#;
let feed: Feed = serde_json::from_str(&json).unwrap();
let expected = Feed {
version: "https://jsonfeed.org/version/1".to_string(),
- title: "some title".to_string(),
- items: vec![],
- ..Default::default()
+ title: "some title".to_string(),
+ items: vec![],
+ ..Default::default()
};
- assert_eq!(
- feed,
- expected
- );
+ assert_eq!(feed, expected);
}
#[test]
@@ -208,10 +206,7 @@ mod tests {
size_in_bytes: Some(1),
duration_in_seconds: Some(1),
};
- assert_eq!(
- attachment,
- expected
- );
+ assert_eq!(attachment, expected);
}
#[test]
@@ -229,17 +224,15 @@ mod tests {
#[test]
fn deserialize_author() {
- let json = r#"{"name":"bob jones","url":"http://example.com","avatar":"http://img.com/blah"}"#;
+ let json =
+ r#"{"name":"bob jones","url":"http://example.com","avatar":"http://img.com/blah"}"#;
let author: Author = serde_json::from_str(&json).unwrap();
let expected = Author {
name: Some("bob jones".to_string()),
url: Some("http://example.com".to_string()),
avatar: Some("http://img.com/blah".to_string()),
};
- assert_eq!(
- author,
- expected
- );
+ assert_eq!(author, expected);
}
#[test]
@@ -262,10 +255,7 @@ mod tests {
type_: "some-type".to_string(),
url: "http://example.com".to_string(),
};
- assert_eq!(
- hub,
- expected
- );
+ assert_eq!(hub, expected);
}
#[test]
diff --git a/lib/jsonfeed/src/item.rs b/lib/jsonfeed/src/item.rs
index 605525b..0f7d6ab 100644
--- a/lib/jsonfeed/src/item.rs
+++ b/lib/jsonfeed/src/item.rs
@@ -1,11 +1,11 @@
-use std::fmt;
use std::default::Default;
+use std::fmt;
-use feed::{Author, Attachment};
use builder::ItemBuilder;
+use feed::{Attachment, Author};
-use serde::ser::{Serialize, Serializer, SerializeStruct};
-use serde::de::{self, Deserialize, Deserializer, Visitor, MapAccess};
+use serde::de::{self, Deserialize, Deserializer, MapAccess, Visitor};
+use serde::ser::{Serialize, SerializeStruct, Serializer};
/// Represents the `content_html` and `content_text` attributes of an item
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
@@ -61,7 +61,8 @@ impl Default for Item {
impl Serialize for Item {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
- where S: Serializer
+ where
+ S: Serializer,
{
let mut state = serializer.serialize_struct("Item", 14)?;
state.serialize_field("id", &self.id)?;
@@ -78,15 +79,15 @@ impl Serialize for Item {
Content::Html(ref s) => {
state.serialize_field("content_html", s)?;
state.serialize_field("content_text", &None::<Option<&str>>)?;
- },
+ }
Content::Text(ref s) => {
state.serialize_field("content_html", &None::<Option<&str>>)?;
state.serialize_field("content_text", s)?;
- },
+ }
Content::Both(ref s, ref t) => {
state.serialize_field("content_html", s)?;
state.serialize_field("content_text", t)?;
- },
+ }
};
if self.summary.is_some() {
state.serialize_field("summary", &self.summary)?;
@@ -117,8 +118,9 @@ impl Serialize for Item {
}
impl<'de> Deserialize<'de> for Item {
- fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where D: Deserializer<'de>
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: Deserializer<'de>,
{
enum Field {
Id,
@@ -135,11 +137,12 @@ impl<'de> Deserialize<'de> for Item {
Author,
Tags,
Attachments,
- };
+ }
impl<'de> Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
- where D: Deserializer<'de>
+ where
+ D: Deserializer<'de>,
{
struct FieldVisitor;
@@ -151,7 +154,8 @@ impl<'de> Deserialize<'de> for Item {
}
fn visit_str<E>(self, value: &str) -> Result<Field, E>
- where E: de::Error
+ where
+ E: de::Error,
{
match value {
"id" => Ok(Field::Id),
@@ -186,7 +190,8 @@ impl<'de> Deserialize<'de> for Item {
}
fn visit_map<V>(self, mut map: V) -> Result<Item, V::Error>
- where V: MapAccess<'de>
+ where
+ V: MapAccess<'de>,
{
let mut id = None;
let mut url = None;
@@ -210,99 +215,93 @@ impl<'de> Deserialize<'de> for Item {
return Err(de::Error::duplicate_field("id"));
}
id = Some(map.next_value()?);
- },
+ }
Field::Url => {
if url.is_some() {
return Err(de::Error::duplicate_field("url"));
}
url = map.next_value()?;
- },
+ }
Field::ExternalUrl => {
if external_url.is_some() {
return Err(de::Error::duplicate_field("external_url"));
}
external_url = map.next_value()?;
- },
+ }
Field::Title => {
if title.is_some() {
return Err(de::Error::duplicate_field("title"));
}
title = map.next_value()?;
- },
+ }
Field::ContentHtml => {
if content_html.is_some() {
return Err(de::Error::duplicate_field("content_html"));
}
content_html = map.next_value()?;
- },
+ }
Field::ContentText => {
if content_text.is_some() {
return Err(de::Error::duplicate_field("content_text"));
}
content_text = map.next_value()?;
- },
+ }
Field::Summary => {
if summary.is_some() {
return Err(de::Error::duplicate_field("summary"));
}
summary = map.next_value()?;
- },
+ }
Field::Image => {
if image.is_some() {
return Err(de::Error::duplicate_field("image"));
}
image = map.next_value()?;
- },
+ }
Field::BannerImage => {
if banner_image.is_some() {
return Err(de::Error::duplicate_field("banner_image"));
}
banner_image = map.next_value()?;
- },
+ }
Field::DatePublished => {
if date_published.is_some() {
return Err(de::Error::duplicate_field("date_published"));
}
date_published = map.next_value()?;
- },
+ }
Field::DateModified => {
if date_modified.is_some() {
return Err(de::Error::duplicate_field("date_modified"));
}
date_modified = map.next_value()?;
- },
+ }
Field::Author => {
if author.is_some() {
return Err(de::Error::duplicate_field("author"));
}
author = map.next_value()?;
- },
+ }
Field::Tags => {
if tags.is_some() {
return Err(de::Error::duplicate_field("tags"));
}
tags = map.next_value()?;
- },
+ }
Field::Attachments => {
if attachments.is_some() {
return Err(de::Error::duplicate_field("attachments"));
}
attachments = map.next_value()?;
- },
+ }
}
}
let id = id.ok_or_else(|| de::Error::missing_field("id"))?;
let content = match (content_html, content_text) {
- (Some(s), Some(t)) => {
- Content::Both(s.to_string(), t.to_string())
- },
- (Some(s), _) => {
- Content::Html(s.to_string())
- },
- (_, Some(t)) => {
- Content::Text(t.to_string())
- },
+ (Some(s), Some(t)) => Content::Both(s.to_string(), t.to_string()),
+ (Some(s), _) => Content::Html(s.to_string()),
+ (_, Some(t)) => Content::Text(t.to_string()),
_ => return Err(de::Error::missing_field("content_html or content_text")),
};
@@ -363,7 +362,12 @@ mod tests {
banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()),
- author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")),
+ author: Some(
+ Author::new()
+ .name("bob jones")
+ .url("http://example.com")
+ .avatar("http://img.com/blah"),
+ ),
tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]),
};
@@ -387,7 +391,12 @@ mod tests {
banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()),
- author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")),
+ author: Some(
+ Author::new()
+ .name("bob jones")
+ .url("http://example.com")
+ .avatar("http://img.com/blah"),
+ ),
tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]),
};
@@ -411,7 +420,12 @@ mod tests {
banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()),
- author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")),
+ author: Some(
+ Author::new()
+ .name("bob jones")
+ .url("http://example.com")
+ .avatar("http://img.com/blah"),
+ ),
tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]),
};
@@ -437,7 +451,12 @@ mod tests {
banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()),
- author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")),
+ author: Some(
+ Author::new()
+ .name("bob jones")
+ .url("http://example.com")
+ .avatar("http://img.com/blah"),
+ ),
tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]),
};
@@ -460,7 +479,12 @@ mod tests {
banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()),
- author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")),
+ author: Some(
+ Author::new()
+ .name("bob jones")
+ .url("http://example.com")
+ .avatar("http://img.com/blah"),
+ ),
tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]),
};
@@ -483,11 +507,15 @@ mod tests {
banner_image: Some("http://img.com/blah".into()),
date_published: Some("2017-01-01 10:00:00".into()),
date_modified: Some("2017-01-01 10:00:00".into()),
- author: Some(Author::new().name("bob jones").url("http://example.com").avatar("http://img.com/blah")),
+ author: Some(
+ Author::new()
+ .name("bob jones")
+ .url("http://example.com")
+ .avatar("http://img.com/blah"),
+ ),
tags: Some(vec!["json".into(), "feed".into()]),
attachments: Some(vec![]),
};
assert_eq!(item, expected);
}
}
-
diff --git a/lib/jsonfeed/src/lib.rs b/lib/jsonfeed/src/lib.rs
index bc1d94e..812083e 100644
--- a/lib/jsonfeed/src/lib.rs
+++ b/lib/jsonfeed/src/lib.rs
@@ -2,7 +2,7 @@
//! instead of XML
//!
//! This crate can serialize and deserialize between JSON Feed strings
-//! and Rust data structures. It also allows for programmatically building
+//! and Rust data structures. It also allows for programmatically building
//! a JSON Feed
//!
//! Example:
@@ -40,18 +40,20 @@
//! ```
extern crate serde;
-#[macro_use] extern crate error_chain;
-#[macro_use] extern crate serde_derive;
+#[macro_use]
+extern crate error_chain;
+#[macro_use]
+extern crate serde_derive;
extern crate serde_json;
+mod builder;
mod errors;
-mod item;
mod feed;
-mod builder;
+mod item;
pub use errors::*;
+pub use feed::{Attachment, Author, Feed};
pub use item::*;
-pub use feed::{Feed, Author, Attachment};
use std::io::Write;
@@ -116,14 +118,16 @@ pub fn to_vec_pretty(value: &Feed) -> Result<Vec<u8>> {
/// Serialize a Feed to JSON and output to an IO stream
pub fn to_writer<W>(writer: W, value: &Feed) -> Result<()>
- where W: Write
+where
+ W: Write,
{
Ok(serde_json::to_writer(writer, value)?)