diff options
| author | Xe Iaso <me@xeiaso.net> | 2025-04-26 19:45:45 -0400 |
|---|---|---|
| committer | Xe Iaso <me@xeiaso.net> | 2025-04-26 19:45:45 -0400 |
| commit | 929e2debb8b9a63c44e3bb02387a6774821ccb99 (patch) | |
| tree | b593e0beeaf17de681403a96c3d3bc84cfeefd62 | |
| parent | 08dd2fce0f46670d146da5748d46ddaa88897098 (diff) | |
| download | x-929e2debb8b9a63c44e3bb02387a6774821ccb99.tar.xz x-929e2debb8b9a63c44e3bb02387a6774821ccb99.zip | |
feat(anubis): replace with tombstone
Signed-off-by: Xe Iaso <me@xeiaso.net>
32 files changed, 3 insertions, 2115 deletions
diff --git a/cmd/anubis/.gitignore b/cmd/anubis/.gitignore deleted file mode 100644 index 061bf12..0000000 --- a/cmd/anubis/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*.rpm -anubis diff --git a/cmd/anubis/CHANGELOG.md b/cmd/anubis/CHANGELOG.md deleted file mode 100644 index 612bec1..0000000 --- a/cmd/anubis/CHANGELOG.md +++ /dev/null @@ -1,5 +0,0 @@ -# CHANGELOG - -## 2025-01-24 - -- Added support for custom bot policy documentation, allowing administrators to change how Anubis works to meet their needs. diff --git a/cmd/anubis/README.md b/cmd/anubis/README.md index 523cfc3..da0425e 100644 --- a/cmd/anubis/README.md +++ b/cmd/anubis/README.md @@ -1,299 +1,5 @@ -# Anubis +# Moved to [TecharoHQ/anubis](https://github.com/TecharoHQ/anubis) -> [!IMPORTANT] -> Anubis has moved to [TecharoHQ/anubis](https://github.com/TecharoHQ/anubis). Please update your configuration to change Docker images from `ghcr.io/xe/x/anubis` to `ghcr.io/techarohq/anubis`. +Anubis used to be here, but is not any more. Please go here: - - - - - - -Anubis [weighs the soul of your connection](https://en.wikipedia.org/wiki/Weighing_of_souls) using a sha256 proof-of-work challenge in order to protect upstream resources from scraper bots. - -Installing and using this will likely result in your website not being indexed by some search engines. This is considered a feature of Anubis, not a bug. - -This is a bit of a nuclear response, but AI scraper bots scraping so aggressively have forced my hand. I hate that I have to do this, but this is what we get for the modern Internet because bots don't conform to standards like robots.txt, even when they claim to. - -In most cases, you should not need this and can probably get by using Cloudflare to protect a given origin. However, for circumstances where you can't or won't use Cloudflare, Anubis is there for you. - -If you want to try this out, connect to [git.xeserv.us](https://git.xeserv.us). - -## Support - -If you run into any issues running Anubis, please [open an issue](https://github.com/TecharoHQ/anubis/issues/new?template=Blank+issue) and tag it with the Anubis tag. Please include all the information I would need to diagnose your issue. - -For live chat, please join the [Patreon](https://patreon.com/cadey) and ask in the Patron discord in the channel `#anubis`. - -## How Anubis works - -Anubis uses a proof-of-work challenge to ensure that clients are using a modern browser and are able to calculate SHA-256 checksums. Anubis has a customizable difficulty for this proof-of-work challenge, but defaults to 5 leading zeroes. - -```mermaid ---- -title: Challenge generation and validation ---- - -flowchart TD - Backend("Backend") - Fail("Fail") - - style PresentChallenge color:#FFFFFF, fill:#AA00FF, stroke:#AA00FF - style ValidateChallenge color:#FFFFFF, fill:#AA00FF, stroke:#AA00FF - style Backend color:#FFFFFF, stroke:#00C853, fill:#00C853 - style Fail color:#FFFFFF, stroke:#FF2962, fill:#FF2962 - - subgraph Server - PresentChallenge("Present Challenge") - ValidateChallenge("Validate Challenge") - end - - subgraph Client - Main("main.mjs") - Worker("Worker") - end - - Main -- Request challenge --> PresentChallenge - PresentChallenge -- Return challenge & difficulty --> Main - Main -- Spawn worker --> Worker - Worker -- Successful challenge --> Main - Main -- Validate challenge --> ValidateChallenge - ValidateChallenge -- Return cookie --> Backend - ValidateChallenge -- If anything is wrong --> Fail -``` - -### Challenge presentation - -Anubis decides to present a challenge using this logic: - -- User-Agent contains `"Mozilla"` -- Request path is not in `/.well-known`, `/robots.txt`, or `/favicon.ico` -- Request path is not obviously an RSS feed (ends with `.rss`, `.xml`, or `.atom`) - -This should ensure that git clients, RSS readers, and other low-harm clients can get through without issue, but high-risk clients such as browsers and AI scraper bots will get blocked. - -```mermaid ---- -title: Challenge presentation logic ---- - -flowchart LR - Request("Request") - Backend("Backend") - %%Fail("Fail") - PresentChallenge("Present -challenge") - HasMozilla{"Is browser -or scraper?"} - HasCookie{"Has cookie?"} - HasExpired{"Cookie expired?"} - HasSignature{"Has valid -signature?"} - RandomJitter{"Secondary -screening?"} - POWPass{"Proof of -work valid?"} - - style PresentChallenge color:#FFFFFF, fill:#AA00FF, stroke:#AA00FF - style Backend color:#FFFFFF, stroke:#00C853, fill:#00C853 - %%style Fail color:#FFFFFF, stroke:#FF2962, fill:#FF2962 - - Request --> HasMozilla - HasMozilla -- Yes --> HasCookie - HasMozilla -- No --> Backend - HasCookie -- Yes --> HasExpired - HasCookie -- No --> PresentChallenge - HasExpired -- Yes --> PresentChallenge - HasExpired -- No --> HasSignature - HasSignature -- Yes --> RandomJitter - HasSignature -- No --> PresentChallenge - RandomJitter -- Yes --> POWPass - RandomJitter -- No --> Backend - POWPass -- Yes --> Backend - PowPass -- No --> PresentChallenge - PresentChallenge -- Back again for another cycle --> Request -``` - -### Proof of passing challenges - -When a client passes a challenge, Anubis sets an HTTP cookie named `"within.website-x-cmd-anubis-auth"` containing a signed [JWT](https://jwt.io/) (JSON Web Token). This JWT contains the following claims: - -- `challenge`: The challenge string derived from user request metadata -- `nonce`: The nonce / iteration number used to generate the passing response -- `response`: The hash that passed Anubis' checks -- `iat`: When the token was issued -- `nbf`: One minute prior to when the token was issued -- `exp`: The token's expiry week after the token was issued - -This ensures that the token has enough metadata to prove that the token is valid (due to the token's signature), but also so that the server can independently prove the token is valid. This cookie is allowed to be set without triggering an EU cookie banner notification; but depending on facts and circumstances, you may wish to disclose this to your users. - -### Challenge format - -Challenges are formed by taking some user request metadata and using that to generate a SHA-256 checksum. The following request headers are used: - -- `Accept-Encoding`: The content encodings that the requestor supports, such as gzip. -- `Accept-Language`: The language that the requestor would prefer the server respond in, such as English. -- `X-Real-Ip`: The IP address of the requestor, as set by a reverse proxy server. -- `User-Agent`: The user agent string of the requestor. -- The current time in UTC rounded to the nearest week. -- The fingerprint (checksum) of Anubis' private ED25519 key. - -This forms a fingerprint of the requestor using metadata that any requestor already is sending. It also uses time as an input, which is known to both the server and requestor due to the nature of linear timelines. Depending on facts and circumstances, you may wish to disclose this to your users. - -### JWT signing - -Anubis uses an ed25519 keypair to sign the JWTs issued when challenges are passed. Anubis will generate a new ed25519 keypair every time it starts. At this time, there is no way to share this keypair between instance of Anubis, but that will be addressed in future versions. - -## Setting up Anubis - -Anubis is meant to sit between your reverse proxy (such as Nginx or Caddy) and your target service. One instance of Anubis must be used per service you are protecting. - -Anubis is shipped in the Docker image [`ghcr.io/xe/x/anubis:latest`](https://github.com/Xe/x/pkgs/container/x%2Fanubis). Other methods to install Anubis may exist, but the Docker image is currently the only supported method. - -The Docker image runs Anubis as user ID 1000 and group ID 1000. If you are mounting external volumes into Anubis' container, please be sure they are owned by or writable to this user/group. - -Anubis has very minimal system requirements. I suspect that 128Mi of ram may be sufficient for a large number of concurrent clients. Anubis may be a poor fit for apps that use WebSockets and maintain open connections, but I don't have enough real-world experience to know one way or another. - -Anubis uses these environment variables for configuration: - -| Environment Variable | Default value | Explanation | -| :------------------- | :------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `BIND` | `:8923` | The TCP port that Anubis listens on. | -| `DIFFICULTY` | `5` | The difficulty of the challenge, or the number of leading zeroes that must be in successful responses. | -| `METRICS_BIND` | `:9090` | The TCP port that Anubis serves Prometheus metrics on. | -| `POLICY_FNAME` | `/data/cfg/botPolicy.json` | The file containing [bot policy configuration](./docs/policies.md). See the bot policy documentation for more details. | -| `SERVE_ROBOTS_TXT` | `false` | If set `true`, Anubis will serve a default `robots.txt` file that disallows all known AI scrapers by name and then additionally disallows every scraper. This is useful if facts and circumstances make it difficult to change the underlying service to serve such a `robots.txt` file. | -| `TARGET` | `http://localhost:3923` | The URL of the service that Anubis should forward valid requests to. | - -### Policies - -Anubis has support for custom bot policies, matched by User-Agent string and request path. Check the [bot policy documentation](./docs/policies.md) for more information. - -### Docker compose - -Add Anubis to your compose file pointed at your service: - -```yaml -services: - anubis-nginx: - image: ghcr.io/techarohq/anubis:latest - environment: - BIND: ":8080" - DIFFICULTY: "5" - METRICS_BIND: ":9090" - SERVE_ROBOTS_TXT: "true" - TARGET: "http://nginx" - ports: - - 8080:8080 - nginx: - image: nginx - volumes: - - "./www:/usr/share/nginx/html" -``` - -### Kubernetes - -This example makes the following assumptions: - -- Your target service is listening on TCP port `5000`. -- Anubis will be listening on port `8080`. - -Attach Anubis to your Deployment: - -```yaml -containers: - # ... - - name: anubis - image: ghcr.io/techarohq/anubis:latest - imagePullPolicy: Always - env: - - name: "BIND" - value: ":8080" - - name: "DIFFICULTY" - value: "5" - - name: "METRICS_BIND" - value: ":9090" - - name: "SERVE_ROBOTS_TXT" - value: "true" - - name: "TARGET" - value: "http://localhost:5000" - resources: - limits: - cpu: 500m - memory: 128Mi - requests: - cpu: 250m - memory: 128Mi - securityContext: - runAsUser: 1000 - runAsGroup: 1000 - runAsNonRoot: true - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - seccompProfile: - type: RuntimeDefault -``` - -Then add a Service entry for Anubis: - -```diff -# ... - spec: - ports: -+ - protocol: TCP -+ port: 8080 -+ targetPort: 8080 -+ name: anubis -``` - -Then point your Ingress to the Anubis port: - -```diff - rules: - - host: git.xeserv.us - http: - paths: - - pathType: Prefix - path: "/" - backend: - service: - name: git - port: -- name: http -+ name: anubis -``` - -## Known caveats - -Anubis works with most programs without any issues as long as they're configured to trust `127.0.0.0/8` and `::1/128` as "valid proxy servers". Some combinations of reverse proxy and target application can have issues. This section documents them so that you can pattern-match and fix them. - -### Caddy + Gitea/Forgejo - -Gitea/Forgejo relies on the reverse proxy setting the `X-Real-Ip` header. Caddy does not do this out of the gate. Modify your Caddyfile like this: - -```diff - ellenjoe.int.within.lgbt { - # ... -- reverse_proxy http://localhost:3000 -+ reverse_proxy http://localhost:3000 { -+ header_up X-Real-Ip {remote_host} -+ } - # ... - } -``` - -Ensure that Gitea/Forgejo have `[security].REVERSE_PROXY_TRUSTED_PROXIES` set to the IP ranges that Anubis will appear from. Typically this is sufficient: - -```ini -[security] -REVERSE_PROXY_TRUSTED_PROXIES = 127.0.0.0/8,::1/128 -``` - -However if you are running Anubis in a separate Pod/Deployment in Kubernetes, you may have to adjust this to the IP range of the Pod space in your Container Networking Interface plugin: - -```ini -[security] -REVERSE_PROXY_TRUSTED_PROXIES = 10.192.0.0/12 -``` +https://github.com/TecharoHQ/anubis diff --git a/cmd/anubis/anubis.env.default b/cmd/anubis/anubis.env.default deleted file mode 100644 index b72eddd..0000000 --- a/cmd/anubis/anubis.env.default +++ /dev/null @@ -1,5 +0,0 @@ -BIND=:8923 -DIFFICULTY=3 -METRICS_BIND=:9090 -SERVE_ROBOTS_TXT=0 -TARGET=http://localhost:3000
\ No newline at end of file diff --git a/cmd/anubis/anubis@.service b/cmd/anubis/anubis@.service deleted file mode 100644 index 102775f..0000000 --- a/cmd/anubis/anubis@.service +++ /dev/null @@ -1,12 +0,0 @@ -[Unit] -Description="Anubis HTTP defense proxy (instance %i)" - -[Service] -ExecStart=/usr/bin/anubis -Restart=always -RestartSec=30s -EnvironmentFile=/etc/anubis/anubis-%i.env -LimitNOFILE=infinity - -[Install] -WantedBy=multi-user.target
\ No newline at end of file diff --git a/cmd/anubis/botPolicies.json b/cmd/anubis/botPolicies.json deleted file mode 100644 index 6e04a11..0000000 --- a/cmd/anubis/botPolicies.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "bots": [ - { - "name": "amazonbot", - "user_agent_regex": "Amazonbot", - "action": "DENY" - }, - { - "name": "googlebot", - "user_agent_regex": "\\+http\\:\\/\\/www\\.google\\.com/bot\\.html", - "action": "ALLOW" - }, - { - "name": "bingbot", - "user_agent_regex": "\\+http\\:\\/\\/www\\.bing\\.com/bingbot\\.htm", - "action": "ALLOW" - }, - { - "name": "qwantbot", - "user_agent_regex": "\\+https\\:\\/\\/help\\.qwant\\.com/bot/", - "action": "ALLOW" - }, - { - "name": "us-artificial-intelligence-scraper", - "user_agent_regex": "\\+https\\:\\/\\/github\\.com\\/US-Artificial-Intelligence\\/scraper", - "action": "DENY" - }, - { - "name": "well-known", - "path_regex": "^/.well-known/.*$", - "action": "ALLOW" - }, - { - "name": "favicon", - "path_regex": "^/favicon.ico$", - "action": "ALLOW" - }, - { - "name": "robots-txt", - "path_regex": "^/robots.txt$", - "action": "ALLOW" - }, - { - "name": "rss-readers", - "path_regex": ".*\\.(rss|xml|atom|json)$", - "action": "ALLOW" - }, - { - "name": "lightpanda", - "user_agent_regex": "^Lightpanda/.*$", - "action": "DENY" - }, - { - "name": "headless-chrome", - "user_agent_regex": "HeadlessChrome", - "action": "DENY" - }, - { - "name": "headless-chromium", - "user_agent_regex": "HeadlessChromium", - "action": "DENY" - }, - { - "name": "generic-browser", - "user_agent_regex": "Mozilla", - "action": "CHALLENGE" - } - ], - "dnsbl": true -} diff --git a/cmd/anubis/decaymap.go b/cmd/anubis/decaymap.go deleted file mode 100644 index f97c4c6..0000000 --- a/cmd/anubis/decaymap.go +++ /dev/null @@ -1,61 +0,0 @@ -package main - -import ( - "sync" - "time" -) - -func zilch[T any]() T { - var zero T - return zero -} - -type DecayMap[K, V comparable] struct { - data map[K]DecayMapEntry[V] - lock sync.RWMutex -} - -type DecayMapEntry[V comparable] struct { - Value V - expiry time.Time -} - -func NewDecayMap[K, V comparable]() *DecayMap[K, V] { - return &DecayMap[K, V]{ - data: make(map[K]DecayMapEntry[V]), - } -} - -func (m *DecayMap[K, V]) Get(key K) (V, bool) { - m.lock.RLock() - value, ok := m.data[key] - m.lock.RUnlock() - - if !ok { - return zilch[V](), false - } - - if time.Now().After(value.expiry) { - m.lock.Lock() - // Since previously reading m.data[key], the value may have been updated. - // Delete the entry only if the expiry time is still the same. - if m.data[key].expiry == value.expiry { - delete(m.data, key) - } - m.lock.Unlock() - - return zilch[V](), false - } - - return value.Value, true -} - -func (m *DecayMap[K, V]) Set(key K, value V, ttl time.Duration) { - m.lock.Lock() - defer m.lock.Unlock() - - m.data[key] = DecayMapEntry[V]{ - Value: value, - expiry: time.Now().Add(ttl), - } -} diff --git a/cmd/anubis/docs/policies.md b/cmd/anubis/docs/policies.md deleted file mode 100644 index 1e1b911..0000000 --- a/cmd/anubis/docs/policies.md +++ /dev/null @@ -1,77 +0,0 @@ -# Policies - -Out of the box, Anubis is pretty heavy-handed. It will aggressively challenge everything that might be a browser (usually indicated by having `Mozilla` in its user agent). However, some bots are smart enough to get past the challenge. Some things that look like bots may actually be fine (IE: RSS readers). Some resources need to be visible no matter what. Some resources and remotes are fine to begin with. - -Bot policies let you customize the rules that Anubis uses to allow, deny, or challenge incoming requests. Currently you can set policies by the following matches: - -- Request path -- User agent string - -Here's an example rule that denies [Amazonbot](https://developer.amazon.com/en/amazonbot): - -```json -{ - "name": "amazonbot", - "user_agent_regex": "Amazonbot", - "action": "DENY" -} -``` - -When this rule is evaluated, Anubis will check the `User-Agent` string of the request. If it contains `Amazonbot`, Anubis will send an error page to the user saying that access is denied, but in such a way that makes scrapers think they have correctly loaded the webpage. - -Right now the only kinds of policies you can write are bot policies. Other forms of policies will be added in the future. - -Here is a minimal policy file that will protect against most scraper bots: - -```json -{ - "bots": [ - { - "name": "well-known", - "path_regex": "^/.well-known/.*$", - "action": "ALLOW" - }, - { - "name": "favicon", - "path_regex": "^/favicon.ico$", - "action": "ALLOW" - }, - { - "name": "robots-txt", - "path_regex": "^/robots.txt$", - "action": "ALLOW" - }, - { - "name": "generic-browser", - "user_agent_regex": "Mozilla", - "action": "CHALLENGE" - } - ] -} -``` - -This allows requests to [`/.well-known`](https://en.wikipedia.org/wiki/Well-known_URI), `/favicon.ico`, `/robots.txt`, and challenges any request that has the word `Mozilla` in its User-Agent string. The [default policy file](../botPolicies.json) is a bit more cohesive, but this should be more than enough for most users. - -If no rules match the request, it is allowed through. - -## Writing your own rules - -There are three actions that can be returned from a rule: - -| Action | Effects | -| :---------- | :-------------------------------------------------------------------------------- | -| `ALLOW` | Bypass all further checks and send the request to the backend. | -| `DENY` | Deny the request and send back an error message that scrapers think is a success. | -| `CHALLENGE` | Show a challenge page and/or validate that clients have passed a challenge. | - -Name your rules in lower case using kebab-case. Rule names will be exposed in Prometheus metrics. - -In case your service needs it for risk calculation reasons, Anubis exposes information about the rules that any requests match using a few headers: - -| Header | Explanation | Example | -| :---------------- | :--------------------------------------------------- | :--------------- | -| `X-Anubis-Rule` | The name of the rule that was matched | `bot/lightpanda` | -| `X-Anubis-Action` | The action that Anubis took in response to that rule | `CHALLENGE` | -| `X-Anubis-Status` | The status and how strict Anubis was in its checks | `PASS-FULL` | - -Policy rules are matched using [Go's standard library regular expressions package](https://pkg.go.dev/regexp). You can mess around with the syntax at [regex101.com](https://regex101.com), make sure to select the Golang option. diff --git a/cmd/anubis/index.templ b/cmd/anubis/index.templ deleted file mode 100644 index 304e5f5..0000000 --- a/cmd/anubis/index.templ +++ /dev/null @@ -1,159 +0,0 @@ -package main - -import ( - "within.website/x" - "within.website/x/xess" -) - -templ base(title string, body templ.Component) { - <!DOCTYPE html> - <html> - <head> - <title>{ title }</title> - <link rel="stylesheet" href={ xess.URL }/> - <meta name="viewport" content="width=device-width, initial-scale=1.0"/> - <style> - body, - html { - height: 100%; - display: flex; - justify-content: center; - align-items: center; - width: 65ch; - margin-left: auto; - margin-right: auto; - } - - .centered-div { - text-align: center; - } - - .lds-roller, - .lds-roller div, - .lds-roller div:after { - box-sizing: border-box; - } - .lds-roller { - display: inline-block; - position: relative; - width: 80px; - height: 80px; - } - .lds-roller div { - animation: lds-roller 1.2s cubic-bezier(0.5, 0, 0.5, 1) infinite; - transform-origin: 40px 40px; - } - .lds-roller div:after { - content: " "; - display: block; - position: absolute; - width: 7.2px; - height: 7.2px; - border-radius: 50%; - background: currentColor; - margin: -3.6px 0 0 -3.6px; - } - .lds-roller div:nth-child(1) { - animation-delay: -0.036s; - } - .lds-roller div:nth-child(1):after { - top: 62.62742px; - left: 62.62742px; - } - .lds-roller div:nth-child(2) { - animation-delay: -0.072s; - } - .lds-roller div:nth-child(2):after { - top: 67.71281px; - left: 56px; - } - .lds-roller div:nth-child(3) { - animation-delay: -0.108s; - } - .lds-roller div:nth-child(3):after { - top: 70.90963px; - left: 48.28221px; - } - .lds-roller div:nth-child(4) { - animation-delay: -0.144s; - } - .lds-roller div:nth-child(4):after { - top: 72px; - left: 40px; - } - .lds-roller div:nth-child(5) { - animation-delay: -0.18s; - } - .lds-roller div:nth-child(5):after { - top: 70.90963px; - left: 31.71779px; - } - .lds-roller div:nth-child(6) { - animation-delay: -0.216s; - } - .lds-roller div:nth-child(6):after { - top: 67.71281px; - left: 24px; - } - .lds-roller div:nth-child(7) { - animation-delay: -0.252s; - } - .lds-roller div:nth-child(7):after { - top: 62.62742px; - left: 17.37258px; - } - .lds-roller div:nth-child(8) { - animation-delay: -0.288s; - } - .lds-roller div:nth-child(8):after { - top: 56px; - left: 12.28719px; - } - @keyframes lds-roller { - 0% { - transform: rotate(0deg); - } - 100% { - transform: rotate(360deg); - } - } - </style> - </head> - <body id="top"> - <main> - <center> - <h1 id="title" class=".centered-div">{ title }</h1> - </center> - @body - <footer> - <center> - <p>Protected by <a href="https://xeiaso.net/blog/2025/anubis">Anubis</a> from <a href="https://within.website">Within</a>.</p> - </center> - </footer> - </main> - </body> - </html> -} - -templ index() { - <div class="centered-div"> - <img id="image" width="256" src={ "/.within.website/x/cmd/anubis/static/img/pensive.webp?cacheBuster=" + x.Version }/> - <img style="display:none;" width="256" src={ "/.within.website/x/cmd/anubis/static/img/happy.webp?cacheBuster=" + x.Version }/> - <p id="status">Loading...</p> - <script async type="module" src={ "/.within.website/x/cmd/anubis/static/js/main.mjs?cacheBuster=" + x.Version }></script> - <div id="spinner" class="lds-roller"><div></div><div></div><div></div><div></div><div></div><div></div><div></div><div></div></div> - <noscript> - <p>Sadly, you must enable JavaScript to get past this challenge. This is required because AI companies have changed the social contract around how website hosting works. A no-JS solution is a work-in-progress.</p> - </noscript> - <div id="testarea"></div> - </div> -} - -templ errorPage(message string) { - <div class="centered-div"> - <img id="image" width="256" src={ "/.within.website/x/cmd/anubis/static/img/sad.webp?cacheBuster=" + x.Version }/> - <p>{ message }.</p> - <button onClick="window.location.reload();">Try again</button> - <p><a href="/">Go home</a></p> - </div> -} diff --git a/cmd/anubis/index_templ.go b/cmd/anubis/index_templ.go deleted file mode 100644 index 491641f..0000000 --- a/cmd/anubis/index_templ.go +++ /dev/null @@ -1,215 +0,0 @@ -// Code generated by templ - DO NOT EDIT. - -// templ: version: v0.3.833 -package main - -//lint:file-ignore SA4006 This context is only used if a nested component is present. - -import "github.com/a-h/templ" -import templruntime "github.com/a-h/templ/runtime" - -import ( - "within.website/x" - "within.website/x/xess" -) - -func base(title string, body templ.Component) templ.Component { - return templruntime.GeneratedTemplate(func(templ_7745c5c3_Input templruntime.GeneratedComponentInput) (templ_7745c5c3_Err error) { - templ_7745c5c3_W, ctx := templ_7745c5c3_Input.Writer, templ_7745c5c3_Input.Context - if templ_7745c5c3_CtxErr := ctx.Err(); templ_7745c5c3_CtxErr != nil { - return templ_7745c5c3_CtxErr - } - templ_7745c5c3_Buffer, templ_7745c5c3_IsBuffer := templruntime.GetBuffer(templ_7745c5c3_W) - if !templ_7745c5c3_IsBuffer { - defer func() { - templ_7745c5c3_BufErr := templruntime.ReleaseBuffer(templ_7745c5c3_Buffer) - if templ_7745c5c3_Err == nil { - templ_7745c5c3_Err = templ_7745c5c3_BufErr - } - }() - } - ctx = templ.InitializeContext(ctx) - templ_7745c5c3_Var1 := templ.GetChildren(ctx) - if templ_7745c5c3_Var1 == nil { - templ_7745c5c3_Var1 = templ.NopComponent - } - ctx = templ.ClearChildren(ctx) - templ_7745c5c3_Err = templruntime.WriteString(templ_7745c5c3_Buffer, 1, "<!doctype html><html><head><title>") - if templ_7745c5c3_Err != nil { - return templ_7745c5c3_Err - } - var templ_7745c5c3_Var2 string - templ_7745c5c3_Var2, templ_7745c5c3_Err = templ.JoinStringErrs(title) - if templ_7745c5c3_Err != nil { - return templ.Error{Err: templ_7745c5c3_Err, FileName: `index.templ`, Line: 12, Col: 17} - } - _, templ_7745c5c3_Err = templ_7745c5c3_B |
