Compare commits
No commits in common. "main" and "askama" have entirely different histories.
52 changed files with 632 additions and 7777 deletions
|
@ -1,95 +0,0 @@
|
|||
;;; SPDX-License-Identifier: GPL-3.0-or-later
|
||||
;; Per-directory local variables for GNU Emacs 23 and later.
|
||||
|
||||
((nil
|
||||
. ((fill-column . 78)
|
||||
(tab-width . 8)
|
||||
(sentence-end-double-space . t)))
|
||||
(c-mode . ((c-file-style . "gnu")))
|
||||
(scheme-mode
|
||||
.
|
||||
((indent-tabs-mode . nil)
|
||||
(eval . (put 'eval-when 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-prompt 'scheme-indent-function 1))
|
||||
(eval . (put 'test-assert 'scheme-indent-function 1))
|
||||
(eval . (put 'test-assertm 'scheme-indent-function 1))
|
||||
(eval . (put 'test-equalm 'scheme-indent-function 1))
|
||||
(eval . (put 'test-equal 'scheme-indent-function 1))
|
||||
(eval . (put 'test-eq 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-input-string 'scheme-indent-function 1))
|
||||
(eval . (put 'guard 'scheme-indent-function 1))
|
||||
(eval . (put 'lambda* 'scheme-indent-function 1))
|
||||
(eval . (put 'substitute* 'scheme-indent-function 1))
|
||||
(eval . (put 'match-record 'scheme-indent-function 2))
|
||||
|
||||
;; 'modify-phases' and its keywords.
|
||||
(eval . (put 'modify-phases 'scheme-indent-function 1))
|
||||
(eval . (put 'replace 'scheme-indent-function 1))
|
||||
(eval . (put 'add-before 'scheme-indent-function 2))
|
||||
(eval . (put 'add-after 'scheme-indent-function 2))
|
||||
|
||||
(eval . (put 'modify-services 'scheme-indent-function 1))
|
||||
(eval . (put 'with-directory-excursion 'scheme-indent-function 1))
|
||||
(eval . (put 'package 'scheme-indent-function 0))
|
||||
(eval . (put 'origin 'scheme-indent-function 0))
|
||||
(eval . (put 'build-system 'scheme-indent-function 0))
|
||||
(eval . (put 'bag 'scheme-indent-function 0))
|
||||
(eval . (put 'graft 'scheme-indent-function 0))
|
||||
(eval . (put 'operating-system 'scheme-indent-function 0))
|
||||
(eval . (put 'file-system 'scheme-indent-function 0))
|
||||
(eval . (put 'manifest-entry 'scheme-indent-function 0))
|
||||
(eval . (put 'manifest-pattern 'scheme-indent-function 0))
|
||||
(eval . (put 'substitute-keyword-arguments 'scheme-indent-function 1))
|
||||
(eval . (put 'with-store 'scheme-indent-function 1))
|
||||
(eval . (put 'with-external-store 'scheme-indent-function 1))
|
||||
(eval . (put 'with-error-handling 'scheme-indent-function 0))
|
||||
(eval . (put 'with-mutex 'scheme-indent-function 1))
|
||||
(eval . (put 'with-atomic-file-output 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-compressed-output-port 'scheme-indent-function 2))
|
||||
(eval . (put 'call-with-decompressed-port 'scheme-indent-function 2))
|
||||
(eval . (put 'call-with-gzip-input-port 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-gzip-output-port 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-lzip-input-port 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-lzip-output-port 'scheme-indent-function 1))
|
||||
(eval . (put 'signature-case 'scheme-indent-function 1))
|
||||
(eval . (put 'emacs-batch-eval 'scheme-indent-function 0))
|
||||
(eval . (put 'emacs-batch-edit-file 'scheme-indent-function 1))
|
||||
(eval . (put 'emacs-substitute-sexps 'scheme-indent-function 1))
|
||||
(eval . (put 'emacs-substitute-variables 'scheme-indent-function 1))
|
||||
(eval . (put 'with-derivation-narinfo 'scheme-indent-function 1))
|
||||
(eval . (put 'with-derivation-substitute 'scheme-indent-function 2))
|
||||
(eval . (put 'with-status-report 'scheme-indent-function 1))
|
||||
(eval . (put 'with-status-verbosity 'scheme-indent-function 1))
|
||||
|
||||
(eval . (put 'mlambda 'scheme-indent-function 1))
|
||||
(eval . (put 'mlambdaq 'scheme-indent-function 1))
|
||||
(eval . (put 'syntax-parameterize 'scheme-indent-function 1))
|
||||
(eval . (put 'with-monad 'scheme-indent-function 1))
|
||||
(eval . (put 'mbegin 'scheme-indent-function 1))
|
||||
(eval . (put 'mwhen 'scheme-indent-function 1))
|
||||
(eval . (put 'munless 'scheme-indent-function 1))
|
||||
(eval . (put 'mlet* 'scheme-indent-function 2))
|
||||
(eval . (put 'mlet 'scheme-indent-function 2))
|
||||
(eval . (put 'run-with-store 'scheme-indent-function 1))
|
||||
(eval . (put 'run-with-state 'scheme-indent-function 1))
|
||||
(eval . (put 'wrap-program 'scheme-indent-function 1))
|
||||
(eval . (put 'with-imported-modules 'scheme-indent-function 1))
|
||||
(eval . (put 'with-extensions 'scheme-indent-function 1))
|
||||
|
||||
(eval . (put 'with-database 'scheme-indent-function 2))
|
||||
(eval . (put 'call-with-transaction 'scheme-indent-function 2))
|
||||
|
||||
(eval . (put 'call-with-container 'scheme-indent-function 1))
|
||||
(eval . (put 'container-excursion 'scheme-indent-function 1))
|
||||
(eval . (put 'eventually 'scheme-indent-function 1))
|
||||
|
||||
(eval . (put 'call-with-progress-reporter 'scheme-indent-function 1))
|
||||
|
||||
;; This notably allows '(' in Paredit to not insert a space when the
|
||||
;; preceding symbol is one of these.
|
||||
(eval . (modify-syntax-entry ?~ "'"))
|
||||
(eval . (modify-syntax-entry ?$ "'"))
|
||||
(eval . (modify-syntax-entry ?+ "'"))))
|
||||
(emacs-lisp-mode . ((indent-tabs-mode . nil)))
|
||||
(texinfo-mode . ((indent-tabs-mode . nil)
|
||||
(fill-column . 72))))
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,2 +1 @@
|
|||
/target
|
||||
names.json
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
(channel
|
||||
(version 0)
|
||||
(directory ".guix/modules"))
|
||||
|
||||
|
||||
;;; Local Variables:
|
||||
;;; mode: scheme
|
||||
;;; End:
|
||||
|
File diff suppressed because it is too large
Load diff
|
@ -1,42 +0,0 @@
|
|||
(load "crates-io.scm")
|
||||
(define-module (meowy-webring)
|
||||
#:use-module (crates-io)
|
||||
#:use-module (guix)
|
||||
#:use-module (guix build-system cargo)
|
||||
#:use-module (guix git-download)
|
||||
#:use-module ((guix licenses) #:prefix license:)
|
||||
#:use-module (gnu packages)
|
||||
#:use-module (gnu packages crates-io)
|
||||
#:use-module (srfi srfi-1))
|
||||
|
||||
(define (keep-file? file stat)
|
||||
(or (git-predicate (current-source-directory))
|
||||
(const #t)))
|
||||
|
||||
(define-public meowy-webring
|
||||
(package
|
||||
(name "meowy-webring")
|
||||
(version "0.1.0-git")
|
||||
(source (local-file "../.." "meowy-webring-checkout"
|
||||
#:recursive? #t
|
||||
#:select? keep-file?))
|
||||
(build-system cargo-build-system)
|
||||
(arguments (list #:cargo-inputs `(("rust-askama-rocket" ,rust-askama-rocket-0.12)
|
||||
("rust-askama" ,rust-askama-0.12)
|
||||
("rust-directories" ,rust-directories-5)
|
||||
("rust-embed" ,rust-embed-6)
|
||||
("rust-hex" ,rust-hex-0.4)
|
||||
("rust-notify" ,rust-notify-6)
|
||||
("rust-rocket" ,rust-rocket-0.5)
|
||||
("rust-serde" ,rust-serde-1)
|
||||
("rust-serde-json" ,rust-serde-json-1)
|
||||
("rust-simple-logger" ,rust-simple-logger-4))
|
||||
#:install-source? #f
|
||||
#:phases #~(modify-phases %standard-phases
|
||||
(delete 'package))))
|
||||
(synopsis "")
|
||||
(description "")
|
||||
(home-page "")
|
||||
(license license:cc0)))
|
||||
|
||||
meowy-webring
|
81
.vscode/launch.json
vendored
81
.vscode/launch.json
vendored
|
@ -1,67 +1,16 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "meowy-cli (Print)",
|
||||
"program": "${workspaceFolder}/target/debug/meowy-cli",
|
||||
"args": [
|
||||
"print"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"sourceMap": {},
|
||||
"sourceLanguages": [
|
||||
"rust"
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "meowy-cli (Print) json",
|
||||
"program": "${workspaceFolder}/target/debug/meowy-cli",
|
||||
"args": [
|
||||
"print",
|
||||
"--json"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"sourceMap": {},
|
||||
"sourceLanguages": [
|
||||
"rust"
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "meowy-cli (Print) only url",
|
||||
"program": "${workspaceFolder}/target/debug/meowy-cli",
|
||||
"args": [
|
||||
"print",
|
||||
"--url"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"sourceMap": {},
|
||||
"sourceLanguages": [
|
||||
"rust"
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "meowy-cli (Print) only name",
|
||||
"program": "${workspaceFolder}/target/debug/meowy-cli",
|
||||
"args": [
|
||||
"print",
|
||||
"--name"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"sourceMap": {},
|
||||
"sourceLanguages": [
|
||||
"rust"
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Debug",
|
||||
"program": "${workspaceFolder}/<executable file>",
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
]
|
||||
}
|
1168
Cargo.lock
generated
1168
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
44
Cargo.toml
44
Cargo.toml
|
@ -1,39 +1,27 @@
|
|||
[workspace]
|
||||
members = ["crates/*"]
|
||||
|
||||
[package]
|
||||
name = "meowy-webring"
|
||||
version = "0.2.0"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
rust-version = "1.70"
|
||||
|
||||
[profile.release]
|
||||
lto = "thin"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies.rocket]
|
||||
version = "0.5"
|
||||
default-features = false
|
||||
version = "=0.5.0-rc.3"
|
||||
features = ["json"]
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0"
|
||||
[dependencies.rust-embed]
|
||||
version = "6.7.0"
|
||||
features = ["debug-embed"]
|
||||
|
||||
[dependencies.serde_json]
|
||||
version = "1.0"
|
||||
[dependencies.askama_rocket]
|
||||
git = "https://github.com/djc/askama.git"
|
||||
package = "askama_rocket"
|
||||
rev = "b9e51601560398766eac445517fb17c35090a952"
|
||||
|
||||
[dependencies.shared]
|
||||
path = "./crates/shared"
|
||||
|
||||
[dependencies.simple_logger]
|
||||
version = "4"
|
||||
default-features = false
|
||||
|
||||
[dependencies.notify]
|
||||
version = "6"
|
||||
default-features = false
|
||||
features = ["macos_fsevent"]
|
||||
|
||||
[dependencies.meowy-assets]
|
||||
path = "./crates/meowy-assets"
|
||||
[dependencies.askama]
|
||||
git = "https://github.com/djc/askama.git"
|
||||
package = "askama"
|
||||
rev = "b9e51601560398766eac445517fb17c35090a952"
|
||||
version = "0.12"
|
||||
default-features = true
|
||||
features = ["with-rocket", "mime", "mime_guess"]
|
||||
|
|
64
LICENSE
64
LICENSE
|
@ -2,14 +2,14 @@ Creative Commons Legal Code
|
|||
|
||||
CC0 1.0 Universal
|
||||
|
||||
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
|
||||
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
|
||||
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
|
||||
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
|
||||
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
|
||||
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
|
||||
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
|
||||
HEREUNDER.
|
||||
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
|
||||
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
|
||||
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
|
||||
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
|
||||
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
|
||||
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
|
||||
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
|
||||
HEREUNDER.
|
||||
|
||||
Statement of Purpose
|
||||
|
||||
|
@ -43,22 +43,22 @@ Related Rights"). Copyright and Related Rights include, but are not
|
|||
limited to, the following:
|
||||
|
||||
i. the right to reproduce, adapt, distribute, perform, display,
|
||||
communicate, and translate a Work;
|
||||
communicate, and translate a Work;
|
||||
ii. moral rights retained by the original author(s) and/or performer(s);
|
||||
iii. publicity and privacy rights pertaining to a person's image or
|
||||
likeness depicted in a Work;
|
||||
likeness depicted in a Work;
|
||||
iv. rights protecting against unfair competition in regards to a Work,
|
||||
subject to the limitations in paragraph 4(a), below;
|
||||
subject to the limitations in paragraph 4(a), below;
|
||||
v. rights protecting the extraction, dissemination, use and reuse of data
|
||||
in a Work;
|
||||
in a Work;
|
||||
vi. database rights (such as those arising under Directive 96/9/EC of the
|
||||
European Parliament and of the Council of 11 March 1996 on the legal
|
||||
protection of databases, and under any national implementation
|
||||
thereof, including any amended or successor version of such
|
||||
directive); and
|
||||
European Parliament and of the Council of 11 March 1996 on the legal
|
||||
protection of databases, and under any national implementation
|
||||
thereof, including any amended or successor version of such
|
||||
directive); and
|
||||
vii. other similar, equivalent or corresponding rights throughout the
|
||||
world based on applicable law or treaty, and any national
|
||||
implementations thereof.
|
||||
world based on applicable law or treaty, and any national
|
||||
implementations thereof.
|
||||
|
||||
2. Waiver. To the greatest extent permitted by, but not in contravention
|
||||
of, applicable law, Affirmer hereby overtly, fully, permanently,
|
||||
|
@ -102,20 +102,20 @@ express Statement of Purpose.
|
|||
4. Limitations and Disclaimers.
|
||||
|
||||
a. No trademark or patent rights held by Affirmer are waived, abandoned,
|
||||
surrendered, licensed or otherwise affected by this document.
|
||||
surrendered, licensed or otherwise affected by this document.
|
||||
b. Affirmer offers the Work as-is and makes no representations or
|
||||
warranties of any kind concerning the Work, express, implied,
|
||||
statutory or otherwise, including without limitation warranties of
|
||||
title, merchantability, fitness for a particular purpose, non
|
||||
infringement, or the absence of latent or other defects, accuracy, or
|
||||
the present or absence of errors, whether or not discoverable, all to
|
||||
the greatest extent permissible under applicable law.
|
||||
warranties of any kind concerning the Work, express, implied,
|
||||
statutory or otherwise, including without limitation warranties of
|
||||
title, merchantability, fitness for a particular purpose, non
|
||||
infringement, or the absence of latent or other defects, accuracy, or
|
||||
the present or absence of errors, whether or not discoverable, all to
|
||||
the greatest extent permissible under applicable law.
|
||||
c. Affirmer disclaims responsibility for clearing rights of other persons
|
||||
that may apply to the Work or any use thereof, including without
|
||||
limitation any person's Copyright and Related Rights in the Work.
|
||||
Further, Affirmer disclaims responsibility for obtaining any necessary
|
||||
consents, permissions or other rights required for any use of the
|
||||
Work.
|
||||
that may apply to the Work or any use thereof, including without
|
||||
limitation any person's Copyright and Related Rights in the Work.
|
||||
Further, Affirmer disclaims responsibility for obtaining any necessary
|
||||
consents, permissions or other rights required for any use of the
|
||||
Work.
|
||||
d. Affirmer understands and acknowledges that Creative Commons is not a
|
||||
party to this document and has no duty or obligation with respect to
|
||||
this CC0 or use of the Work.
|
||||
party to this document and has no duty or obligation with respect to
|
||||
this CC0 or use of the Work.
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
[package]
|
||||
name = "cli"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
serde = "1.0"
|
||||
serde_json = "1.0"
|
||||
log = "0.4"
|
||||
|
||||
[dependencies.clap]
|
||||
version = "4"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.shared]
|
||||
path = "../shared"
|
||||
|
||||
[dependencies.simple_logger]
|
||||
version = "4"
|
||||
default-features = false
|
||||
features = ["stderr"]
|
||||
|
||||
[[bin]]
|
||||
name = "meowy-cli"
|
||||
path = "src/main.rs"
|
|
@ -1,71 +0,0 @@
|
|||
use clap::{arg, command, Args, Parser, Subcommand};
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
pub(crate) struct Arguments {
|
||||
#[arg(help = "the path to the names.json file", long, short)]
|
||||
pub(crate) path: Option<String>,
|
||||
#[arg(
|
||||
long,
|
||||
short,
|
||||
help = "a separator string to seperate the url from the name. defaults to : with a space after that."
|
||||
)]
|
||||
pub(crate) separator: Option<String>,
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug)]
|
||||
pub(crate) enum Commands {
|
||||
#[command(about = "print the current webring sites and their names")]
|
||||
Print {
|
||||
#[arg(help = "url you want to filter to")]
|
||||
filter: Option<String>,
|
||||
#[command(flatten)]
|
||||
group: PrintGroup,
|
||||
#[arg(
|
||||
long,
|
||||
short,
|
||||
conflicts_with = "url",
|
||||
conflicts_with = "name",
|
||||
help = "print the data out as a json string"
|
||||
)]
|
||||
json: bool,
|
||||
},
|
||||
#[command(about = "add a site to the webring")]
|
||||
Add {
|
||||
#[arg(
|
||||
long,
|
||||
short,
|
||||
required = true,
|
||||
help = "the url of the site you want to add. example: \"example.com\"."
|
||||
)]
|
||||
url: String,
|
||||
#[arg(
|
||||
long,
|
||||
short,
|
||||
required = false,
|
||||
help = "the personal name of the site. this is not required."
|
||||
)]
|
||||
name: Option<String>,
|
||||
},
|
||||
#[command(about = "remove a site from the webring")]
|
||||
Remove {
|
||||
#[arg(
|
||||
long,
|
||||
short,
|
||||
required = true,
|
||||
help = "the url of the site you want to remove."
|
||||
)]
|
||||
url: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
#[group(required = false)]
|
||||
pub struct PrintGroup {
|
||||
#[arg(long, short, action = clap::ArgAction::SetTrue, help = "print the url only")]
|
||||
pub(crate) url: bool,
|
||||
#[arg(long, short, action = clap::ArgAction::SetTrue, help = "print the name only")]
|
||||
pub(crate) name: bool,
|
||||
}
|
|
@ -1,43 +0,0 @@
|
|||
use shared::{
|
||||
errors::{Error, ErrorStatus},
|
||||
names::{self, Site},
|
||||
};
|
||||
use std::path::Path;
|
||||
|
||||
use crate::commands::utils::{site_string, PrintOptions};
|
||||
|
||||
pub(crate) fn add(
|
||||
path: &Path,
|
||||
url: &String,
|
||||
name: &Option<String>,
|
||||
separator: &String,
|
||||
) -> Result<(), Error> {
|
||||
let names_file = names::read_names_file(path)?;
|
||||
let mut names = names::load_names(&names_file)?;
|
||||
|
||||
if names.iter().any(|site| site.url.contains(url)) {
|
||||
return Err(Error {
|
||||
status: ErrorStatus::AlreadyExistsError,
|
||||
data:
|
||||
"this url already exists in names.json. you can't have more then 1 of the same url."
|
||||
.into(),
|
||||
});
|
||||
}
|
||||
|
||||
let site = Site {
|
||||
url: url.to_string(),
|
||||
name: name.to_owned(),
|
||||
};
|
||||
|
||||
log::debug!("adding {:?} to {}", site, path.display());
|
||||
names.push(site.clone());
|
||||
|
||||
let json = serde_json::to_string(&names).unwrap();
|
||||
std::fs::write(path, json).unwrap();
|
||||
println!(
|
||||
"added {} to names.json",
|
||||
site_string(&site, PrintOptions::All, separator)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
mod add;
|
||||
mod print;
|
||||
mod remove;
|
||||
mod utils;
|
||||
|
||||
pub(crate) use add::add;
|
||||
pub(crate) use print::print;
|
||||
pub(crate) use remove::remove;
|
|
@ -1,88 +0,0 @@
|
|||
use crate::{arguments::PrintGroup, commands::utils::site_string};
|
||||
use shared::{
|
||||
errors::{Error, ErrorStatus},
|
||||
names::{self, Site},
|
||||
};
|
||||
use std::path::Path;
|
||||
|
||||
use super::utils::PrintOptions;
|
||||
|
||||
pub(crate) fn print(
|
||||
path: &Path,
|
||||
filter: &Option<String>,
|
||||
group: &PrintGroup,
|
||||
separator: &String,
|
||||
json: bool,
|
||||
) -> Result<(), Error> {
|
||||
let names_file = names::read_names_file(path)?;
|
||||
let mut names = names::load_names(&names_file)?;
|
||||
|
||||
if let Some(filter) = filter {
|
||||
names.retain(|f| &f.url == filter);
|
||||
if names.is_empty() {
|
||||
return Err(Error {
|
||||
status: ErrorStatus::NotFoundError,
|
||||
data: "this url was not found in names.json".into(),
|
||||
});
|
||||
}
|
||||
return filter_site(&names[0], json, separator, group);
|
||||
}
|
||||
|
||||
for site in names {
|
||||
if json {
|
||||
json_printing(&site)?;
|
||||
continue;
|
||||
}
|
||||
|
||||
printing(separator, &site, group);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn filter_site(
|
||||
site: &Site,
|
||||
json: bool,
|
||||
separator: &String,
|
||||
group: &PrintGroup,
|
||||
) -> Result<(), Error> {
|
||||
if json {
|
||||
json_printing(site)?;
|
||||
return Ok(());
|
||||
}
|
||||
printing(separator, site, group);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn json_printing(site: &Site) -> Result<(), Error> {
|
||||
match serde_json::to_string(&site) {
|
||||
Ok(json) => {
|
||||
println!("{}", json);
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => Err(Error {
|
||||
status: ErrorStatus::ParsingError,
|
||||
data: err.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn printing(separator: &String, site: &Site, group: &PrintGroup) {
|
||||
let string = site_string(site, print_group_to_options(group), separator);
|
||||
|
||||
println!("{}", string);
|
||||
}
|
||||
|
||||
fn print_group_to_options(group: &PrintGroup) -> PrintOptions {
|
||||
match group {
|
||||
PrintGroup {
|
||||
url: true,
|
||||
name: false,
|
||||
} => PrintOptions::Url,
|
||||
PrintGroup {
|
||||
url: false,
|
||||
name: true,
|
||||
} => PrintOptions::Name,
|
||||
_ => PrintOptions::All,
|
||||
}
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
use shared::{errors::Error, names};
|
||||
use std::path::Path;
|
||||
|
||||
use crate::commands::utils::{site_string, PrintOptions};
|
||||
|
||||
pub(crate) fn remove(path: &Path, url: &String, separator: &String) -> Result<(), Error> {
|
||||
let names_file = names::read_names_file(path)?;
|
||||
let mut names = names::load_names(&names_file)?;
|
||||
|
||||
names.retain(|site| {
|
||||
if &site.url == url {
|
||||
println!(
|
||||
"removing {} from names.json",
|
||||
site_string(site, PrintOptions::All, separator)
|
||||
);
|
||||
}
|
||||
&site.url != url
|
||||
});
|
||||
let json = serde_json::to_string(&names).unwrap();
|
||||
std::fs::write(path, json).unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
use shared::names::Site;
|
||||
|
||||
pub(super) enum PrintOptions {
|
||||
Url,
|
||||
Name,
|
||||
All,
|
||||
}
|
||||
|
||||
pub(super) fn site_string(site: &Site, options: PrintOptions, separator: &String) -> String {
|
||||
let mut string = String::new();
|
||||
|
||||
if matches!(options, PrintOptions::Url) || matches!(options, PrintOptions::All) {
|
||||
string += &site.url;
|
||||
}
|
||||
|
||||
if let Some(name) = &site.name {
|
||||
if matches!(options, PrintOptions::Url) {
|
||||
return string;
|
||||
}
|
||||
if !string.is_empty() {
|
||||
string += &format!("{}{}", separator, name)
|
||||
} else {
|
||||
string += name;
|
||||
}
|
||||
}
|
||||
|
||||
string
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
use log::LevelFilter;
|
||||
use shared::errors::{Error, ErrorStatus};
|
||||
use simple_logger::SimpleLogger;
|
||||
|
||||
pub fn initialize_logger() -> Result<(), Error> {
|
||||
if let Err(err) = SimpleLogger::new()
|
||||
.with_level(LevelFilter::Info)
|
||||
.env()
|
||||
.init()
|
||||
{
|
||||
return Err(Error {
|
||||
status: ErrorStatus::LoggerInitializationError,
|
||||
data: err.to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
use arguments::{Arguments, Commands};
|
||||
use clap::Parser;
|
||||
use commands::{add, print, remove};
|
||||
use shared::{directories, errors::Error};
|
||||
use std::path::Path;
|
||||
|
||||
mod arguments;
|
||||
mod commands;
|
||||
mod logging;
|
||||
|
||||
fn main() -> Result<(), Error> {
|
||||
logging::initialize_logger()?;
|
||||
|
||||
let default_path = directories::get_names_path()?;
|
||||
let args = Arguments::parse();
|
||||
let separator = args.separator.unwrap_or(": ".into());
|
||||
|
||||
let path = match &args.path {
|
||||
Some(path) => Path::new(path),
|
||||
None => &default_path,
|
||||
};
|
||||
|
||||
match &args.command {
|
||||
Commands::Print {
|
||||
filter,
|
||||
group,
|
||||
json,
|
||||
} => print(path, filter, group, &separator, *json)?,
|
||||
Commands::Add { url, name } => add(path, url, name, &separator)?,
|
||||
Commands::Remove { url } => remove(path, url, &separator)?,
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
[package]
|
||||
name = "meowy-assets"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
hex = "0.4"
|
||||
|
||||
[dependencies.rocket]
|
||||
version = "0.5"
|
||||
default-features = false
|
||||
|
||||
[dependencies.askama_rocket]
|
||||
package = "askama_rocket"
|
||||
version = "0.12"
|
||||
default-features = false
|
||||
|
||||
[dependencies.askama]
|
||||
package = "askama"
|
||||
version = "0.12"
|
||||
default-features = false
|
||||
|
||||
[dependencies.rust-embed]
|
||||
version = "6"
|
||||
features = ["debug-embed"]
|
||||
|
||||
[dependencies.sha2]
|
||||
version = "0.10"
|
||||
features = ["asm"]
|
||||
|
||||
[dependencies.shared]
|
||||
path = "../shared"
|
||||
|
||||
[dependencies.proc_macros]
|
||||
path = "../proc-macros"
|
|
@ -1,64 +0,0 @@
|
|||
@media (prefers-color-scheme: light) {
|
||||
:root {
|
||||
--background-color: #f6f5f4;
|
||||
--text-color: black;
|
||||
--link-color: darkblue;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
:root {
|
||||
--background-color: #191919;
|
||||
--text-color: #E9E9E9;
|
||||
--link-color: cyan;
|
||||
}
|
||||
}
|
||||
|
||||
:root {
|
||||
--h1-font-size: 3.225rem;
|
||||
--h2-font-size: 2.825rem;
|
||||
--h3-font-size: 2.225rem;
|
||||
--h4-font-size: 1.665rem;
|
||||
--default-font-size: 1.375rem;
|
||||
--h6-font-size: 1.185rem;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: "Atkinson Hyperlegible", sans-serif;
|
||||
text-align: center;
|
||||
font-size: var(--default-font-size);
|
||||
max-width: 600px;
|
||||
margin: auto;
|
||||
background-color: var(--background-color);
|
||||
color: var(--text-color)
|
||||
}
|
||||
|
||||
|
||||
a {
|
||||
font-size: var(--default-font-size);
|
||||
color: var(--link-color);
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: var(--h1-font-size);
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: var(--h2-font-size);
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: var(--h3-font-size);
|
||||
}
|
||||
|
||||
h4 {
|
||||
font-size: var(--h4-font-size);
|
||||
}
|
||||
|
||||
h5 {
|
||||
font-size: var(--default-font-size);
|
||||
}
|
||||
|
||||
h6 {
|
||||
font-size: var(--h6-font-size);
|
||||
}
|
|
@ -1,160 +0,0 @@
|
|||
use super::templates::HyperlegibleTemplate;
|
||||
use askama::Template;
|
||||
use rocket::http::Status;
|
||||
use rust_embed::RustEmbed;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::{collections::HashMap, sync::OnceLock};
|
||||
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "public/"]
|
||||
struct Assets;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Files {
|
||||
file_vector: Vec<File>,
|
||||
hash_filename_dictionary: HashMap<String, usize>,
|
||||
filename_dictionary: HashMap<String, usize>,
|
||||
}
|
||||
|
||||
impl Files {
|
||||
pub fn hash_filename_get(&self, name: &str) -> Option<&File> {
|
||||
match self.hash_filename_dictionary.get(name) {
|
||||
Some(index) => Some(&self.file_vector[*index]),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
pub fn filename_get(&self, name: &str) -> Option<&File> {
|
||||
match self.filename_dictionary.get(name) {
|
||||
Some(index) => Some(&self.file_vector[*index]),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
pub fn insert_file_into_hashmap(&mut self, file: File) {
|
||||
let index = self.file_vector.len();
|
||||
self.hash_filename_dictionary
|
||||
.insert(file.metadata.get_hash_filename(), index);
|
||||
self.filename_dictionary
|
||||
.insert(file.metadata.filename.clone(), index);
|
||||
self.file_vector.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FileMetadata {
|
||||
pub filename: String,
|
||||
pub extension: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
impl FileMetadata {
|
||||
pub fn get_hash_filename(&self) -> String {
|
||||
let mut hash = self.hash.clone();
|
||||
hash.truncate(8);
|
||||
format!("{}.{}.{}", self.filename, hash, self.extension)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct File {
|
||||
pub metadata: FileMetadata,
|
||||
data: Box<[u8]>,
|
||||
}
|
||||
|
||||
impl File {
|
||||
fn get(filename: &str, extension: &str) -> Option<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
match Assets::get(&format!("{}.{}", filename, extension)) {
|
||||
Some(file) => {
|
||||
let metadata = FileMetadata {
|
||||
filename: filename.into(),
|
||||
extension: extension.into(),
|
||||
hash: hex::encode(file.metadata.sha256_hash()),
|
||||
};
|
||||
Some(File {
|
||||
data: file.data.into(),
|
||||
metadata,
|
||||
})
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_data(&self) -> &[u8] {
|
||||
&self.data
|
||||
}
|
||||
|
||||
pub fn get_text(&self) -> Option<String> {
|
||||
match std::str::from_utf8(&self.data) {
|
||||
Ok(str) => Some(str.into()),
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub static FILES: OnceLock<Files> = OnceLock::new();
|
||||
|
||||
pub fn get_file_wrapper() -> Result<&'static Files, Status> {
|
||||
match FILES.get() {
|
||||
Some(files) => Ok(files),
|
||||
None => Err(Status::InternalServerError),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_sha256_hash(string: &String) -> String {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(string);
|
||||
let result = hasher.finalize();
|
||||
hex::encode(result)
|
||||
}
|
||||
|
||||
fn get_hyperlegible(
|
||||
latin_woff2_filename: String,
|
||||
latin_ext_woff2_filename: String,
|
||||
all_woff_filename: String,
|
||||
) -> File {
|
||||
let hyperlegible_template = HyperlegibleTemplate {
|
||||
atkinson_latin_woff2_filename: latin_woff2_filename,
|
||||
atkinson_latin_ext_woff2_filename: latin_ext_woff2_filename,
|
||||
atkinson_all_woff_filename: all_woff_filename,
|
||||
};
|
||||
|
||||
let rendered_template = hyperlegible_template.render().unwrap();
|
||||
let hash = get_sha256_hash(&rendered_template);
|
||||
|
||||
let metadata = FileMetadata {
|
||||
filename: "hyperlegible".into(),
|
||||
extension: "css".into(),
|
||||
hash,
|
||||
};
|
||||
|
||||
File {
|
||||
data: rendered_template.as_bytes().into(),
|
||||
metadata,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn initialize_files() -> Result<(), Files> {
|
||||
let atkinson_latin_woff2 =
|
||||
File::get("atkinson-hyperlegible-latin-400-normal", "woff2").unwrap();
|
||||
let atkinson_latin_ext_woff2 =
|
||||
File::get("atkinson-hyperlegible-latin-ext-400-normal", "woff2").unwrap();
|
||||
let atkinson_all_woff = File::get("atkinson-hyperlegible-all-400-normal", "woff").unwrap();
|
||||
|
||||
let mut files = Files {
|
||||
file_vector: Vec::new(),
|
||||
hash_filename_dictionary: HashMap::new(),
|
||||
filename_dictionary: HashMap::new(),
|
||||
};
|
||||
files.insert_file_into_hashmap(File::get("style", "css").unwrap());
|
||||
files.insert_file_into_hashmap(get_hyperlegible(
|
||||
atkinson_latin_woff2.metadata.get_hash_filename(),
|
||||
atkinson_latin_ext_woff2.metadata.get_hash_filename(),
|
||||
atkinson_all_woff.metadata.get_hash_filename(),
|
||||
));
|
||||
files.insert_file_into_hashmap(atkinson_latin_woff2);
|
||||
files.insert_file_into_hashmap(atkinson_latin_ext_woff2);
|
||||
files.insert_file_into_hashmap(atkinson_all_woff);
|
||||
return FILES.set(files);
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
pub mod files;
|
||||
mod responders;
|
||||
mod routes;
|
||||
pub mod templates;
|
||||
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
|
||||
pub use routes::style;
|
||||
pub use routes::woff2_font;
|
||||
pub use routes::woff_font;
|
|
@ -1,43 +0,0 @@
|
|||
use super::templates::ErrorTemplate;
|
||||
use rocket::{
|
||||
http::Header,
|
||||
response::{self, Responder},
|
||||
Response,
|
||||
};
|
||||
|
||||
#[derive(Responder)]
|
||||
#[response(status = 200, content_type = "font/woff2")]
|
||||
pub struct RawWoff2Font(pub &'static [u8]);
|
||||
|
||||
#[derive(Responder)]
|
||||
#[response(status = 200, content_type = "font/woff")]
|
||||
pub struct RawWoffFont(pub &'static [u8]);
|
||||
|
||||
#[derive(Responder)]
|
||||
pub struct ErrorTemplateResponder<'a> {
|
||||
template: ErrorTemplate<'a>,
|
||||
}
|
||||
|
||||
pub struct CachedResponse<T> {
|
||||
inner: T,
|
||||
}
|
||||
|
||||
impl<'r, T> Responder<'r, 'static> for CachedResponse<T>
|
||||
where
|
||||
T: Responder<'r, 'static>,
|
||||
{
|
||||
fn respond_to(self, request: &'r rocket::Request<'_>) -> response::Result<'static> {
|
||||
Response::build_from(self.inner.respond_to(request)?)
|
||||
.header(Header::new("Cache-Control", "max-age=31536000, immutable"))
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'r, T> From<T> for CachedResponse<T>
|
||||
where
|
||||
T: Responder<'r, 'static>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self { inner: value }
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
use crate::{
|
||||
files::get_file_wrapper,
|
||||
responders::{CachedResponse, RawWoff2Font, RawWoffFont},
|
||||
};
|
||||
use rocket::{http::Status, response::content::RawCss};
|
||||
|
||||
#[get("/css/<style>")]
|
||||
pub fn style(style: &str) -> Result<CachedResponse<RawCss<String>>, Status> {
|
||||
let file_wrapper = get_file_wrapper()?;
|
||||
match file_wrapper.hash_filename_get(style) {
|
||||
Some(style) => {
|
||||
if style.metadata.extension != "css" {
|
||||
return Err(Status::NotFound);
|
||||
}
|
||||
match style.get_text() {
|
||||
Some(text) => Ok(RawCss::<String>(text).into()),
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/woff2/<font>")]
|
||||
pub fn woff2_font(font: &str) -> Result<CachedResponse<RawWoff2Font>, Status> {
|
||||
let file_wrapper = get_file_wrapper()?;
|
||||
match file_wrapper.hash_filename_get(font) {
|
||||
Some(font) => {
|
||||
if font.metadata.extension != "woff2" {
|
||||
return Err(Status::NotFound);
|
||||
}
|
||||
Ok(RawWoff2Font(font.get_data()).into())
|
||||
}
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/woff/<font>")]
|
||||
pub fn woff_font(font: &str) -> Result<CachedResponse<RawWoffFont>, Status> {
|
||||
let file_wrapper = get_file_wrapper()?;
|
||||
match file_wrapper.hash_filename_get(font) {
|
||||
Some(font) => {
|
||||
if font.metadata.extension != "woff" {
|
||||
return Err(Status::NotFound);
|
||||
}
|
||||
Ok(RawWoffFont(font.get_data()).into())
|
||||
}
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
use askama::Template;
|
||||
use proc_macros::uses_base_template;
|
||||
use shared::names::Site;
|
||||
|
||||
pub struct BaseTemplate {
|
||||
pub hyperlegible_filename: String,
|
||||
pub style_filename: String,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "hyperlegible.css", escape = "none")]
|
||||
pub(super) struct HyperlegibleTemplate {
|
||||
pub atkinson_latin_woff2_filename: String,
|
||||
pub atkinson_latin_ext_woff2_filename: String,
|
||||
pub atkinson_all_woff_filename: String,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "error.html")]
|
||||
#[uses_base_template]
|
||||
pub struct ErrorTemplate<'a> {
|
||||
pub error: &'a str,
|
||||
pub error_description: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "index.html")]
|
||||
#[uses_base_template]
|
||||
pub struct IndexTemplate {
|
||||
pub sites: Vec<Site>,
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<title>Meowy Webring{% block title %}{% endblock %}</title>
|
||||
{% block head %}
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<link rel="stylesheet" href="/public/css/{{ base_template.hyperlegible_filename }}" />
|
||||
<link rel="stylesheet" href="/public/css/{{ base_template.style_filename }}" />
|
||||
{% endblock %}
|
||||
</head>
|
||||
<body>
|
||||
{% block content %}
|
||||
{% endblock %}
|
||||
</body>
|
||||
</html>
|
|
@ -1,10 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %} - {{ error }}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<main>
|
||||
<h1>{{ error }}</h1>
|
||||
<p>{{ error_description }}</p>
|
||||
</main>
|
||||
{% endblock %}
|
|
@ -1,43 +0,0 @@
|
|||
@font-face {
|
||||
font-family: Atkinson Hyperlegible;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-weight: 400;
|
||||
src: url("/public/woff2/{{ atkinson_latin_ext_woff2_filename }}") format("woff2"),
|
||||
url("/public/woff/{{ atkinson_all_woff_filename }}") format("woff");
|
||||
unicode-range: U+0100-024F,
|
||||
U+0259,
|
||||
U+1E00-1EFF,
|
||||
U+2020,
|
||||
U+20A0-20AB,
|
||||
U+20AD-20CF,
|
||||
U+2113,
|
||||
U+2C60-2C7F,
|
||||
U+A720-A7FF
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: Atkinson Hyperlegible;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-weight: 400;
|
||||
src: url("/public/woff2/{{ atkinson_latin_woff2_filename }}") format("woff2"),
|
||||
url("/public/woff/{{ atkinson_all_woff_filename }}") format("woff");
|
||||
unicode-range: U+0000-00FF,
|
||||
U+0131,
|
||||
U+0152-0153,
|
||||
U+02BB-02BC,
|
||||
U+02C6,
|
||||
U+02DA,
|
||||
U+02DC,
|
||||
U+2000-206F,
|
||||
U+2074,
|
||||
U+20AC,
|
||||
U+2122,
|
||||
U+2191,
|
||||
U+2193,
|
||||
U+2212,
|
||||
U+2215,
|
||||
U+FEFF,
|
||||
U+FFFD
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<main>
|
||||
<h1>Meowy Webring</h1>
|
||||
<h2>Sites</h2>
|
||||
{% for site in sites %}
|
||||
{% match site.name %}
|
||||
{% when Some with (value) %}
|
||||
<p><a href="https://{{ site.url }}">{{ value }}</a></p>
|
||||
{% when None %}
|
||||
<p><a href="https://{{ site.url }}">{{ site.url }}</a></p>
|
||||
{% endmatch %}
|
||||
{% endfor %}
|
||||
</main>
|
||||
{% endblock %}
|
|
@ -1,13 +0,0 @@
|
|||
[package]
|
||||
name = "proc_macros"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
syn = "2.0"
|
||||
quote = "1.0"
|
|
@ -1,26 +0,0 @@
|
|||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::{parse::Parser, parse_macro_input, DeriveInput};
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn uses_base_template(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let mut input = parse_macro_input!(item as DeriveInput);
|
||||
|
||||
let base_template_field = syn::Field::parse_named
|
||||
.parse2(quote! {
|
||||
pub base_template: BaseTemplate
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
if let syn::Data::Struct(ref mut struct_data) = &mut input.data {
|
||||
if let syn::Fields::Named(fields) = &mut struct_data.fields {
|
||||
fields.named.push(base_template_field);
|
||||
}
|
||||
quote! {
|
||||
#input
|
||||
}
|
||||
.into()
|
||||
} else {
|
||||
panic!("bad")
|
||||
}
|
||||
}
|
|
@ -1,13 +0,0 @@
|
|||
[package]
|
||||
name = "shared"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
serde_json = "1.0"
|
||||
directories = "5.0"
|
||||
log = "0.4"
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0"
|
||||
features = ["derive"]
|
|
@ -1,45 +0,0 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
|
||||
use directories::ProjectDirs;
|
||||
|
||||
use crate::errors::{Error, ErrorStatus, DIRECTORIES_ERROR_MESSAGE};
|
||||
|
||||
pub fn get_project_dir() -> Result<ProjectDirs, Error> {
|
||||
match ProjectDirs::from("moe", "solarpunk", "meowy-webring") {
|
||||
Some(project) => Ok(project),
|
||||
None => Err(Error {
|
||||
status: ErrorStatus::DirectoriesError,
|
||||
data: DIRECTORIES_ERROR_MESSAGE.into(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_file_from_directory(path: &Path, filename: &str) -> Result<PathBuf, Error> {
|
||||
if !path.exists() {
|
||||
create_directory(path)?;
|
||||
}
|
||||
Ok(path.join(filename))
|
||||
}
|
||||
|
||||
pub fn get_names_path() -> Result<PathBuf, Error> {
|
||||
let directory = get_project_dir()?;
|
||||
return get_file_from_directory(directory.data_dir(), "names.json");
|
||||
}
|
||||
|
||||
pub fn get_names_project_path() -> Result<PathBuf, Error> {
|
||||
let directory = get_project_dir()?;
|
||||
return Ok(directory.data_dir().to_path_buf());
|
||||
}
|
||||
|
||||
fn create_directory(path: &Path) -> Result<(), Error> {
|
||||
match std::fs::create_dir_all(path) {
|
||||
Ok(_) => {
|
||||
log::debug!("created the directory {}", path.display());
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => Err(Error {
|
||||
status: ErrorStatus::IOError,
|
||||
data: err.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
#[derive(Debug)]
|
||||
pub enum ErrorStatus {
|
||||
IOError,
|
||||
ParsingError,
|
||||
DirectoriesError,
|
||||
LoggerInitializationError,
|
||||
NotFoundError,
|
||||
AlreadyExistsError,
|
||||
GenericError,
|
||||
}
|
||||
|
||||
pub struct Error {
|
||||
pub status: ErrorStatus,
|
||||
pub data: String,
|
||||
}
|
||||
|
||||
impl Error {
|
||||
fn error_fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"A {:?} error has occured.\nDetails: {}",
|
||||
self.status, self.data
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
pub(crate) static DIRECTORIES_ERROR_MESSAGE: &str = "could not retreive a valid home path from the operating system. maybe try to define the HOME enviroment variable if you\'re on a unix or unix like operating system.";
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.error_fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.error_fmt(f)
|
||||
}
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
pub mod directories;
|
||||
pub mod errors;
|
||||
pub mod names;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
|
@ -1,59 +0,0 @@
|
|||
use crate::errors::{Error, ErrorStatus};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct Site {
|
||||
pub url: String,
|
||||
pub name: Option<String>,
|
||||
}
|
||||
|
||||
pub fn load_names(names: &str) -> Result<Vec<Site>, Error> {
|
||||
match serde_json::from_str::<Vec<Site>>(&names) {
|
||||
Ok(content) => {
|
||||
log::debug!("successfully parsed names.json.");
|
||||
Ok(content)
|
||||
}
|
||||
Err(err) => Err(Error {
|
||||
status: ErrorStatus::ParsingError,
|
||||
data: err.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_names_file(path: &Path) -> Result<String, Error> {
|
||||
if !path.exists() {
|
||||
log::debug!(
|
||||
"the names.json file does not exist at {}. creating names.json",
|
||||
path.display()
|
||||
);
|
||||
create_names_file(path)?
|
||||
}
|
||||
|
||||
match std::fs::read_to_string(path) {
|
||||
Ok(data) => {
|
||||
log::debug!(
|
||||
"successfully read the names.json file at {}",
|
||||
path.display()
|
||||
);
|
||||
Ok(data)
|
||||
}
|
||||
Err(err) => Err(Error {
|
||||
status: ErrorStatus::IOError,
|
||||
data: err.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn create_names_file(path: &Path) -> Result<(), Error> {
|
||||
match std::fs::write(path, "[]") {
|
||||
Ok(_) => {
|
||||
log::debug!("created a names.json file at {}", path.display());
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => Err(Error {
|
||||
status: ErrorStatus::IOError,
|
||||
data: err.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
use crate::{
|
||||
errors::Error,
|
||||
names::{load_names, read_names_file},
|
||||
};
|
||||
use std::{env, error};
|
||||
|
||||
#[test]
|
||||
fn test_name_parsing() -> Result<(), Error> {
|
||||
let names =
|
||||
load_names(r#"[{"url": "sus.com", "name": "sussy"}, {"url": "sussy.com", "name": null}]"#)?;
|
||||
|
||||
assert_eq!(names.len(), 2);
|
||||
assert_eq!(names[0].url, "sus.com");
|
||||
assert_eq!(names[0].name.as_ref().unwrap(), "sussy");
|
||||
assert_eq!(names[1].url, "sussy.com");
|
||||
assert!(names[1].name.is_none());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_parsing() {
|
||||
let no_url_field = load_names(r#"[{"name":""}]"#);
|
||||
let no_fields = load_names(r#"[{}]"#);
|
||||
let trailing_array = load_names("[");
|
||||
|
||||
assert!(no_url_field.is_err());
|
||||
assert!(no_fields.is_err());
|
||||
assert!(trailing_array.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reading_a_non_existent_names_file() -> Result<(), Box<dyn error::Error>> {
|
||||
let temp_file = env::temp_dir().join("meowy-test-names.json");
|
||||
let contents = read_names_file(&temp_file)?;
|
||||
assert_eq!(contents, "[]");
|
||||
std::fs::remove_file(temp_file)?;
|
||||
Ok(())
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
.\" Manpage for meowy-cli
|
||||
|
||||
.TH man 1 "05 May 2024" "0.1.0" "meowy-cli man page"
|
||||
.SH NAME
|
||||
meowy-cli \- configure the entries in the meowy-webring sites list
|
||||
.SH SYNOPSIS
|
||||
meowy-cli [options] <command>
|
||||
.SH DESCRIPTION
|
||||
meowy-cli is a program for adding and removing entries in the list of URLs used by the meowy-webring webring software
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B -p, --path <PATH>
|
||||
The path to the names.json file
|
||||
.TP
|
||||
.B -s, --separator <SEPARATOR>
|
||||
A separator string to separate the URL from the name. Defaults to ": ".
|
||||
.TP
|
||||
.B -h, --help
|
||||
Print help
|
||||
.TP
|
||||
.B -v, --version
|
||||
Print version
|
||||
|
||||
.SH SUBCOMMANDS
|
||||
.TP
|
||||
.B print
|
||||
Print the current webring sites and their names
|
||||
.TP
|
||||
.B add
|
||||
Add a site to the webring
|
||||
.TP
|
||||
.B remove
|
||||
Remove a site from the webring
|
||||
.TP
|
||||
.B help
|
||||
Print the output of -h or the help of the given subcommand(s)
|
||||
|
||||
.SH EXIT STATUS
|
||||
.TP
|
||||
.B
|
||||
0
|
||||
Success
|
1
guix.scm
1
guix.scm
|
@ -1 +0,0 @@
|
|||
.guix/modules/meowy-webring.scm
|
10
manifest.scm
10
manifest.scm
|
@ -1,10 +0,0 @@
|
|||
;; What follows is a "manifest" equivalent to the command line you gave.
|
||||
;; You can store it in a file that you may then pass to any 'guix' command
|
||||
;; that accepts a '--manifest' (or '-m') option.
|
||||
|
||||
(specifications->manifest
|
||||
(list "gcc-toolchain"
|
||||
"git"
|
||||
"git-lfs"
|
||||
"rust-cargo"
|
||||
"rust"))
|
72
public/style.css
Normal file
72
public/style.css
Normal file
|
@ -0,0 +1,72 @@
|
|||
@media (prefers-color-scheme: light) {
|
||||
:root {
|
||||
--background-color: #f6f5f4;
|
||||
--text-color: black;
|
||||
--link-color: darkblue;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
:root {
|
||||
--background-color: #191919;
|
||||
--text-color: #E9E9E9;
|
||||
--link-color: cyan;
|
||||
}
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: "Atkinson Hyperlegible", sans-serif;
|
||||
text-align: center;
|
||||
max-width: 600px;
|
||||
margin: auto;
|
||||
background-color: var(--background-color);
|
||||
color: var(--text-color)
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 22px;
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: Atkinson Hyperlegible;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-weight: 400;
|
||||
src: url(/public/woff2/atkinson-hyperlegible-latin-ext-400-normal.woff2) format("woff2"),
|
||||
url(/public/woff/atkinson-hyperlegible-all-400-normal.woff) format("woff");
|
||||
unicode-range: U+0100-024F,
|
||||
U+0259,
|
||||
U+1E00-1EFF,
|
||||
U+2020,
|
||||
U+20A0-20AB,
|
||||
U+20AD-20CF,
|
||||
U+2113,
|
||||
U+2C60-2C7F,
|
||||
U+A720-A7FF
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: Atkinson Hyperlegible;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-weight: 400;
|
||||
src: url(/public/woff2/atkinson-hyperlegible-latin-400-normal.woff2) format("woff2"),
|
||||
url(/public/woff/atkinson-hyperlegible-all-400-normal.woff) format("woff");
|
||||
unicode-range: U+0000-00FF,
|
||||
U+0131,
|
||||
U+0152-0153,
|
||||
U+02BB-02BC,
|
||||
U+02C6,
|
||||
U+02DA,
|
||||
U+02DC,
|
||||
U+2000-206F,
|
||||
U+2074,
|
||||
U+20AC,
|
||||
U+2122,
|
||||
U+2191,
|
||||
U+2193,
|
||||
U+2212,
|
||||
U+2215,
|
||||
U+FEFF,
|
||||
U+FFFD
|
||||
}
|
63
src/assets.rs
Normal file
63
src/assets.rs
Normal file
|
@ -0,0 +1,63 @@
|
|||
use std::borrow::Cow;
|
||||
|
||||
use askama_rocket::Template;
|
||||
use rocket::http::Status;
|
||||
use rust_embed::RustEmbed;
|
||||
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "public/"]
|
||||
pub struct PublicAssets;
|
||||
|
||||
#[derive(Responder)]
|
||||
#[response(status = 200, content_type = "font/woff2")]
|
||||
pub struct RawWoff2Font(pub Cow<'static, [u8]>);
|
||||
|
||||
#[derive(Responder)]
|
||||
#[response(status = 200, content_type = "font/woff")]
|
||||
pub struct RawWoffFont(pub Cow<'static, [u8]>);
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "error.html")]
|
||||
pub struct ErrorTemplate<'a> {
|
||||
pub error: &'a str,
|
||||
pub error_description: &'a str
|
||||
}
|
||||
|
||||
#[derive(Responder)]
|
||||
pub struct ErrorTemplateResponder<'a> {
|
||||
template: ErrorTemplate<'a>
|
||||
}
|
||||
|
||||
#[get("/style.css")]
|
||||
pub fn style() -> Result<rocket::response::content::RawCss<String>, Status> {
|
||||
let style = PublicAssets::get("style.css").unwrap();
|
||||
match std::str::from_utf8(&style.data) {
|
||||
Ok(style) => Ok(rocket::response::content::RawCss::<String>(style.to_string())),
|
||||
Err(_) => Err(Status::InternalServerError),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/woff2/<font>")]
|
||||
pub fn woff2_font(font: &str) -> Result<RawWoff2Font, Status> {
|
||||
let latin = "atkinson-hyperlegible-latin-400-normal.woff2";
|
||||
let latin_ext = "atkinson-hyperlegible-latin-ext-400-normal.woff2";
|
||||
|
||||
if font == latin {
|
||||
Ok(RawWoff2Font(PublicAssets::get(latin).unwrap().data))
|
||||
} else if font == latin_ext {
|
||||
Ok(RawWoff2Font(PublicAssets::get(latin_ext).unwrap().data))
|
||||
} else {
|
||||
Err(Status::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/woff/<font>")]
|
||||
pub fn woff_font(font: &str) -> Result<RawWoffFont, Status> {
|
||||
let all = "atkinson-hyperlegible-all-400-normal.woff";
|
||||
|
||||
if font == all {
|
||||
Ok(RawWoffFont(PublicAssets::get(all).unwrap().data))
|
||||
} else {
|
||||
Err(Status::NotFound)
|
||||
}
|
||||
}
|
57
src/links.rs
57
src/links.rs
|
@ -1,57 +1,18 @@
|
|||
use shared::names::Site;
|
||||
static NAMES: [&str; 3] = ["mossfet.xyz", "fries.gay", "ta-kev.digital"];
|
||||
|
||||
trait IndexArithmetic {
|
||||
fn index_add(self, length: usize, num: usize) -> usize;
|
||||
fn index_subtract(self, length: usize, num: usize) -> usize;
|
||||
}
|
||||
|
||||
impl IndexArithmetic for usize {
|
||||
fn index_add(self, length: usize, num: usize) -> usize {
|
||||
if self > (length - 1) {
|
||||
return 0;
|
||||
}
|
||||
return self + num;
|
||||
}
|
||||
|
||||
fn index_subtract(self, length: usize, num: usize) -> usize {
|
||||
match self.checked_sub(num) {
|
||||
Some(num) => num,
|
||||
None => length - 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn previous_url(source_url: &String, names: &Vec<Site>) -> Option<String> {
|
||||
match names.iter().position(|r| &r.url == source_url) {
|
||||
Some(index) => Some(names[index.index_subtract(names.len(), 1)].url.clone()),
|
||||
pub fn previous_url(source_url: &String) -> Option<String> {
|
||||
match NAMES.iter().position(|&r| r == source_url) {
|
||||
Some(index) if index == 0 => Some(NAMES[NAMES.len() - 1].to_string()),
|
||||
Some(index) => Some(NAMES[index - 1].to_string()),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_url(source_url: &String, names: &Vec<Site>) -> Option<String> {
|
||||
pub fn next_url(source_url: &String) -> Option<String> {
|
||||
// this is gay
|
||||
match names.iter().position(|r| &r.url == source_url) {
|
||||
Some(index) => Some(names[index.index_add(names.len(), 1)].url.clone()),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn previous_name(source_url: &String, names: &Vec<Site>) -> Option<String> {
|
||||
match names.iter().position(|r| &r.url == source_url) {
|
||||
Some(index) => match &names[index.index_subtract(names.len(), 1)].name {
|
||||
Some(name) => Some(name.clone()),
|
||||
None => previous_url(source_url, names),
|
||||
},
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_name(source_url: &String, names: &Vec<Site>) -> Option<String> {
|
||||
match names.iter().position(|r| &r.url == source_url) {
|
||||
Some(index) => match &names[index.index_add(names.len(), 1)].name {
|
||||
Some(name) => Some(name.clone()),
|
||||
None => next_url(source_url, names),
|
||||
},
|
||||
match NAMES.iter().position(|&r| r == source_url) {
|
||||
Some(index) if index == NAMES.len() - 1 => Some(NAMES[0].to_string()),
|
||||
Some(index) => Some(NAMES[index + 1].to_string()),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
|
21
src/main.rs
21
src/main.rs
|
@ -1,23 +1,12 @@
|
|||
use crate::watcher::hot_reloading;
|
||||
use meowy_assets::files::initialize_files;
|
||||
use rocket::tokio;
|
||||
use sites::init_names;
|
||||
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
|
||||
mod assets;
|
||||
mod links;
|
||||
mod responders;
|
||||
mod routes;
|
||||
mod sites;
|
||||
mod watcher;
|
||||
|
||||
#[launch]
|
||||
async fn rocket() -> _ {
|
||||
init_names().unwrap();
|
||||
initialize_files().unwrap();
|
||||
tokio::task::spawn_blocking(hot_reloading);
|
||||
|
||||
fn rocket() -> _ {
|
||||
rocket::build()
|
||||
.mount(
|
||||
"/",
|
||||
|
@ -26,10 +15,6 @@ async fn rocket() -> _ {
|
|||
.register("/", catchers![routes::not_found])
|
||||
.mount(
|
||||
"/public",
|
||||
routes![
|
||||
meowy_assets::style,
|
||||
meowy_assets::woff2_font,
|
||||
meowy_assets::woff_font
|
||||
],
|
||||
routes![assets::style, assets::woff2_font, assets::woff_font],
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
use rocket::{http::Header, response::Responder, Response};
|
||||
|
||||
pub struct CorsResponse<T> {
|
||||
pub inner: T,
|
||||
}
|
||||
|
||||
impl<'r, T> Responder<'r, 'static> for CorsResponse<T>
|
||||
where
|
||||
T: Responder<'r, 'static>,
|
||||
{
|
||||
fn respond_to(self, request: &'r rocket::Request<'_>) -> rocket::response::Result<'static> {
|
||||
Response::build_from(self.inner.respond_to(request)?)
|
||||
.header(Header::new("Access-Control-Allow-Origin", "*"))
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'r, T> From<T> for CorsResponse<T>
|
||||
where
|
||||
T: Responder<'r, 'static>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self { inner: value }
|
||||
}
|
||||
}
|
|
@ -1,48 +1,14 @@
|
|||
use crate::{
|
||||
links::{next_name, next_url, previous_name, previous_url},
|
||||
responders::CorsResponse,
|
||||
sites::get_global_names,
|
||||
};
|
||||
use meowy_assets::{
|
||||
files::{get_file_wrapper, File},
|
||||
templates::{BaseTemplate, ErrorTemplate, IndexTemplate},
|
||||
};
|
||||
use crate::{links::{next_url, previous_url}, assets::ErrorTemplate};
|
||||
use rocket::{
|
||||
http::Status,
|
||||
response::Redirect,
|
||||
serde::{json::Json, Serialize},
|
||||
};
|
||||
|
||||
fn get_file(filename: &str) -> Result<&File, Status> {
|
||||
let files = get_file_wrapper()?;
|
||||
|
||||
match files.filename_get(filename) {
|
||||
Some(file) => Ok(file),
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_base_template() -> Result<BaseTemplate, Status> {
|
||||
let hyperlegible_filename = get_file("hyperlegible")?.metadata.get_hash_filename();
|
||||
let style_filename = get_file("style")?.metadata.get_hash_filename();
|
||||
|
||||
let template = BaseTemplate {
|
||||
hyperlegible_filename,
|
||||
style_filename,
|
||||
};
|
||||
|
||||
Ok(template)
|
||||
}
|
||||
|
||||
fn not_found_error() -> Result<ErrorTemplate<'static>, Status> {
|
||||
let base_template = get_base_template()?;
|
||||
let template = ErrorTemplate {
|
||||
error: "Not Found",
|
||||
error_description: "this URL could not be found on the webring.",
|
||||
base_template,
|
||||
};
|
||||
Ok(template)
|
||||
}
|
||||
const NOT_FOUND_ERROR: ErrorTemplate = ErrorTemplate {
|
||||
error: "Not Found",
|
||||
error_description: "this URL could not be found on the webring.",
|
||||
};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
|
@ -52,35 +18,30 @@ pub struct JsonResponse {
|
|||
}
|
||||
|
||||
#[get("/")]
|
||||
pub async fn index() -> Result<IndexTemplate, Status> {
|
||||
let base_template = get_base_template()?;
|
||||
let template = IndexTemplate {
|
||||
sites: get_global_names().await,
|
||||
base_template,
|
||||
};
|
||||
Ok(template)
|
||||
pub fn index() -> &'static str {
|
||||
"Like, this is a webring, meow!"
|
||||
}
|
||||
|
||||
#[get("/previous?<source_url>")]
|
||||
pub async fn previous(source_url: String) -> Result<Redirect, Status> {
|
||||
match previous_url(&source_url, &get_global_names().await) {
|
||||
pub fn previous(source_url: String) -> Result<Redirect, Status> {
|
||||
match previous_url(&source_url) {
|
||||
Some(url) => Ok(Redirect::to(format!("https://{}", url))),
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/next?<source_url>")]
|
||||
pub async fn next(source_url: String) -> Result<Redirect, Status> {
|
||||
match next_url(&source_url, &get_global_names().await) {
|
||||
pub fn next(source_url: String) -> Result<Redirect, Status> {
|
||||
match next_url(&source_url) {
|
||||
Some(url) => Ok(Redirect::to(format!("https://{}", url))),
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/name?<source_url>")]
|
||||
pub async fn name(source_url: String) -> Result<CorsResponse<Json<JsonResponse>>, Status> {
|
||||
let previous_site_name = previous_name(&source_url, &get_global_names().await);
|
||||
let next_site_name = next_name(&source_url, &get_global_names().await);
|
||||
pub fn name(source_url: String) -> Result<Json<JsonResponse>, Status> {
|
||||
let previous_site_name = previous_url(&source_url);
|
||||
let next_site_name = next_url(&source_url);
|
||||
|
||||
if previous_site_name.is_none() && next_site_name.is_none() {
|
||||
return Err(Status::NotFound);
|
||||
|
@ -89,11 +50,10 @@ pub async fn name(source_url: String) -> Result<CorsResponse<Json<JsonResponse>>
|
|||
Ok(Json(JsonResponse {
|
||||
previous_site_name,
|
||||
next_site_name,
|
||||
})
|
||||
.into())
|
||||
}))
|
||||
}
|
||||
|
||||
#[catch(404)]
|
||||
pub fn not_found() -> Result<ErrorTemplate<'static>, Status> {
|
||||
not_found_error()
|
||||
pub fn not_found() -> ErrorTemplate<'static> {
|
||||
NOT_FOUND_ERROR
|
||||
}
|
||||
|
|
40
src/sites.rs
40
src/sites.rs
|
@ -1,40 +0,0 @@
|
|||
use rocket::tokio::sync::Mutex;
|
||||
use shared::{
|
||||
directories,
|
||||
errors::{Error, ErrorStatus},
|
||||
names::{self, Site},
|
||||
};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
pub async fn get_global_names() -> Vec<Site> {
|
||||
NAMES.get().unwrap().lock().await.clone()
|
||||
}
|
||||
|
||||
pub fn set_names() {
|
||||
match get_names() {
|
||||
Ok(names) => *NAMES.get().unwrap().blocking_lock() = names,
|
||||
Err(err) => println!("{:?}", err),
|
||||
}
|
||||
}
|
||||
|
||||
static NAMES: OnceLock<Mutex<Vec<Site>>> = OnceLock::new();
|
||||
|
||||
pub fn init_names() -> Result<(), Error> {
|
||||
println!(
|
||||
"names.json path: {}",
|
||||
directories::get_names_path().unwrap().display()
|
||||
);
|
||||
match NAMES.set(Mutex::new(get_names().unwrap())) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(_) => Err(Error {
|
||||
status: ErrorStatus::GenericError,
|
||||
data: "an error has occured while trying to get the names.json file".into(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_names() -> Result<Vec<Site>, Error> {
|
||||
let names_path = directories::get_names_path()?;
|
||||
let names_file = names::read_names_file(&names_path)?;
|
||||
names::load_names(&names_file)
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
use notify::{
|
||||
event::{DataChange, ModifyKind},
|
||||
EventKind, Result, Watcher,
|
||||
};
|
||||
use shared::directories;
|
||||
|
||||
use crate::sites;
|
||||
|
||||
pub(crate) fn hot_reloading() {
|
||||
let (tx, rx) = std::sync::mpsc::channel();
|
||||
let names_path = directories::get_names_project_path().unwrap();
|
||||
let mut watcher = notify::recommended_watcher(tx).unwrap();
|
||||
|
||||
watcher
|
||||
.watch(&names_path, notify::RecursiveMode::NonRecursive)
|
||||
.unwrap();
|
||||
|
||||
for res in rx {
|
||||
watch(res);
|
||||
}
|
||||
}
|
||||
|
||||
fn watch(res: Result<notify::Event>) {
|
||||
match res {
|
||||
Ok(event) => {
|
||||
if event.kind == EventKind::Modify(ModifyKind::Data(DataChange::Any)) {
|
||||
sites::set_names();
|
||||
}
|
||||
}
|
||||
Err(err) => println!("Error: {}", err),
|
||||
}
|
||||
}
|
15
templates/error.html
Normal file
15
templates/error.html
Normal file
|
@ -0,0 +1,15 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<title>Meowy Webring - {{ error }}</title>
|
||||
<link rel="stylesheet" href="/public/style.css" />
|
||||
</head>
|
||||
<body>
|
||||
<main>
|
||||
<h1>{{ error }}</h1>
|
||||
<p>{{ error_description }}</p>
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
Loading…
Reference in a new issue