Compare commits
No commits in common. "main" and "portability" have entirely different histories.
main
...
portabilit
52 changed files with 816 additions and 7278 deletions
|
@ -1,95 +0,0 @@
|
|||
;;; SPDX-License-Identifier: GPL-3.0-or-later
|
||||
;; Per-directory local variables for GNU Emacs 23 and later.
|
||||
|
||||
((nil
|
||||
. ((fill-column . 78)
|
||||
(tab-width . 8)
|
||||
(sentence-end-double-space . t)))
|
||||
(c-mode . ((c-file-style . "gnu")))
|
||||
(scheme-mode
|
||||
.
|
||||
((indent-tabs-mode . nil)
|
||||
(eval . (put 'eval-when 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-prompt 'scheme-indent-function 1))
|
||||
(eval . (put 'test-assert 'scheme-indent-function 1))
|
||||
(eval . (put 'test-assertm 'scheme-indent-function 1))
|
||||
(eval . (put 'test-equalm 'scheme-indent-function 1))
|
||||
(eval . (put 'test-equal 'scheme-indent-function 1))
|
||||
(eval . (put 'test-eq 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-input-string 'scheme-indent-function 1))
|
||||
(eval . (put 'guard 'scheme-indent-function 1))
|
||||
(eval . (put 'lambda* 'scheme-indent-function 1))
|
||||
(eval . (put 'substitute* 'scheme-indent-function 1))
|
||||
(eval . (put 'match-record 'scheme-indent-function 2))
|
||||
|
||||
;; 'modify-phases' and its keywords.
|
||||
(eval . (put 'modify-phases 'scheme-indent-function 1))
|
||||
(eval . (put 'replace 'scheme-indent-function 1))
|
||||
(eval . (put 'add-before 'scheme-indent-function 2))
|
||||
(eval . (put 'add-after 'scheme-indent-function 2))
|
||||
|
||||
(eval . (put 'modify-services 'scheme-indent-function 1))
|
||||
(eval . (put 'with-directory-excursion 'scheme-indent-function 1))
|
||||
(eval . (put 'package 'scheme-indent-function 0))
|
||||
(eval . (put 'origin 'scheme-indent-function 0))
|
||||
(eval . (put 'build-system 'scheme-indent-function 0))
|
||||
(eval . (put 'bag 'scheme-indent-function 0))
|
||||
(eval . (put 'graft 'scheme-indent-function 0))
|
||||
(eval . (put 'operating-system 'scheme-indent-function 0))
|
||||
(eval . (put 'file-system 'scheme-indent-function 0))
|
||||
(eval . (put 'manifest-entry 'scheme-indent-function 0))
|
||||
(eval . (put 'manifest-pattern 'scheme-indent-function 0))
|
||||
(eval . (put 'substitute-keyword-arguments 'scheme-indent-function 1))
|
||||
(eval . (put 'with-store 'scheme-indent-function 1))
|
||||
(eval . (put 'with-external-store 'scheme-indent-function 1))
|
||||
(eval . (put 'with-error-handling 'scheme-indent-function 0))
|
||||
(eval . (put 'with-mutex 'scheme-indent-function 1))
|
||||
(eval . (put 'with-atomic-file-output 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-compressed-output-port 'scheme-indent-function 2))
|
||||
(eval . (put 'call-with-decompressed-port 'scheme-indent-function 2))
|
||||
(eval . (put 'call-with-gzip-input-port 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-gzip-output-port 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-lzip-input-port 'scheme-indent-function 1))
|
||||
(eval . (put 'call-with-lzip-output-port 'scheme-indent-function 1))
|
||||
(eval . (put 'signature-case 'scheme-indent-function 1))
|
||||
(eval . (put 'emacs-batch-eval 'scheme-indent-function 0))
|
||||
(eval . (put 'emacs-batch-edit-file 'scheme-indent-function 1))
|
||||
(eval . (put 'emacs-substitute-sexps 'scheme-indent-function 1))
|
||||
(eval . (put 'emacs-substitute-variables 'scheme-indent-function 1))
|
||||
(eval . (put 'with-derivation-narinfo 'scheme-indent-function 1))
|
||||
(eval . (put 'with-derivation-substitute 'scheme-indent-function 2))
|
||||
(eval . (put 'with-status-report 'scheme-indent-function 1))
|
||||
(eval . (put 'with-status-verbosity 'scheme-indent-function 1))
|
||||
|
||||
(eval . (put 'mlambda 'scheme-indent-function 1))
|
||||
(eval . (put 'mlambdaq 'scheme-indent-function 1))
|
||||
(eval . (put 'syntax-parameterize 'scheme-indent-function 1))
|
||||
(eval . (put 'with-monad 'scheme-indent-function 1))
|
||||
(eval . (put 'mbegin 'scheme-indent-function 1))
|
||||
(eval . (put 'mwhen 'scheme-indent-function 1))
|
||||
(eval . (put 'munless 'scheme-indent-function 1))
|
||||
(eval . (put 'mlet* 'scheme-indent-function 2))
|
||||
(eval . (put 'mlet 'scheme-indent-function 2))
|
||||
(eval . (put 'run-with-store 'scheme-indent-function 1))
|
||||
(eval . (put 'run-with-state 'scheme-indent-function 1))
|
||||
(eval . (put 'wrap-program 'scheme-indent-function 1))
|
||||
(eval . (put 'with-imported-modules 'scheme-indent-function 1))
|
||||
(eval . (put 'with-extensions 'scheme-indent-function 1))
|
||||
|
||||
(eval . (put 'with-database 'scheme-indent-function 2))
|
||||
(eval . (put 'call-with-transaction 'scheme-indent-function 2))
|
||||
|
||||
(eval . (put 'call-with-container 'scheme-indent-function 1))
|
||||
(eval . (put 'container-excursion 'scheme-indent-function 1))
|
||||
(eval . (put 'eventually 'scheme-indent-function 1))
|
||||
|
||||
(eval . (put 'call-with-progress-reporter 'scheme-indent-function 1))
|
||||
|
||||
;; This notably allows '(' in Paredit to not insert a space when the
|
||||
;; preceding symbol is one of these.
|
||||
(eval . (modify-syntax-entry ?~ "'"))
|
||||
(eval . (modify-syntax-entry ?$ "'"))
|
||||
(eval . (modify-syntax-entry ?+ "'"))))
|
||||
(emacs-lisp-mode . ((indent-tabs-mode . nil)))
|
||||
(texinfo-mode . ((indent-tabs-mode . nil)
|
||||
(fill-column . 72))))
|
|
@ -1,9 +0,0 @@
|
|||
(channel
|
||||
(version 0)
|
||||
(directory ".guix/modules"))
|
||||
|
||||
|
||||
;;; Local Variables:
|
||||
;;; mode: scheme
|
||||
;;; End:
|
||||
|
File diff suppressed because it is too large
Load diff
|
@ -1,42 +0,0 @@
|
|||
(load "crates-io.scm")
|
||||
(define-module (meowy-webring)
|
||||
#:use-module (crates-io)
|
||||
#:use-module (guix)
|
||||
#:use-module (guix build-system cargo)
|
||||
#:use-module (guix git-download)
|
||||
#:use-module ((guix licenses) #:prefix license:)
|
||||
#:use-module (gnu packages)
|
||||
#:use-module (gnu packages crates-io)
|
||||
#:use-module (srfi srfi-1))
|
||||
|
||||
(define (keep-file? file stat)
|
||||
(or (git-predicate (current-source-directory))
|
||||
(const #t)))
|
||||
|
||||
(define-public meowy-webring
|
||||
(package
|
||||
(name "meowy-webring")
|
||||
(version "0.1.0-git")
|
||||
(source (local-file "../.." "meowy-webring-checkout"
|
||||
#:recursive? #t
|
||||
#:select? keep-file?))
|
||||
(build-system cargo-build-system)
|
||||
(arguments (list #:cargo-inputs `(("rust-askama-rocket" ,rust-askama-rocket-0.12)
|
||||
("rust-askama" ,rust-askama-0.12)
|
||||
("rust-directories" ,rust-directories-5)
|
||||
("rust-embed" ,rust-embed-6)
|
||||
("rust-hex" ,rust-hex-0.4)
|
||||
("rust-notify" ,rust-notify-6)
|
||||
("rust-rocket" ,rust-rocket-0.5)
|
||||
("rust-serde" ,rust-serde-1)
|
||||
("rust-serde-json" ,rust-serde-json-1)
|
||||
("rust-simple-logger" ,rust-simple-logger-4))
|
||||
#:install-source? #f
|
||||
#:phases #~(modify-phases %standard-phases
|
||||
(delete 'package))))
|
||||
(synopsis "")
|
||||
(description "")
|
||||
(home-page "")
|
||||
(license license:cc0)))
|
||||
|
||||
meowy-webring
|
61
.vscode/launch.json
vendored
61
.vscode/launch.json
vendored
|
@ -1,62 +1,15 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "meowy-cli (Print)",
|
||||
"name": "run meowy-cli",
|
||||
"program": "${workspaceFolder}/target/debug/meowy-cli",
|
||||
"args": [
|
||||
"print"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"sourceMap": {},
|
||||
"sourceLanguages": [
|
||||
"rust"
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "meowy-cli (Print) json",
|
||||
"program": "${workspaceFolder}/target/debug/meowy-cli",
|
||||
"args": [
|
||||
"print",
|
||||
"--json"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"sourceMap": {},
|
||||
"sourceLanguages": [
|
||||
"rust"
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "meowy-cli (Print) only url",
|
||||
"program": "${workspaceFolder}/target/debug/meowy-cli",
|
||||
"args": [
|
||||
"print",
|
||||
"--url"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"sourceMap": {},
|
||||
"sourceLanguages": [
|
||||
"rust"
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "meowy-cli (Print) only name",
|
||||
"program": "${workspaceFolder}/target/debug/meowy-cli",
|
||||
"args": [
|
||||
"print",
|
||||
"--name"
|
||||
],
|
||||
"args": ["print"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"sourceMap": {},
|
||||
"sourceLanguages": [
|
||||
|
|
978
Cargo.lock
generated
978
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
42
Cargo.toml
42
Cargo.toml
|
@ -1,39 +1,47 @@
|
|||
[workspace]
|
||||
members = ["crates/*"]
|
||||
members = ["cli", "shared"]
|
||||
|
||||
[package]
|
||||
name = "meowy-webring"
|
||||
version = "0.2.0"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
rust-version = "1.70"
|
||||
|
||||
[profile.release]
|
||||
lto = "thin"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
log = "0.4"
|
||||
|
||||
[dependencies.rocket]
|
||||
version = "0.5"
|
||||
default-features = false
|
||||
version = "=0.5.0-rc.3"
|
||||
default_features = false
|
||||
features = ["json"]
|
||||
|
||||
[dependencies.rust-embed]
|
||||
version = "6"
|
||||
features = ["debug-embed"]
|
||||
|
||||
[dependencies.serde]
|
||||
version = "1.0"
|
||||
|
||||
[dependencies.serde_json]
|
||||
version = "1.0"
|
||||
|
||||
[dependencies.askama_rocket]
|
||||
git = "https://github.com/djc/askama.git"
|
||||
package = "askama_rocket"
|
||||
rev = "b9e51601560398766eac445517fb17c35090a952"
|
||||
default-features = false
|
||||
|
||||
[dependencies.askama]
|
||||
git = "https://github.com/djc/askama.git"
|
||||
package = "askama"
|
||||
rev = "b9e51601560398766eac445517fb17c35090a952"
|
||||
version = "0.12"
|
||||
default-features = false
|
||||
|
||||
[dependencies.shared]
|
||||
path = "./crates/shared"
|
||||
path = "./shared"
|
||||
|
||||
[dependencies.simple_logger]
|
||||
version = "4"
|
||||
default-features = false
|
||||
|
||||
[dependencies.notify]
|
||||
version = "6"
|
||||
default-features = false
|
||||
features = ["macos_fsevent"]
|
||||
|
||||
[dependencies.meowy-assets]
|
||||
path = "./crates/meowy-assets"
|
||||
|
|
64
LICENSE
64
LICENSE
|
@ -2,14 +2,14 @@ Creative Commons Legal Code
|
|||
|
||||
CC0 1.0 Universal
|
||||
|
||||
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
|
||||
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
|
||||
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
|
||||
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
|
||||
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
|
||||
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
|
||||
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
|
||||
HEREUNDER.
|
||||
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
|
||||
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
|
||||
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
|
||||
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
|
||||
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
|
||||
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
|
||||
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
|
||||
HEREUNDER.
|
||||
|
||||
Statement of Purpose
|
||||
|
||||
|
@ -43,22 +43,22 @@ Related Rights"). Copyright and Related Rights include, but are not
|
|||
limited to, the following:
|
||||
|
||||
i. the right to reproduce, adapt, distribute, perform, display,
|
||||
communicate, and translate a Work;
|
||||
communicate, and translate a Work;
|
||||
ii. moral rights retained by the original author(s) and/or performer(s);
|
||||
iii. publicity and privacy rights pertaining to a person's image or
|
||||
likeness depicted in a Work;
|
||||
likeness depicted in a Work;
|
||||
iv. rights protecting against unfair competition in regards to a Work,
|
||||
subject to the limitations in paragraph 4(a), below;
|
||||
subject to the limitations in paragraph 4(a), below;
|
||||
v. rights protecting the extraction, dissemination, use and reuse of data
|
||||
in a Work;
|
||||
in a Work;
|
||||
vi. database rights (such as those arising under Directive 96/9/EC of the
|
||||
European Parliament and of the Council of 11 March 1996 on the legal
|
||||
protection of databases, and under any national implementation
|
||||
thereof, including any amended or successor version of such
|
||||
directive); and
|
||||
European Parliament and of the Council of 11 March 1996 on the legal
|
||||
protection of databases, and under any national implementation
|
||||
thereof, including any amended or successor version of such
|
||||
directive); and
|
||||
vii. other similar, equivalent or corresponding rights throughout the
|
||||
world based on applicable law or treaty, and any national
|
||||
implementations thereof.
|
||||
world based on applicable law or treaty, and any national
|
||||
implementations thereof.
|
||||
|
||||
2. Waiver. To the greatest extent permitted by, but not in contravention
|
||||
of, applicable law, Affirmer hereby overtly, fully, permanently,
|
||||
|
@ -102,20 +102,20 @@ express Statement of Purpose.
|
|||
4. Limitations and Disclaimers.
|
||||
|
||||
a. No trademark or patent rights held by Affirmer are waived, abandoned,
|
||||
surrendered, licensed or otherwise affected by this document.
|
||||
surrendered, licensed or otherwise affected by this document.
|
||||
b. Affirmer offers the Work as-is and makes no representations or
|
||||
warranties of any kind concerning the Work, express, implied,
|
||||
statutory or otherwise, including without limitation warranties of
|
||||
title, merchantability, fitness for a particular purpose, non
|
||||
infringement, or the absence of latent or other defects, accuracy, or
|
||||
the present or absence of errors, whether or not discoverable, all to
|
||||
the greatest extent permissible under applicable law.
|
||||
warranties of any kind concerning the Work, express, implied,
|
||||
statutory or otherwise, including without limitation warranties of
|
||||
title, merchantability, fitness for a particular purpose, non
|
||||
infringement, or the absence of latent or other defects, accuracy, or
|
||||
the present or absence of errors, whether or not discoverable, all to
|
||||
the greatest extent permissible under applicable law.
|
||||
c. Affirmer disclaims responsibility for clearing rights of other persons
|
||||
that may apply to the Work or any use thereof, including without
|
||||
limitation any person's Copyright and Related Rights in the Work.
|
||||
Further, Affirmer disclaims responsibility for obtaining any necessary
|
||||
consents, permissions or other rights required for any use of the
|
||||
Work.
|
||||
that may apply to the Work or any use thereof, including without
|
||||
limitation any person's Copyright and Related Rights in the Work.
|
||||
Further, Affirmer disclaims responsibility for obtaining any necessary
|
||||
consents, permissions or other rights required for any use of the
|
||||
Work.
|
||||
d. Affirmer understands and acknowledges that Creative Commons is not a
|
||||
party to this document and has no duty or obligation with respect to
|
||||
this CC0 or use of the Work.
|
||||
party to this document and has no duty or obligation with respect to
|
||||
this CC0 or use of the Work.
|
||||
|
|
|
@ -9,7 +9,7 @@ serde_json = "1.0"
|
|||
log = "0.4"
|
||||
|
||||
[dependencies.clap]
|
||||
version = "4"
|
||||
version = "4"
|
||||
features = ["derive"]
|
||||
|
||||
[dependencies.shared]
|
||||
|
@ -18,7 +18,6 @@ path = "../shared"
|
|||
[dependencies.simple_logger]
|
||||
version = "4"
|
||||
default-features = false
|
||||
features = ["stderr"]
|
||||
|
||||
[[bin]]
|
||||
name = "meowy-cli"
|
|
@ -5,12 +5,6 @@ use clap::{arg, command, Args, Parser, Subcommand};
|
|||
pub(crate) struct Arguments {
|
||||
#[arg(help = "the path to the names.json file", long, short)]
|
||||
pub(crate) path: Option<String>,
|
||||
#[arg(
|
||||
long,
|
||||
short,
|
||||
help = "a separator string to seperate the url from the name. defaults to : with a space after that."
|
||||
)]
|
||||
pub(crate) separator: Option<String>,
|
||||
#[command(subcommand)]
|
||||
pub(crate) command: Commands,
|
||||
}
|
||||
|
@ -23,11 +17,20 @@ pub(crate) enum Commands {
|
|||
filter: Option<String>,
|
||||
#[command(flatten)]
|
||||
group: PrintGroup,
|
||||
#[arg(
|
||||
long,
|
||||
short,
|
||||
help = "a seperator character to seperate the url from the name. defaults to ,",
|
||||
requires = "url",
|
||||
requires = "name"
|
||||
)]
|
||||
seperator: Option<char>,
|
||||
#[arg(
|
||||
long,
|
||||
short,
|
||||
conflicts_with = "url",
|
||||
conflicts_with = "name",
|
||||
conflicts_with = "seperator",
|
||||
help = "print the data out as a json string"
|
||||
)]
|
||||
json: bool,
|
141
cli/src/commands.rs
Normal file
141
cli/src/commands.rs
Normal file
|
@ -0,0 +1,141 @@
|
|||
use std::path::Path;
|
||||
|
||||
use shared::errors::ErrorStatus;
|
||||
use shared::names;
|
||||
use shared::{errors::Error, names::Site};
|
||||
|
||||
use crate::arguments::PrintGroup;
|
||||
|
||||
fn group_printing(seperator: &Option<char>, site: &Site, group: &PrintGroup) {
|
||||
let mut string = String::new();
|
||||
let delimiter = seperator.unwrap_or(',');
|
||||
|
||||
if group.url {
|
||||
string += &site.url;
|
||||
}
|
||||
|
||||
if group.name {
|
||||
if !string.is_empty() {
|
||||
string += &format!(
|
||||
"{}{}",
|
||||
delimiter,
|
||||
site.name.as_ref().unwrap_or(&"None".into())
|
||||
)
|
||||
} else {
|
||||
string += &site.name.as_ref().unwrap_or(&"None".into());
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("{}", string);
|
||||
}
|
||||
|
||||
fn json_printing(site: &Site) -> Result<(), Error> {
|
||||
match serde_json::to_string(&site) {
|
||||
Ok(json) => {
|
||||
log::info!("{}", json);
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => Err(Error {
|
||||
status: ErrorStatus::ParsingError,
|
||||
data: err.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn filter_site(
|
||||
site: &Site,
|
||||
json: bool,
|
||||
seperator: &Option<char>,
|
||||
group: &PrintGroup,
|
||||
) -> Result<(), Error> {
|
||||
if json {
|
||||
json_printing(site)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if !group.url && !group.name {
|
||||
log::info!("{:?}", site);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
return Ok(group_printing(seperator, site, group));
|
||||
}
|
||||
|
||||
pub(crate) fn print(
|
||||
path: &Path,
|
||||
filter: &Option<String>,
|
||||
group: &PrintGroup,
|
||||
seperator: &Option<char>,
|
||||
json: bool,
|
||||
) -> Result<(), Error> {
|
||||
let names_file = names::read_names_file(path)?;
|
||||
let mut names = names::load_names(names_file)?;
|
||||
|
||||
if let Some(filter) = filter {
|
||||
names.retain(|f| &f.url == filter);
|
||||
if names.len() == 0 {
|
||||
return Err(Error {
|
||||
status: ErrorStatus::NotFoundError,
|
||||
data: "this url was not found in names.json".into(),
|
||||
});
|
||||
}
|
||||
return filter_site(&names[0], json, seperator, group);
|
||||
}
|
||||
|
||||
for site in names {
|
||||
if json {
|
||||
json_printing(&site)?;
|
||||
continue;
|
||||
}
|
||||
|
||||
if !group.url && !group.name {
|
||||
log::info!("{:?}", site);
|
||||
continue;
|
||||
}
|
||||
|
||||
group_printing(seperator, &site, group);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn add(path: &Path, url: &String, name: &Option<String>) -> Result<(), Error> {
|
||||
let names_file = names::read_names_file(path)?;
|
||||
let mut names = names::load_names(names_file)?;
|
||||
|
||||
if names.iter().any(|site| site.url.contains(url)) {
|
||||
return Err(Error {
|
||||
status: ErrorStatus::AlreadyExistsError,
|
||||
data:
|
||||
"this url already exists in names.json. you can't have more then 1 of the same url."
|
||||
.into(),
|
||||
});
|
||||
}
|
||||
|
||||
let site = Site {
|
||||
url: url.to_string(),
|
||||
name: name.to_owned(),
|
||||
};
|
||||
log::debug!("adding {:?} to {}", site, path.display());
|
||||
names.push(site.clone());
|
||||
let json = serde_json::to_string(&names).unwrap();
|
||||
std::fs::write(path, json).unwrap();
|
||||
log::info!("added {:?} to names.json", site);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn remove(path: &Path, url: &String) -> Result<(), Error> {
|
||||
let names_file = names::read_names_file(path)?;
|
||||
let mut names = names::load_names(names_file)?;
|
||||
|
||||
names.retain(|site| {
|
||||
if &site.url == url {
|
||||
log::info!("removing {:?} from names.json", site);
|
||||
}
|
||||
&site.url != url
|
||||
});
|
||||
let json = serde_json::to_string(&names).unwrap();
|
||||
std::fs::write(path, json).unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,8 +1,8 @@
|
|||
use std::path::Path;
|
||||
use arguments::{Arguments, Commands};
|
||||
use clap::Parser;
|
||||
use commands::{add, print, remove};
|
||||
use shared::{directories, errors::Error};
|
||||
use std::path::Path;
|
||||
|
||||
mod arguments;
|
||||
mod commands;
|
||||
|
@ -13,7 +13,6 @@ fn main() -> Result<(), Error> {
|
|||
|
||||
let default_path = directories::get_names_path()?;
|
||||
let args = Arguments::parse();
|
||||
let separator = args.separator.unwrap_or(": ".into());
|
||||
|
||||
let path = match &args.path {
|
||||
Some(path) => Path::new(path),
|
||||
|
@ -21,13 +20,9 @@ fn main() -> Result<(), Error> {
|
|||
};
|
||||
|
||||
match &args.command {
|
||||
Commands::Print {
|
||||
filter,
|
||||
group,
|
||||
json,
|
||||
} => print(path, filter, group, &separator, *json)?,
|
||||
Commands::Add { url, name } => add(path, url, name, &separator)?,
|
||||
Commands::Remove { url } => remove(path, url, &separator)?,
|
||||
Commands::Print { filter, group, seperator, json, } => print(path, filter, group, seperator, *json)?,
|
||||
Commands::Add { url, name } => add(path, url, name)?,
|
||||
Commands::Remove { url } => remove(path, url)?,
|
||||
};
|
||||
|
||||
Ok(())
|
|
@ -1,43 +0,0 @@
|
|||
use shared::{
|
||||
errors::{Error, ErrorStatus},
|
||||
names::{self, Site},
|
||||
};
|
||||
use std::path::Path;
|
||||
|
||||
use crate::commands::utils::{site_string, PrintOptions};
|
||||
|
||||
pub(crate) fn add(
|
||||
path: &Path,
|
||||
url: &String,
|
||||
name: &Option<String>,
|
||||
separator: &String,
|
||||
) -> Result<(), Error> {
|
||||
let names_file = names::read_names_file(path)?;
|
||||
let mut names = names::load_names(&names_file)?;
|
||||
|
||||
if names.iter().any(|site| site.url.contains(url)) {
|
||||
return Err(Error {
|
||||
status: ErrorStatus::AlreadyExistsError,
|
||||
data:
|
||||
"this url already exists in names.json. you can't have more then 1 of the same url."
|
||||
.into(),
|
||||
});
|
||||
}
|
||||
|
||||
let site = Site {
|
||||
url: url.to_string(),
|
||||
name: name.to_owned(),
|
||||
};
|
||||
|
||||
log::debug!("adding {:?} to {}", site, path.display());
|
||||
names.push(site.clone());
|
||||
|
||||
let json = serde_json::to_string(&names).unwrap();
|
||||
std::fs::write(path, json).unwrap();
|
||||
println!(
|
||||
"added {} to names.json",
|
||||
site_string(&site, PrintOptions::All, separator)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
mod add;
|
||||
mod print;
|
||||
mod remove;
|
||||
mod utils;
|
||||
|
||||
pub(crate) use add::add;
|
||||
pub(crate) use print::print;
|
||||
pub(crate) use remove::remove;
|
|
@ -1,88 +0,0 @@
|
|||
use crate::{arguments::PrintGroup, commands::utils::site_string};
|
||||
use shared::{
|
||||
errors::{Error, ErrorStatus},
|
||||
names::{self, Site},
|
||||
};
|
||||
use std::path::Path;
|
||||
|
||||
use super::utils::PrintOptions;
|
||||
|
||||
pub(crate) fn print(
|
||||
path: &Path,
|
||||
filter: &Option<String>,
|
||||
group: &PrintGroup,
|
||||
separator: &String,
|
||||
json: bool,
|
||||
) -> Result<(), Error> {
|
||||
let names_file = names::read_names_file(path)?;
|
||||
let mut names = names::load_names(&names_file)?;
|
||||
|
||||
if let Some(filter) = filter {
|
||||
names.retain(|f| &f.url == filter);
|
||||
if names.is_empty() {
|
||||
return Err(Error {
|
||||
status: ErrorStatus::NotFoundError,
|
||||
data: "this url was not found in names.json".into(),
|
||||
});
|
||||
}
|
||||
return filter_site(&names[0], json, separator, group);
|
||||
}
|
||||
|
||||
for site in names {
|
||||
if json {
|
||||
json_printing(&site)?;
|
||||
continue;
|
||||
}
|
||||
|
||||
printing(separator, &site, group);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn filter_site(
|
||||
site: &Site,
|
||||
json: bool,
|
||||
separator: &String,
|
||||
group: &PrintGroup,
|
||||
) -> Result<(), Error> {
|
||||
if json {
|
||||
json_printing(site)?;
|
||||
return Ok(());
|
||||
}
|
||||
printing(separator, site, group);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn json_printing(site: &Site) -> Result<(), Error> {
|
||||
match serde_json::to_string(&site) {
|
||||
Ok(json) => {
|
||||
println!("{}", json);
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => Err(Error {
|
||||
status: ErrorStatus::ParsingError,
|
||||
data: err.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn printing(separator: &String, site: &Site, group: &PrintGroup) {
|
||||
let string = site_string(site, print_group_to_options(group), separator);
|
||||
|
||||
println!("{}", string);
|
||||
}
|
||||
|
||||
fn print_group_to_options(group: &PrintGroup) -> PrintOptions {
|
||||
match group {
|
||||
PrintGroup {
|
||||
url: true,
|
||||
name: false,
|
||||
} => PrintOptions::Url,
|
||||
PrintGroup {
|
||||
url: false,
|
||||
name: true,
|
||||
} => PrintOptions::Name,
|
||||
_ => PrintOptions::All,
|
||||
}
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
use shared::{errors::Error, names};
|
||||
use std::path::Path;
|
||||
|
||||
use crate::commands::utils::{site_string, PrintOptions};
|
||||
|
||||
pub(crate) fn remove(path: &Path, url: &String, separator: &String) -> Result<(), Error> {
|
||||
let names_file = names::read_names_file(path)?;
|
||||
let mut names = names::load_names(&names_file)?;
|
||||
|
||||
names.retain(|site| {
|
||||
if &site.url == url {
|
||||
println!(
|
||||
"removing {} from names.json",
|
||||
site_string(site, PrintOptions::All, separator)
|
||||
);
|
||||
}
|
||||
&site.url != url
|
||||
});
|
||||
let json = serde_json::to_string(&names).unwrap();
|
||||
std::fs::write(path, json).unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
use shared::names::Site;
|
||||
|
||||
pub(super) enum PrintOptions {
|
||||
Url,
|
||||
Name,
|
||||
All,
|
||||
}
|
||||
|
||||
pub(super) fn site_string(site: &Site, options: PrintOptions, separator: &String) -> String {
|
||||
let mut string = String::new();
|
||||
|
||||
if matches!(options, PrintOptions::Url) || matches!(options, PrintOptions::All) {
|
||||
string += &site.url;
|
||||
}
|
||||
|
||||
if let Some(name) = &site.name {
|
||||
if matches!(options, PrintOptions::Url) {
|
||||
return string;
|
||||
}
|
||||
if !string.is_empty() {
|
||||
string += &format!("{}{}", separator, name)
|
||||
} else {
|
||||
string += name;
|
||||
}
|
||||
}
|
||||
|
||||
string
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
[package]
|
||||
name = "meowy-assets"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
hex = "0.4"
|
||||
|
||||
[dependencies.rocket]
|
||||
version = "0.5"
|
||||
default-features = false
|
||||
|
||||
[dependencies.askama_rocket]
|
||||
package = "askama_rocket"
|
||||
version = "0.12"
|
||||
default-features = false
|
||||
|
||||
[dependencies.askama]
|
||||
package = "askama"
|
||||
version = "0.12"
|
||||
default-features = false
|
||||
|
||||
[dependencies.rust-embed]
|
||||
version = "6"
|
||||
features = ["debug-embed"]
|
||||
|
||||
[dependencies.sha2]
|
||||
version = "0.10"
|
||||
features = ["asm"]
|
||||
|
||||
[dependencies.shared]
|
||||
path = "../shared"
|
||||
|
||||
[dependencies.proc_macros]
|
||||
path = "../proc-macros"
|
|
@ -1,64 +0,0 @@
|
|||
@media (prefers-color-scheme: light) {
|
||||
:root {
|
||||
--background-color: #f6f5f4;
|
||||
--text-color: black;
|
||||
--link-color: darkblue;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
:root {
|
||||
--background-color: #191919;
|
||||
--text-color: #E9E9E9;
|
||||
--link-color: cyan;
|
||||
}
|
||||
}
|
||||
|
||||
:root {
|
||||
--h1-font-size: 3.225rem;
|
||||
--h2-font-size: 2.825rem;
|
||||
--h3-font-size: 2.225rem;
|
||||
--h4-font-size: 1.665rem;
|
||||
--default-font-size: 1.375rem;
|
||||
--h6-font-size: 1.185rem;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: "Atkinson Hyperlegible", sans-serif;
|
||||
text-align: center;
|
||||
font-size: var(--default-font-size);
|
||||
max-width: 600px;
|
||||
margin: auto;
|
||||
background-color: var(--background-color);
|
||||
color: var(--text-color)
|
||||
}
|
||||
|
||||
|
||||
a {
|
||||
font-size: var(--default-font-size);
|
||||
color: var(--link-color);
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: var(--h1-font-size);
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-size: var(--h2-font-size);
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: var(--h3-font-size);
|
||||
}
|
||||
|
||||
h4 {
|
||||
font-size: var(--h4-font-size);
|
||||
}
|
||||
|
||||
h5 {
|
||||
font-size: var(--default-font-size);
|
||||
}
|
||||
|
||||
h6 {
|
||||
font-size: var(--h6-font-size);
|
||||
}
|
|
@ -1,160 +0,0 @@
|
|||
use super::templates::HyperlegibleTemplate;
|
||||
use askama::Template;
|
||||
use rocket::http::Status;
|
||||
use rust_embed::RustEmbed;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::{collections::HashMap, sync::OnceLock};
|
||||
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "public/"]
|
||||
struct Assets;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Files {
|
||||
file_vector: Vec<File>,
|
||||
hash_filename_dictionary: HashMap<String, usize>,
|
||||
filename_dictionary: HashMap<String, usize>,
|
||||
}
|
||||
|
||||
impl Files {
|
||||
pub fn hash_filename_get(&self, name: &str) -> Option<&File> {
|
||||
match self.hash_filename_dictionary.get(name) {
|
||||
Some(index) => Some(&self.file_vector[*index]),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
pub fn filename_get(&self, name: &str) -> Option<&File> {
|
||||
match self.filename_dictionary.get(name) {
|
||||
Some(index) => Some(&self.file_vector[*index]),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
pub fn insert_file_into_hashmap(&mut self, file: File) {
|
||||
let index = self.file_vector.len();
|
||||
self.hash_filename_dictionary
|
||||
.insert(file.metadata.get_hash_filename(), index);
|
||||
self.filename_dictionary
|
||||
.insert(file.metadata.filename.clone(), index);
|
||||
self.file_vector.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FileMetadata {
|
||||
pub filename: String,
|
||||
pub extension: String,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
impl FileMetadata {
|
||||
pub fn get_hash_filename(&self) -> String {
|
||||
let mut hash = self.hash.clone();
|
||||
hash.truncate(8);
|
||||
format!("{}.{}.{}", self.filename, hash, self.extension)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct File {
|
||||
pub metadata: FileMetadata,
|
||||
data: Box<[u8]>,
|
||||
}
|
||||
|
||||
impl File {
|
||||
fn get(filename: &str, extension: &str) -> Option<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
match Assets::get(&format!("{}.{}", filename, extension)) {
|
||||
Some(file) => {
|
||||
let metadata = FileMetadata {
|
||||
filename: filename.into(),
|
||||
extension: extension.into(),
|
||||
hash: hex::encode(file.metadata.sha256_hash()),
|
||||
};
|
||||
Some(File {
|
||||
data: file.data.into(),
|
||||
metadata,
|
||||
})
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_data(&self) -> &[u8] {
|
||||
&self.data
|
||||
}
|
||||
|
||||
pub fn get_text(&self) -> Option<String> {
|
||||
match std::str::from_utf8(&self.data) {
|
||||
Ok(str) => Some(str.into()),
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub static FILES: OnceLock<Files> = OnceLock::new();
|
||||
|
||||
pub fn get_file_wrapper() -> Result<&'static Files, Status> {
|
||||
match FILES.get() {
|
||||
Some(files) => Ok(files),
|
||||
None => Err(Status::InternalServerError),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_sha256_hash(string: &String) -> String {
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(string);
|
||||
let result = hasher.finalize();
|
||||
hex::encode(result)
|
||||
}
|
||||
|
||||
fn get_hyperlegible(
|
||||
latin_woff2_filename: String,
|
||||
latin_ext_woff2_filename: String,
|
||||
all_woff_filename: String,
|
||||
) -> File {
|
||||
let hyperlegible_template = HyperlegibleTemplate {
|
||||
atkinson_latin_woff2_filename: latin_woff2_filename,
|
||||
atkinson_latin_ext_woff2_filename: latin_ext_woff2_filename,
|
||||
atkinson_all_woff_filename: all_woff_filename,
|
||||
};
|
||||
|
||||
let rendered_template = hyperlegible_template.render().unwrap();
|
||||
let hash = get_sha256_hash(&rendered_template);
|
||||
|
||||
let metadata = FileMetadata {
|
||||
filename: "hyperlegible".into(),
|
||||
extension: "css".into(),
|
||||
hash,
|
||||
};
|
||||
|
||||
File {
|
||||
data: rendered_template.as_bytes().into(),
|
||||
metadata,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn initialize_files() -> Result<(), Files> {
|
||||
let atkinson_latin_woff2 =
|
||||
File::get("atkinson-hyperlegible-latin-400-normal", "woff2").unwrap();
|
||||
let atkinson_latin_ext_woff2 =
|
||||
File::get("atkinson-hyperlegible-latin-ext-400-normal", "woff2").unwrap();
|
||||
let atkinson_all_woff = File::get("atkinson-hyperlegible-all-400-normal", "woff").unwrap();
|
||||
|
||||
let mut files = Files {
|
||||
file_vector: Vec::new(),
|
||||
hash_filename_dictionary: HashMap::new(),
|
||||
filename_dictionary: HashMap::new(),
|
||||
};
|
||||
files.insert_file_into_hashmap(File::get("style", "css").unwrap());
|
||||
files.insert_file_into_hashmap(get_hyperlegible(
|
||||
atkinson_latin_woff2.metadata.get_hash_filename(),
|
||||
atkinson_latin_ext_woff2.metadata.get_hash_filename(),
|
||||
atkinson_all_woff.metadata.get_hash_filename(),
|
||||
));
|
||||
files.insert_file_into_hashmap(atkinson_latin_woff2);
|
||||
files.insert_file_into_hashmap(atkinson_latin_ext_woff2);
|
||||
files.insert_file_into_hashmap(atkinson_all_woff);
|
||||
return FILES.set(files);
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
pub mod files;
|
||||
mod responders;
|
||||
mod routes;
|
||||
pub mod templates;
|
||||
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
|
||||
pub use routes::style;
|
||||
pub use routes::woff2_font;
|
||||
pub use routes::woff_font;
|
|
@ -1,43 +0,0 @@
|
|||
use super::templates::ErrorTemplate;
|
||||
use rocket::{
|
||||
http::Header,
|
||||
response::{self, Responder},
|
||||
Response,
|
||||
};
|
||||
|
||||
#[derive(Responder)]
|
||||
#[response(status = 200, content_type = "font/woff2")]
|
||||
pub struct RawWoff2Font(pub &'static [u8]);
|
||||
|
||||
#[derive(Responder)]
|
||||
#[response(status = 200, content_type = "font/woff")]
|
||||
pub struct RawWoffFont(pub &'static [u8]);
|
||||
|
||||
#[derive(Responder)]
|
||||
pub struct ErrorTemplateResponder<'a> {
|
||||
template: ErrorTemplate<'a>,
|
||||
}
|
||||
|
||||
pub struct CachedResponse<T> {
|
||||
inner: T,
|
||||
}
|
||||
|
||||
impl<'r, T> Responder<'r, 'static> for CachedResponse<T>
|
||||
where
|
||||
T: Responder<'r, 'static>,
|
||||
{
|
||||
fn respond_to(self, request: &'r rocket::Request<'_>) -> response::Result<'static> {
|
||||
Response::build_from(self.inner.respond_to(request)?)
|
||||
.header(Header::new("Cache-Control", "max-age=31536000, immutable"))
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'r, T> From<T> for CachedResponse<T>
|
||||
where
|
||||
T: Responder<'r, 'static>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self { inner: value }
|
||||
}
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
use crate::{
|
||||
files::get_file_wrapper,
|
||||
responders::{CachedResponse, RawWoff2Font, RawWoffFont},
|
||||
};
|
||||
use rocket::{http::Status, response::content::RawCss};
|
||||
|
||||
#[get("/css/<style>")]
|
||||
pub fn style(style: &str) -> Result<CachedResponse<RawCss<String>>, Status> {
|
||||
let file_wrapper = get_file_wrapper()?;
|
||||
match file_wrapper.hash_filename_get(style) {
|
||||
Some(style) => {
|
||||
if style.metadata.extension != "css" {
|
||||
return Err(Status::NotFound);
|
||||
}
|
||||
match style.get_text() {
|
||||
Some(text) => Ok(RawCss::<String>(text).into()),
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/woff2/<font>")]
|
||||
pub fn woff2_font(font: &str) -> Result<CachedResponse<RawWoff2Font>, Status> {
|
||||
let file_wrapper = get_file_wrapper()?;
|
||||
match file_wrapper.hash_filename_get(font) {
|
||||
Some(font) => {
|
||||
if font.metadata.extension != "woff2" {
|
||||
return Err(Status::NotFound);
|
||||
}
|
||||
Ok(RawWoff2Font(font.get_data()).into())
|
||||
}
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/woff/<font>")]
|
||||
pub fn woff_font(font: &str) -> Result<CachedResponse<RawWoffFont>, Status> {
|
||||
let file_wrapper = get_file_wrapper()?;
|
||||
match file_wrapper.hash_filename_get(font) {
|
||||
Some(font) => {
|
||||
if font.metadata.extension != "woff" {
|
||||
return Err(Status::NotFound);
|
||||
}
|
||||
Ok(RawWoffFont(font.get_data()).into())
|
||||
}
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
use askama::Template;
|
||||
use proc_macros::uses_base_template;
|
||||
use shared::names::Site;
|
||||
|
||||
pub struct BaseTemplate {
|
||||
pub hyperlegible_filename: String,
|
||||
pub style_filename: String,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "hyperlegible.css", escape = "none")]
|
||||
pub(super) struct HyperlegibleTemplate {
|
||||
pub atkinson_latin_woff2_filename: String,
|
||||
pub atkinson_latin_ext_woff2_filename: String,
|
||||
pub atkinson_all_woff_filename: String,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "error.html")]
|
||||
#[uses_base_template]
|
||||
pub struct ErrorTemplate<'a> {
|
||||
pub error: &'a str,
|
||||
pub error_description: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "index.html")]
|
||||
#[uses_base_template]
|
||||
pub struct IndexTemplate {
|
||||
pub sites: Vec<Site>,
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<title>Meowy Webring{% block title %}{% endblock %}</title>
|
||||
{% block head %}
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<link rel="stylesheet" href="/public/css/{{ base_template.hyperlegible_filename }}" />
|
||||
<link rel="stylesheet" href="/public/css/{{ base_template.style_filename }}" />
|
||||
{% endblock %}
|
||||
</head>
|
||||
<body>
|
||||
{% block content %}
|
||||
{% endblock %}
|
||||
</body>
|
||||
</html>
|
|
@ -1,10 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %} - {{ error }}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<main>
|
||||
<h1>{{ error }}</h1>
|
||||
<p>{{ error_description }}</p>
|
||||
</main>
|
||||
{% endblock %}
|
|
@ -1,43 +0,0 @@
|
|||
@font-face {
|
||||
font-family: Atkinson Hyperlegible;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-weight: 400;
|
||||
src: url("/public/woff2/{{ atkinson_latin_ext_woff2_filename }}") format("woff2"),
|
||||
url("/public/woff/{{ atkinson_all_woff_filename }}") format("woff");
|
||||
unicode-range: U+0100-024F,
|
||||
U+0259,
|
||||
U+1E00-1EFF,
|
||||
U+2020,
|
||||
U+20A0-20AB,
|
||||
U+20AD-20CF,
|
||||
U+2113,
|
||||
U+2C60-2C7F,
|
||||
U+A720-A7FF
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: Atkinson Hyperlegible;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-weight: 400;
|
||||
src: url("/public/woff2/{{ atkinson_latin_woff2_filename }}") format("woff2"),
|
||||
url("/public/woff/{{ atkinson_all_woff_filename }}") format("woff");
|
||||
unicode-range: U+0000-00FF,
|
||||
U+0131,
|
||||
U+0152-0153,
|
||||
U+02BB-02BC,
|
||||
U+02C6,
|
||||
U+02DA,
|
||||
U+02DC,
|
||||
U+2000-206F,
|
||||
U+2074,
|
||||
U+20AC,
|
||||
U+2122,
|
||||
U+2191,
|
||||
U+2193,
|
||||
U+2212,
|
||||
U+2215,
|
||||
U+FEFF,
|
||||
U+FFFD
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<main>
|
||||
<h1>Meowy Webring</h1>
|
||||
<h2>Sites</h2>
|
||||
{% for site in sites %}
|
||||
{% match site.name %}
|
||||
{% when Some with (value) %}
|
||||
<p><a href="https://{{ site.url }}">{{ value }}</a></p>
|
||||
{% when None %}
|
||||
<p><a href="https://{{ site.url }}">{{ site.url }}</a></p>
|
||||
{% endmatch %}
|
||||
{% endfor %}
|
||||
</main>
|
||||
{% endblock %}
|
|
@ -1,13 +0,0 @@
|
|||
[package]
|
||||
name = "proc_macros"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
syn = "2.0"
|
||||
quote = "1.0"
|
|
@ -1,26 +0,0 @@
|
|||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::{parse::Parser, parse_macro_input, DeriveInput};
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn uses_base_template(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let mut input = parse_macro_input!(item as DeriveInput);
|
||||
|
||||
let base_template_field = syn::Field::parse_named
|
||||
.parse2(quote! {
|
||||
pub base_template: BaseTemplate
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
if let syn::Data::Struct(ref mut struct_data) = &mut input.data {
|
||||
if let syn::Fields::Named(fields) = &mut struct_data.fields {
|
||||
fields.named.push(base_template_field);
|
||||
}
|
||||
quote! {
|
||||
#input
|
||||
}
|
||||
.into()
|
||||
} else {
|
||||
panic!("bad")
|
||||
}
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
use crate::{
|
||||
errors::Error,
|
||||
names::{load_names, read_names_file},
|
||||
};
|
||||
use std::{env, error};
|
||||
|
||||
#[test]
|
||||
fn test_name_parsing() -> Result<(), Error> {
|
||||
let names =
|
||||
load_names(r#"[{"url": "sus.com", "name": "sussy"}, {"url": "sussy.com", "name": null}]"#)?;
|
||||
|
||||
assert_eq!(names.len(), 2);
|
||||
assert_eq!(names[0].url, "sus.com");
|
||||
assert_eq!(names[0].name.as_ref().unwrap(), "sussy");
|
||||
assert_eq!(names[1].url, "sussy.com");
|
||||
assert!(names[1].name.is_none());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_parsing() {
|
||||
let no_url_field = load_names(r#"[{"name":""}]"#);
|
||||
let no_fields = load_names(r#"[{}]"#);
|
||||
let trailing_array = load_names("[");
|
||||
|
||||
assert!(no_url_field.is_err());
|
||||
assert!(no_fields.is_err());
|
||||
assert!(trailing_array.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reading_a_non_existent_names_file() -> Result<(), Box<dyn error::Error>> {
|
||||
let temp_file = env::temp_dir().join("meowy-test-names.json");
|
||||
let contents = read_names_file(&temp_file)?;
|
||||
assert_eq!(contents, "[]");
|
||||
std::fs::remove_file(temp_file)?;
|
||||
Ok(())
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
.\" Manpage for meowy-cli
|
||||
|
||||
.TH man 1 "05 May 2024" "0.1.0" "meowy-cli man page"
|
||||
.SH NAME
|
||||
meowy-cli \- configure the entries in the meowy-webring sites list
|
||||
.SH SYNOPSIS
|
||||
meowy-cli [options] <command>
|
||||
.SH DESCRIPTION
|
||||
meowy-cli is a program for adding and removing entries in the list of URLs used by the meowy-webring webring software
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B -p, --path <PATH>
|
||||
The path to the names.json file
|
||||
.TP
|
||||
.B -s, --separator <SEPARATOR>
|
||||
A separator string to separate the URL from the name. Defaults to ": ".
|
||||
.TP
|
||||
.B -h, --help
|
||||
Print help
|
||||
.TP
|
||||
.B -v, --version
|
||||
Print version
|
||||
|
||||
.SH SUBCOMMANDS
|
||||
.TP
|
||||
.B print
|
||||
Print the current webring sites and their names
|
||||
.TP
|
||||
.B add
|
||||
Add a site to the webring
|
||||
.TP
|
||||
.B remove
|
||||
Remove a site from the webring
|
||||
.TP
|
||||
.B help
|
||||
Print the output of -h or the help of the given subcommand(s)
|
||||
|
||||
.SH EXIT STATUS
|
||||
.TP
|
||||
.B
|
||||
0
|
||||
Success
|
1
guix.scm
1
guix.scm
|
@ -1 +0,0 @@
|
|||
.guix/modules/meowy-webring.scm
|
10
manifest.scm
10
manifest.scm
|
@ -1,10 +0,0 @@
|
|||
;; What follows is a "manifest" equivalent to the command line you gave.
|
||||
;; You can store it in a file that you may then pass to any 'guix' command
|
||||
;; that accepts a '--manifest' (or '-m') option.
|
||||
|
||||
(specifications->manifest
|
||||
(list "gcc-toolchain"
|
||||
"git"
|
||||
"git-lfs"
|
||||
"rust-cargo"
|
||||
"rust"))
|
72
public/style.css
Normal file
72
public/style.css
Normal file
|
@ -0,0 +1,72 @@
|
|||
@media (prefers-color-scheme: light) {
|
||||
:root {
|
||||
--background-color: #f6f5f4;
|
||||
--text-color: black;
|
||||
--link-color: darkblue;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
:root {
|
||||
--background-color: #191919;
|
||||
--text-color: #E9E9E9;
|
||||
--link-color: cyan;
|
||||
}
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: "Atkinson Hyperlegible", sans-serif;
|
||||
text-align: center;
|
||||
max-width: 600px;
|
||||
margin: auto;
|
||||
background-color: var(--background-color);
|
||||
color: var(--text-color)
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 22px;
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: Atkinson Hyperlegible;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-weight: 400;
|
||||
src: url(/public/woff2/atkinson-hyperlegible-latin-ext-400-normal.woff2) format("woff2"),
|
||||
url(/public/woff/atkinson-hyperlegible-all-400-normal.woff) format("woff");
|
||||
unicode-range: U+0100-024F,
|
||||
U+0259,
|
||||
U+1E00-1EFF,
|
||||
U+2020,
|
||||
U+20A0-20AB,
|
||||
U+20AD-20CF,
|
||||
U+2113,
|
||||
U+2C60-2C7F,
|
||||
U+A720-A7FF
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: Atkinson Hyperlegible;
|
||||
font-style: normal;
|
||||
font-display: swap;
|
||||
font-weight: 400;
|
||||
src: url(/public/woff2/atkinson-hyperlegible-latin-400-normal.woff2) format("woff2"),
|
||||
url(/public/woff/atkinson-hyperlegible-all-400-normal.woff) format("woff");
|
||||
unicode-range: U+0000-00FF,
|
||||
U+0131,
|
||||
U+0152-0153,
|
||||
U+02BB-02BC,
|
||||
U+02C6,
|
||||
U+02DA,
|
||||
U+02DC,
|
||||
U+2000-206F,
|
||||
U+2074,
|
||||
U+20AC,
|
||||
U+2122,
|
||||
U+2191,
|
||||
U+2193,
|
||||
U+2212,
|
||||
U+2215,
|
||||
U+FEFF,
|
||||
U+FFFD
|
||||
}
|
|
@ -26,11 +26,6 @@ pub fn get_names_path() -> Result<PathBuf, Error> {
|
|||
return get_file_from_directory(directory.data_dir(), "names.json");
|
||||
}
|
||||
|
||||
pub fn get_names_project_path() -> Result<PathBuf, Error> {
|
||||
let directory = get_project_dir()?;
|
||||
return Ok(directory.data_dir().to_path_buf());
|
||||
}
|
||||
|
||||
fn create_directory(path: &Path) -> Result<(), Error> {
|
||||
match std::fs::create_dir_all(path) {
|
||||
Ok(_) => {
|
|
@ -5,8 +5,7 @@ pub enum ErrorStatus {
|
|||
DirectoriesError,
|
||||
LoggerInitializationError,
|
||||
NotFoundError,
|
||||
AlreadyExistsError,
|
||||
GenericError,
|
||||
AlreadyExistsError
|
||||
}
|
||||
|
||||
pub struct Error {
|
||||
|
@ -14,8 +13,10 @@ pub struct Error {
|
|||
pub data: String,
|
||||
}
|
||||
|
||||
impl Error {
|
||||
fn error_fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
pub(crate) static DIRECTORIES_ERROR_MESSAGE: &str = "could not retreive a valid home path from the operating system. maybe try to define the HOME enviroment variable if you\'re on a unix or unix like operating system.";
|
||||
|
||||
impl core::fmt::Debug for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"A {:?} error has occured.\nDetails: {}",
|
||||
|
@ -23,19 +24,3 @@ impl Error {
|
|||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
pub(crate) static DIRECTORIES_ERROR_MESSAGE: &str = "could not retreive a valid home path from the operating system. maybe try to define the HOME enviroment variable if you\'re on a unix or unix like operating system.";
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.error_fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.error_fmt(f)
|
||||
}
|
||||
}
|
|
@ -1,5 +1,3 @@
|
|||
pub mod directories;
|
||||
pub mod errors;
|
||||
pub mod names;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
pub mod errors;
|
||||
pub mod directories;
|
|
@ -8,7 +8,7 @@ pub struct Site {
|
|||
pub name: Option<String>,
|
||||
}
|
||||
|
||||
pub fn load_names(names: &str) -> Result<Vec<Site>, Error> {
|
||||
pub fn load_names(names: String) -> Result<Vec<Site>, Error> {
|
||||
match serde_json::from_str::<Vec<Site>>(&names) {
|
||||
Ok(content) => {
|
||||
log::debug!("successfully parsed names.json.");
|
63
src/assets.rs
Normal file
63
src/assets.rs
Normal file
|
@ -0,0 +1,63 @@
|
|||
use std::borrow::Cow;
|
||||
|
||||
use askama_rocket::Template;
|
||||
use rocket::http::Status;
|
||||
use rust_embed::RustEmbed;
|
||||
|
||||
#[derive(RustEmbed)]
|
||||
#[folder = "public/"]
|
||||
pub struct PublicAssets;
|
||||
|
||||
#[derive(Responder)]
|
||||
#[response(status = 200, content_type = "font/woff2")]
|
||||
pub struct RawWoff2Font(pub Cow<'static, [u8]>);
|
||||
|
||||
#[derive(Responder)]
|
||||
#[response(status = 200, content_type = "font/woff")]
|
||||
pub struct RawWoffFont(pub Cow<'static, [u8]>);
|
||||
|
||||
#[derive(Template)]
|
||||
#[template(path = "error.html")]
|
||||
pub struct ErrorTemplate<'a> {
|
||||
pub error: &'a str,
|
||||
pub error_description: &'a str
|
||||
}
|
||||
|
||||
#[derive(Responder)]
|
||||
pub struct ErrorTemplateResponder<'a> {
|
||||
template: ErrorTemplate<'a>
|
||||
}
|
||||
|
||||
#[get("/style.css")]
|
||||
pub fn style() -> Result<rocket::response::content::RawCss<String>, Status> {
|
||||
let style = PublicAssets::get("style.css").unwrap();
|
||||
match std::str::from_utf8(&style.data) {
|
||||
Ok(style) => Ok(rocket::response::content::RawCss::<String>(style.to_string())),
|
||||
Err(_) => Err(Status::InternalServerError),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/woff2/<font>")]
|
||||
pub fn woff2_font(font: &str) -> Result<RawWoff2Font, Status> {
|
||||
let latin = "atkinson-hyperlegible-latin-400-normal.woff2";
|
||||
let latin_ext = "atkinson-hyperlegible-latin-ext-400-normal.woff2";
|
||||
|
||||
if font == latin {
|
||||
Ok(RawWoff2Font(PublicAssets::get(latin).unwrap().data))
|
||||
} else if font == latin_ext {
|
||||
Ok(RawWoff2Font(PublicAssets::get(latin_ext).unwrap().data))
|
||||
} else {
|
||||
Err(Status::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/woff/<font>")]
|
||||
pub fn woff_font(font: &str) -> Result<RawWoffFont, Status> {
|
||||
let all = "atkinson-hyperlegible-all-400-normal.woff";
|
||||
|
||||
if font == all {
|
||||
Ok(RawWoffFont(PublicAssets::get(all).unwrap().data))
|
||||
} else {
|
||||
Err(Status::NotFound)
|
||||
}
|
||||
}
|
47
src/links.rs
47
src/links.rs
|
@ -1,29 +1,9 @@
|
|||
use shared::names::Site;
|
||||
|
||||
trait IndexArithmetic {
|
||||
fn index_add(self, length: usize, num: usize) -> usize;
|
||||
fn index_subtract(self, length: usize, num: usize) -> usize;
|
||||
}
|
||||
|
||||
impl IndexArithmetic for usize {
|
||||
fn index_add(self, length: usize, num: usize) -> usize {
|
||||
if self > (length - 1) {
|
||||
return 0;
|
||||
}
|
||||
return self + num;
|
||||
}
|
||||
|
||||
fn index_subtract(self, length: usize, num: usize) -> usize {
|
||||
match self.checked_sub(num) {
|
||||
Some(num) => num,
|
||||
None => length - 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn previous_url(source_url: &String, names: &Vec<Site>) -> Option<String> {
|
||||
match names.iter().position(|r| &r.url == source_url) {
|
||||
Some(index) => Some(names[index.index_subtract(names.len(), 1)].url.clone()),
|
||||
Some(index) if index == 0 => Some(names[names.len() - 1].url.to_string()),
|
||||
Some(index) => Some(names[index - 1].url.to_string()),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
@ -31,27 +11,10 @@ pub fn previous_url(source_url: &String, names: &Vec<Site>) -> Option<String> {
|
|||
pub fn next_url(source_url: &String, names: &Vec<Site>) -> Option<String> {
|
||||
// this is gay
|
||||
match names.iter().position(|r| &r.url == source_url) {
|
||||
Some(index) => Some(names[index.index_add(names.len(), 1)].url.clone()),
|
||||
Some(index) if index == names.len() - 1 => Some(names[0].url.to_string()),
|
||||
Some(index) => Some(names[index + 1].url.to_string()),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn previous_name(source_url: &String, names: &Vec<Site>) -> Option<String> {
|
||||
match names.iter().position(|r| &r.url == source_url) {
|
||||
Some(index) => match &names[index.index_subtract(names.len(), 1)].name {
|
||||
Some(name) => Some(name.clone()),
|
||||
None => previous_url(source_url, names),
|
||||
},
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_name(source_url: &String, names: &Vec<Site>) -> Option<String> {
|
||||
match names.iter().position(|r| &r.url == source_url) {
|
||||
Some(index) => match &names[index.index_add(names.len(), 1)].name {
|
||||
Some(name) => Some(name.clone()),
|
||||
None => next_url(source_url, names),
|
||||
},
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
// TODO: previous_name, next_name
|
||||
|
|
25
src/main.rs
25
src/main.rs
|
@ -1,24 +1,21 @@
|
|||
use crate::watcher::hot_reloading;
|
||||
use meowy_assets::files::initialize_files;
|
||||
use rocket::tokio;
|
||||
use sites::init_names;
|
||||
use shared::{directories, names};
|
||||
|
||||
#[macro_use]
|
||||
extern crate rocket;
|
||||
|
||||
mod assets;
|
||||
mod links;
|
||||
mod responders;
|
||||
mod routes;
|
||||
mod sites;
|
||||
mod watcher;
|
||||
|
||||
#[launch]
|
||||
async fn rocket() -> _ {
|
||||
init_names().unwrap();
|
||||
initialize_files().unwrap();
|
||||
tokio::task::spawn_blocking(hot_reloading);
|
||||
fn rocket() -> _ {
|
||||
let names_path = directories::get_names_path().unwrap();
|
||||
println!("names.json path: {}", names_path.display());
|
||||
let names_file = names::read_names_file(&names_path).unwrap();
|
||||
let names = names::load_names(names_file).unwrap();
|
||||
|
||||
rocket::build()
|
||||
.manage(names)
|
||||
.mount(
|
||||
"/",
|
||||
routes![routes::index, routes::previous, routes::next, routes::name],
|
||||
|
@ -26,10 +23,6 @@ async fn rocket() -> _ {
|
|||
.register("/", catchers![routes::not_found])
|
||||
.mount(
|
||||
"/public",
|
||||
routes![
|
||||
meowy_assets::style,
|
||||
meowy_assets::woff2_font,
|
||||
meowy_assets::woff_font
|
||||
],
|
||||
routes![assets::style, assets::woff2_font, assets::woff_font],
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
use rocket::{http::Header, response::Responder, Response};
|
||||
|
||||
pub struct CorsResponse<T> {
|
||||
pub inner: T,
|
||||
}
|
||||
|
||||
impl<'r, T> Responder<'r, 'static> for CorsResponse<T>
|
||||
where
|
||||
T: Responder<'r, 'static>,
|
||||
{
|
||||
fn respond_to(self, request: &'r rocket::Request<'_>) -> rocket::response::Result<'static> {
|
||||
Response::build_from(self.inner.respond_to(request)?)
|
||||
.header(Header::new("Access-Control-Allow-Origin", "*"))
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'r, T> From<T> for CorsResponse<T>
|
||||
where
|
||||
T: Responder<'r, 'static>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Self { inner: value }
|
||||
}
|
||||
}
|
|
@ -1,48 +1,17 @@
|
|||
use crate::{
|
||||
links::{next_name, next_url, previous_name, previous_url},
|
||||
responders::CorsResponse,
|
||||
sites::get_global_names,
|
||||
};
|
||||
use meowy_assets::{
|
||||
files::{get_file_wrapper, File},
|
||||
templates::{BaseTemplate, ErrorTemplate, IndexTemplate},
|
||||
};
|
||||
use crate::{links::{next_url, previous_url}, assets::ErrorTemplate};
|
||||
|
||||
use rocket::{
|
||||
http::Status,
|
||||
response::Redirect,
|
||||
serde::{json::Json, Serialize},
|
||||
State,
|
||||
};
|
||||
use shared::names::Site;
|
||||
|
||||
fn get_file(filename: &str) -> Result<&File, Status> {
|
||||
let files = get_file_wrapper()?;
|
||||
|
||||
match files.filename_get(filename) {
|
||||
Some(file) => Ok(file),
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_base_template() -> Result<BaseTemplate, Status> {
|
||||
let hyperlegible_filename = get_file("hyperlegible")?.metadata.get_hash_filename();
|
||||
let style_filename = get_file("style")?.metadata.get_hash_filename();
|
||||
|
||||
let template = BaseTemplate {
|
||||
hyperlegible_filename,
|
||||
style_filename,
|
||||
};
|
||||
|
||||
Ok(template)
|
||||
}
|
||||
|
||||
fn not_found_error() -> Result<ErrorTemplate<'static>, Status> {
|
||||
let base_template = get_base_template()?;
|
||||
let template = ErrorTemplate {
|
||||
error: "Not Found",
|
||||
error_description: "this URL could not be found on the webring.",
|
||||
base_template,
|
||||
};
|
||||
Ok(template)
|
||||
}
|
||||
const NOT_FOUND_ERROR: ErrorTemplate = ErrorTemplate {
|
||||
error: "Not Found",
|
||||
error_description: "this URL could not be found on the webring.",
|
||||
};
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
|
@ -52,35 +21,32 @@ pub struct JsonResponse {
|
|||
}
|
||||
|
||||
#[get("/")]
|
||||
pub async fn index() -> Result<IndexTemplate, Status> {
|
||||
let base_template = get_base_template()?;
|
||||
let template = IndexTemplate {
|
||||
sites: get_global_names().await,
|
||||
base_template,
|
||||
};
|
||||
Ok(template)
|
||||
pub fn index() -> &'static str {
|
||||
"Like, this is a webring, meow!"
|
||||
}
|
||||
|
||||
#[get("/previous?<source_url>")]
|
||||
pub async fn previous(source_url: String) -> Result<Redirect, Status> {
|
||||
match previous_url(&source_url, &get_global_names().await) {
|
||||
pub fn previous(source_url: String, names: &State<Vec<Site>>) -> Result<Redirect, Status> {
|
||||
let names = names.inner();
|
||||
match previous_url(&source_url, names) {
|
||||
Some(url) => Ok(Redirect::to(format!("https://{}", url))),
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/next?<source_url>")]
|
||||
pub async fn next(source_url: String) -> Result<Redirect, Status> {
|
||||
match next_url(&source_url, &get_global_names().await) {
|
||||
pub fn next(source_url: String, names: &State<Vec<Site>>) -> Result<Redirect, Status> {
|
||||
let names = names.inner();
|
||||
match next_url(&source_url, names) {
|
||||
Some(url) => Ok(Redirect::to(format!("https://{}", url))),
|
||||
None => Err(Status::NotFound),
|
||||
}
|
||||
}
|
||||
|
||||
#[get("/name?<source_url>")]
|
||||
pub async fn name(source_url: String) -> Result<CorsResponse<Json<JsonResponse>>, Status> {
|
||||
let previous_site_name = previous_name(&source_url, &get_global_names().await);
|
||||
let next_site_name = next_name(&source_url, &get_global_names().await);
|
||||
pub fn name(source_url: String, names: &State<Vec<Site>>) -> Result<Json<JsonResponse>, Status> {
|
||||
let previous_site_name = previous_url(&source_url, names);
|
||||
let next_site_name = next_url(&source_url, names);
|
||||
|
||||
if previous_site_name.is_none() && next_site_name.is_none() {
|
||||
return Err(Status::NotFound);
|
||||
|
@ -89,11 +55,10 @@ pub async fn name(source_url: String) -> Result<CorsResponse<Json<JsonResponse>>
|
|||
Ok(Json(JsonResponse {
|
||||
previous_site_name,
|
||||
next_site_name,
|
||||
})
|
||||
.into())
|
||||
}))
|
||||
}
|
||||
|
||||
#[catch(404)]
|
||||
pub fn not_found() -> Result<ErrorTemplate<'static>, Status> {
|
||||
not_found_error()
|
||||
pub fn not_found() -> ErrorTemplate<'static> {
|
||||
NOT_FOUND_ERROR
|
||||
}
|
||||
|
|
40
src/sites.rs
40
src/sites.rs
|
@ -1,40 +0,0 @@
|
|||
use rocket::tokio::sync::Mutex;
|
||||
use shared::{
|
||||
directories,
|
||||
errors::{Error, ErrorStatus},
|
||||
names::{self, Site},
|
||||
};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
pub async fn get_global_names() -> Vec<Site> {
|
||||
NAMES.get().unwrap().lock().await.clone()
|
||||
}
|
||||
|
||||
pub fn set_names() {
|
||||
match get_names() {
|
||||
Ok(names) => *NAMES.get().unwrap().blocking_lock() = names,
|
||||
Err(err) => println!("{:?}", err),
|
||||
}
|
||||
}
|
||||
|
||||
static NAMES: OnceLock<Mutex<Vec<Site>>> = OnceLock::new();
|
||||
|
||||
pub fn init_names() -> Result<(), Error> {
|
||||
println!(
|
||||
"names.json path: {}",
|
||||
directories::get_names_path().unwrap().display()
|
||||
);
|
||||
match NAMES.set(Mutex::new(get_names().unwrap())) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(_) => Err(Error {
|
||||
status: ErrorStatus::GenericError,
|
||||
data: "an error has occured while trying to get the names.json file".into(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_names() -> Result<Vec<Site>, Error> {
|
||||
let names_path = directories::get_names_path()?;
|
||||
let names_file = names::read_names_file(&names_path)?;
|
||||
names::load_names(&names_file)
|
||||
}
|
|
@ -1,32 +0,0 @@
|
|||
use notify::{
|
||||
event::{DataChange, ModifyKind},
|
||||
EventKind, Result, Watcher,
|
||||
};
|
||||
use shared::directories;
|
||||
|
||||
use crate::sites;
|
||||
|
||||
pub(crate) fn hot_reloading() {
|
||||
let (tx, rx) = std::sync::mpsc::channel();
|
||||
let names_path = directories::get_names_project_path().unwrap();
|
||||
let mut watcher = notify::recommended_watcher(tx).unwrap();
|
||||
|
||||
watcher
|
||||
.watch(&names_path, notify::RecursiveMode::NonRecursive)
|
||||
.unwrap();
|
||||
|
||||
for res in rx {
|
||||
watch(res);
|
||||
}
|
||||
}
|
||||
|
||||
fn watch(res: Result<notify::Event>) {
|
||||
match res {
|
||||
Ok(event) => {
|
||||
if event.kind == EventKind::Modify(ModifyKind::Data(DataChange::Any)) {
|
||||
sites::set_names();
|
||||
}
|
||||
}
|
||||
Err(err) => println!("Error: {}", err),
|
||||
}
|
||||
}
|
15
templates/error.html
Normal file
15
templates/error.html
Normal file
|
@ -0,0 +1,15 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<title>Meowy Webring - {{ error }}</title>
|
||||
<link rel="stylesheet" href="/public/style.css" />
|
||||
</head>
|
||||
<body>
|
||||
<main>
|
||||
<h1>{{ error }}</h1>
|
||||
<p>{{ error_description }}</p>
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
Loading…
Reference in a new issue