6 Commits
next ... master

Author SHA1 Message Date
1007b75069 downgrade some stuff 2022-05-13 14:38:36 +01:00
0e7a4d02de ? 2022-05-13 14:21:34 +01:00
37e5c50800 added message content intent 2022-05-13 14:11:39 +01:00
88255032de update for new database models 2022-05-13 10:03:45 +01:00
b88d046846 block mentions in todo command 2021-11-15 07:55:37 +00:00
f9c110ffb7 removed unreleased code. fixed offset command 2021-11-07 23:36:07 +00:00
198 changed files with 5637 additions and 81890 deletions

2
.gitignore vendored
View File

@ -2,4 +2,6 @@
.env .env
/venv /venv
.cargo .cargo
assets
out.json
/.idea /.idea

View File

@ -1,2 +0,0 @@
printWidth = 90
tabWidth = 4

2963
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,58 +1,29 @@
[package] [package]
name = "reminder-rs" name = "reminder_rs"
version = "1.6.40" version = "1.5.2"
authors = ["Jude Southworth <judesouthworth@pm.me>"] authors = ["jellywx <judesouthworth@pm.me>"]
edition = "2021" edition = "2018"
license = "AGPL-3.0 only"
description = "Reminder Bot for Discord, now in Rust"
[dependencies] [dependencies]
poise = "0.5" dashmap = "4.0"
dotenv = "0.15" dotenv = "0.15"
humantime = "2.1"
tokio = { version = "1", features = ["process", "full"] } tokio = { version = "1", features = ["process", "full"] }
reqwest = "0.11" reqwest = "0.11"
lazy-regex = "3.0" regex = "1.4"
regex = "1.9"
log = "0.4" log = "0.4"
env_logger = "0.10" env_logger = "0.8"
chrono = "0.4" chrono = "0.4"
chrono-tz = { version = "0.8", features = ["serde"] } chrono-tz = "0.5"
lazy_static = "1.4" lazy_static = "1.4"
num-integer = "0.1" num-integer = "0.1"
serde = "1.0" serde = "1.0"
serde_json = "1.0" serde_json = "1.0"
serde_repr = "0.1" rand = "0.7"
rmp-serde = "1.1" Inflector = "0.11"
rand = "0.8"
levenshtein = "1.0" levenshtein = "1.0"
sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "macros", "mysql", "bigdecimal", "chrono", "migrate"]} serenity = { git = "https://github.com/jellywx/serenity", branch = "jellywx-attachment_option", features = ["collector", "unstable_discord_api"] }
base64 = "0.21.0" sqlx = { version = "0.5", features = ["runtime-tokio-rustls", "macros", "mysql", "bigdecimal", "chrono"]}
[dependencies.postman] [dependencies.regex_command_attr]
path = "postman" path = "./regex_command_attr"
[dependencies.reminder_web]
path = "web"
[package.metadata.deb]
depends = "$auto, python3-dateparser (>= 1.0.0)"
suggests = "mysql-server-8.0, nginx"
maintainer-scripts = "debian"
assets = [
["target/release/reminder-rs", "usr/bin/reminder-rs", "755"],
["conf/default.env", "etc/reminder-rs/config.env", "600"],
["conf/Rocket.toml", "etc/reminder-rs/Rocket.toml", "600"],
["web/static/**/*", "lib/reminder-rs/static", "644"],
["web/templates/**/*", "lib/reminder-rs/templates", "644"],
["healthcheck", "lib/reminder-rs/healthcheck", "755"],
["cron.d/reminder_health", "etc/cron.d/reminder_health", "644"],
# ["nginx/reminder-rs", "etc/nginx/sites-available/reminder-rs", "755"]
]
conf-files = [
"/etc/reminder-rs/config.env",
"/etc/reminder-rs/Rocket.toml",
]
[package.metadata.deb.systemd-units]
unit-scripts = "systemd"
start = false

View File

@ -1,9 +0,0 @@
FROM ubuntu:20.04
ENV RUSTUP_HOME=/usr/local/rustup \
CARGO_HOME=/usr/local/cargo \
PATH=/usr/local/cargo/bin:$PATH
RUN apt update && DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt install -y gcc gcc-multilib cmake pkg-config libssl-dev curl mysql-client-8.0
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --no-modify-path --profile minimal --default-toolchain nightly
RUN cargo install cargo-deb

View File

@ -1,42 +1,26 @@
# reminder-rs # reminder-rs
Reminder Bot for Discord. Reminder Bot for Discord, now in Rust.
Old Python version: https://github.com/reminder-bot/bot
## How do I use it? ## How do I use it?
I offer a hosted version of the bot. You can invite it with: **https://invite.reminder-bot.com**. The catch is that repeating We offer a hosted version of the bot. You can invite it with: **https://invite.reminder-bot.com**. The catch is that repeating
reminders are paid on the hosted version of the bot. Keep reading if you want to host it yourself. reminders are paid on the hosted version of the bot. Keep reading if you want to host it yourself.
You'll need rustc and cargo for compilation. To run, you'll need Python 3 still (due to no suitable replacement for dateparser in Rust) You'll need rustc and cargo for compilation. To run, you'll need Python 3 still (due to no suitable replacement for dateparser in Rust)
### Build APT package ### Compiling
Reminder Bot can be built by running `cargo build --release` in the top level directory. It is necessary to create a folder called 'assets' containing an image file with its name specified in the environment as `WEBHOOK_AVATAR`, of dimensions 128x128px to be used as the webhook avatar.
Recommended method. #### Compilation environment variables
These environment variables must be provided when compiling the bot
* `DATABASE_URL` - the URL of your MySQL database (`mysql://user[:password]@domain/database`)
* `WEBHOOK_AVATAR` - accepts the name of an image file located in `$CARGO_MANIFEST_DIR/assets/` to be used as the avatar when creating webhooks. **IMPORTANT: image file must be 128x128 or smaller in size**
* `STRINGS_FILE` - accepts the name of a compiled strings file located in `$CARGO_MANIFEST_DIR/assets/` to be used for creating messages. Compiled string files can be generated with `compile.py` at https://github.com/reminder-bot/languages
By default, this builds targeting Ubuntu 20.04. Modify the Containerfile if you wish to target a different platform. These instructions are written using `podman`, but `docker` should work too. ### Setting up Python
Reminder Bot by default looks for a venv within it's working directory to run Python out of. To set up a venv, install `python3-venv` and run `python3 -m venv venv`. Then, run `source venv/bin/activate` to activate the venv, and do `pip install dateparser` to install the required library
1. Install container software: `sudo apt install podman`.
2. Install database server: `sudo apt install mysql-server-8.0`. Create a database called `reminders`
3. Install SQLx CLI: `cargo install sqlx-cli`
4. From the source code directory, execute `sqlx migrate run`
5. Build container image: `podman build -t reminder-rs .`
6. Build with podman: `podman run --rm --network=host -v "$PWD":/mnt -w /mnt -e "DATABASE_URL=mysql://user@localhost/reminders" reminder-rs cargo deb`
### Compiling for other target
1. Install requirements:
`sudo apt install gcc gcc-multilib cmake libssl-dev build-essential python3-dateparser`
2. Install rustup from https://rustup.rs
3. Install the nightly toolchain: `rustup toolchain default nightly`
4. Install database server: `sudo apt install mysql-server-8.0`. Create a database called `reminders`.
5. Install `sqlx-cli`: `cargo install sqlx-cli`.
6. Run migrations: `sqlx migrate run`.
7. Set environment variables:
* `DATABASE_URL` - the URL of your MySQL database (`mysql://user[:password]@domain/database`)
8. Build: `cargo build --release`
### Configuring
### Environment Variables
Reminder Bot reads a number of environment variables. Some are essential, and others have hardcoded fallbacks. Environment variables can be loaded from a .env file in the working directory. Reminder Bot reads a number of environment variables. Some are essential, and others have hardcoded fallbacks. Environment variables can be loaded from a .env file in the working directory.
__Required Variables__ __Required Variables__
@ -45,8 +29,16 @@ __Required Variables__
__Other Variables__ __Other Variables__
* `MIN_INTERVAL` - default `600`, defines the shortest interval the bot should accept * `MIN_INTERVAL` - default `600`, defines the shortest interval the bot should accept
* `MAX_TIME` - default `1576800000`, defines the maximum time ahead that reminders can be set for
* `LOCAL_TIMEZONE` - default `UTC`, necessary for calculations in the natural language processor * `LOCAL_TIMEZONE` - default `UTC`, necessary for calculations in the natural language processor
* `DEFAULT_PREFIX` - default `$`, used for the default prefix on new guilds
* `SUBSCRIPTION_ROLES` - default `None`, accepts a list of Discord role IDs that are given to subscribed users * `SUBSCRIPTION_ROLES` - default `None`, accepts a list of Discord role IDs that are given to subscribed users
* `CNC_GUILD` - default `None`, accepts a single Discord guild ID for the server that the subscription roles belong to * `CNC_GUILD` - default `None`, accepts a single Discord guild ID for the server that the subscription roles belong to
* `PYTHON_LOCATION` - default `/usr/bin/python3`. Can be changed if your Python executable is located somewhere else * `IGNORE_BOTS` - default `1`, if `1`, Reminder Bot will ignore all other bots
* `PYTHON_LOCATION` - default `venv/bin/python3`. Can be changed if your Python executable is located somewhere else
* `LOCAL_LANGUAGE` - default `EN`. Specifies the string set to fall back to if a string cannot be found (and to be used with new users)
* `THEME_COLOR` - default `8fb677`. Specifies the hex value of the color to use on info message embeds * `THEME_COLOR` - default `8fb677`. Specifies the hex value of the color to use on info message embeds
* `CASE_INSENSITIVE` - default `1`, if `1`, commands will be treated with case insensitivity (so both `$help` and `$HELP` will work)
* `SHARD_COUNT` - default `None`, accepts the number of shards that are being ran
* `SHARD_RANGE` - default `None`, if `SHARD_COUNT` is specified, specifies what range of shards to start on this process
* `DM_ENABLED` - default `1`, if `1`, Reminder Bot will respond to direct messages

View File

@ -1,28 +0,0 @@
[default]
address = "0.0.0.0"
port = 18920
template_dir = "web/templates"
limits = { json = "10MiB" }
[debug]
secret_key = "tR8krio5FXTnnyIZNiJDXPondz0kI1v6X6BXZcBGIRY="
[debug.tls]
certs = "web/private/rsa_sha256_cert.pem"
key = "web/private/rsa_sha256_key.pem"
[debug.rsa_sha256.tls]
certs = "web/private/rsa_sha256_cert.pem"
key = "web/private/rsa_sha256_key.pem"
[debug.ecdsa_nistp256_sha256.tls]
certs = "web/private/ecdsa_nistp256_sha256_cert.pem"
key = "web/private/ecdsa_nistp256_sha256_key_pkcs8.pem"
[debug.ecdsa_nistp384_sha384.tls]
certs = "web/private/ecdsa_nistp384_sha384_cert.pem"
key = "web/private/ecdsa_nistp384_sha384_key_pkcs8.pem"
[debug.ed25519.tls]
certs = "web/private/ed25519_cert.pem"
key = "eb/private/ed25519_key.pem"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

View File

@ -1,3 +0,0 @@
fn main() {
println!("cargo:rerun-if-changed=migrations");
}

View File

@ -1,8 +0,0 @@
[default]
address = "127.0.0.1"
port = 18920
template_dir = "/lib/reminder-rs/templates"
limits = { json = "10MiB" }
[release]
# secret_key = ""

View File

@ -1,19 +0,0 @@
DATABASE_URL=
DISCORD_TOKEN=
PATREON_GUILD_ID=
PATREON_ROLE_ID=
LOCAL_TIMEZONE=
MIN_INTERVAL=
PYTHON_LOCATION=/usr/bin/python3
DONTRUN=
SECRET_KEY=
REMIND_INTERVAL=
OAUTH2_DISCORD_CALLBACK=
OAUTH2_CLIENT_ID=
OAUTH2_CLIENT_SECRET=
REPORT_EMAIL=
LOG_TO_DATABASE=1

View File

@ -1,6 +1,10 @@
CREATE DATABASE IF NOT EXISTS reminders;
SET FOREIGN_KEY_CHECKS=0; SET FOREIGN_KEY_CHECKS=0;
CREATE TABLE guilds ( USE reminders;
CREATE TABLE reminders.guilds (
id INT UNSIGNED UNIQUE NOT NULL AUTO_INCREMENT, id INT UNSIGNED UNIQUE NOT NULL AUTO_INCREMENT,
guild BIGINT UNSIGNED UNIQUE NOT NULL, guild BIGINT UNSIGNED UNIQUE NOT NULL,
@ -14,10 +18,10 @@ CREATE TABLE guilds (
default_avatar VARCHAR(512) DEFAULT 'https://raw.githubusercontent.com/reminder-bot/logos/master/Remind_Me_Bot_Logo_PPic.jpg' NOT NULL, default_avatar VARCHAR(512) DEFAULT 'https://raw.githubusercontent.com/reminder-bot/logos/master/Remind_Me_Bot_Logo_PPic.jpg' NOT NULL,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (default_channel_id) REFERENCES channels(id) ON DELETE SET NULL FOREIGN KEY (default_channel_id) REFERENCES reminders.channels(id) ON DELETE SET NULL
); );
CREATE TABLE channels ( CREATE TABLE reminders.channels (
id INT UNSIGNED UNIQUE NOT NULL AUTO_INCREMENT, id INT UNSIGNED UNIQUE NOT NULL AUTO_INCREMENT,
channel BIGINT UNSIGNED UNIQUE NOT NULL, channel BIGINT UNSIGNED UNIQUE NOT NULL,
@ -35,10 +39,10 @@ CREATE TABLE channels (
guild_id INT UNSIGNED, guild_id INT UNSIGNED,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (guild_id) REFERENCES guilds(id) ON DELETE CASCADE FOREIGN KEY (guild_id) REFERENCES reminders.guilds(id) ON DELETE CASCADE
); );
CREATE TABLE users ( CREATE TABLE reminders.users (
id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL, id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL,
user BIGINT UNSIGNED UNIQUE NOT NULL, user BIGINT UNSIGNED UNIQUE NOT NULL,
@ -55,10 +59,10 @@ CREATE TABLE users (
patreon BOOLEAN NOT NULL DEFAULT 0, patreon BOOLEAN NOT NULL DEFAULT 0,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (dm_channel) REFERENCES channels(id) ON DELETE RESTRICT FOREIGN KEY (dm_channel) REFERENCES reminders.channels(id) ON DELETE RESTRICT
); );
CREATE TABLE roles ( CREATE TABLE reminders.roles (
id INT UNSIGNED UNIQUE NOT NULL AUTO_INCREMENT, id INT UNSIGNED UNIQUE NOT NULL AUTO_INCREMENT,
role BIGINT UNSIGNED UNIQUE NOT NULL, role BIGINT UNSIGNED UNIQUE NOT NULL,
@ -67,10 +71,10 @@ CREATE TABLE roles (
guild_id INT UNSIGNED NOT NULL, guild_id INT UNSIGNED NOT NULL,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (guild_id) REFERENCES guilds(id) ON DELETE CASCADE FOREIGN KEY (guild_id) REFERENCES reminders.guilds(id) ON DELETE CASCADE
); );
CREATE TABLE embeds ( CREATE TABLE reminders.embeds (
id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL, id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL,
title VARCHAR(256) NOT NULL DEFAULT '', title VARCHAR(256) NOT NULL DEFAULT '',
@ -87,7 +91,7 @@ CREATE TABLE embeds (
PRIMARY KEY (id) PRIMARY KEY (id)
); );
CREATE TABLE embed_fields ( CREATE TABLE reminders.embed_fields (
id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL, id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL,
title VARCHAR(256) NOT NULL DEFAULT '', title VARCHAR(256) NOT NULL DEFAULT '',
@ -96,10 +100,10 @@ CREATE TABLE embed_fields (
embed_id INT UNSIGNED NOT NULL, embed_id INT UNSIGNED NOT NULL,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (embed_id) REFERENCES embeds(id) ON DELETE CASCADE FOREIGN KEY (embed_id) REFERENCES reminders.embeds(id) ON DELETE CASCADE
); );
CREATE TABLE messages ( CREATE TABLE reminders.messages (
id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL, id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL,
content VARCHAR(2048) NOT NULL DEFAULT '', content VARCHAR(2048) NOT NULL DEFAULT '',
@ -110,10 +114,10 @@ CREATE TABLE messages (
attachment_name VARCHAR(260), attachment_name VARCHAR(260),
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (embed_id) REFERENCES embeds(id) ON DELETE SET NULL FOREIGN KEY (embed_id) REFERENCES reminders.embeds(id) ON DELETE SET NULL
); );
CREATE TABLE reminders ( CREATE TABLE reminders.reminders (
id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL, id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL,
uid VARCHAR(64) UNIQUE NOT NULL, uid VARCHAR(64) UNIQUE NOT NULL,
@ -136,20 +140,20 @@ CREATE TABLE reminders (
set_by INT UNSIGNED, set_by INT UNSIGNED,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE RESTRICT, FOREIGN KEY (message_id) REFERENCES reminders.messages(id) ON DELETE RESTRICT,
FOREIGN KEY (channel_id) REFERENCES channels(id) ON DELETE CASCADE, FOREIGN KEY (channel_id) REFERENCES reminders.channels(id) ON DELETE CASCADE,
FOREIGN KEY (set_by) REFERENCES users(id) ON DELETE SET NULL FOREIGN KEY (set_by) REFERENCES reminders.users(id) ON DELETE SET NULL
); );
CREATE TRIGGER message_cleanup AFTER DELETE ON reminders CREATE TRIGGER message_cleanup AFTER DELETE ON reminders.reminders
FOR EACH ROW FOR EACH ROW
DELETE FROM messages WHERE id = OLD.message_id; DELETE FROM reminders.messages WHERE id = OLD.message_id;
CREATE TRIGGER embed_cleanup AFTER DELETE ON messages CREATE TRIGGER embed_cleanup AFTER DELETE ON reminders.messages
FOR EACH ROW FOR EACH ROW
DELETE FROM embeds WHERE id = OLD.embed_id; DELETE FROM reminders.embeds WHERE id = OLD.embed_id;
CREATE TABLE todos ( CREATE TABLE reminders.todos (
id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL, id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL,
user_id INT UNSIGNED, user_id INT UNSIGNED,
guild_id INT UNSIGNED, guild_id INT UNSIGNED,
@ -157,23 +161,23 @@ CREATE TABLE todos (
value VARCHAR(2000) NOT NULL, value VARCHAR(2000) NOT NULL,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL, FOREIGN KEY (user_id) REFERENCES reminders.users(id) ON DELETE SET NULL,
FOREIGN KEY (guild_id) REFERENCES guilds(id) ON DELETE CASCADE, FOREIGN KEY (guild_id) REFERENCES reminders.guilds(id) ON DELETE CASCADE,
FOREIGN KEY (channel_id) REFERENCES channels(id) ON DELETE SET NULL FOREIGN KEY (channel_id) REFERENCES reminders.channels(id) ON DELETE SET NULL
); );
CREATE TABLE command_restrictions ( CREATE TABLE reminders.command_restrictions (
id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL, id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL,
role_id INT UNSIGNED NOT NULL, role_id INT UNSIGNED NOT NULL,
command ENUM('todos', 'natural', 'remind', 'interval', 'timer', 'del', 'look', 'alias', 'countdown') NOT NULL, command ENUM('todos', 'natural', 'remind', 'interval', 'timer', 'del', 'look', 'alias', 'countdown') NOT NULL,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (role_id) REFERENCES roles(id) ON DELETE CASCADE, FOREIGN KEY (role_id) REFERENCES reminders.roles(id) ON DELETE CASCADE,
UNIQUE KEY (`role_id`, `command`) UNIQUE KEY (`role_id`, `command`)
); );
CREATE TABLE timers ( CREATE TABLE reminders.timers (
id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL, id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL,
start_time TIMESTAMP NOT NULL DEFAULT NOW(), start_time TIMESTAMP NOT NULL DEFAULT NOW(),
name VARCHAR(32) NOT NULL, name VARCHAR(32) NOT NULL,
@ -182,7 +186,7 @@ CREATE TABLE timers (
PRIMARY KEY (id) PRIMARY KEY (id)
); );
CREATE TABLE events ( CREATE TABLE reminders.events (
id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL, id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL,
`time` TIMESTAMP NOT NULL DEFAULT NOW(), `time` TIMESTAMP NOT NULL DEFAULT NOW(),
@ -194,12 +198,12 @@ CREATE TABLE events (
reminder_id INT UNSIGNED, reminder_id INT UNSIGNED,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (guild_id) REFERENCES guilds(id) ON DELETE CASCADE, FOREIGN KEY (guild_id) REFERENCES reminders.guilds(id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE SET NULL, FOREIGN KEY (user_id) REFERENCES reminders.users(id) ON DELETE SET NULL,
FOREIGN KEY (reminder_id) REFERENCES reminders(id) ON DELETE SET NULL FOREIGN KEY (reminder_id) REFERENCES reminders.reminders(id) ON DELETE SET NULL
); );
CREATE TABLE command_aliases ( CREATE TABLE reminders.command_aliases (
id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL, id INT UNSIGNED AUTO_INCREMENT UNIQUE NOT NULL,
guild_id INT UNSIGNED NOT NULL, guild_id INT UNSIGNED NOT NULL,
@ -208,22 +212,22 @@ CREATE TABLE command_aliases (
command VARCHAR(2048) NOT NULL, command VARCHAR(2048) NOT NULL,
PRIMARY KEY (id), PRIMARY KEY (id),
FOREIGN KEY (guild_id) REFERENCES guilds(id) ON DELETE CASCADE, FOREIGN KEY (guild_id) REFERENCES reminders.guilds(id) ON DELETE CASCADE,
UNIQUE KEY (`guild_id`, `name`) UNIQUE KEY (`guild_id`, `name`)
); );
CREATE TABLE guild_users ( CREATE TABLE reminders.guild_users (
guild INT UNSIGNED NOT NULL, guild INT UNSIGNED NOT NULL,
user INT UNSIGNED NOT NULL, user INT UNSIGNED NOT NULL,
can_access BOOL NOT NULL DEFAULT 0, can_access BOOL NOT NULL DEFAULT 0,
FOREIGN KEY (guild) REFERENCES guilds(id) ON DELETE CASCADE, FOREIGN KEY (guild) REFERENCES reminders.guilds(id) ON DELETE CASCADE,
FOREIGN KEY (user) REFERENCES users(id) ON DELETE CASCADE, FOREIGN KEY (user) REFERENCES reminders.users(id) ON DELETE CASCADE,
UNIQUE KEY (guild, user) UNIQUE KEY (guild, user)
); );
CREATE EVENT event_cleanup CREATE EVENT reminders.event_cleanup
ON SCHEDULE AT CURRENT_TIMESTAMP + INTERVAL 1 DAY ON SCHEDULE AT CURRENT_TIMESTAMP + INTERVAL 1 DAY
ON COMPLETION PRESERVE ON COMPLETION PRESERVE
DO DELETE FROM events WHERE `time` < DATE_SUB(NOW(), INTERVAL 5 DAY); DO DELETE FROM reminders.events WHERE `time` < DATE_SUB(NOW(), INTERVAL 5 DAY);

View File

@ -1 +0,0 @@
*/10 * * * * reminder /lib/reminder-rs/healthcheck

9
debian/postinst vendored
View File

@ -1,9 +0,0 @@
#!/bin/bash
set -e
id -u reminder &>/dev/null || useradd -r -M reminder
chown -R reminder /etc/reminder-rs
#DEBHELPER#

7
debian/postrm vendored
View File

@ -1,7 +0,0 @@
#!/bin/bash
set -e
id -u reminder &>/dev/null || userdel reminder
#DEBHELPER#

View File

@ -1,13 +0,0 @@
#!/bin/bash
export $(grep -v '^#' /etc/reminder-rs/config.env | xargs -d '\n')
REGEX='mysql://([A-Za-z]+)@(.+)/(.+)'
[[ $DATABASE_URL =~ $REGEX ]]
VAR=$(mysql -u "${BASH_REMATCH[1]}" -h "${BASH_REMATCH[2]}" -N -D "${BASH_REMATCH[3]}" -e "SELECT COUNT(1) FROM reminders WHERE utc_time < NOW() - INTERVAL 10 MINUTE AND enabled = 1 AND status = 'pending'")
if [ "$VAR" -gt 0 ]
then
echo "This is to inform that there is a reminder backlog which must be resolved." | mail -s "Backlog: $VAR" "$REPORT_EMAIL"
fi

View File

@ -1,3 +1,5 @@
USE reminders;
SET FOREIGN_KEY_CHECKS = 0; SET FOREIGN_KEY_CHECKS = 0;
DROP TABLE IF EXISTS reminders_new; DROP TABLE IF EXISTS reminders_new;
@ -54,7 +56,8 @@ CREATE TABLE reminders_new (
-- , CONSTRAINT interval_enabled_mutin CHECK (`enabled` = 1 OR `interval` IS NULL) -- , CONSTRAINT interval_enabled_mutin CHECK (`enabled` = 1 OR `interval` IS NULL)
# disallow an expiry time if interval is unspecified # disallow an expiry time if interval is unspecified
-- , CONSTRAINT interval_expires_mutin CHECK (`expires` IS NULL OR `interval` IS NOT NULL) -- , CONSTRAINT interval_expires_mutin CHECK (`expires` IS NULL OR `interval` IS NOT NULL)
); )
COLLATE utf8mb4_unicode_ci;
# import data from other tables # import data from other tables
INSERT INTO reminders_new ( INSERT INTO reminders_new (
@ -155,9 +158,4 @@ CREATE TABLE events (
FOREIGN KEY (reminder_id) REFERENCES reminders_new(id) ON DELETE SET NULL FOREIGN KEY (reminder_id) REFERENCES reminders_new(id) ON DELETE SET NULL
); );
DROP TABLE reminders;
DROP TABLE embed_fields;
RENAME TABLE reminders_new TO reminders;
RENAME TABLE embed_fields_new TO embed_fields;
SET FOREIGN_KEY_CHECKS = 1; SET FOREIGN_KEY_CHECKS = 1;

View File

@ -1,11 +0,0 @@
CREATE TABLE macro (
id INT UNSIGNED AUTO_INCREMENT,
guild_id INT UNSIGNED NOT NULL,
name VARCHAR(100) NOT NULL,
description VARCHAR(100),
commands TEXT NOT NULL,
FOREIGN KEY (guild_id) REFERENCES guilds(id) ON DELETE CASCADE,
PRIMARY KEY (id)
);

View File

@ -1,2 +0,0 @@
ALTER TABLE reminders RENAME COLUMN `interval` TO `interval_seconds`;
ALTER TABLE reminders ADD COLUMN `interval_months` INT UNSIGNED DEFAULT NULL;

View File

@ -1,49 +0,0 @@
CREATE TABLE reminder_template (
`id` INT UNSIGNED NOT NULL AUTO_INCREMENT,
`name` VARCHAR(24) NOT NULL DEFAULT 'Reminder',
`guild_id` INT UNSIGNED NOT NULL,
`username` VARCHAR(32) DEFAULT NULL,
`avatar` VARCHAR(512) DEFAULT NULL,
`content` VARCHAR(2048) NOT NULL DEFAULT '',
`tts` BOOL NOT NULL DEFAULT 0,
`attachment` MEDIUMBLOB,
`attachment_name` VARCHAR(260),
`embed_title` VARCHAR(256) NOT NULL DEFAULT '',
`embed_description` VARCHAR(2048) NOT NULL DEFAULT '',
`embed_image_url` VARCHAR(512),
`embed_thumbnail_url` VARCHAR(512),
`embed_footer` VARCHAR(2048) NOT NULL DEFAULT '',
`embed_footer_url` VARCHAR(512),
`embed_author` VARCHAR(256) NOT NULL DEFAULT '',
`embed_author_url` VARCHAR(512),
`embed_color` INT UNSIGNED NOT NULL DEFAULT 0x0,
`embed_fields` JSON,
PRIMARY KEY (id),
FOREIGN KEY (`guild_id`) REFERENCES guilds (`id`) ON DELETE CASCADE
);
ALTER TABLE reminders ADD COLUMN embed_fields JSON;
update reminders
inner join embed_fields as E
on E.reminder_id = reminders.id
set embed_fields = (
select JSON_ARRAYAGG(
JSON_OBJECT(
'title', E.title,
'value', E.value,
'inline',
if(inline = 1, cast(TRUE as json), cast(FALSE as json))
)
)
from embed_fields
group by reminder_id
having reminder_id = reminders.id
);

View File

@ -1 +0,0 @@
ALTER TABLE reminders ADD COLUMN `interval_days` INT UNSIGNED DEFAULT NULL;

View File

@ -1 +0,0 @@
ALTER TABLE reminders ADD COLUMN `thread_id` BIGINT DEFAULT NULL;

View File

@ -1 +0,0 @@
ALTER TABLE guilds ADD COLUMN ephemeral_confirmations BOOL NOT NULL DEFAULT 0;

View File

@ -1,2 +0,0 @@
ALTER TABLE reminders MODIFY `name` VARCHAR(100) NOT NULL DEFAULT 'Reminder';
ALTER TABLE reminder_template MODIFY `name` VARCHAR(100) NOT NULL DEFAULT 'Reminder';

View File

@ -1,9 +0,0 @@
CREATE TABLE stat (
`id` BIGINT NOT NULL AUTO_INCREMENT,
`utc_time` DATETIME NOT NULL DEFAULT NOW(),
`type` ENUM('reminder_sent', 'reminder_failed'),
`reminder_id` INT UNSIGNED,
`message` TEXT,
PRIMARY KEY (`id`)
);

View File

@ -1,2 +0,0 @@
ALTER TABLE reminders ADD COLUMN `status` ENUM ('pending', 'sent', 'failed', 'deleted') NOT NULL DEFAULT 'pending';
ALTER TABLE reminders ADD COLUMN `status_message` TEXT;

View File

@ -1,19 +0,0 @@
-- Drop existing constraint
ALTER TABLE `reminders` DROP CONSTRAINT `reminders_ibfk_1`;
ALTER TABLE `reminders` MODIFY COLUMN `channel_id` INT UNSIGNED;
ALTER TABLE `reminders` ADD COLUMN `guild_id` INT UNSIGNED;
ALTER TABLE `reminders`
ADD CONSTRAINT `guild_id_fk`
FOREIGN KEY (`guild_id`)
REFERENCES `guilds`(`id`)
ON DELETE CASCADE;
ALTER TABLE `reminders`
ADD CONSTRAINT `channel_id_fk`
FOREIGN KEY (`channel_id`)
REFERENCES `channels`(`id`)
ON DELETE SET NULL;
UPDATE `reminders` SET `guild_id` = (SELECT guilds.`id` FROM `channels` INNER JOIN `guilds` ON channels.guild_id = guilds.id WHERE reminders.channel_id = channels.id);

View File

@ -1,4 +0,0 @@
ALTER TABLE reminders ADD COLUMN `status_change_time` DATETIME;
-- This is a best-guess as to the status change time.
UPDATE reminders SET `status_change_time` = `utc_time` WHERE `status` != 'pending';

View File

@ -1,41 +0,0 @@
server {
server_name www.reminder-bot.com;
return 301 $scheme://reminder-bot.com$request_uri;
}
server {
listen 80;
server_name reminder-bot.com;
return 301 https://reminder-bot.com$request_uri;
}
server {
listen 443 ssl;
server_name reminder-bot.com;
ssl_certificate /etc/letsencrypt/live/reminder-bot.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/reminder-bot.com/privkey.pem;
access_log /var/log/nginx/access.log;
error_log /var/log/nginx/error.log;
proxy_buffer_size 128k;
proxy_buffers 4 256k;
proxy_busy_buffers_size 256k;
location / {
proxy_pass http://localhost:18920;
proxy_redirect off;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
location /static {
alias /var/www/reminder-rs/static;
expires 30d;
}
}

View File

@ -1,16 +0,0 @@
[package]
name = "postman"
version = "0.1.0"
edition = "2021"
[dependencies]
tokio = { version = "1", features = ["process", "full"] }
regex = "1.9"
log = "0.4"
chrono = "0.4"
chrono-tz = { version = "0.8", features = ["serde"] }
lazy_static = "1.4"
num-integer = "0.1"
serde = "1.0"
sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "macros", "mysql", "bigdecimal", "chrono", "json"]}
serenity = { version = "0.11", default-features = false, features = ["builder", "cache", "client", "gateway", "http", "model", "utils", "rustls_backend"] }

View File

@ -1,50 +0,0 @@
mod sender;
use std::env;
use log::{info, warn};
use serenity::client::Context;
use sqlx::{Executor, MySql};
use tokio::{
sync::broadcast::Receiver,
time::{sleep_until, Duration, Instant},
};
type Database = MySql;
pub async fn initialize(
mut kill: Receiver<()>,
ctx: Context,
pool: impl Executor<'_, Database = Database> + Copy,
) -> Result<(), &'static str> {
tokio::select! {
output = _initialize(ctx, pool) => Ok(output),
_ = kill.recv() => {
warn!("Received terminate signal. Goodbye");
Err("Received terminate signal. Goodbye")
}
}
}
async fn _initialize(ctx: Context, pool: impl Executor<'_, Database = Database> + Copy) {
let remind_interval = env::var("REMIND_INTERVAL")
.map(|inner| inner.parse::<u64>().ok())
.ok()
.flatten()
.unwrap_or(10);
loop {
let sleep_to = Instant::now() + Duration::from_secs(remind_interval);
let reminders = sender::Reminder::fetch_reminders(pool).await;
if reminders.len() > 0 {
info!("Preparing to send {} reminders.", reminders.len());
for reminder in reminders {
reminder.send(pool, ctx.clone()).await;
}
}
sleep_until(sleep_to).await;
}
}

View File

@ -1,800 +0,0 @@
use std::env;
use chrono::{DateTime, Days, Duration, Months};
use chrono_tz::Tz;
use lazy_static::lazy_static;
use log::{error, info, warn};
use num_integer::Integer;
use regex::{Captures, Regex};
use serde::Deserialize;
use serenity::{
builder::CreateEmbed,
http::{CacheHttp, Http, HttpError},
model::{
channel::{Channel, Embed as SerenityEmbed},
id::ChannelId,
webhook::Webhook,
},
Error, Result,
};
use sqlx::{
types::{
chrono::{NaiveDateTime, Utc},
Json,
},
Executor,
};
use crate::Database;
lazy_static! {
pub static ref TIMEFROM_REGEX: Regex =
Regex::new(r#"<<timefrom:(?P<time>\d+):(?P<format>.+)?>>"#).unwrap();
pub static ref TIMENOW_REGEX: Regex =
Regex::new(r#"<<timenow:(?P<timezone>(?:\w|/|_)+):(?P<format>.+)?>>"#).unwrap();
pub static ref LOG_TO_DATABASE: bool = env::var("LOG_TO_DATABASE").map_or(true, |v| v == "1");
}
fn fmt_displacement(format: &str, seconds: u64) -> String {
let mut seconds = seconds;
let mut days: u64 = 0;
let mut hours: u64 = 0;
let mut minutes: u64 = 0;
for (rep, time_type, div) in
[("%d", &mut days, 86400), ("%h", &mut hours, 3600), ("%m", &mut minutes, 60)].iter_mut()
{
if format.contains(*rep) {
let (divided, new_seconds) = seconds.div_rem(&div);
**time_type = divided;
seconds = new_seconds;
}
}
format
.replace("%s", &seconds.to_string())
.replace("%m", &minutes.to_string())
.replace("%h", &hours.to_string())
.replace("%d", &days.to_string())
}
pub fn substitute(string: &str) -> String {
let new = TIMEFROM_REGEX.replace(string, |caps: &Captures| {
let final_time = caps.name("time").map(|m| m.as_str().parse::<i64>().ok()).flatten();
let format = caps.name("format").map(|m| m.as_str());
if let (Some(final_time), Some(format)) = (final_time, format) {
match NaiveDateTime::from_timestamp_opt(final_time, 0) {
Some(dt) => {
let now = Utc::now().naive_utc();
let difference = {
if now < dt {
dt - Utc::now().naive_utc()
} else {
Utc::now().naive_utc() - dt
}
};
fmt_displacement(format, difference.num_seconds() as u64)
}
None => String::new(),
}
} else {
String::new()
}
});
TIMENOW_REGEX
.replace(&new, |caps: &Captures| {
let timezone = caps.name("timezone").map(|m| m.as_str().parse::<Tz>().ok()).flatten();
let format = caps.name("format").map(|m| m.as_str());
if let (Some(timezone), Some(format)) = (timezone, format) {
let now = Utc::now().with_timezone(&timezone);
now.format(format).to_string()
} else {
String::new()
}
})
.to_string()
}
struct Embed {
title: String,
description: String,
image_url: Option<String>,
thumbnail_url: Option<String>,
footer: String,
footer_url: Option<String>,
author: String,
author_url: Option<String>,
color: u32,
fields: Json<Vec<EmbedField>>,
}
#[derive(Deserialize)]
struct EmbedField {
title: String,
value: String,
inline: bool,
}
impl Embed {
pub async fn from_id(
pool: impl Executor<'_, Database = Database> + Copy,
id: u32,
) -> Option<Self> {
match sqlx::query_as!(
Self,
r#"
SELECT
`embed_title` AS title,
`embed_description` AS description,
`embed_image_url` AS image_url,
`embed_thumbnail_url` AS thumbnail_url,
`embed_footer` AS footer,
`embed_footer_url` AS footer_url,
`embed_author` AS author,
`embed_author_url` AS author_url,
`embed_color` AS color,
IFNULL(`embed_fields`, '[]') AS "fields:_"
FROM reminders
WHERE `id` = ?"#,
id
)
.fetch_one(pool)
.await
{
Ok(mut embed) => {
embed.title = substitute(&embed.title);
embed.description = substitute(&embed.description);
embed.footer = substitute(&embed.footer);
embed.fields.iter_mut().for_each(|field| {
field.title = substitute(&field.title);
field.value = substitute(&field.value);
});
if embed.has_content() {
Some(embed)
} else {
None
}
}
Err(e) => {
warn!("Error loading embed from reminder: {:?}", e);
None
}
}
}
pub fn has_content(&self) -> bool {
if self.title.is_empty()
&& self.description.is_empty()
&& self.image_url.is_none()
&& self.thumbnail_url.is_none()
&& self.footer.is_empty()
&& self.footer_url.is_none()
&& self.author.is_empty()
&& self.author_url.is_none()
&& self.fields.0.is_empty()
{
false
} else {
true
}
}
}
impl Into<CreateEmbed> for Embed {
fn into(self) -> CreateEmbed {
let mut c = CreateEmbed::default();
c.title(&self.title)
.description(&self.description)
.color(self.color)
.author(|a| {
a.name(&self.author);
if let Some(author_icon) = &self.author_url {
a.icon_url(author_icon);
}
a
})
.footer(|f| {
f.text(&self.footer);
if let Some(footer_icon) = &self.footer_url {
f.icon_url(footer_icon);
}
f
});
for field in &self.fields.0 {
c.field(&field.title, &field.value, field.inline);
}
if let Some(image_url) = &self.image_url {
c.image(image_url);
}
if let Some(thumbnail_url) = &self.thumbnail_url {
c.thumbnail(thumbnail_url);
}
c
}
}
pub struct Reminder {
id: u32,
channel_id: Option<u64>,
webhook_id: Option<u64>,
webhook_token: Option<String>,
channel_paused: Option<bool>,
channel_paused_until: Option<NaiveDateTime>,
enabled: bool,
tts: bool,
pin: bool,
content: String,
attachment: Option<Vec<u8>>,
attachment_name: Option<String>,
utc_time: DateTime<Utc>,
timezone: String,
restartable: bool,
expires: Option<DateTime<Utc>>,
interval_seconds: Option<u32>,
interval_days: Option<u32>,
interval_months: Option<u32>,
avatar: Option<String>,
username: Option<String>,
}
impl Reminder {
pub async fn fetch_reminders(pool: impl Executor<'_, Database = Database> + Copy) -> Vec<Self> {
match sqlx::query_as_unchecked!(
Reminder,
r#"
SELECT
reminders.`id` AS id,
channels.`channel` AS channel_id,
channels.`webhook_id` AS webhook_id,
channels.`webhook_token` AS webhook_token,
channels.`paused` AS 'channel_paused',
channels.`paused_until` AS 'channel_paused_until',
reminders.`enabled` AS 'enabled',
reminders.`tts` AS tts,
reminders.`pin` AS pin,
reminders.`content` AS content,
reminders.`attachment` AS attachment,
reminders.`attachment_name` AS attachment_name,
reminders.`utc_time` AS 'utc_time',
reminders.`timezone` AS timezone,
reminders.`restartable` AS restartable,
reminders.`expires` AS 'expires',
reminders.`interval_seconds` AS 'interval_seconds',
reminders.`interval_days` AS 'interval_days',
reminders.`interval_months` AS 'interval_months',
reminders.`avatar` AS avatar,
reminders.`username` AS username
FROM
reminders
LEFT JOIN
channels
ON
reminders.channel_id = channels.id
WHERE
reminders.`status` = 'pending' AND
reminders.`id` IN (
SELECT
MIN(id)
FROM
reminders
WHERE
reminders.`utc_time` <= NOW() AND
`status` = 'pending' AND
(
reminders.`interval_seconds` IS NOT NULL
OR reminders.`interval_months` IS NOT NULL
OR reminders.`interval_days` IS NOT NULL
OR reminders.enabled
)
GROUP BY channel_id
)
"#,
)
.fetch_all(pool)
.await
{
Ok(reminders) => reminders
.into_iter()
.map(|mut rem| {
rem.content = substitute(&rem.content);
rem
})
.collect::<Vec<Self>>(),
Err(e) => {
warn!("Could not fetch reminders: {:?}", e);
vec![]
}
}
}
async fn reset_webhook(&self, pool: impl Executor<'_, Database = Database> + Copy) {
let _ = sqlx::query!(
"
UPDATE channels SET webhook_id = NULL, webhook_token = NULL
WHERE channel = ?
",
self.channel_id
)
.execute(pool)
.await;
}
async fn refresh(&self, pool: impl Executor<'_, Database = Database> + Copy) {
if self.interval_seconds.is_some()
|| self.interval_months.is_some()
|| self.interval_days.is_some()
{
// If all intervals are zero then dont care
if self.interval_seconds == Some(0)
&& self.interval_days == Some(0)
&& self.interval_months == Some(0)
{
self.set_sent(pool).await;
}
let now = Utc::now();
let mut updated_reminder_time =
self.utc_time.with_timezone(&self.timezone.parse().unwrap_or(Tz::UTC));
let mut fail_count = 0;
while updated_reminder_time < now && fail_count < 4 {
if let Some(interval) = self.interval_months {
if interval != 0 {
updated_reminder_time = updated_reminder_time
.checked_add_months(Months::new(interval))
.unwrap_or_else(|| {
warn!(
"{}: Could not add {} months to a reminder",
interval, self.id
);
fail_count += 1;
updated_reminder_time
});
}
}
if let Some(interval) = self.interval_days {
if interval != 0 {
updated_reminder_time = updated_reminder_time
.checked_add_days(Days::new(interval as u64))
.unwrap_or_else(|| {
warn!("{}: Could not add {} days to a reminder", self.id, interval);
fail_count += 1;
updated_reminder_time
})
}
}
if let Some(interval) = self.interval_seconds {
updated_reminder_time += Duration::seconds(interval as i64);
}
}
if fail_count >= 4 {
self.log_error(
pool,
"Failed to update 4 times and so is being deleted",
None::<&'static str>,
)
.await;
self.set_failed(pool, "Failed to update 4 times and so is being deleted").await;
} else if self.expires.map_or(false, |expires| updated_reminder_time > expires) {
self.set_sent(pool).await;
} else {
sqlx::query!(
"
UPDATE reminders SET `utc_time` = ? WHERE `id` = ?
",
updated_reminder_time.with_timezone(&Utc),
self.id
)
.execute(pool)
.await
.expect(&format!("Could not update time on Reminder {}", self.id));
}
} else {
self.set_sent(pool).await;
}
}
async fn log_error(
&self,
pool: impl Executor<'_, Database = Database> + Copy,
error: &'static str,
debug_info: Option<impl std::fmt::Debug>,
) {
let message = match debug_info {
Some(info) => format!(
"{}
{:?}",
error, info
),
None => error.to_string(),
};
error!("[Reminder {}] {}", self.id, message);
if *LOG_TO_DATABASE {
sqlx::query!(
"
INSERT INTO stat (type, reminder_id, message)
VALUES ('reminder_failed', ?, ?)
",
self.id,
message,
)
.execute(pool)
.await
.expect("Could not log error to database");
}
}
async fn log_success(&self, pool: impl Executor<'_, Database = Database> + Copy) {
if *LOG_TO_DATABASE {
sqlx::query!(
"
INSERT INTO stat (type, reminder_id)
VALUES ('reminder_sent', ?)
",
self.id,
)
.execute(pool)
.await
.expect("Could not log success to database");
}
}
async fn set_sent(&self, pool: impl Executor<'_, Database = Database> + Copy) {
sqlx::query!(
"
UPDATE reminders
SET `status` = 'sent', `status_change_time` = NOW()
WHERE `id` = ?
",
self.id
)
.execute(pool)
.await
.expect(&format!("Could not delete Reminder {}", self.id));
}
async fn set_failed(
&self,
pool: impl Executor<'_, Database = Database> + Copy,
message: &'static str,
) {
sqlx::query!(
"
UPDATE reminders
SET `status` = 'failed', `status_message` = ?, `status_change_time` = NOW()
WHERE `id` = ?
",
message,
self.id
)
.execute(pool)
.await
.expect(&format!("Could not delete Reminder {}", self.id));
}
async fn pin_message<M: Into<u64>>(&self, message_id: M, http: impl AsRef<Http>) {
if let Some(channel_id) = self.channel_id {
let _ = http.as_ref().pin_message(channel_id, message_id.into(), None).await;
}
}
pub async fn send(
&self,
pool: impl Executor<'_, Database = Database> + Copy,
cache_http: impl CacheHttp,
) {
async fn send_to_channel(
cache_http: impl CacheHttp,
channel_id: u64,
reminder: &Reminder,
embed: Option<CreateEmbed>,
) -> Result<()> {
let channel = ChannelId(channel_id).to_channel(&cache_http).await;
match channel {
Ok(Channel::Guild(channel)) => {
match channel
.send_message(&cache_http, |m| {
m.content(&reminder.content).tts(reminder.tts);
if let (Some(attachment), Some(name)) =
(&reminder.attachment, &reminder.attachment_name)
{
m.add_file((attachment as &[u8], name.as_str()));
}
if let Some(embed) = embed {
m.set_embed(embed);
}
m
})
.await
{
Ok(m) => {
if reminder.pin {
reminder.pin_message(m.id, cache_http.http()).await;
}
Ok(())
}
Err(e) => Err(e),
}
}
Ok(Channel::Private(channel)) => {
match channel
.send_message(&cache_http.http(), |m| {
m.content(&reminder.content).tts(reminder.tts);
if let (Some(attachment), Some(name)) =
(&reminder.attachment, &reminder.attachment_name)
{
m.add_file((attachment as &[u8], name.as_str()));
}
if let Some(embed) = embed {
m.set_embed(embed);
}
m
})
.await
{
Ok(m) => {
if reminder.pin {
reminder.pin_message(m.id, cache_http.http()).await;
}
Ok(())
}
Err(e) => Err(e),
}
}
Err(e) => Err(e),
_ => Err(Error::Other("Channel not of valid type")),
}
}
async fn send_to_webhook(
cache_http: impl CacheHttp,
reminder: &Reminder,
webhook: Webhook,
embed: Option<CreateEmbed>,
) -> Result<()> {
match webhook
.execute(&cache_http.http(), reminder.pin || reminder.restartable, |w| {
w.content(&reminder.content).tts(reminder.tts);
if let Some(username) = &reminder.username {
if !username.is_empty() {
w.username(username);
}
}
if let Some(avatar) = &reminder.avatar {
w.avatar_url(avatar);
}
if let (Some(attachment), Some(name)) =
(&reminder.attachment, &reminder.attachment_name)
{
w.add_file((attachment as &[u8], name.as_str()));
}
if let Some(embed) = embed {
w.embeds(vec![SerenityEmbed::fake(|c| {
*c = embed;
c
})]);
}
w
})
.await
{
Ok(m) => {
if reminder.pin {
if let Some(message) = m {
reminder.pin_message(message.id, cache_http.http()).await;
}
}
Ok(())
}
Err(e) => Err(e),
}
}
match self.channel_id {
Some(channel_id) => {
if self.enabled
&& !(self.channel_paused.unwrap_or(false)
&& self
.channel_paused_until
.map_or(true, |inner| inner >= Utc::now().naive_local()))
{
let _ = sqlx::query!(
"
UPDATE `channels`
SET paused = 0, paused_until = NULL
WHERE `channel` = ?
",
self.channel_id
)
.execute(pool)
.await;
let embed = Embed::from_id(pool, self.id).await.map(|e| e.into());
let result = if let (Some(webhook_id), Some(webhook_token)) =
(self.webhook_id, &self.webhook_token)
{
let webhook_res = cache_http
.http()
.get_webhook_with_token(webhook_id, webhook_token)
.await;
if let Ok(webhook) = webhook_res {
send_to_webhook(cache_http, &self, webhook, embed).await
} else {
warn!("Webhook vanished for reminder {}: {:?}", self.id, webhook_res);
self.reset_webhook(pool).await;
send_to_channel(cache_http, channel_id, &self, embed).await
}
} else {
send_to_channel(cache_http, channel_id, &self, embed).await
};
if let Err(e) = result {
if let Error::Http(error) = e {
if let HttpError::UnsuccessfulRequest(http_error) = *error {
match http_error.error.code {
10003 => {
self.log_error(
pool,
"Could not be sent as channel does not exist",
None::<&'static str>,
)
.await;
self.set_failed(
pool,
"Could not be sent as channel does not exist",
)
.await;
}
10004 => {
self.log_error(
pool,
"Could not be sent as guild does not exist",
None::<&'static str>,
)
.await;
self.set_failed(
pool,
"Could not be sent as guild does not exist",
)
.await;
}
50001 => {
self.log_error(
pool,
"Could not be sent as missing access",
None::<&'static str>,
)
.await;
self.set_failed(
pool,
"Could not be sent as missing access",
)
.await;
}
50007 => {
self.log_error(
pool,
"Could not be sent as user has DMs disabled",
None::<&'static str>,
)
.await;
self.set_failed(
pool,
"Could not be sent as user has DMs disabled",
)
.await;
}
50013 => {
self.log_error(
pool,
"Could not be sent as permissions are invalid",
None::<&'static str>,
)
.await;
self.set_failed(
pool,
"Could not be sent as permissions are invalid",
)
.await;
}
_ => {
self.log_error(
pool,
"HTTP error sending reminder",
Some(http_error),
)
.await;
self.refresh(pool).await;
}
}
} else {
self.log_error(pool, "(Likely) a parsing error", Some(error)).await;
self.refresh(pool).await;
}
} else {
self.log_error(pool, "Non-HTTP error", Some(e)).await;
self.refresh(pool).await;
}
} else {
self.log_success(pool).await;
self.refresh(pool).await;
}
} else {
info!("Reminder {} is paused", self.id);
self.refresh(pool).await;
}
}
None => {
info!("Reminder {} is orphaned", self.id);
self.log_error(pool, "Orphaned", Option::<u8>::None).await;
self.set_failed(pool, "Could not be sent as channel was deleted").await;
}
}
}
}

View File

@ -0,0 +1,14 @@
[package]
name = "regex_command_attr"
version = "0.2.0"
authors = ["acdenisSK <acdenissk69@gmail.com>", "jellywx <judesouthworth@pm.me>"]
edition = "2018"
description = "Procedural macros for command creation for the RegexFramework for serenity."
[lib]
proc-macro = true
[dependencies]
quote = "^1.0"
syn = { version = "^1.0", features = ["full", "derive", "extra-traits"] }
proc-macro2 = "1.0"

View File

@ -0,0 +1,293 @@
use proc_macro2::Span;
use syn::parse::{Error, Result};
use syn::spanned::Spanned;
use syn::{Attribute, Ident, Lit, LitStr, Meta, NestedMeta, Path};
use crate::structures::PermissionLevel;
use crate::util::{AsOption, LitExt};
use std::fmt::{self, Write};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ValueKind {
// #[<name>]
Name,
// #[<name> = <value>]
Equals,
// #[<name>([<value>, <value>, <value>, ...])]
List,
// #[<name>(<value>)]
SingleList,
}
impl fmt::Display for ValueKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ValueKind::Name => f.pad("`#[<name>]`"),
ValueKind::Equals => f.pad("`#[<name> = <value>]`"),
ValueKind::List => f.pad("`#[<name>([<value>, <value>, <value>, ...])]`"),
ValueKind::SingleList => f.pad("`#[<name>(<value>)]`"),
}
}
}
fn to_ident(p: Path) -> Result<Ident> {
if p.segments.is_empty() {
return Err(Error::new(
p.span(),
"cannot convert an empty path to an identifier",
));
}
if p.segments.len() > 1 {
return Err(Error::new(
p.span(),
"the path must not have more than one segment",
));
}
if !p.segments[0].arguments.is_empty() {
return Err(Error::new(
p.span(),
"the singular path segment must not have any arguments",
));
}
Ok(p.segments[0].ident.clone())
}
#[derive(Debug)]
pub struct Values {
pub name: Ident,
pub literals: Vec<Lit>,
pub kind: ValueKind,
pub span: Span,
}
impl Values {
#[inline]
pub fn new(name: Ident, kind: ValueKind, literals: Vec<Lit>, span: Span) -> Self {
Values {
name,
literals,
kind,
span,
}
}
}
pub fn parse_values(attr: &Attribute) -> Result<Values> {
let meta = attr.parse_meta()?;
match meta {
Meta::Path(path) => {
let name = to_ident(path)?;
Ok(Values::new(name, ValueKind::Name, Vec::new(), attr.span()))
}
Meta::List(meta) => {
let name = to_ident(meta.path)?;
let nested = meta.nested;
if nested.is_empty() {
return Err(Error::new(attr.span(), "list cannot be empty"));
}
let mut lits = Vec::with_capacity(nested.len());
for meta in nested {
match meta {
NestedMeta::Lit(l) => lits.push(l),
NestedMeta::Meta(m) => match m {
Meta::Path(path) => {
let i = to_ident(path)?;
lits.push(Lit::Str(LitStr::new(&i.to_string(), i.span())))
}
Meta::List(_) | Meta::NameValue(_) => {
return Err(Error::new(attr.span(), "cannot nest a list; only accept literals and identifiers at this level"))
}
},
}
}
let kind = if lits.len() == 1 {
ValueKind::SingleList
} else {
ValueKind::List
};
Ok(Values::new(name, kind, lits, attr.span()))
}
Meta::NameValue(meta) => {
let name = to_ident(meta.path)?;
let lit = meta.lit;
Ok(Values::new(name, ValueKind::Equals, vec![lit], attr.span()))
}
}
}
#[derive(Debug, Clone)]
struct DisplaySlice<'a, T>(&'a [T]);
impl<'a, T: fmt::Display> fmt::Display for DisplaySlice<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut iter = self.0.iter().enumerate();
match iter.next() {
None => f.write_str("nothing")?,
Some((idx, elem)) => {
write!(f, "{}: {}", idx, elem)?;
for (idx, elem) in iter {
f.write_char('\n')?;
write!(f, "{}: {}", idx, elem)?;
}
}
}
Ok(())
}
}
#[inline]
fn is_form_acceptable(expect: &[ValueKind], kind: ValueKind) -> bool {
if expect.contains(&ValueKind::List) && kind == ValueKind::SingleList {
true
} else {
expect.contains(&kind)
}
}
#[inline]
fn validate(values: &Values, forms: &[ValueKind]) -> Result<()> {
if !is_form_acceptable(forms, values.kind) {
return Err(Error::new(
values.span,
// Using the `_args` version here to avoid an allocation.
format_args!(
"the attribute must be in of these forms:\n{}",
DisplaySlice(forms)
),
));
}
Ok(())
}
#[inline]
pub fn parse<T: AttributeOption>(values: Values) -> Result<T> {
T::parse(values)
}
pub trait AttributeOption: Sized {
fn parse(values: Values) -> Result<Self>;
}
impl AttributeOption for Vec<String> {
fn parse(values: Values) -> Result<Self> {
validate(&values, &[ValueKind::List])?;
Ok(values
.literals
.into_iter()
.map(|lit| lit.to_str())
.collect())
}
}
impl AttributeOption for String {
#[inline]
fn parse(values: Values) -> Result<Self> {
validate(&values, &[ValueKind::Equals, ValueKind::SingleList])?;
Ok(values.literals[0].to_str())
}
}
impl AttributeOption for bool {
#[inline]
fn parse(values: Values) -> Result<Self> {
validate(&values, &[ValueKind::Name, ValueKind::SingleList])?;
Ok(values.literals.get(0).map_or(true, |l| l.to_bool()))
}
}
impl AttributeOption for Ident {
#[inline]
fn parse(values: Values) -> Result<Self> {
validate(&values, &[ValueKind::SingleList])?;
Ok(values.literals[0].to_ident())
}
}
impl AttributeOption for Vec<Ident> {
#[inline]
fn parse(values: Values) -> Result<Self> {
validate(&values, &[ValueKind::List])?;
Ok(values.literals.into_iter().map(|l| l.to_ident()).collect())
}
}
impl AttributeOption for Option<String> {
fn parse(values: Values) -> Result<Self> {
validate(&values, &[ValueKind::Name, ValueKind::Equals, ValueKind::SingleList])?;
Ok(values.literals.get(0).map(|l| l.to_str()))
}
}
impl AttributeOption for PermissionLevel {
fn parse(values: Values) -> Result<Self> {
validate(&values, &[ValueKind::SingleList])?;
Ok(values.literals.get(0).map(|l| PermissionLevel::from_str(&*l.to_str()).unwrap()).unwrap())
}
}
impl<T: AttributeOption> AttributeOption for AsOption<T> {
#[inline]
fn parse(values: Values) -> Result<Self> {
Ok(AsOption(Some(T::parse(values)?)))
}
}
macro_rules! attr_option_num {
($($n:ty),*) => {
$(
impl AttributeOption for $n {
fn parse(values: Values) -> Result<Self> {
validate(&values, &[ValueKind::SingleList])?;
Ok(match &values.literals[0] {
Lit::Int(l) => l.base10_parse::<$n>()?,
l => {
let s = l.to_str();
// Use `as_str` to guide the compiler to use `&str`'s parse method.
// We don't want to use our `parse` method here (`impl AttributeOption for String`).
match s.as_str().parse::<$n>() {
Ok(n) => n,
Err(_) => return Err(Error::new(l.span(), "invalid integer")),
}
}
})
}
}
impl AttributeOption for Option<$n> {
#[inline]
fn parse(values: Values) -> Result<Self> {
<$n as AttributeOption>::parse(values).map(Some)
}
}
)*
}
}
attr_option_num!(u16, u32, usize);

View File

@ -0,0 +1,5 @@
pub mod suffixes {
pub const COMMAND: &str = "COMMAND";
}
pub use self::suffixes::*;

View File

@ -0,0 +1,102 @@
#![deny(rust_2018_idioms)]
// FIXME: Remove this in a foreseeable future.
// Currently exists for backwards compatibility to previous Rust versions.
#![recursion_limit = "128"]
#[allow(unused_extern_crates)]
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse::Error, parse_macro_input, spanned::Spanned, Lit};
pub(crate) mod attributes;
pub(crate) mod consts;
pub(crate) mod structures;
#[macro_use]
pub(crate) mod util;
use attributes::*;
use consts::*;
use structures::*;
use util::*;
macro_rules! match_options {
($v:expr, $values:ident, $options:ident, $span:expr => [$($name:ident);*]) => {
match $v {
$(
stringify!($name) => $options.$name = propagate_err!($crate::attributes::parse($values)),
)*
_ => {
return Error::new($span, format_args!("invalid attribute: {:?}", $v))
.to_compile_error()
.into();
},
}
};
}
#[proc_macro_attribute]
pub fn command(attr: TokenStream, input: TokenStream) -> TokenStream {
let mut fun = parse_macro_input!(input as CommandFun);
let lit_name = if !attr.is_empty() {
parse_macro_input!(attr as Lit).to_str()
} else {
fun.name.to_string()
};
let mut options = Options::new();
for attribute in &fun.attributes {
let span = attribute.span();
let values = propagate_err!(parse_values(attribute));
let name = values.name.to_string();
let name = &name[..];
match_options!(name, values, options, span => [
permission_level;
supports_dm;
can_blacklist
]);
}
let Options {
permission_level,
supports_dm,
can_blacklist,
} = options;
let visibility = fun.visibility;
let name = fun.name.clone();
let body = fun.body;
let n = name.with_suffix(COMMAND);
let cooked = fun.cooked.clone();
let command_path = quote!(crate::framework::Command);
populate_fut_lifetimes_on_refs(&mut fun.args);
let args = fun.args;
(quote! {
#(#cooked)*
pub static #n: #command_path = #command_path {
func: #name,
name: #lit_name,
required_perms: #permission_level,
supports_dm: #supports_dm,
can_blacklist: #can_blacklist,
};
#visibility fn #name<'fut> (#(#args),*) -> ::serenity::futures::future::BoxFuture<'fut, ()> {
use ::serenity::futures::future::FutureExt;
async move { #(#body)* }.boxed()
}
})
.into()
}

View File

@ -0,0 +1,231 @@
use crate::util::{Argument, Parenthesised};
use proc_macro2::Span;
use proc_macro2::TokenStream as TokenStream2;
use quote::{quote, ToTokens};
use syn::{
braced,
parse::{Error, Parse, ParseStream, Result},
spanned::Spanned,
Attribute, Block, FnArg, Ident, Pat, Path, PathSegment, Stmt, Token, Visibility,
};
fn parse_argument(arg: FnArg) -> Result<Argument> {
match arg {
FnArg::Typed(typed) => {
let pat = typed.pat;
let kind = typed.ty;
match *pat {
Pat::Ident(id) => {
let name = id.ident;
let mutable = id.mutability;
Ok(Argument {
mutable,
name,
kind: *kind,
})
}
Pat::Wild(wild) => {
let token = wild.underscore_token;
let name = Ident::new("_", token.spans[0]);
Ok(Argument {
mutable: None,
name,
kind: *kind,
})
}
_ => Err(Error::new(
pat.span(),
format_args!("unsupported pattern: {:?}", pat),
)),
}
}
FnArg::Receiver(_) => Err(Error::new(
arg.span(),
format_args!("`self` arguments are prohibited: {:?}", arg),
)),
}
}
/// Test if the attribute is cooked.
fn is_cooked(attr: &Attribute) -> bool {
const COOKED_ATTRIBUTE_NAMES: &[&str] = &[
"cfg", "cfg_attr", "doc", "derive", "inline", "allow", "warn", "deny", "forbid",
];
COOKED_ATTRIBUTE_NAMES.iter().any(|n| attr.path.is_ident(n))
}
/// Removes cooked attributes from a vector of attributes. Uncooked attributes are left in the vector.
///
/// # Return
///
/// Returns a vector of cooked attributes that have been removed from the input vector.
fn remove_cooked(attrs: &mut Vec<Attribute>) -> Vec<Attribute> {
let mut cooked = Vec::new();
// FIXME: Replace with `Vec::drain_filter` once it is stable.
let mut i = 0;
while i < attrs.len() {
if !is_cooked(&attrs[i]) {
i += 1;
continue;
}
cooked.push(attrs.remove(i));
}
cooked
}
#[derive(Debug)]
pub struct CommandFun {
/// `#[...]`-style attributes.
pub attributes: Vec<Attribute>,
/// Populated cooked attributes. These are attributes outside of the realm of this crate's procedural macros
/// and will appear in generated output.
pub cooked: Vec<Attribute>,
pub visibility: Visibility,
pub name: Ident,
pub args: Vec<Argument>,
pub body: Vec<Stmt>,
}
impl Parse for CommandFun {
fn parse(input: ParseStream<'_>) -> Result<Self> {
let mut attributes = input.call(Attribute::parse_outer)?;
// `#[doc = "..."]` is a cooked attribute but it is special-cased for commands.
for attr in &mut attributes {
// Rename documentation comment attributes (`#[doc = "..."]`) to `#[description = "..."]`.
if attr.path.is_ident("doc") {
attr.path = Path::from(PathSegment::from(Ident::new(
"description",
Span::call_site(),
)));
}
}
let cooked = remove_cooked(&mut attributes);
let visibility = input.parse::<Visibility>()?;
input.parse::<Token![async]>()?;
input.parse::<Token![fn]>()?;
let name = input.parse()?;
// (...)
let Parenthesised(args) = input.parse::<Parenthesised<FnArg>>()?;
// { ... }
let bcont;
braced!(bcont in input);
let body = bcont.call(Block::parse_within)?;
let args = args
.into_iter()
.map(parse_argument)
.collect::<Result<Vec<_>>>()?;
Ok(Self {
attributes,
cooked,
visibility,
name,
args,
body,
})
}
}
impl ToTokens for CommandFun {
fn to_tokens(&self, stream: &mut TokenStream2) {
let Self {
attributes: _,
cooked,
visibility,
name,
args,
body,
} = self;
stream.extend(quote! {
#(#cooked)*
#visibility async fn #name (#(#args),*) -> () {
#(#body)*
}
});
}
}
#[derive(Debug)]
pub enum PermissionLevel {
Unrestricted,
Managed,
Restricted,
}
impl Default for PermissionLevel {
fn default() -> Self {
Self::Unrestricted
}
}
impl PermissionLevel {
pub fn from_str(s: &str) -> Option<Self> {
Some(match s.to_uppercase().as_str() {
"UNRESTRICTED" => Self::Unrestricted,
"MANAGED" => Self::Managed,
"RESTRICTED" => Self::Restricted,
_ => return None,
})
}
}
impl ToTokens for PermissionLevel {
fn to_tokens(&self, stream: &mut TokenStream2) {
let path = quote!(crate::framework::PermissionLevel);
let variant;
match self {
Self::Unrestricted => {
variant = quote!(Unrestricted);
}
Self::Managed => {
variant = quote!(Managed);
}
Self::Restricted => {
variant = quote!(Restricted);
}
}
stream.extend(quote! {
#path::#variant
});
}
}
#[derive(Debug, Default)]
pub struct Options {
pub permission_level: PermissionLevel,
pub supports_dm: bool,
pub can_blacklist: bool,
}
impl Options {
#[inline]
pub fn new() -> Self {
let mut options = Self::default();
options.can_blacklist = true;
options.supports_dm = true;
options
}
}

View File

@ -0,0 +1,160 @@
use proc_macro::TokenStream;
use proc_macro2::Span;
use proc_macro2::TokenStream as TokenStream2;
use quote::{format_ident, quote, ToTokens};
use syn::{
braced, bracketed, parenthesized,
parse::{Error, Parse, ParseStream, Result as SynResult},
punctuated::Punctuated,
token::{Comma, Mut},
Ident, Lifetime, Lit, Type,
};
pub trait LitExt {
fn to_str(&self) -> String;
fn to_bool(&self) -> bool;
fn to_ident(&self) -> Ident;
}
impl LitExt for Lit {
fn to_str(&self) -> String {
match self {
Lit::Str(s) => s.value(),
Lit::ByteStr(s) => unsafe { String::from_utf8_unchecked(s.value()) },
Lit::Char(c) => c.value().to_string(),
Lit::Byte(b) => (b.value() as char).to_string(),
_ => panic!("values must be a (byte)string or a char"),
}
}
fn to_bool(&self) -> bool {
if let Lit::Bool(b) = self {
b.value
} else {
self.to_str()
.parse()
.unwrap_or_else(|_| panic!("expected bool from {:?}", self))
}
}
#[inline]
fn to_ident(&self) -> Ident {
Ident::new(&self.to_str(), self.span())
}
}
pub trait IdentExt2: Sized {
fn to_uppercase(&self) -> Self;
fn with_suffix(&self, suf: &str) -> Ident;
}
impl IdentExt2 for Ident {
#[inline]
fn to_uppercase(&self) -> Self {
format_ident!("{}", self.to_string().to_uppercase())
}
#[inline]
fn with_suffix(&self, suffix: &str) -> Ident {
format_ident!("{}_{}", self.to_string().to_uppercase(), suffix)
}
}
#[inline]
pub fn into_stream(e: Error) -> TokenStream {
e.to_compile_error().into()
}
macro_rules! propagate_err {
($res:expr) => {{
match $res {
Ok(v) => v,
Err(e) => return $crate::util::into_stream(e),
}
}};
}
#[derive(Debug)]
pub struct Bracketed<T>(pub Punctuated<T, Comma>);
impl<T: Parse> Parse for Bracketed<T> {
fn parse(input: ParseStream<'_>) -> SynResult<Self> {
let content;
bracketed!(content in input);
Ok(Bracketed(content.parse_terminated(T::parse)?))
}
}
#[derive(Debug)]
pub struct Braced<T>(pub Punctuated<T, Comma>);
impl<T: Parse> Parse for Braced<T> {
fn parse(input: ParseStream<'_>) -> SynResult<Self> {
let content;
braced!(content in input);
Ok(Braced(content.parse_terminated(T::parse)?))
}
}
#[derive(Debug)]
pub struct Parenthesised<T>(pub Punctuated<T, Comma>);
impl<T: Parse> Parse for Parenthesised<T> {
fn parse(input: ParseStream<'_>) -> SynResult<Self> {
let content;
parenthesized!(content in input);
Ok(Parenthesised(content.parse_terminated(T::parse)?))
}
}
#[derive(Debug)]
pub struct AsOption<T>(pub Option<T>);
impl<T: ToTokens> ToTokens for AsOption<T> {
fn to_tokens(&self, stream: &mut TokenStream2) {
match &self.0 {
Some(o) => stream.extend(quote!(Some(#o))),
None => stream.extend(quote!(None)),
}
}
}
impl<T> Default for AsOption<T> {
#[inline]
fn default() -> Self {
AsOption(None)
}
}
#[derive(Debug)]
pub struct Argument {
pub mutable: Option<Mut>,
pub name: Ident,
pub kind: Type,
}
impl ToTokens for Argument {
fn to_tokens(&self, stream: &mut TokenStream2) {
let Argument {
mutable,
name,
kind,
} = self;
stream.extend(quote! {
#mutable #name: #kind
});
}
}
#[inline]
pub fn populate_fut_lifetimes_on_refs(args: &mut Vec<Argument>) {
for arg in args {
if let Type::Reference(reference) = &mut arg.kind {
reference.lifetime = Some(Lifetime::new("'fut", Span::call_site()));
}
}
}

View File

@ -1,3 +0,0 @@
imports_granularity = "Crate"
group_imports = "StdExternalCrate"
use_small_heuristics = "Max"

View File

@ -1,117 +0,0 @@
use std::time::{SystemTime, UNIX_EPOCH};
use chrono_tz::TZ_VARIANTS;
use poise::AutocompleteChoice;
use crate::{models::CtxData, time_parser::natural_parser, Context};
pub async fn timezone_autocomplete(ctx: Context<'_>, partial: &str) -> Vec<String> {
if partial.is_empty() {
ctx.data().popular_timezones.iter().map(|t| t.to_string()).collect::<Vec<String>>()
} else {
TZ_VARIANTS
.iter()
.filter(|tz| tz.to_string().contains(&partial))
.take(25)
.map(|t| t.to_string())
.collect::<Vec<String>>()
}
}
pub async fn macro_name_autocomplete(ctx: Context<'_>, partial: &str) -> Vec<String> {
sqlx::query!(
"
SELECT name
FROM macro
WHERE
guild_id = (SELECT id FROM guilds WHERE guild = ?)
AND name LIKE CONCAT(?, '%')",
ctx.guild_id().unwrap().0,
partial,
)
.fetch_all(&ctx.data().database)
.await
.unwrap_or_default()
.iter()
.map(|s| s.name.clone())
.collect()
}
pub async fn time_hint_autocomplete(
ctx: Context<'_>,
partial: &str,
) -> Vec<AutocompleteChoice<String>> {
if partial.is_empty() {
vec![AutocompleteChoice {
name: "Start typing a time...".to_string(),
value: "now".to_string(),
}]
} else {
match natural_parser(partial, &ctx.timezone().await.to_string()).await {
Some(timestamp) => match SystemTime::now().duration_since(UNIX_EPOCH) {
Ok(now) => {
let diff = timestamp - now.as_secs() as i64;
if diff < 0 {
vec![AutocompleteChoice {
name: "Time is in the past".to_string(),
value: "1 year ago".to_string(),
}]
} else {
if diff > 86400 {
vec![
AutocompleteChoice {
name: partial.to_string(),
value: partial.to_string(),
},
AutocompleteChoice {
name: format!(
"In approximately {} days, {} hours",
diff / 86400,
(diff % 86400) / 3600
),
value: partial.to_string(),
},
]
} else if diff > 3600 {
vec![
AutocompleteChoice {
name: partial.to_string(),
value: partial.to_string(),
},
AutocompleteChoice {
name: format!("In approximately {} hours", diff / 3600),
value: partial.to_string(),
},
]
} else {
vec![
AutocompleteChoice {
name: partial.to_string(),
value: partial.to_string(),
},
AutocompleteChoice {
name: format!("In approximately {} minutes", diff / 60),
value: partial.to_string(),
},
]
}
}
}
Err(_) => {
vec![AutocompleteChoice {
name: partial.to_string(),
value: partial.to_string(),
}]
}
},
None => {
vec![AutocompleteChoice {
name: "Time not recognised".to_string(),
value: "now".to_string(),
}]
}
}
}
}

View File

@ -1,46 +0,0 @@
use super::super::autocomplete::macro_name_autocomplete;
use crate::{Context, Error};
/// Delete a recorded macro
#[poise::command(
slash_command,
rename = "delete",
guild_only = true,
default_member_permissions = "MANAGE_GUILD",
identifying_name = "delete_macro"
)]
pub async fn delete_macro(
ctx: Context<'_>,
#[description = "Name of macro to delete"]
#[autocomplete = "macro_name_autocomplete"]
name: String,
) -> Result<(), Error> {
match sqlx::query!(
"
SELECT id FROM macro WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?) AND name = ?",
ctx.guild_id().unwrap().0,
name
)
.fetch_one(&ctx.data().database)
.await
{
Ok(row) => {
sqlx::query!("DELETE FROM macro WHERE id = ?", row.id)
.execute(&ctx.data().database)
.await
.unwrap();
ctx.say(format!("Macro \"{}\" deleted", name)).await?;
}
Err(sqlx::Error::RowNotFound) => {
ctx.say(format!("Macro \"{}\" not found", name)).await?;
}
Err(e) => {
panic!("{}", e);
}
}
Ok(())
}

View File

@ -1,89 +0,0 @@
use poise::CreateReply;
use crate::{
component_models::pager::{MacroPager, Pager},
consts::THEME_COLOR,
models::{command_macro::CommandMacro, CtxData},
Context, Error,
};
/// List recorded macros
#[poise::command(
slash_command,
rename = "list",
guild_only = true,
default_member_permissions = "MANAGE_GUILD",
identifying_name = "list_macro"
)]
pub async fn list_macro(ctx: Context<'_>) -> Result<(), Error> {
let macros = ctx.command_macros().await?;
let resp = show_macro_page(&macros, 0);
ctx.send(|m| {
*m = resp;
m
})
.await?;
Ok(())
}
pub fn max_macro_page<U, E>(macros: &[CommandMacro<U, E>]) -> usize {
((macros.len() as f64) / 25.0).ceil() as usize
}
pub fn show_macro_page<U, E>(macros: &[CommandMacro<U, E>], page: usize) -> CreateReply {
let pager = MacroPager::new(page);
if macros.is_empty() {
let mut reply = CreateReply::default();
reply.embed(|e| {
e.title("Macros")
.description("No Macros Set Up. Use `/macro record` to get started.")
.color(*THEME_COLOR)
});
return reply;
}
let pages = max_macro_page(macros);
let mut page = page;
if page >= pages {
page = pages - 1;
}
let lower = (page * 25).min(macros.len());
let upper = ((page + 1) * 25).min(macros.len());
let fields = macros[lower..upper].iter().map(|m| {
if let Some(description) = &m.description {
(
m.name.clone(),
format!("*{}*\n- Has {} commands", description, m.commands.len()),
true,
)
} else {
(m.name.clone(), format!("- Has {} commands", m.commands.len()), true)
}
});
let mut reply = CreateReply::default();
reply
.embed(|e| {
e.title("Macros")
.fields(fields)
.footer(|f| f.text(format!("Page {} of {}", page + 1, pages)))
.color(*THEME_COLOR)
})
.components(|comp| {
pager.create_button_row(pages, comp);
comp
});
reply
}

View File

@ -1,229 +0,0 @@
use lazy_regex::regex;
use poise::serenity_prelude::command::CommandOptionType;
use regex::Captures;
use serde_json::{json, Value};
use crate::{models::command_macro::RawCommandMacro, Context, Error, GuildId};
struct Alias {
name: String,
command: String,
}
/// Migrate old $alias reminder commands to macros. Only macro names that are not taken will be used.
#[poise::command(
slash_command,
rename = "migrate",
guild_only = true,
default_member_permissions = "MANAGE_GUILD",
identifying_name = "migrate_macro"
)]
pub async fn migrate_macro(ctx: Context<'_>) -> Result<(), Error> {
let guild_id = ctx.guild_id().unwrap();
let mut transaction = ctx.data().database.begin().await?;
let aliases = sqlx::query_as!(
Alias,
"SELECT name, command FROM command_aliases WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?)",
guild_id.0
)
.fetch_all(&mut *transaction)
.await?;
let mut added_aliases = 0;
for alias in aliases {
match parse_text_command(guild_id, alias.name, &alias.command) {
Some(cmd_macro) => {
sqlx::query!(
"INSERT INTO macro (guild_id, name, description, commands) VALUES ((SELECT id FROM guilds WHERE guild = ?), ?, ?, ?)",
cmd_macro.guild_id.0,
cmd_macro.name,
cmd_macro.description,
cmd_macro.commands
)
.execute(&mut *transaction)
.await?;
added_aliases += 1;
}
None => {}
}
}
transaction.commit().await?;
ctx.send(|b| b.content(format!("Added {} macros.", added_aliases))).await?;
Ok(())
}
fn parse_text_command(
guild_id: GuildId,
alias_name: String,
command: &str,
) -> Option<RawCommandMacro> {
match command.split_once(" ") {
Some((command_word, args)) => {
let command_word = command_word.to_lowercase();
if command_word == "r"
|| command_word == "i"
|| command_word == "remind"
|| command_word == "interval"
{
let matcher = regex!(
r#"(?P<mentions>(?:<@\d+>\s+|<@!\d+>\s+|<#\d+>\s+)*)(?P<time>(?:(?:\d+)(?:s|m|h|d|:|/|-|))+)(?:\s+(?P<interval>(?:(?:\d+)(?:s|m|h|d|))+))?(?:\s+(?P<expires>(?:(?:\d+)(?:s|m|h|d|:|/|-|))+))?\s+(?P<content>.*)"#s
);
match matcher.captures(&args) {
Some(captures) => {
let mut args: Vec<Value> = vec![];
if let Some(group) = captures.name("time") {
let content = group.as_str();
args.push(json!({
"name": "time",
"value": content,
"type": CommandOptionType::String,
}));
}
if let Some(group) = captures.name("content") {
let content = group.as_str();
args.push(json!({
"name": "content",
"value": content,
"type": CommandOptionType::String,
}));
}
if let Some(group) = captures.name("interval") {
let content = group.as_str();
args.push(json!({
"name": "interval",
"value": content,
"type": CommandOptionType::String,
}));
}
if let Some(group) = captures.name("expires") {
let content = group.as_str();
args.push(json!({
"name": "expires",
"value": content,
"type": CommandOptionType::String,
}));
}
if let Some(group) = captures.name("mentions") {
let content = group.as_str();
args.push(json!({
"name": "channels",
"value": content,
"type": CommandOptionType::String,
}));
}
Some(RawCommandMacro {
guild_id,
name: alias_name,
description: None,
commands: json!([
{
"command_name": "remind",
"options": args,
}
]),
})
}
None => None,
}
} else if command_word == "n" || command_word == "natural" {
let matcher_primary = regex!(
r#"(?P<time>.*?)(?:\s+)(?:send|say)(?:\s+)(?P<content>.*?)(?:(?:\s+)to(?:\s+)(?P<mentions>((?:<@\d+>)|(?:<@!\d+>)|(?:<#\d+>)|(?:\s+))+))?$"#s
);
let matcher_secondary = regex!(
r#"(?P<msg>.*)(?:\s+)every(?:\s+)(?P<interval>.*?)(?:(?:\s+)(?:until|for)(?:\s+)(?P<expires>.*?))?$"#s
);
match matcher_primary.captures(&args) {
Some(captures) => {
let captures_secondary = matcher_secondary.captures(&args);
let mut args: Vec<Value> = vec![];
if let Some(group) = captures.name("time") {
let content = group.as_str();
args.push(json!({
"name": "time",
"value": content,
"type": CommandOptionType::String,
}));
}
if let Some(group) = captures.name("content") {
let content = group.as_str();
args.push(json!({
"name": "content",
"value": content,
"type": CommandOptionType::String,
}));
}
if let Some(group) =
captures_secondary.as_ref().and_then(|c: &Captures| c.name("interval"))
{
let content = group.as_str();
args.push(json!({
"name": "interval",
"value": content,
"type": CommandOptionType::String,
}));
}
if let Some(group) =
captures_secondary.and_then(|c: Captures| c.name("expires"))
{
let content = group.as_str();
args.push(json!({
"name": "expires",
"value": content,
"type": CommandOptionType::String,
}));
}
if let Some(group) = captures.name("mentions") {
let content = group.as_str();
args.push(json!({
"name": "channels",
"value": content,
"type": CommandOptionType::String,
}));
}
Some(RawCommandMacro {
guild_id,
name: alias_name,
description: None,
commands: json!([
{
"command_name": "remind",
"options": args,
}
]),
})
}
None => None,
}
} else {
None
}
}
None => None,
}
}

View File

@ -1,19 +0,0 @@
use crate::{Context, Error};
pub mod delete;
pub mod list;
pub mod migrate;
pub mod record;
pub mod run;
/// Record and replay command sequences
#[poise::command(
slash_command,
rename = "macro",
guild_only = true,
default_member_permissions = "MANAGE_GUILD",
identifying_name = "macro_base"
)]
pub async fn macro_base(_ctx: Context<'_>) -> Result<(), Error> {
Ok(())
}

View File

@ -1,151 +0,0 @@
use std::collections::hash_map::Entry;
use crate::{consts::THEME_COLOR, models::command_macro::CommandMacro, Context, Error};
/// Start recording up to 5 commands to replay
#[poise::command(
slash_command,
rename = "record",
guild_only = true,
default_member_permissions = "MANAGE_GUILD",
identifying_name = "record_macro"
)]
pub async fn record_macro(
ctx: Context<'_>,
#[description = "Name for the new macro"] name: String,
#[description = "Description for the new macro"] description: Option<String>,
) -> Result<(), Error> {
if name.len() > 100 {
ctx.say("Name must be less than 100 characters").await?;
return Ok(());
}
if description.as_ref().map_or(0, |d| d.len()) > 100 {
ctx.say("Description must be less than 100 characters").await?;
return Ok(());
}
let guild_id = ctx.guild_id().unwrap();
let row = sqlx::query!(
"
SELECT 1 as _e FROM macro WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?) AND name = ?",
guild_id.0,
name
)
.fetch_one(&ctx.data().database)
.await;
if row.is_ok() {
ctx.send(|m| {
m.ephemeral(true).embed(|e| {
e.title("Unique Name Required")
.description(
"A macro already exists under this name.
Please select a unique name for your macro.",
)
.color(*THEME_COLOR)
})
})
.await?;
} else {
let okay = {
let mut lock = ctx.data().recording_macros.write().await;
if let Entry::Vacant(e) = lock.entry((guild_id, ctx.author().id)) {
e.insert(CommandMacro { guild_id, name, description, commands: vec![] });
true
} else {
false
}
};
if okay {
ctx.send(|m| {
m.ephemeral(true).embed(|e| {
e.title("Macro Recording Started")
.description(
"Run up to 5 commands, or type `/macro finish` to stop at any point.
Any commands ran as part of recording will be inconsequential",
)
.color(*THEME_COLOR)
})
})
.await?;
} else {
ctx.send(|m| {
m.ephemeral(true).embed(|e| {
e.title("Macro Already Recording")
.description(
"You are already recording a macro in this server.
Please use `/macro finish` to end this recording before starting another.",
)
.color(*THEME_COLOR)
})
})
.await?;
}
}
Ok(())
}
/// Finish current macro recording
#[poise::command(
slash_command,
rename = "finish",
guild_only = true,
default_member_permissions = "MANAGE_GUILD",
identifying_name = "finish_macro"
)]
pub async fn finish_macro(ctx: Context<'_>) -> Result<(), Error> {
let key = (ctx.guild_id().unwrap(), ctx.author().id);
{
let lock = ctx.data().recording_macros.read().await;
let contained = lock.get(&key);
if contained.map_or(true, |cmacro| cmacro.commands.is_empty()) {
ctx.send(|m| {
m.embed(|e| {
e.title("No Macro Recorded")
.description("Use `/macro record` to start recording a macro")
.color(*THEME_COLOR)
})
})
.await?;
} else {
let command_macro = contained.unwrap();
let json = serde_json::to_string(&command_macro.commands).unwrap();
sqlx::query!(
"INSERT INTO macro (guild_id, name, description, commands) VALUES ((SELECT id FROM guilds WHERE guild = ?), ?, ?, ?)",
command_macro.guild_id.0,
command_macro.name,
command_macro.description,
json
)
.execute(&ctx.data().database)
.await
.unwrap();
ctx.send(|m| {
m.embed(|e| {
e.title("Macro Recorded")
.description("Use `/macro run` to execute the macro")
.color(*THEME_COLOR)
})
})
.await?;
}
}
{
let mut lock = ctx.data().recording_macros.write().await;
lock.remove(&key);
}
Ok(())
}

View File

@ -1,56 +0,0 @@
use super::super::autocomplete::macro_name_autocomplete;
use crate::{models::command_macro::guild_command_macro, Context, Data, Error, THEME_COLOR};
/// Run a recorded macro
#[poise::command(
slash_command,
rename = "run",
guild_only = true,
default_member_permissions = "MANAGE_GUILD",
identifying_name = "run_macro"
)]
pub async fn run_macro(
ctx: poise::ApplicationContext<'_, Data, Error>,
#[description = "Name of macro to run"]
#[autocomplete = "macro_name_autocomplete"]
name: String,
) -> Result<(), Error> {
match guild_command_macro(&Context::Application(ctx), &name).await {
Some(command_macro) => {
Context::Application(ctx)
.send(|b| {
b.embed(|e| {
e.title("Running Macro").color(*THEME_COLOR).description(format!(
"Running macro {} ({} commands)",
command_macro.name,
command_macro.commands.len()
))
})
})
.await?;
for command in command_macro.commands {
if let Some(action) = command.action {
match (action)(poise::ApplicationContext { args: &command.options, ..ctx })
.await
{
Ok(()) => {}
Err(e) => {
println!("{:?}", e);
}
}
} else {
Context::Application(ctx)
.say(format!("Command \"{}\" not found", command.command_name))
.await?;
}
}
}
None => {
Context::Application(ctx).say(format!("Macro \"{}\" not found", name)).await?;
}
}
Ok(())
}

View File

@ -1,13 +1,42 @@
use regex_command_attr::command;
use serenity::{builder::CreateEmbedFooter, client::Context, model::channel::Message};
use chrono::offset::Utc; use chrono::offset::Utc;
use poise::{serenity_prelude as serenity, serenity_prelude::Mentionable};
use crate::{models::CtxData, Context, Error, THEME_COLOR}; use crate::{
command_help,
consts::DEFAULT_PREFIX,
get_ctx_data,
language_manager::LanguageManager,
models::{user_data::UserData, CtxGuildData},
FrameworkCtx, THEME_COLOR,
};
fn footer( use std::{
ctx: Context<'_>, sync::Arc,
) -> impl FnOnce(&mut serenity::CreateEmbedFooter) -> &mut serenity::CreateEmbedFooter { time::{SystemTime, UNIX_EPOCH},
let shard_count = ctx.serenity_context().cache.shard_count(); };
let shard = ctx.serenity_context().shard_id;
#[command]
#[can_blacklist(false)]
async fn ping(ctx: &Context, msg: &Message, _args: String) {
let now = SystemTime::now();
let since_epoch = now
.duration_since(UNIX_EPOCH)
.expect("Time calculated as going backwards. Very bad");
let delta = since_epoch.as_millis() as i64 - msg.timestamp.timestamp_millis();
let _ = msg
.channel_id
.say(&ctx, format!("Time taken to receive message: {}ms", delta))
.await;
}
async fn footer(ctx: &Context) -> impl FnOnce(&mut CreateEmbedFooter) -> &mut CreateEmbedFooter {
let shard_count = ctx.cache.shard_count();
let shard = ctx.shard_id;
move |f| { move |f| {
f.text(format!( f.text(format!(
@ -19,163 +48,174 @@ fn footer(
} }
} }
/// Get an overview of bot commands #[command]
#[poise::command(slash_command)] #[can_blacklist(false)]
pub async fn help(ctx: Context<'_>) -> Result<(), Error> { async fn help(ctx: &Context, msg: &Message, args: String) {
let footer = footer(ctx); async fn default_help(
ctx: &Context,
msg: &Message,
lm: Arc<LanguageManager>,
prefix: &str,
language: &str,
) {
let desc = lm.get(language, "help/desc").replace("{prefix}", prefix);
let footer = footer(ctx).await;
ctx.send(|m| { let _ = msg
m.ephemeral(true).embed(|e| { .channel_id
e.title("Help") .send_message(ctx, |m| {
.color(*THEME_COLOR) m.embed(move |e| {
.description( e.title("Help Menu")
"__Info Commands__ .description(desc)
`/help` `/info` `/donate` `/dashboard` `/clock` .field(
*run these commands with no options* lm.get(language, "help/setup_title"),
"`lang` `timezone` `meridian`",
true,
)
.field(
lm.get(language, "help/mod_title"),
"`prefix` `blacklist` `restrict` `alias`",
true,
)
.field(
lm.get(language, "help/reminder_title"),
"`remind` `interval` `natural` `look` `countdown`",
true,
)
.field(
lm.get(language, "help/reminder_mod_title"),
"`del` `offset` `pause` `nudge`",
true,
)
.field(
lm.get(language, "help/info_title"),
"`help` `info` `donate` `clock`",
true,
)
.field(
lm.get(language, "help/todo_title"),
"`todo` `todos` `todoc`",
true,
)
.field(lm.get(language, "help/other_title"), "`timer`", true)
.footer(footer)
.color(*THEME_COLOR)
})
})
.await;
}
__Reminder Commands__ let (pool, lm) = get_ctx_data(&ctx).await;
`/remind` - Create a new reminder that will send a message at a certain time
`/timer` - Start a timer from now, that will count time passed. Also used to view and remove timers
__Reminder Management__ let language = UserData::language_of(&msg.author, &pool);
`/del` - Delete reminders let prefix = ctx.prefix(msg.guild_id);
`/look` - View reminders
`/pause` - Pause all reminders on the channel
`/offset` - Move all reminders by a certain time
`/nudge` - Move all new reminders on this channel by a certain time
__Todo Commands__ if !args.is_empty() {
`/todo` - Add, view and manage the server, channel or user todo lists let framework = ctx
.data
.read()
.await
.get::<FrameworkCtx>()
.cloned()
.expect("Could not get FrameworkCtx from data");
__Setup Commands__ let matched = framework
`/timezone` - Set your timezone (necessary for `/remind` to work properly) .commands
`/dm allow/block` - Change your DM settings for reminders. .get(args.as_str())
.map(|inner| inner.name);
__Advanced Commands__ if let Some(command_name) = matched {
`/macro` - Record and replay command sequences command_help(ctx, msg, lm, &prefix.await, &language.await, command_name).await
", } else {
) default_help(ctx, msg, lm, &prefix.await, &language.await).await;
.footer(footer) }
}) } else {
}) default_help(ctx, msg, lm, &prefix.await, &language.await).await;
.await?; }
Ok(())
} }
/// Get information about the bot #[command]
#[poise::command(slash_command)] async fn info(ctx: &Context, msg: &Message, _args: String) {
pub async fn info(ctx: Context<'_>) -> Result<(), Error> { let (pool, lm) = get_ctx_data(&ctx).await;
let footer = footer(ctx);
let _ = ctx let language = UserData::language_of(&msg.author, &pool);
.send(|m| { let prefix = ctx.prefix(msg.guild_id);
m.ephemeral(true).embed(|e| { let current_user = ctx.cache.current_user();
let footer = footer(ctx).await;
let desc = lm
.get(&language.await, "info")
.replacen("{user}", &current_user.name, 1)
.replace("{default_prefix}", &*DEFAULT_PREFIX)
.replace("{prefix}", &prefix.await);
let _ = msg
.channel_id
.send_message(ctx, |m| {
m.embed(move |e| {
e.title("Info") e.title("Info")
.description( .description(desc)
"Help: `/help`
**Welcome to Reminder Bot!**
Developer: <@203532103185465344>
Icon: <@253202252821430272>
Find me on https://discord.jellywx.com and on https://github.com/JellyWX :)
Invite the bot: https://invite.reminder-bot.com/
Use our dashboard: https://reminder-bot.com/",
)
.footer(footer) .footer(footer)
.color(*THEME_COLOR) .color(*THEME_COLOR)
}) })
}) })
.await; .await;
Ok(())
} }
/// Details on supporting the bot and Patreon benefits #[command]
#[poise::command(slash_command)] async fn donate(ctx: &Context, msg: &Message, _args: String) {
pub async fn donate(ctx: Context<'_>) -> Result<(), Error> { let (pool, lm) = get_ctx_data(&ctx).await;
let footer = footer(ctx);
ctx.send(|m| m.embed(|e| { let language = UserData::language_of(&msg.author, &pool).await;
e.title("Donate") let desc = lm.get(&language, "donate");
.description("Thinking of adding a monthly contribution? let footer = footer(ctx).await;
Click below for my Patreon and official bot server :)
**https://www.patreon.com/jellywx/** let _ = msg
**https://discord.jellywx.com/** .channel_id
.send_message(ctx, |m| {
When you subscribe, Patreon will automatically rank you up on our Discord server (make sure you link your Patreon and Discord accounts!) m.embed(move |e| {
With your new rank, you'll be able to: e.title("Donate")
• Set repeating reminders with `interval`, `natural` or the dashboard .description(desc)
• Use unlimited uploads on SoundFX .footer(footer)
.color(*THEME_COLOR)
(Also, members of servers you __own__ will be able to set repeating reminders via commands) })
Just $2 USD/month!
*Please note, you must be in the JellyWX Discord server to receive Patreon features*")
.footer(footer)
.color(*THEME_COLOR)
}),
)
.await?;
Ok(())
}
/// Get the link to the online dashboard
#[poise::command(slash_command)]
pub async fn dashboard(ctx: Context<'_>) -> Result<(), Error> {
let footer = footer(ctx);
ctx.send(|m| {
m.ephemeral(true).embed(|e| {
e.title("Dashboard")
.description("**https://reminder-bot.com/dashboard**")
.footer(footer)
.color(*THEME_COLOR)
}) })
}) .await;
.await?;
Ok(())
} }
/// View the current time in your selected timezone #[command]
#[poise::command(slash_command)] async fn dashboard(ctx: &Context, msg: &Message, _args: String) {
pub async fn clock(ctx: Context<'_>) -> Result<(), Error> { let footer = footer(ctx).await;
ctx.defer_ephemeral().await?;
let tz = ctx.timezone().await; let _ = msg
let now = Utc::now().with_timezone(&tz); .channel_id
.send_message(ctx, |m| {
ctx.send(|m| { m.embed(move |e| {
m.ephemeral(true).content(format!("Time in **{}**: `{}`", tz, now.format("%H:%M"))) e.title("Dashboard")
}) .description("https://reminder-bot.com/dashboard")
.await?; .footer(footer)
.color(*THEME_COLOR)
Ok(()) })
})
.await;
} }
/// View the current time in a user's selected timezone #[command]
#[poise::command(context_menu_command = "View Local Time")] async fn clock(ctx: &Context, msg: &Message, _args: String) {
pub async fn clock_context_menu(ctx: Context<'_>, user: serenity::User) -> Result<(), Error> { let (pool, lm) = get_ctx_data(&ctx).await;
ctx.defer_ephemeral().await?;
let user_data = ctx.user_data(user.id).await?; let language = UserData::language_of(&msg.author, &pool).await;
let tz = user_data.timezone(); let timezone = UserData::timezone_of(&msg.author, &pool).await;
let now = Utc::now().with_timezone(&tz); let now = Utc::now().with_timezone(&timezone);
ctx.send(|m| { let clock_display = lm.get(&language, "clock/time");
m.ephemeral(true).content(format!(
"Time in {}'s timezone: `{}`",
user.mention(),
now.format("%H:%M")
))
})
.await?;
Ok(()) let _ = msg
.channel_id
.say(
&ctx,
clock_display.replacen("{}", &now.format("%H:%M").to_string(), 1),
)
.await;
} }

View File

@ -1,5 +1,3 @@
mod autocomplete;
pub mod command_macro;
pub mod info_cmds; pub mod info_cmds;
pub mod moderation_cmds; pub mod moderation_cmds;
pub mod reminder_cmds; pub mod reminder_cmds;

View File

@ -1,52 +1,140 @@
use chrono::offset::Utc; use regex_command_attr::command;
use serenity::{
builder::CreateActionRow,
client::Context,
framework::Framework,
model::{
channel::Message,
id::{ChannelId, MessageId, RoleId},
interactions::message_component::ButtonStyle,
},
};
use chrono_tz::{Tz, TZ_VARIANTS}; use chrono_tz::{Tz, TZ_VARIANTS};
use chrono::offset::Utc;
use inflector::Inflector;
use levenshtein::levenshtein; use levenshtein::levenshtein;
use log::warn;
use super::autocomplete::timezone_autocomplete; use crate::{
use crate::{consts::THEME_COLOR, models::CtxData, Context, Error}; command_help,
consts::{REGEX_ALIAS, REGEX_CHANNEL, REGEX_COMMANDS, REGEX_ROLE, THEME_COLOR},
framework::SendIterator,
get_ctx_data,
models::{channel_data::ChannelData, guild_data::GuildData, user_data::UserData, CtxGuildData},
FrameworkCtx, PopularTimezones,
};
/// Select your timezone use std::{collections::HashMap, iter};
#[poise::command(slash_command, identifying_name = "timezone")]
pub async fn timezone(
ctx: Context<'_>,
#[description = "Timezone to use from this list: https://gist.github.com/JellyWX/913dfc8b63d45192ad6cb54c829324ee"]
#[autocomplete = "timezone_autocomplete"]
timezone: Option<String>,
) -> Result<(), Error> {
let mut user_data = ctx.author_data().await.unwrap();
let footer_text = format!("Current timezone: {}", user_data.timezone); #[command]
#[supports_dm(false)]
#[permission_level(Restricted)]
#[can_blacklist(false)]
async fn blacklist(ctx: &Context, msg: &Message, args: String) {
let (pool, lm) = get_ctx_data(&ctx).await;
if let Some(timezone) = timezone { let language = UserData::language_of(&msg.author, &pool).await;
match timezone.parse::<Tz>() {
Ok(tz) => {
user_data.timezone = timezone.clone();
user_data.commit_changes(&ctx.data().database).await;
let now = Utc::now().with_timezone(&tz); let capture_opt = REGEX_CHANNEL
.captures(&args)
.map(|cap| cap.get(1))
.flatten();
ctx.send(|m| { let (channel, local) = match capture_opt {
m.embed(|e| { Some(capture) => (
e.title("Timezone Set") ChannelId(capture.as_str().parse::<u64>().unwrap()).to_channel_cached(&ctx),
.description(format!( false,
"Timezone has been set to **{}**. Your current time should be `{}`", ),
timezone,
now.format("%H:%M") None => (msg.channel(&ctx).await.ok(), true),
)) };
.color(*THEME_COLOR)
}) let mut channel_data = ChannelData::from_channel(channel.unwrap(), &pool)
}) .await
.await?; .unwrap();
channel_data.blacklisted = !channel_data.blacklisted;
channel_data.commit_changes(&pool).await;
if channel_data.blacklisted {
if local {
let _ = msg
.channel_id
.say(&ctx, lm.get(&language, "blacklist/added"))
.await;
} else {
let _ = msg
.channel_id
.say(&ctx, lm.get(&language, "blacklist/added_from"))
.await;
}
} else if local {
let _ = msg
.channel_id
.say(&ctx, lm.get(&language, "blacklist/removed"))
.await;
} else {
let _ = msg
.channel_id
.say(&ctx, lm.get(&language, "blacklist/removed_from"))
.await;
}
}
#[command]
async fn timezone(ctx: &Context, msg: &Message, args: String) {
let (pool, lm) = get_ctx_data(&ctx).await;
let mut user_data = UserData::from_user(&msg.author, &ctx, &pool).await.unwrap();
let footer_text = lm.get(&user_data.language, "timezone/footer").replacen(
"{timezone}",
&user_data.timezone,
1,
);
if !args.is_empty() {
match args.parse::<Tz>() {
Ok(_) => {
user_data.timezone = args;
user_data.commit_changes(&pool).await;
let now = Utc::now().with_timezone(&user_data.timezone());
let content = lm
.get(&user_data.language, "timezone/set_p")
.replacen("{timezone}", &user_data.timezone, 1)
.replacen("{time}", &now.format("%H:%M").to_string(), 1);
let _ =
msg.channel_id
.send_message(&ctx, |m| {
m.embed(|e| {
e.title(lm.get(&user_data.language, "timezone/set_p_title"))
.description(content)
.color(*THEME_COLOR)
.footer(|f| {
f.text(
lm.get(&user_data.language, "timezone/footer")
.replacen("{timezone}", &user_data.timezone, 1),
)
})
})
})
.await;
} }
Err(_) => { Err(_) => {
let filtered_tz = TZ_VARIANTS let filtered_tz = TZ_VARIANTS
.iter() .iter()
.filter(|tz| { .filter(|tz| {
timezone.contains(&tz.to_string()) args.contains(&tz.to_string())
|| tz.to_string().contains(&timezone) || tz.to_string().contains(&args)
|| levenshtein(&tz.to_string(), &timezone) < 4 || levenshtein(&tz.to_string(), &args) < 4
}) })
.take(25) .take(25)
.map(|t| t.to_owned()) .map(|t| t.to_owned())
@ -55,201 +143,503 @@ pub async fn timezone(
let fields = filtered_tz.iter().map(|tz| { let fields = filtered_tz.iter().map(|tz| {
( (
tz.to_string(), tz.to_string(),
format!("🕗 `{}`", Utc::now().with_timezone(tz).format("%H:%M")), format!(
"🕗 `{}`",
Utc::now().with_timezone(tz).format("%H:%M").to_string()
),
true, true,
) )
}); });
ctx.send(|m| { let _ = msg
m.embed(|e| { .channel_id
e.title("Timezone Not Recognized") .send_message(&ctx, |m| {
.description("Possibly you meant one of the following timezones, otherwise click [here](https://gist.github.com/JellyWX/913dfc8b63d45192ad6cb54c829324ee):") m.embed(|e| {
.color(*THEME_COLOR) e.title(lm.get(&user_data.language, "timezone/no_timezone_title"))
.fields(fields) .description(lm.get(&user_data.language, "timezone/no_timezone"))
.footer(|f| f.text(footer_text)) .color(*THEME_COLOR)
.url("https://gist.github.com/JellyWX/913dfc8b63d45192ad6cb54c829324ee") .fields(fields)
.footer(|f| f.text(footer_text))
.url("https://gist.github.com/JellyWX/913dfc8b63d45192ad6cb54c829324ee")
}).components(|c| {
for row in filtered_tz.as_slice().chunks(5) {
let mut action_row = CreateActionRow::default();
for timezone in row {
action_row.create_button(|b| {
b.style(ButtonStyle::Secondary)
.label(timezone.to_string())
.custom_id(format!("timezone:{}", timezone.to_string()))
});
}
c.add_action_row(action_row);
}
c
})
}) })
}) .await;
.await?;
} }
} }
} else { } else {
let popular_timezones_iter = ctx.data().popular_timezones.iter().map(|t| { let content = lm
(t.to_string(), format!("🕗 `{}`", Utc::now().with_timezone(t).format("%H:%M")), true) .get(&user_data.language, "timezone/no_argument")
.replace("{prefix}", &ctx.prefix(msg.guild_id).await);
let popular_timezones = ctx
.data
.read()
.await
.get::<PopularTimezones>()
.cloned()
.unwrap();
let popular_timezones_iter = popular_timezones.iter().map(|t| {
(
t.to_string(),
format!(
"🕗 `{}`",
Utc::now().with_timezone(t).format("%H:%M").to_string()
),
true,
)
}); });
ctx.send(|m| { let _ = msg
m.embed(|e| { .channel_id
e.title("Timezone Usage") .send_message(&ctx, |m| {
.description( m.embed(|e| {
"**Usage:** e.title(lm.get(&user_data.language, "timezone/no_argument_title"))
`/timezone Name` .description(content)
.color(*THEME_COLOR)
**Example:** .fields(popular_timezones_iter)
`/timezone Europe/London` .footer(|f| f.text(footer_text))
.url("https://gist.github.com/JellyWX/913dfc8b63d45192ad6cb54c829324ee")
You may want to use one of the popular timezones below, otherwise click [here](https://gist.github.com/JellyWX/913dfc8b63d45192ad6cb54c829324ee):",
)
.color(*THEME_COLOR)
.fields(popular_timezones_iter)
.footer(|f| f.text(footer_text))
.url("https://gist.github.com/JellyWX/913dfc8b63d45192ad6cb54c829324ee")
})
})
.await?;
}
Ok(())
}
/// Configure server settings
#[poise::command(
slash_command,
rename = "settings",
identifying_name = "settings",
guild_only = true
)]
pub async fn settings(_ctx: Context<'_>) -> Result<(), Error> {
Ok(())
}
/// Configure ephemeral setup
#[poise::command(
slash_command,
rename = "ephemeral",
identifying_name = "ephemeral_confirmations",
guild_only = true
)]
pub async fn ephemeral_confirmations(_ctx: Context<'_>) -> Result<(), Error> {
Ok(())
}
/// Set reminder confirmations to be sent "ephemerally" (private and cleared automatically)
#[poise::command(
slash_command,
rename = "on",
identifying_name = "set_ephemeral_confirmations",
guild_only = true
)]
pub async fn set_ephemeral_confirmations(ctx: Context<'_>) -> Result<(), Error> {
let mut guild_data = ctx.guild_data().await.unwrap()?;
guild_data.ephemeral_confirmations = true;
guild_data.commit_changes(&ctx.data().database).await;
ctx.send(|r| {
r.ephemeral(true).embed(|e| {
e.title("Confirmations ephemeral")
.description("Reminder confirmations will be sent privately, and removed when your client restarts.")
.color(*THEME_COLOR)
})
})
.await?;
Ok(())
}
/// Set reminder confirmations to persist indefinitely
#[poise::command(
slash_command,
rename = "off",
identifying_name = "unset_ephemeral_confirmations",
guild_only = true
)]
pub async fn unset_ephemeral_confirmations(ctx: Context<'_>) -> Result<(), Error> {
let mut guild_data = ctx.guild_data().await.unwrap()?;
guild_data.ephemeral_confirmations = false;
guild_data.commit_changes(&ctx.data().database).await;
ctx.send(|r| {
r.ephemeral(true).embed(|e| {
e.title("Confirmations public")
.description(
"Reminder confirmations will be sent as regular messages, and won't be removed automatically.",
)
.color(*THEME_COLOR)
})
})
.await?;
Ok(())
}
/// Configure whether other users can set reminders to your direct messages
#[poise::command(slash_command, rename = "dm", identifying_name = "allowed_dm")]
pub async fn allowed_dm(_ctx: Context<'_>) -> Result<(), Error> {
Ok(())
}
/// Allow other users to set reminders in your direct messages
#[poise::command(slash_command, rename = "allow", identifying_name = "set_allowed_dm")]
pub async fn set_allowed_dm(ctx: Context<'_>) -> Result<(), Error> {
let mut user_data = ctx.author_data().await?;
user_data.allowed_dm = true;
user_data.commit_changes(&ctx.data().database).await;
ctx.send(|r| {
r.ephemeral(true).embed(|e| {
e.title("DMs permitted")
.description("You will receive a message if a user sets a DM reminder for you.")
.color(*THEME_COLOR)
})
})
.await?;
Ok(())
}
/// Block other users from setting reminders in your direct messages
#[poise::command(slash_command, rename = "block", identifying_name = "unset_allowed_dm")]
pub async fn unset_allowed_dm(ctx: Context<'_>) -> Result<(), Error> {
let mut user_data = ctx.author_data().await?;
user_data.allowed_dm = false;
user_data.commit_changes(&ctx.data().database).await;
ctx.send(|r| {
r.ephemeral(true).embed(|e| {
e.title("DMs blocked")
.description(
"You can still set DM reminders for yourself or for users with DMs enabled.",
)
.color(*THEME_COLOR)
})
})
.await?;
Ok(())
}
/// View the webhook being used to send reminders to this channel
#[poise::command(
slash_command,
identifying_name = "webhook_url",
required_permissions = "ADMINISTRATOR"
)]
pub async fn webhook(ctx: Context<'_>) -> Result<(), Error> {
match ctx.channel_data().await {
Ok(data) => {
if let (Some(id), Some(token)) = (data.webhook_id, data.webhook_token) {
ctx.send(|b| {
b.ephemeral(true).content(format!(
"**Warning!**
This link can be used by users to anonymously send messages, with or without permissions.
Do not share it!
|| https://discord.com/api/webhooks/{}/{} ||",
id, token,
))
}) })
.await?; .components(|c| {
} else { for row in popular_timezones.as_slice().chunks(5) {
ctx.say("No webhook configured on this channel.").await?; let mut action_row = CreateActionRow::default();
for timezone in row {
action_row.create_button(|b| {
b.style(ButtonStyle::Secondary)
.label(timezone.to_string())
.custom_id(format!("timezone:{}", timezone.to_string()))
});
}
c.add_action_row(action_row);
}
c
})
})
.await;
}
}
#[command("lang")]
async fn language(ctx: &Context, msg: &Message, args: String) {
let (pool, lm) = get_ctx_data(&ctx).await;
let mut user_data = UserData::from_user(&msg.author, &ctx, &pool).await.unwrap();
if !args.is_empty() {
match lm.get_language(&args) {
Some(lang) => {
user_data.language = lang.to_string();
user_data.commit_changes(&pool).await;
let _ = msg
.channel_id
.send_message(&ctx, |m| {
m.embed(|e| {
e.title(lm.get(&user_data.language, "lang/set_p_title"))
.color(*THEME_COLOR)
.description(lm.get(&user_data.language, "lang/set_p"))
})
})
.await;
}
None => {
let language_codes = lm.all_languages().map(|(k, v)| {
(
format!("{} {}", lm.get(k, "flag"), v.to_title_case()),
format!("`$lang {}`", k.to_uppercase()),
true,
)
});
let _ = msg
.channel_id
.send_message(&ctx, |m| {
m.embed(|e| {
e.title(lm.get(&user_data.language, "lang/invalid_title"))
.color(*THEME_COLOR)
.description(lm.get(&user_data.language, "lang/invalid"))
.fields(language_codes)
})
.components(|c| {
for row in lm
.all_languages()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect::<Vec<(String, String)>>()
.as_slice()
.chunks(5)
{
let mut action_row = CreateActionRow::default();
for (code, name) in row {
action_row.create_button(|b| {
b.style(ButtonStyle::Primary)
.label(name.to_title_case())
.custom_id(format!("lang:{}", code.to_uppercase()))
});
}
c.add_action_row(action_row);
}
c
})
})
.await;
} }
} }
Err(e) => { } else {
warn!("Error fetching channel data: {:?}", e); let language_codes = lm.all_languages().map(|(k, v)| {
(
format!("{} {}", lm.get(k, "flag"), v.to_title_case()),
format!("`$lang {}`", k.to_uppercase()),
true,
)
});
ctx.say("No webhook configured on this channel.").await?; let _ = msg
} .channel_id
.send_message(&ctx, |m| {
m.embed(|e| {
e.title(lm.get(&user_data.language, "lang/select_title"))
.color(*THEME_COLOR)
.description(lm.get(&user_data.language, "lang/select"))
.fields(language_codes)
})
.components(|c| {
for row in lm
.all_languages()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect::<Vec<(String, String)>>()
.as_slice()
.chunks(5)
{
let mut action_row = CreateActionRow::default();
for (code, name) in row {
action_row.create_button(|b| {
b.style(ButtonStyle::Primary)
.label(name.to_title_case())
.custom_id(format!("lang:{}", code.to_uppercase()))
});
}
c.add_action_row(action_row);
}
c
})
})
.await;
}
}
#[command]
#[supports_dm(false)]
#[permission_level(Restricted)]
async fn prefix(ctx: &Context, msg: &Message, args: String) {
let (pool, lm) = get_ctx_data(&ctx).await;
let guild_data = ctx.guild_data(msg.guild_id.unwrap()).await.unwrap();
let language = UserData::language_of(&msg.author, &pool).await;
if args.len() > 5 {
let _ = msg
.channel_id
.say(&ctx, lm.get(&language, "prefix/too_long"))
.await;
} else if args.is_empty() {
let _ = msg
.channel_id
.say(&ctx, lm.get(&language, "prefix/no_argument"))
.await;
} else {
guild_data.write().await.prefix = args;
guild_data.read().await.commit_changes(&pool).await;
let content = lm.get(&language, "prefix/success").replacen(
"{prefix}",
&guild_data.read().await.prefix,
1,
);
let _ = msg.channel_id.say(&ctx, content).await;
}
}
#[command]
#[supports_dm(false)]
#[permission_level(Restricted)]
async fn restrict(ctx: &Context, msg: &Message, args: String) {
let (pool, lm) = get_ctx_data(&ctx).await;
let language = UserData::language_of(&msg.author, &pool).await;
let guild_data = GuildData::from_guild(msg.guild(&ctx).unwrap(), &pool)
.await
.unwrap();
let role_tag_match = REGEX_ROLE.find(&args);
if let Some(role_tag) = role_tag_match {
let commands = REGEX_COMMANDS
.find_iter(&args.to_lowercase())
.map(|c| c.as_str().to_string())
.collect::<Vec<String>>();
let role_id = RoleId(
role_tag.as_str()[3..role_tag.as_str().len() - 1]
.parse::<u64>()
.unwrap(),
);
let role_opt = role_id.to_role_cached(&ctx);
if let Some(role) = role_opt {
let _ = sqlx::query!(
"
DELETE FROM command_restrictions WHERE role_id = (SELECT id FROM roles WHERE role = ?)
",
role.id.as_u64()
)
.execute(&pool)
.await;
if commands.is_empty() {
let _ = msg
.channel_id
.say(&ctx, lm.get(&language, "restrict/disabled"))
.await;
} else {
let _ = sqlx::query!(
"
INSERT IGNORE INTO roles (role, name, guild_id) VALUES (?, ?, ?)
",
role.id.as_u64(),
role.name,
guild_data.id
)
.execute(&pool)
.await;
for command in commands {
let res = sqlx::query!(
"
INSERT INTO command_restrictions (role_id, command) VALUES ((SELECT id FROM roles WHERE role = ?), ?)
", role.id.as_u64(), command)
.execute(&pool)
.await;
if res.is_err() {
println!("{:?}", res);
let content = lm.get(&language, "restrict/failure").replacen(
"{command}",
&command,
1,
);
let _ = msg.channel_id.say(&ctx, content).await;
}
}
let _ = msg
.channel_id
.say(&ctx, lm.get(&language, "restrict/enabled"))
.await;
}
}
} else if args.is_empty() {
let guild_id = msg.guild_id.unwrap().as_u64().to_owned();
let rows = sqlx::query!(
"
SELECT
roles.role, command_restrictions.command
FROM
command_restrictions
INNER JOIN
roles
ON
roles.id = command_restrictions.role_id
WHERE
roles.guild_id = (SELECT id FROM guilds WHERE guild = ?)
",
guild_id
)
.fetch_all(&pool)
.await
.unwrap();
let mut commands_roles: HashMap<&str, Vec<String>> = HashMap::new();
rows.iter().for_each(|row| {
if let Some(vec) = commands_roles.get_mut(&row.command.as_str()) {
vec.push(format!("<@&{}>", row.role));
} else {
commands_roles.insert(&row.command, vec![format!("<@&{}>", row.role)]);
}
});
let fields = commands_roles
.iter()
.map(|(key, value)| (key.to_title_case(), value.join("\n"), true));
let title = lm.get(&language, "restrict/title");
let _ = msg
.channel_id
.send_message(&ctx, |m| {
m.embed(|e| e.title(title).fields(fields).color(*THEME_COLOR))
})
.await;
} else {
let prefix = ctx.prefix(msg.guild_id).await;
command_help(ctx, msg, lm, &prefix, &language, "restrict").await;
}
}
#[command("alias")]
#[supports_dm(false)]
#[permission_level(Managed)]
async fn alias(ctx: &Context, msg: &Message, args: String) {
let (pool, lm) = get_ctx_data(&ctx).await;
let language = UserData::language_of(&msg.author, &pool).await;
let guild_id = msg.guild_id.unwrap().as_u64().to_owned();
let matches_opt = REGEX_ALIAS.captures(&args);
if let Some(matches) = matches_opt {
let name = matches.name("name").unwrap().as_str();
let command_opt = matches.name("cmd").map(|m| m.as_str());
match name {
"list" => {
let aliases = sqlx::query!(
"
SELECT name, command FROM command_aliases WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?)
",
guild_id
)
.fetch_all(&pool)
.await
.unwrap();
let content = iter::once("Aliases:".to_string()).chain(
aliases
.iter()
.map(|row| format!("**{}**: `{}`", row.name, row.command)),
);
let _ = msg.channel_id.say_lines(&ctx, content).await;
}
"remove" => {
if let Some(command) = command_opt {
let deleted_count = sqlx::query!(
"
SELECT COUNT(1) AS count FROM command_aliases WHERE name = ? AND guild_id = (SELECT id FROM guilds WHERE guild = ?)
", command, guild_id)
.fetch_one(&pool)
.await
.unwrap();
sqlx::query!(
"
DELETE FROM command_aliases WHERE name = ? AND guild_id = (SELECT id FROM guilds WHERE guild = ?)
",
command,
guild_id
)
.execute(&pool)
.await
.unwrap();
let content = lm
.get(&language, "alias/removed")
.replace("{count}", &deleted_count.count.to_string());
let _ = msg.channel_id.say(&ctx, content).await;
} else {
let _ = msg
.channel_id
.say(&ctx, lm.get(&language, "alias/help"))
.await;
}
}
name => {
if let Some(command) = command_opt {
let res = sqlx::query!(
"
INSERT INTO command_aliases (guild_id, name, command) VALUES ((SELECT id FROM guilds WHERE guild = ?), ?, ?)
", guild_id, name, command)
.execute(&pool)
.await;
if res.is_err() {
sqlx::query!(
"
UPDATE command_aliases SET command = ? WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?) AND name = ?
", command, guild_id, name)
.execute(&pool)
.await
.unwrap();
}
let content = lm.get(&language, "alias/created").replace("{name}", name);
let _ = msg.channel_id.say(&ctx, content).await;
} else {
match sqlx::query!(
"
SELECT command FROM command_aliases WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?) AND name = ?
", guild_id, name)
.fetch_one(&pool)
.await {
Ok(row) => {
let framework = ctx.data.read().await
.get::<FrameworkCtx>().cloned().expect("Could not get FrameworkCtx from data");
let mut new_msg = msg.clone();
new_msg.content = format!("<@{}> {}", &ctx.cache.current_user_id(), row.command);
new_msg.id = MessageId(0);
framework.dispatch(ctx.clone(), new_msg).await;
},
Err(_) => {
let content = lm.get(&language, "alias/not_found").replace("{name}", name);
let _ = msg.channel_id.say(&ctx, content).await;
},
}
}
}
}
} else {
let prefix = ctx.prefix(msg.guild_id).await;
command_help(ctx, msg, lm, &prefix, &language, "alias").await;
} }
Ok(())
} }

File diff suppressed because it is too large Load Diff

View File

@ -1,366 +1,455 @@
use poise::CreateReply; use regex_command_attr::command;
use crate::{ use serenity::{
component_models::{ async_trait,
pager::{Pager, TodoPager}, client::Context,
ComponentDataModel, TodoSelector, constants::MESSAGE_CODE_LIMIT,
model::{
channel::Message,
id::{ChannelId, GuildId, UserId},
}, },
consts::{EMBED_DESCRIPTION_MAX_LENGTH, SELECT_MAX_ENTRIES, THEME_COLOR},
models::CtxData,
Context, Error,
}; };
/// Manage todo lists use std::fmt;
#[poise::command(
slash_command, use crate::{
rename = "todo", command_help, get_ctx_data,
identifying_name = "todo_base", models::{user_data::UserData, CtxGuildData},
default_member_permissions = "MANAGE_GUILD" };
)] use sqlx::MySqlPool;
pub async fn todo_base(_ctx: Context<'_>) -> Result<(), Error> { use std::convert::TryFrom;
Ok(())
#[derive(Debug)]
struct TodoNotFound;
impl std::error::Error for TodoNotFound {}
impl fmt::Display for TodoNotFound {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Todo not found")
}
} }
/// Manage the server todo list struct Todo {
#[poise::command( id: u32,
slash_command, value: String,
rename = "server",
guild_only = true,
identifying_name = "todo_guild_base",
default_member_permissions = "MANAGE_GUILD"
)]
pub async fn todo_guild_base(_ctx: Context<'_>) -> Result<(), Error> {
Ok(())
} }
/// Add an item to the server todo list struct TodoTarget {
#[poise::command( user: UserId,
slash_command, guild: Option<GuildId>,
rename = "add", channel: Option<ChannelId>,
guild_only = true,
identifying_name = "todo_guild_add",
default_member_permissions = "MANAGE_GUILD"
)]
pub async fn todo_guild_add(
ctx: Context<'_>,
#[description = "The task to add to the todo list"] task: String,
) -> Result<(), Error> {
sqlx::query!(
"INSERT INTO todos (guild_id, value)
VALUES ((SELECT id FROM guilds WHERE guild = ?), ?)",
ctx.guild_id().unwrap().0,
task
)
.execute(&ctx.data().database)
.await
.unwrap();
ctx.say("Item added to todo list").await?;
Ok(())
} }
/// View and remove from the server todo list impl TodoTarget {
#[poise::command( pub fn command(&self, subcommand_opt: Option<SubCommand>) -> String {
slash_command, let context = if self.channel.is_some() {
rename = "view", "channel"
guild_only = true, } else if self.guild.is_some() {
identifying_name = "todo_guild_view", "guild"
default_member_permissions = "MANAGE_GUILD" } else {
)] "user"
pub async fn todo_guild_view(ctx: Context<'_>) -> Result<(), Error> { };
let values = sqlx::query!(
"SELECT todos.id, value FROM todos
INNER JOIN guilds ON todos.guild_id = guilds.id
WHERE guilds.guild = ?",
ctx.guild_id().unwrap().0,
)
.fetch_all(&ctx.data().database)
.await
.unwrap()
.iter()
.map(|row| (row.id as usize, row.value.clone()))
.collect::<Vec<(usize, String)>>();
let resp = show_todo_page(&values, 0, None, None, ctx.guild_id().map(|g| g.0)); if let Some(subcommand) = subcommand_opt {
format!("todo {} {}", context, subcommand.to_string())
ctx.send(|r| { } else {
*r = resp; format!("todo {}", context)
r }
})
.await?;
Ok(())
}
/// Manage the channel todo list
#[poise::command(
slash_command,
rename = "channel",
guild_only = true,
identifying_name = "todo_channel_base",
default_member_permissions = "MANAGE_GUILD"
)]
pub async fn todo_channel_base(_ctx: Context<'_>) -> Result<(), Error> {
Ok(())
}
/// Add an item to the channel todo list
#[poise::command(
slash_command,
rename = "add",
guild_only = true,
identifying_name = "todo_channel_add",
default_member_permissions = "MANAGE_GUILD"
)]
pub async fn todo_channel_add(
ctx: Context<'_>,
#[description = "The task to add to the todo list"] task: String,
) -> Result<(), Error> {
// ensure channel is cached
let _ = ctx.channel_data().await;
sqlx::query!(
"INSERT INTO todos (guild_id, channel_id, value)
VALUES ((SELECT id FROM guilds WHERE guild = ?), (SELECT id FROM channels WHERE channel = ?), ?)",
ctx.guild_id().unwrap().0,
ctx.channel_id().0,
task
)
.execute(&ctx.data().database)
.await
.unwrap();
ctx.say("Item added to todo list").await?;
Ok(())
}
/// View and remove from the channel todo list
#[poise::command(
slash_command,
rename = "view",
guild_only = true,
identifying_name = "todo_channel_view",
default_member_permissions = "MANAGE_GUILD"
)]
pub async fn todo_channel_view(ctx: Context<'_>) -> Result<(), Error> {
let values = sqlx::query!(
"SELECT todos.id, value FROM todos
INNER JOIN channels ON todos.channel_id = channels.id
WHERE channels.channel = ?",
ctx.channel_id().0,
)
.fetch_all(&ctx.data().database)
.await
.unwrap()
.iter()
.map(|row| (row.id as usize, row.value.clone()))
.collect::<Vec<(usize, String)>>();
let resp =
show_todo_page(&values, 0, None, Some(ctx.channel_id().0), ctx.guild_id().map(|g| g.0));
ctx.send(|r| {
*r = resp;
r
})
.await?;
Ok(())
}
/// Manage your personal todo list
#[poise::command(slash_command, rename = "user", identifying_name = "todo_user_base")]
pub async fn todo_user_base(_ctx: Context<'_>) -> Result<(), Error> {
Ok(())
}
/// Add an item to your personal todo list
#[poise::command(slash_command, rename = "add", identifying_name = "todo_user_add")]
pub async fn todo_user_add(
ctx: Context<'_>,
#[description = "The task to add to the todo list"] task: String,
) -> Result<(), Error> {
sqlx::query!(
"INSERT INTO todos (user_id, value)
VALUES ((SELECT id FROM users WHERE user = ?), ?)",
ctx.author().id.0,
task
)
.execute(&ctx.data().database)
.await
.unwrap();
ctx.say("Item added to todo list").await?;
Ok(())
}
/// View and remove from your personal todo list
#[poise::command(slash_command, rename = "view", identifying_name = "todo_user_view")]
pub async fn todo_user_view(ctx: Context<'_>) -> Result<(), Error> {
let values = sqlx::query!(
"SELECT todos.id, value FROM todos
INNER JOIN users ON todos.user_id = users.id
WHERE users.user = ?",
ctx.author().id.0,
)
.fetch_all(&ctx.data().database)
.await
.unwrap()
.iter()
.map(|row| (row.id as usize, row.value.clone()))
.collect::<Vec<(usize, String)>>();
let resp = show_todo_page(&values, 0, Some(ctx.author().id.0), None, None);
ctx.send(|r| {
*r = resp;
r
})
.await?;
Ok(())
}
pub fn max_todo_page(todo_values: &[(usize, String)]) -> usize {
let mut rows = 0;
let mut char_count = 0;
todo_values.iter().enumerate().map(|(c, (_, v))| format!("{}: {}", c, v)).fold(
1,
|mut pages, text| {
rows += 1;
char_count += text.len();
if char_count > EMBED_DESCRIPTION_MAX_LENGTH || rows > SELECT_MAX_ENTRIES {
rows = 1;
char_count = text.len();
pages += 1;
}
pages
},
)
}
pub fn show_todo_page(
todo_values: &[(usize, String)],
page: usize,
user_id: Option<u64>,
channel_id: Option<u64>,
guild_id: Option<u64>,
) -> CreateReply {
let pager = TodoPager::new(page, user_id, channel_id, guild_id);
let pages = max_todo_page(todo_values);
let mut page = page;
if page >= pages {
page = pages - 1;
} }
let mut char_count = 0; pub fn name(&self) -> String {
let mut rows = 0; if self.channel.is_some() {
let mut skipped_rows = 0; "Channel"
let mut skipped_char_count = 0; } else if self.guild.is_some() {
let mut first_num = 0; "Guild"
} else {
"User"
}
.to_string()
}
let mut skipped_pages = 0; pub async fn view(
&self,
pool: MySqlPool,
) -> Result<Vec<Todo>, Box<dyn std::error::Error + Send + Sync>> {
Ok(if let Some(cid) = self.channel {
sqlx::query_as!(
Todo,
"
SELECT id, value FROM todos WHERE channel_id = (SELECT id FROM channels WHERE channel = ?)
",
cid.as_u64()
)
.fetch_all(&pool)
.await?
} else if let Some(gid) = self.guild {
sqlx::query_as!(
Todo,
"
SELECT id, value FROM todos WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?) AND channel_id IS NULL
",
gid.as_u64()
)
.fetch_all(&pool)
.await?
} else {
sqlx::query_as!(
Todo,
"
SELECT id, value FROM todos WHERE user_id = (SELECT id FROM users WHERE user = ?) AND guild_id IS NULL
",
self.user.as_u64()
)
.fetch_all(&pool)
.await?
})
}
let (todo_ids, display_vec): (Vec<usize>, Vec<String>) = todo_values pub async fn add(
.iter() &self,
.enumerate() value: String,
.map(|(c, (i, v))| (i, format!("`{}`: {}", c + 1, v))) pool: MySqlPool,
.skip_while(|(_, p)| { ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
first_num += 1; if let (Some(cid), Some(gid)) = (self.channel, self.guild) {
skipped_rows += 1; sqlx::query!(
skipped_char_count += p.len(); "
INSERT INTO todos (user_id, guild_id, channel_id, value) VALUES (
(SELECT id FROM users WHERE user = ?),
(SELECT id FROM guilds WHERE guild = ?),
(SELECT id FROM channels WHERE channel = ?),
?
)
",
self.user.as_u64(),
gid.as_u64(),
cid.as_u64(),
value
)
.execute(&pool)
.await?;
} else if let Some(gid) = self.guild {
sqlx::query!(
"
INSERT INTO todos (user_id, guild_id, value) VALUES (
(SELECT id FROM users WHERE user = ?),
(SELECT id FROM guilds WHERE guild = ?),
?
)
",
self.user.as_u64(),
gid.as_u64(),
value
)
.execute(&pool)
.await?;
} else {
sqlx::query!(
"
INSERT INTO todos (user_id, value) VALUES (
(SELECT id FROM users WHERE user = ?),
?
)
",
self.user.as_u64(),
value
)
.execute(&pool)
.await?;
}
if skipped_char_count > EMBED_DESCRIPTION_MAX_LENGTH Ok(())
|| skipped_rows > SELECT_MAX_ENTRIES }
{
skipped_rows = 1; pub async fn remove(
skipped_char_count = p.len(); &self,
skipped_pages += 1; num: usize,
pool: &MySqlPool,
) -> Result<Todo, Box<dyn std::error::Error + Sync + Send>> {
let todos = self.view(pool.clone()).await?;
if let Some(removal_todo) = todos.get(num) {
let deleting = sqlx::query_as!(
Todo,
"
SELECT id, value FROM todos WHERE id = ?
",
removal_todo.id
)
.fetch_one(&pool.clone())
.await?;
sqlx::query!(
"
DELETE FROM todos WHERE id = ?
",
removal_todo.id
)
.execute(pool)
.await?;
Ok(deleting)
} else {
Err(Box::new(TodoNotFound))
}
}
pub async fn clear(
&self,
pool: &MySqlPool,
) -> Result<(), Box<dyn std::error::Error + Sync + Send>> {
if let Some(cid) = self.channel {
sqlx::query!(
"
DELETE FROM todos WHERE channel_id = (SELECT id FROM channels WHERE channel = ?)
",
cid.as_u64()
)
.execute(pool)
.await?;
} else if let Some(gid) = self.guild {
sqlx::query!(
"
DELETE FROM todos WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?) AND channel_id IS NULL
",
gid.as_u64()
)
.execute(pool)
.await?;
} else {
sqlx::query!(
"
DELETE FROM todos WHERE user_id = (SELECT id FROM users WHERE user = ?) AND guild_id IS NULL
",
self.user.as_u64()
)
.execute(pool)
.await?;
}
Ok(())
}
async fn execute(&self, ctx: &Context, msg: &Message, subcommand: SubCommand, extra: String) {
let (pool, lm) = get_ctx_data(&ctx).await;
let user_data = UserData::from_user(&msg.author, &ctx, &pool).await.unwrap();
let prefix = ctx.prefix(msg.guild_id).await;
match subcommand {
SubCommand::View => {
let todo_items = self.view(pool).await.unwrap();
let mut todo_groups = vec!["".to_string()];
let mut char_count = 0;
todo_items.iter().enumerate().for_each(|(count, todo)| {
let display = format!("{}: {}\n", count + 1, todo.value);
if char_count + display.len() > MESSAGE_CODE_LIMIT as usize {
char_count = display.len();
todo_groups.push(display);
} else {
char_count += display.len();
let last_group = todo_groups.pop().unwrap();
todo_groups.push(format!("{}{}", last_group, display));
}
});
for group in todo_groups {
let _ = msg
.channel_id
.send_message(&ctx, |m| {
m.embed(|e| e.title(format!("{} Todo", self.name())).description(group))
})
.await;
}
} }
skipped_pages < page SubCommand::Add => {
}) let content = lm
.take_while(|(_, p)| { .get(&user_data.language, "todo/added")
rows += 1; .replacen("{name}", &extra, 1);
char_count += p.len();
char_count < EMBED_DESCRIPTION_MAX_LENGTH && rows <= SELECT_MAX_ENTRIES self.add(extra, pool).await.unwrap();
})
.unzip();
let display = display_vec.join("\n"); let _ = msg
.channel_id
.send_message(&ctx, |m| {
m.content(content).allowed_mentions(|m| m.empty_parse())
})
.await;
}
let title = if user_id.is_some() { SubCommand::Remove => {
"Your" if let Ok(num) = extra.parse::<usize>() {
} else if channel_id.is_some() { if let Ok(todo) = self.remove(num - 1, &pool).await {
"Channel" let content = lm.get(&user_data.language, "todo/removed").replacen(
} else { "{}",
"Server" &todo.value,
1,
);
let _ = msg
.channel_id
.send_message(&ctx, |m| {
m.content(content).allowed_mentions(|m| m.empty_parse())
})
.await;
} else {
let _ = msg
.channel_id
.say(&ctx, lm.get(&user_data.language, "todo/error_index"))
.await;
}
} else {
let content = lm
.get(&user_data.language, "todo/error_value")
.replacen("{prefix}", &prefix, 1)
.replacen("{command}", &self.command(Some(subcommand)), 1);
let _ = msg.channel_id.say(&ctx, content).await;
}
}
SubCommand::Clear => {
self.clear(&pool).await.unwrap();
let content = lm.get(&user_data.language, "todo/cleared");
let _ = msg.channel_id.say(&ctx, content).await;
}
}
}
}
enum SubCommand {
View,
Add,
Remove,
Clear,
}
impl TryFrom<Option<&str>> for SubCommand {
type Error = ();
fn try_from(value: Option<&str>) -> Result<Self, Self::Error> {
match value {
Some("add") => Ok(SubCommand::Add),
Some("remove") => Ok(SubCommand::Remove),
Some("clear") => Ok(SubCommand::Clear),
None | Some("") => Ok(SubCommand::View),
Some(_unrecognised) => Err(()),
}
}
}
impl ToString for SubCommand {
fn to_string(&self) -> String {
match self {
SubCommand::View => "",
SubCommand::Add => "add",
SubCommand::Remove => "remove",
SubCommand::Clear => "clear",
}
.to_string()
}
}
#[async_trait]
trait Execute {
async fn execute(self, ctx: &Context, msg: &Message, extra: String, target: TodoTarget);
}
#[async_trait]
impl Execute for Result<SubCommand, ()> {
async fn execute(self, ctx: &Context, msg: &Message, extra: String, target: TodoTarget) {
if let Ok(subcommand) = self {
target.execute(ctx, msg, subcommand, extra).await;
} else {
show_help(&ctx, msg, Some(target)).await;
}
}
}
#[command("todo")]
async fn todo_user(ctx: &Context, msg: &Message, args: String) {
let mut split = args.split(' ');
let target = TodoTarget {
user: msg.author.id,
guild: None,
channel: None,
}; };
if todo_ids.is_empty() { let subcommand_opt = SubCommand::try_from(split.next());
let mut reply = CreateReply::default();
reply.embed(|e| { subcommand_opt
e.title(format!("{} Todo List", title)) .execute(ctx, msg, split.collect::<Vec<&str>>().join(" "), target)
.description("Todo List Empty!") .await;
.footer(|f| f.text(format!("Page {} of {}", page + 1, pages))) }
.color(*THEME_COLOR)
}); #[command("todoc")]
#[supports_dm(false)]
reply #[permission_level(Managed)]
} else { async fn todo_channel(ctx: &Context, msg: &Message, args: String) {
let todo_selector = let mut split = args.split(' ');
ComponentDataModel::TodoSelector(TodoSelector { page, user_id, channel_id, guild_id });
let target = TodoTarget {
let mut reply = CreateReply::default(); user: msg.author.id,
guild: msg.guild_id,
reply channel: Some(msg.channel_id),
.embed(|e| { };
e.title(format!("{} Todo List", title))
.description(display) let subcommand_opt = SubCommand::try_from(split.next());
.footer(|f| f.text(format!("Page {} of {}", page + 1, pages)))
.color(*THEME_COLOR) subcommand_opt
}) .execute(ctx, msg, split.collect::<Vec<&str>>().join(" "), target)
.components(|comp| { .await;
pager.create_button_row(pages, comp); }
comp.create_action_row(|row| { #[command("todos")]
row.create_select_menu(|menu| { #[supports_dm(false)]
menu.custom_id(todo_selector.to_custom_id()).options(|opt| { #[permission_level(Managed)]
for (count, (id, disp)) in todo_ids.iter().zip(&display_vec).enumerate() async fn todo_guild(ctx: &Context, msg: &Message, args: String) {
{ let mut split = args.split(' ');
opt.create_option(|o| {
o.label(format!("Mark {} complete", count + first_num)) let target = TodoTarget {
.value(id) user: msg.author.id,
.description({ guild: msg.guild_id,
let c = disp.split_once(' ').unwrap_or(("", "")).1; channel: None,
};
if c.len() > 100 {
format!( let subcommand_opt = SubCommand::try_from(split.next());
"{}...",
c.chars().take(97).collect::<String>() subcommand_opt
) .execute(ctx, msg, split.collect::<Vec<&str>>().join(" "), target)
} else { .await;
c.to_string() }
}
}) async fn show_help(ctx: &Context, msg: &Message, target: Option<TodoTarget>) {
}); let (pool, lm) = get_ctx_data(&ctx).await;
}
let language = UserData::language_of(&msg.author, &pool);
opt let prefix = ctx.prefix(msg.guild_id);
})
}) let command = match target {
}) None => "todo",
}); Some(t) => {
if t.channel.is_some() {
reply "todoc"
} } else if t.guild.is_some() {
"todos"
} else {
"todo"
}
}
};
command_help(ctx, msg, lm, &prefix.await, &language.await, command).await;
} }

View File

@ -1,441 +0,0 @@
pub(crate) mod pager;
use std::io::Cursor;
use base64::{engine::general_purpose, Engine};
use chrono_tz::Tz;
use log::warn;
use poise::{
serenity_prelude as serenity,
serenity_prelude::{
builder::CreateEmbed,
model::{
application::interaction::{
message_component::MessageComponentInteraction, InteractionResponseType,
MessageFlags,
},
channel::Channel,
},
Context,
},
};
use rmp_serde::Serializer;
use serde::{Deserialize, Serialize};
use crate::{
commands::{
command_macro::list::{max_macro_page, show_macro_page},
reminder_cmds::{max_delete_page, show_delete_page},
todo_cmds::{max_todo_page, show_todo_page},
},
component_models::pager::{DelPager, LookPager, MacroPager, Pager, TodoPager},
consts::{EMBED_DESCRIPTION_MAX_LENGTH, THEME_COLOR},
models::reminder::Reminder,
utils::send_as_initial_response,
Data,
};
#[derive(Deserialize, Serialize)]
#[serde(tag = "type")]
#[repr(u8)]
pub enum ComponentDataModel {
LookPager(LookPager),
DelPager(DelPager),
TodoPager(TodoPager),
DelSelector(DelSelector),
TodoSelector(TodoSelector),
MacroPager(MacroPager),
UndoReminder(UndoReminder),
}
impl ComponentDataModel {
pub fn to_custom_id(&self) -> String {
let mut buf = Vec::new();
self.serialize(&mut Serializer::new(&mut buf)).unwrap();
general_purpose::STANDARD.encode(buf)
}
pub fn from_custom_id(data: &String) -> Self {
let buf = general_purpose::STANDARD
.decode(data)
.map_err(|e| format!("Could not decode `custom_id' {}: {:?}", data, e))
.unwrap();
let cur = Cursor::new(buf);
rmp_serde::from_read(cur).unwrap()
}
pub async fn act(&self, ctx: &Context, data: &Data, component: &MessageComponentInteraction) {
match self {
ComponentDataModel::LookPager(pager) => {
let flags = pager.flags;
let channel_opt = component.channel_id.to_channel_cached(&ctx);
let channel_id = if let Some(Channel::Guild(channel)) = channel_opt {
if Some(channel.guild_id) == component.guild_id {
flags.channel_id.unwrap_or(component.channel_id)
} else {
component.channel_id
}
} else {
component.channel_id
};
let reminders = Reminder::from_channel(&data.database, channel_id, &flags).await;
let pages = reminders
.iter()
.map(|reminder| reminder.display(&flags, &pager.timezone))
.fold(0, |t, r| t + r.len())
.div_ceil(EMBED_DESCRIPTION_MAX_LENGTH);
let channel_name =
if let Some(Channel::Guild(channel)) = channel_id.to_channel_cached(&ctx) {
Some(channel.name)
} else {
None
};
let next_page = pager.next_page(pages);
let mut char_count = 0;
let mut skip_char_count = 0;
let display = reminders
.iter()
.map(|reminder| reminder.display(&flags, &pager.timezone))
.skip_while(|p| {
skip_char_count += p.len();
skip_char_count < EMBED_DESCRIPTION_MAX_LENGTH * next_page as usize
})
.take_while(|p| {
char_count += p.len();
char_count < EMBED_DESCRIPTION_MAX_LENGTH
})
.collect::<Vec<String>>()
.join("");
let mut embed = CreateEmbed::default();
embed
.title(format!(
"Reminders{}",
channel_name.map_or(String::new(), |n| format!(" on #{}", n))
))
.description(display)
.footer(|f| f.text(format!("Page {} of {}", next_page + 1, pages)))
.color(*THEME_COLOR);
let _ = component
.create_interaction_response(&ctx, |r| {
r.kind(InteractionResponseType::UpdateMessage).interaction_response_data(
|response| {
response.set_embeds(vec![embed]).components(|comp| {
pager.create_button_row(pages, comp);
comp
})
},
)
})
.await;
}
ComponentDataModel::DelPager(pager) => {
let reminders = Reminder::from_guild(
&ctx,
&data.database,
component.guild_id,
component.user.id,
)
.await;
let max_pages = max_delete_page(&reminders, &pager.timezone);
let resp = show_delete_page(&reminders, pager.next_page(max_pages), pager.timezone);
let _ = component
.create_interaction_response(&ctx, |f| {
f.kind(InteractionResponseType::UpdateMessage).interaction_response_data(
|d| {
send_as_initial_response(resp, d);
d
},
)
})
.await;
}
ComponentDataModel::DelSelector(selector) => {
for id in &component.data.values {
match id.parse::<u32>() {
Ok(id) => {
if let Some(reminder) = Reminder::from_id(&data.database, id).await {
reminder.delete(&data.database).await.unwrap();
} else {
warn!("Attempt to delete non-existent reminder");
}
}
Err(e) => {
warn!("Error casting ID to integer: {:?}.", e);
}
}
}
let reminders = Reminder::from_guild(
&ctx,
&data.database,
component.guild_id,
component.user.id,
)
.await;
let resp = show_delete_page(&reminders, selector.page, selector.timezone);
let _ = component
.create_interaction_response(&ctx, |f| {
f.kind(InteractionResponseType::UpdateMessage).interaction_response_data(
|d| {
send_as_initial_response(resp, d);
d
},
)
})
.await;
}
ComponentDataModel::TodoPager(pager) => {
if Some(component.user.id.0) == pager.user_id || pager.user_id.is_none() {
let values = if let Some(uid) = pager.user_id {
sqlx::query!(
"SELECT todos.id, value FROM todos
INNER JOIN users ON todos.user_id = users.id
WHERE users.user = ?",
uid,
)
.fetch_all(&data.database)
.await
.unwrap()
.iter()
.map(|row| (row.id as usize, row.value.clone()))
.collect::<Vec<(usize, String)>>()
} else if let Some(cid) = pager.channel_id {
sqlx::query!(
"SELECT todos.id, value FROM todos
INNER JOIN channels ON todos.channel_id = channels.id
WHERE channels.channel = ?",
cid,
)
.fetch_all(&data.database)
.await
.unwrap()
.iter()
.map(|row| (row.id as usize, row.value.clone()))
.collect::<Vec<(usize, String)>>()
} else {
sqlx::query!(
"SELECT todos.id, value FROM todos
INNER JOIN guilds ON todos.guild_id = guilds.id
WHERE guilds.guild = ?",
pager.guild_id,
)
.fetch_all(&data.database)
.await
.unwrap()
.iter()
.map(|row| (row.id as usize, row.value.clone()))
.collect::<Vec<(usize, String)>>()
};
let max_pages = max_todo_page(&values);
let resp = show_todo_page(
&values,
pager.next_page(max_pages),
pager.user_id,
pager.channel_id,
pager.guild_id,
);
let _ = component
.create_interaction_response(&ctx, |f| {
f.kind(InteractionResponseType::UpdateMessage)
.interaction_response_data(|d| {
send_as_initial_response(resp, d);
d
})
})
.await;
} else {
let _ = component
.create_interaction_response(&ctx, |r| {
r.kind(InteractionResponseType::ChannelMessageWithSource)
.interaction_response_data(|d| {
d.flags(
MessageFlags::EPHEMERAL,
)
.content("Only the user who performed the command can use these components")
})
})
.await;
}
}
ComponentDataModel::TodoSelector(selector) => {
if Some(component.user.id.0) == selector.user_id || selector.user_id.is_none() {
let selected_id = component.data.values.join(",");
sqlx::query!("DELETE FROM todos WHERE FIND_IN_SET(id, ?)", selected_id)
.execute(&data.database)
.await
.unwrap();
let values = sqlx::query!(
// fucking braindead mysql use <=> instead of = for null comparison
"SELECT id, value FROM todos WHERE user_id <=> ? AND channel_id <=> ? AND guild_id <=> ?",
selector.user_id,
selector.channel_id,
selector.guild_id,
)
.fetch_all(&data.database)
.await
.unwrap()
.iter()
.map(|row| (row.id as usize, row.value.clone()))
.collect::<Vec<(usize, String)>>();
let resp = show_todo_page(
&values,
selector.page,
selector.user_id,
selector.channel_id,
selector.guild_id,
);
let _ = component
.create_interaction_response(&ctx, |f| {
f.kind(InteractionResponseType::UpdateMessage)
.interaction_response_data(|d| {
send_as_initial_response(resp, d);
d
})
})
.await;
} else {
let _ = component
.create_interaction_response(&ctx, |r| {
r.kind(InteractionResponseType::ChannelMessageWithSource)
.interaction_response_data(|d| {
d.flags(
MessageFlags::EPHEMERAL,
)
.content("Only the user who performed the command can use these components")
})
})
.await;
}
}
ComponentDataModel::MacroPager(pager) => {
let macros = data.command_macros(component.guild_id.unwrap()).await.unwrap();
let max_page = max_macro_page(&macros);
let page = pager.next_page(max_page);
let resp = show_macro_page(&macros, page);
let _ = component
.create_interaction_response(&ctx, |f| {
f.kind(InteractionResponseType::UpdateMessage).interaction_response_data(
|d| {
send_as_initial_response(resp, d);
d
},
)
})
.await;
}
ComponentDataModel::UndoReminder(undo_reminder) => {
if component.user.id == undo_reminder.user_id {
let reminder =
Reminder::from_id(&data.database, undo_reminder.reminder_id).await;
if let Some(reminder) = reminder {
match reminder.delete(&data.database).await {
Ok(()) => {
let _ = component
.create_interaction_response(&ctx, |f| {
f.kind(InteractionResponseType::UpdateMessage)
.interaction_response_data(|d| {
d.embed(|e| {
e.title("Reminder Canceled")
.description(
"This reminder has been canceled.",
)
.color(*THEME_COLOR)
})
.components(|c| c)
})
})
.await;
}
Err(e) => {
warn!("Error canceling reminder: {:?}", e);
let _ = component
.create_interaction_response(&ctx, |f| {
f.kind(InteractionResponseType::ChannelMessageWithSource)
.interaction_response_data(|d| {
d.content(
"The reminder could not be canceled: it may have already been deleted. Check `/del`!")
.ephemeral(true)
})
})
.await;
}
}
} else {
let _ = component
.create_interaction_response(&ctx, |f| {
f.kind(InteractionResponseType::ChannelMessageWithSource)
.interaction_response_data(|d| {
d.content(
"The reminder could not be canceled: it may have already been deleted. Check `/del`!")
.ephemeral(true)
})
})
.await;
}
} else {
let _ = component
.create_interaction_response(&ctx, |f| {
f.kind(InteractionResponseType::ChannelMessageWithSource)
.interaction_response_data(|d| {
d.content(
"Only the user who performed the command can use this button.")
.ephemeral(true)
})
})
.await;
}
}
}
}
}
#[derive(Serialize, Deserialize)]
pub struct DelSelector {
pub page: usize,
pub timezone: Tz,
}
#[derive(Serialize, Deserialize)]
pub struct TodoSelector {
pub page: usize,
pub user_id: Option<u64>,
pub channel_id: Option<u64>,
pub guild_id: Option<u64>,
}
#[derive(Serialize, Deserialize)]
pub struct UndoReminder {
pub user_id: serenity::UserId,
pub reminder_id: u32,
}

View File

@ -1,413 +0,0 @@
// todo split pager out into a single struct
use chrono_tz::Tz;
use poise::serenity_prelude::{
builder::CreateComponents, model::application::component::ButtonStyle,
};
use serde::{Deserialize, Serialize};
use serde_repr::*;
use crate::{component_models::ComponentDataModel, models::reminder::look_flags::LookFlags};
pub trait Pager {
fn next_page(&self, max_pages: usize) -> usize;
fn create_button_row(&self, max_pages: usize, comp: &mut CreateComponents);
}
#[derive(Serialize_repr, Deserialize_repr)]
#[repr(u8)]
enum PageAction {
First = 0,
Previous = 1,
Refresh = 2,
Next = 3,
Last = 4,
}
#[derive(Serialize, Deserialize)]
pub struct LookPager {
pub flags: LookFlags,
pub page: usize,
action: PageAction,
pub timezone: Tz,
}
impl Pager for LookPager {
fn next_page(&self, max_pages: usize) -> usize {
match self.action {
PageAction::First => 0,
PageAction::Previous => 0.max(self.page - 1),
PageAction::Refresh => self.page,
PageAction::Next => (max_pages - 1).min(self.page + 1),
PageAction::Last => max_pages - 1,
}
}
fn create_button_row(&self, max_pages: usize, comp: &mut CreateComponents) {
let next_page = self.next_page(max_pages);
let (page_first, page_prev, page_refresh, page_next, page_last) =
LookPager::buttons(self.flags, next_page, self.timezone);
comp.create_action_row(|row| {
row.create_button(|b| {
b.label("⏮️")
.style(ButtonStyle::Primary)
.custom_id(page_first.to_custom_id())
.disabled(next_page == 0)
})
.create_button(|b| {
b.label("◀️")
.style(ButtonStyle::Secondary)
.custom_id(page_prev.to_custom_id())
.disabled(next_page == 0)
})
.create_button(|b| {
b.label("🔁").style(ButtonStyle::Secondary).custom_id(page_refresh.to_custom_id())
})
.create_button(|b| {
b.label("▶️")
.style(ButtonStyle::Secondary)
.custom_id(page_next.to_custom_id())
.disabled(next_page + 1 == max_pages)
})
.create_button(|b| {
b.label("⏭️")
.style(ButtonStyle::Primary)
.custom_id(page_last.to_custom_id())
.disabled(next_page + 1 == max_pages)
})
});
}
}
impl LookPager {
pub fn new(flags: LookFlags, timezone: Tz) -> Self {
Self { flags, page: 0, action: PageAction::First, timezone }
}
pub fn buttons(
flags: LookFlags,
page: usize,
timezone: Tz,
) -> (
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
) {
(
ComponentDataModel::LookPager(LookPager {
flags,
page,
action: PageAction::First,
timezone,
}),
ComponentDataModel::LookPager(LookPager {
flags,
page,
action: PageAction::Previous,
timezone,
}),
ComponentDataModel::LookPager(LookPager {
flags,
page,
action: PageAction::Refresh,
timezone,
}),
ComponentDataModel::LookPager(LookPager {
flags,
page,
action: PageAction::Next,
timezone,
}),
ComponentDataModel::LookPager(LookPager {
flags,
page,
action: PageAction::Last,
timezone,
}),
)
}
}
#[derive(Serialize, Deserialize)]
pub struct DelPager {
pub page: usize,
action: PageAction,
pub timezone: Tz,
}
impl Pager for DelPager {
fn next_page(&self, max_pages: usize) -> usize {
match self.action {
PageAction::First => 0,
PageAction::Previous => 0.max(self.page - 1),
PageAction::Refresh => self.page,
PageAction::Next => (max_pages - 1).min(self.page + 1),
PageAction::Last => max_pages - 1,
}
}
fn create_button_row(&self, max_pages: usize, comp: &mut CreateComponents) {
let next_page = self.next_page(max_pages);
let (page_first, page_prev, page_refresh, page_next, page_last) =
DelPager::buttons(next_page, self.timezone);
comp.create_action_row(|row| {
row.create_button(|b| {
b.label("⏮️")
.style(ButtonStyle::Primary)
.custom_id(page_first.to_custom_id())
.disabled(next_page == 0)
})
.create_button(|b| {
b.label("◀️")
.style(ButtonStyle::Secondary)
.custom_id(page_prev.to_custom_id())
.disabled(next_page == 0)
})
.create_button(|b| {
b.label("🔁").style(ButtonStyle::Secondary).custom_id(page_refresh.to_custom_id())
})
.create_button(|b| {
b.label("▶️")
.style(ButtonStyle::Secondary)
.custom_id(page_next.to_custom_id())
.disabled(next_page + 1 == max_pages)
})
.create_button(|b| {
b.label("⏭️")
.style(ButtonStyle::Primary)
.custom_id(page_last.to_custom_id())
.disabled(next_page + 1 == max_pages)
})
});
}
}
impl DelPager {
pub fn new(page: usize, timezone: Tz) -> Self {
Self { page, action: PageAction::Refresh, timezone }
}
pub fn buttons(
page: usize,
timezone: Tz,
) -> (
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
) {
(
ComponentDataModel::DelPager(DelPager { page, action: PageAction::First, timezone }),
ComponentDataModel::DelPager(DelPager { page, action: PageAction::Previous, timezone }),
ComponentDataModel::DelPager(DelPager { page, action: PageAction::Refresh, timezone }),
ComponentDataModel::DelPager(DelPager { page, action: PageAction::Next, timezone }),
ComponentDataModel::DelPager(DelPager { page, action: PageAction::Last, timezone }),
)
}
}
#[derive(Deserialize, Serialize)]
pub struct TodoPager {
pub page: usize,
action: PageAction,
pub user_id: Option<u64>,
pub channel_id: Option<u64>,
pub guild_id: Option<u64>,
}
impl Pager for TodoPager {
fn next_page(&self, max_pages: usize) -> usize {
match self.action {
PageAction::First => 0,
PageAction::Previous => 0.max(self.page - 1),
PageAction::Refresh => self.page,
PageAction::Next => (max_pages - 1).min(self.page + 1),
PageAction::Last => max_pages - 1,
}
}
fn create_button_row(&self, max_pages: usize, comp: &mut CreateComponents) {
let next_page = self.next_page(max_pages);
let (page_first, page_prev, page_refresh, page_next, page_last) =
TodoPager::buttons(next_page, self.user_id, self.channel_id, self.guild_id);
comp.create_action_row(|row| {
row.create_button(|b| {
b.label("⏮️")
.style(ButtonStyle::Primary)
.custom_id(page_first.to_custom_id())
.disabled(next_page == 0)
})
.create_button(|b| {
b.label("◀️")
.style(ButtonStyle::Secondary)
.custom_id(page_prev.to_custom_id())
.disabled(next_page == 0)
})
.create_button(|b| {
b.label("🔁").style(ButtonStyle::Secondary).custom_id(page_refresh.to_custom_id())
})
.create_button(|b| {
b.label("▶️")
.style(ButtonStyle::Secondary)
.custom_id(page_next.to_custom_id())
.disabled(next_page + 1 == max_pages)
})
.create_button(|b| {
b.label("⏭️")
.style(ButtonStyle::Primary)
.custom_id(page_last.to_custom_id())
.disabled(next_page + 1 == max_pages)
})
});
}
}
impl TodoPager {
pub fn new(
page: usize,
user_id: Option<u64>,
channel_id: Option<u64>,
guild_id: Option<u64>,
) -> Self {
Self { page, action: PageAction::Refresh, user_id, channel_id, guild_id }
}
pub fn buttons(
page: usize,
user_id: Option<u64>,
channel_id: Option<u64>,
guild_id: Option<u64>,
) -> (
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
) {
(
ComponentDataModel::TodoPager(TodoPager {
page,
action: PageAction::First,
user_id,
channel_id,
guild_id,
}),
ComponentDataModel::TodoPager(TodoPager {
page,
action: PageAction::Previous,
user_id,
channel_id,
guild_id,
}),
ComponentDataModel::TodoPager(TodoPager {
page,
action: PageAction::Refresh,
user_id,
channel_id,
guild_id,
}),
ComponentDataModel::TodoPager(TodoPager {
page,
action: PageAction::Next,
user_id,
channel_id,
guild_id,
}),
ComponentDataModel::TodoPager(TodoPager {
page,
action: PageAction::Last,
user_id,
channel_id,
guild_id,
}),
)
}
}
#[derive(Serialize, Deserialize)]
pub struct MacroPager {
pub page: usize,
action: PageAction,
}
impl Pager for MacroPager {
fn next_page(&self, max_pages: usize) -> usize {
match self.action {
PageAction::First => 0,
PageAction::Previous => 0.max(self.page - 1),
PageAction::Refresh => self.page,
PageAction::Next => (max_pages - 1).min(self.page + 1),
PageAction::Last => max_pages - 1,
}
}
fn create_button_row(&self, max_pages: usize, comp: &mut CreateComponents) {
let next_page = self.next_page(max_pages);
let (page_first, page_prev, page_refresh, page_next, page_last) =
MacroPager::buttons(next_page);
comp.create_action_row(|row| {
row.create_button(|b| {
b.label("⏮️")
.style(ButtonStyle::Primary)
.custom_id(page_first.to_custom_id())
.disabled(next_page == 0)
})
.create_button(|b| {
b.label("◀️")
.style(ButtonStyle::Secondary)
.custom_id(page_prev.to_custom_id())
.disabled(next_page == 0)
})
.create_button(|b| {
b.label("🔁").style(ButtonStyle::Secondary).custom_id(page_refresh.to_custom_id())
})
.create_button(|b| {
b.label("▶️")
.style(ButtonStyle::Secondary)
.custom_id(page_next.to_custom_id())
.disabled(next_page + 1 == max_pages)
})
.create_button(|b| {
b.label("⏭️")
.style(ButtonStyle::Primary)
.custom_id(page_last.to_custom_id())
.disabled(next_page + 1 == max_pages)
})
});
}
}
impl MacroPager {
pub fn new(page: usize) -> Self {
Self { page, action: PageAction::Refresh }
}
pub fn buttons(
page: usize,
) -> (
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
ComponentDataModel,
) {
(
ComponentDataModel::MacroPager(MacroPager { page, action: PageAction::First }),
ComponentDataModel::MacroPager(MacroPager { page, action: PageAction::Previous }),
ComponentDataModel::MacroPager(MacroPager { page, action: PageAction::Refresh }),
ComponentDataModel::MacroPager(MacroPager { page, action: PageAction::Next }),
ComponentDataModel::MacroPager(MacroPager { page, action: PageAction::Last }),
)
}
}

View File

@ -2,47 +2,89 @@ pub const DAY: u64 = 86_400;
pub const HOUR: u64 = 3_600; pub const HOUR: u64 = 3_600;
pub const MINUTE: u64 = 60; pub const MINUTE: u64 = 60;
pub const EMBED_DESCRIPTION_MAX_LENGTH: usize = 4096;
pub const SELECT_MAX_ENTRIES: usize = 25;
pub const CHARACTERS: &str = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"; pub const CHARACTERS: &str = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_";
const THEME_COLOR_FALLBACK: u32 = 0x8fb677; const THEME_COLOR_FALLBACK: u32 = 0x8fb677;
pub const MACRO_MAX_COMMANDS: usize = 5;
use std::{collections::HashSet, env, iter::FromIterator}; use std::{collections::HashSet, env, iter::FromIterator};
use poise::serenity_prelude::model::prelude::AttachmentType; use regex::{Regex, RegexBuilder};
use regex::Regex;
lazy_static! { lazy_static! {
pub static ref DEFAULT_AVATAR: AttachmentType<'static> = ( pub static ref REGEX_CHANNEL: Regex = Regex::new(r#"^\s*<#(\d+)>\s*$"#).unwrap();
include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/webhook.jpg")) as &[u8],
"webhook.jpg", pub static ref REGEX_ROLE: Regex = Regex::new(r#"<@&(\d+)>"#).unwrap();
)
.into(); pub static ref REGEX_COMMANDS: Regex = Regex::new(r#"([a-z]+)"#).unwrap();
pub static ref REGEX_ALIAS: Regex =
Regex::new(r#"(?P<name>[\S]{1,12})(?:(?: (?P<cmd>.*)$)|$)"#).unwrap();
pub static ref REGEX_CONTENT_SUBSTITUTION: Regex = Regex::new(r#"<<((?P<user>\d+)|(?P<role>.{1,100}))>>"#).unwrap();
pub static ref REGEX_CHANNEL_USER: Regex = Regex::new(r#"\s*<(#|@)(?:!)?(\d+)>\s*"#).unwrap(); pub static ref REGEX_CHANNEL_USER: Regex = Regex::new(r#"\s*<(#|@)(?:!)?(\d+)>\s*"#).unwrap();
pub static ref REGEX_REMIND_COMMAND: Regex = RegexBuilder::new(
r#"(?P<mentions>(?:<@\d+>\s+|<@!\d+>\s+|<#\d+>\s+)*)(?P<time>(?:(?:\d+)(?:s|m|h|d|:|/|-|))+)(?:\s+(?P<interval>(?:(?:\d+)(?:s|m|h|d|))+))?(?:\s+(?P<expires>(?:(?:\d+)(?:s|m|h|d|:|/|-|))+))?\s+(?P<content>.*)"#
)
.dot_matches_new_line(true)
.build()
.unwrap();
pub static ref REGEX_NATURAL_COMMAND_1: Regex = RegexBuilder::new(
r#"(?P<time>.*?)(?:\s+)(?:send|say)(?:\s+)(?P<msg>.*?)(?:(?:\s+)to(?:\s+)(?P<mentions>((?:<@\d+>)|(?:<@!\d+>)|(?:<#\d+>)|(?:\s+))+))?$"#
)
.dot_matches_new_line(true)
.build()
.unwrap();
pub static ref REGEX_NATURAL_COMMAND_2: Regex = RegexBuilder::new(
r#"(?P<msg>.*)(?:\s+)every(?:\s+)(?P<interval>.*?)(?:(?:\s+)(?:until|for)(?:\s+)(?P<expires>.*?))?$"#
)
.dot_matches_new_line(true)
.build()
.unwrap();
pub static ref SUBSCRIPTION_ROLES: HashSet<u64> = HashSet::from_iter( pub static ref SUBSCRIPTION_ROLES: HashSet<u64> = HashSet::from_iter(
env::var("PATREON_ROLE_ID") env::var("SUBSCRIPTION_ROLES")
.map(|var| var .map(|var| var
.split(',') .split(',')
.filter_map(|item| { item.parse::<u64>().ok() }) .filter_map(|item| { item.parse::<u64>().ok() })
.collect::<Vec<u64>>()) .collect::<Vec<u64>>())
.unwrap_or_else(|_| Vec::new()) .unwrap_or_else(|_| Vec::new())
); );
pub static ref CNC_GUILD: Option<u64> =
env::var("PATREON_GUILD_ID").map(|var| var.parse::<u64>().ok()).ok().flatten(); pub static ref CNC_GUILD: Option<u64> = env::var("CNC_GUILD")
pub static ref MIN_INTERVAL: i64 = .map(|var| var.parse::<u64>().ok())
env::var("MIN_INTERVAL").ok().and_then(|inner| inner.parse::<i64>().ok()).unwrap_or(600); .ok()
.flatten();
pub static ref MIN_INTERVAL: i64 = env::var("MIN_INTERVAL")
.ok()
.map(|inner| inner.parse::<i64>().ok())
.flatten()
.unwrap_or(600);
pub static ref MAX_TIME: i64 = env::var("MAX_TIME") pub static ref MAX_TIME: i64 = env::var("MAX_TIME")
.ok() .ok()
.and_then(|inner| inner.parse::<i64>().ok()) .map(|inner| inner.parse::<i64>().ok())
.flatten()
.unwrap_or(60 * 60 * 24 * 365 * 50); .unwrap_or(60 * 60 * 24 * 365 * 50);
pub static ref LOCAL_TIMEZONE: String = pub static ref LOCAL_TIMEZONE: String =
env::var("LOCAL_TIMEZONE").unwrap_or_else(|_| "UTC".to_string()); env::var("LOCAL_TIMEZONE").unwrap_or_else(|_| "UTC".to_string());
pub static ref THEME_COLOR: u32 = env::var("THEME_COLOR")
.map_or(THEME_COLOR_FALLBACK, |inner| u32::from_str_radix(&inner, 16) pub static ref LOCAL_LANGUAGE: String =
.unwrap_or(THEME_COLOR_FALLBACK)); env::var("LOCAL_LANGUAGE").unwrap_or_else(|_| "EN".to_string());
pub static ref DEFAULT_PREFIX: String =
env::var("DEFAULT_PREFIX").unwrap_or_else(|_| "$".to_string());
pub static ref THEME_COLOR: u32 = env::var("THEME_COLOR").map_or(
THEME_COLOR_FALLBACK,
|inner| u32::from_str_radix(&inner, 16).unwrap_or(THEME_COLOR_FALLBACK)
);
pub static ref PYTHON_LOCATION: String = pub static ref PYTHON_LOCATION: String =
env::var("PYTHON_LOCATION").unwrap_or_else(|_| "/usr/bin/python3".to_string()); env::var("PYTHON_LOCATION").unwrap_or_else(|_| "venv/bin/python3".to_string());
} }

View File

@ -1,114 +0,0 @@
use std::{collections::HashMap, env};
use log::error;
use poise::{
serenity_prelude as serenity,
serenity_prelude::{model::application::interaction::Interaction, utils::shard_id},
};
use crate::{component_models::ComponentDataModel, Data, Error, THEME_COLOR};
pub async fn listener(
ctx: &serenity::Context,
event: &poise::Event<'_>,
data: &Data,
) -> Result<(), Error> {
match event {
poise::Event::Ready { .. } => {
ctx.set_activity(serenity::Activity::watching("for /remind")).await;
}
poise::Event::ChannelDelete { channel } => {
sqlx::query!("DELETE FROM channels WHERE channel = ?", channel.id.as_u64())
.execute(&data.database)
.await
.unwrap();
}
poise::Event::GuildCreate { guild, is_new } => {
if *is_new {
let guild_id = guild.id.as_u64().to_owned();
sqlx::query!("INSERT IGNORE INTO guilds (guild) VALUES (?)", guild_id)
.execute(&data.database)
.await?;
if let Err(e) = post_guild_count(ctx, &data.http, guild_id).await {
error!("DiscordBotList: {:?}", e);
}
let default_channel = guild.default_channel_guaranteed();
if let Some(default_channel) = default_channel {
default_channel
.send_message(&ctx, |m| {
m.embed(|e| {
e.title("Thank you for adding Reminder Bot!").description(
"To get started:
• Set your timezone with `/timezone`
• Set up permissions in Server Settings 🠚 Integrations 🠚 Reminder Bot (desktop only)
• Create your first reminder with `/remind`
__Support__
If you need any support, please come and ask us! Join our [Discord](https://discord.jellywx.com).
__Updates__
To stay up to date on the latest features and fixes, join our [Discord](https://discord.jellywx.com).
",
).color(*THEME_COLOR)
})
})
.await?;
}
}
}
poise::Event::GuildDelete { incomplete, .. } => {
let _ = sqlx::query!("DELETE FROM guilds WHERE guild = ?", incomplete.id.0)
.execute(&data.database)
.await;
}
poise::Event::InteractionCreate { interaction } => {
if let Interaction::MessageComponent(component) = interaction {
let component_model = ComponentDataModel::from_custom_id(&component.data.custom_id);
component_model.act(ctx, data, component).await;
}
}
_ => {}
}
Ok(())
}
async fn post_guild_count(
ctx: &serenity::Context,
http: &reqwest::Client,
guild_id: u64,
) -> Result<(), reqwest::Error> {
if let Ok(token) = env::var("DISCORDBOTS_TOKEN") {
let shard_count = ctx.cache.shard_count();
let current_shard_id = shard_id(guild_id, shard_count);
let guild_count = ctx
.cache
.guilds()
.iter()
.filter(|g| shard_id(g.as_u64().to_owned(), shard_count) == current_shard_id)
.count() as u64;
let mut hm = HashMap::new();
hm.insert("server_count", guild_count);
hm.insert("shard_id", current_shard_id);
hm.insert("shard_count", shard_count);
http.post(
format!("https://top.gg/api/bots/{}/stats", ctx.cache.current_user_id().as_u64())
.as_str(),
)
.header("Authorization", token)
.json(&hm)
.send()
.await
.map(|_| ())
} else {
Ok(())
}
}

500
src/framework.rs Normal file
View File

@ -0,0 +1,500 @@
use serenity::{
async_trait,
client::Context,
constants::MESSAGE_CODE_LIMIT,
framework::Framework,
futures::prelude::future::BoxFuture,
http::Http,
model::{
channel::{Channel, GuildChannel, Message},
guild::{Guild, Member},
id::{ChannelId, MessageId},
},
Result as SerenityResult,
};
use log::{error, info, warn};
use regex::{Match, Regex, RegexBuilder};
use std::{collections::HashMap, fmt};
use crate::{
language_manager::LanguageManager,
models::{channel_data::ChannelData, guild_data::GuildData, user_data::UserData, CtxGuildData},
LimitExecutors, SQLPool,
};
type CommandFn = for<'fut> fn(&'fut Context, &'fut Message, String) -> BoxFuture<'fut, ()>;
#[derive(Debug, PartialEq)]
pub enum PermissionLevel {
Unrestricted,
Managed,
Restricted,
}
pub struct Command {
pub name: &'static str,
pub required_perms: PermissionLevel,
pub supports_dm: bool,
pub can_blacklist: bool,
pub func: CommandFn,
}
impl Command {
async fn check_permissions(&self, ctx: &Context, guild: &Guild, member: &Member) -> bool {
if self.required_perms == PermissionLevel::Unrestricted {
true
} else {
let permissions = guild.member_permissions(&ctx, &member.user).await.unwrap();
if permissions.manage_guild()
|| (permissions.manage_messages()
&& self.required_perms == PermissionLevel::Managed)
{
return true;
}
if self.required_perms == PermissionLevel::Managed {
let pool = ctx
.data
.read()
.await
.get::<SQLPool>()
.cloned()
.expect("Could not get SQLPool from data");
match sqlx::query!(
"
SELECT
role
FROM
roles
INNER JOIN
command_restrictions ON roles.id = command_restrictions.role_id
WHERE
command_restrictions.command = ? AND
roles.guild_id = (
SELECT
id
FROM
guilds
WHERE
guild = ?)
",
self.name,
guild.id.as_u64()
)
.fetch_all(&pool)
.await
{
Ok(rows) => {
let role_ids = member
.roles
.iter()
.map(|r| *r.as_u64())
.collect::<Vec<u64>>();
for row in rows {
if role_ids.contains(&row.role) {
return true;
}
}
false
}
Err(sqlx::Error::RowNotFound) => false,
Err(e) => {
warn!(
"Unexpected error occurred querying command_restrictions: {:?}",
e
);
false
}
}
} else {
false
}
}
}
}
impl fmt::Debug for Command {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Command")
.field("name", &self.name)
.field("required_perms", &self.required_perms)
.field("supports_dm", &self.supports_dm)
.field("can_blacklist", &self.can_blacklist)
.finish()
}
}
#[async_trait]
pub trait SendIterator {
async fn say_lines(
self,
http: impl AsRef<Http> + Send + Sync + 'async_trait,
content: impl Iterator<Item = String> + Send + 'async_trait,
) -> SerenityResult<()>;
}
#[async_trait]
impl SendIterator for ChannelId {
async fn say_lines(
self,
http: impl AsRef<Http> + Send + Sync + 'async_trait,
content: impl Iterator<Item = String> + Send + 'async_trait,
) -> SerenityResult<()> {
let mut current_content = String::new();
for line in content {
if current_content.len() + line.len() > MESSAGE_CODE_LIMIT as usize {
self.send_message(&http, |m| {
m.allowed_mentions(|am| am.empty_parse())
.content(&current_content)
})
.await?;
current_content = line;
} else {
current_content = format!("{}\n{}", current_content, line);
}
}
if !current_content.is_empty() {
self.send_message(&http, |m| {
m.allowed_mentions(|am| am.empty_parse())
.content(&current_content)
})
.await?;
}
Ok(())
}
}
pub struct RegexFramework {
pub commands: HashMap<String, &'static Command>,
command_matcher: Regex,
dm_regex_matcher: Regex,
default_prefix: String,
client_id: u64,
ignore_bots: bool,
case_insensitive: bool,
dm_enabled: bool,
}
impl RegexFramework {
pub fn new<T: Into<u64>>(client_id: T) -> Self {
Self {
commands: HashMap::new(),
command_matcher: Regex::new(r#"^$"#).unwrap(),
dm_regex_matcher: Regex::new(r#"^$"#).unwrap(),
default_prefix: "".to_string(),
client_id: client_id.into(),
ignore_bots: true,
case_insensitive: true,
dm_enabled: true,
}
}
pub fn case_insensitive(mut self, case_insensitive: bool) -> Self {
self.case_insensitive = case_insensitive;
self
}
pub fn default_prefix<T: ToString>(mut self, new_prefix: T) -> Self {
self.default_prefix = new_prefix.to_string();
self
}
pub fn ignore_bots(mut self, ignore_bots: bool) -> Self {
self.ignore_bots = ignore_bots;
self
}
pub fn dm_enabled(mut self, dm_enabled: bool) -> Self {
self.dm_enabled = dm_enabled;
self
}
pub fn add_command<S: ToString>(mut self, name: S, command: &'static Command) -> Self {
self.commands.insert(name.to_string(), command);
self
}
pub fn build(mut self) -> Self {
{
let command_names;
{
let mut command_names_vec =
self.commands.keys().map(|k| &k[..]).collect::<Vec<&str>>();
command_names_vec.sort_unstable_by_key(|a| a.len());
command_names = command_names_vec.join("|");
}
info!("Command names: {}", command_names);
{
let match_string = r#"^(?:(?:<@ID>\s*)|(?:<@!ID>\s*)|(?P<prefix>\S{1,5}?))(?P<cmd>COMMANDS)(?:$|\s+(?P<args>.*))$"#
.replace("COMMANDS", command_names.as_str())
.replace("ID", self.client_id.to_string().as_str());
self.command_matcher = RegexBuilder::new(match_string.as_str())
.case_insensitive(self.case_insensitive)
.dot_matches_new_line(true)
.build()
.unwrap();
}
}
{
let dm_command_names;
{
let mut command_names_vec = self
.commands
.iter()
.filter_map(|(key, command)| {
if command.supports_dm {
Some(&key[..])
} else {
None
}
})
.collect::<Vec<&str>>();
command_names_vec.sort_unstable_by_key(|a| a.len());
dm_command_names = command_names_vec.join("|");
}
{
let match_string = r#"^(?:(?:<@ID>\s+)|(?:<@!ID>\s+)|(\$)|())(?P<cmd>COMMANDS)(?:$|\s+(?P<args>.*))$"#
.replace("COMMANDS", dm_command_names.as_str())
.replace("ID", self.client_id.to_string().as_str());
self.dm_regex_matcher = RegexBuilder::new(match_string.as_str())
.case_insensitive(self.case_insensitive)
.dot_matches_new_line(true)
.build()
.unwrap();
}
}
self
}
}
enum PermissionCheck {
None, // No permissions
Basic(bool, bool), // Send + Embed permissions (sufficient to reply)
All, // Above + Manage Webhooks (sufficient to operate)
}
#[async_trait]
impl Framework for RegexFramework {
async fn dispatch(&self, ctx: Context, msg: Message) {
async fn check_self_permissions(
ctx: &Context,
guild: &Guild,
channel: &GuildChannel,
) -> SerenityResult<PermissionCheck> {
let user_id = ctx.cache.current_user_id();
let guild_perms = guild.member_permissions(&ctx, user_id).await?;
let channel_perms = channel.permissions_for_user(ctx, user_id)?;
let basic_perms = channel_perms.send_messages();
Ok(
if basic_perms && guild_perms.manage_webhooks() && channel_perms.embed_links() {
PermissionCheck::All
} else if basic_perms {
PermissionCheck::Basic(
guild_perms.manage_webhooks(),
channel_perms.embed_links(),
)
} else {
PermissionCheck::None
},
)
}
async fn check_prefix(ctx: &Context, guild: &Guild, prefix_opt: Option<Match<'_>>) -> bool {
if let Some(prefix) = prefix_opt {
let guild_prefix = ctx.prefix(Some(guild.id)).await;
guild_prefix.as_str() == prefix.as_str()
} else {
true
}
}
// gate to prevent analysing messages unnecessarily
if (msg.author.bot && self.ignore_bots) || msg.content.is_empty() {
} else {
// Guild Command
if let (Some(guild), Ok(Channel::Guild(channel))) =
(msg.guild(&ctx), msg.channel(&ctx).await)
{
let data = ctx.data.read().await;
let pool = data
.get::<SQLPool>()
.cloned()
.expect("Could not get SQLPool from data");
if let Some(full_match) = self.command_matcher.captures(&msg.content) {
if check_prefix(&ctx, &guild, full_match.name("prefix")).await {
let lm = data.get::<LanguageManager>().unwrap();
let language = UserData::language_of(&msg.author, &pool);
match check_self_permissions(&ctx, &guild, &channel).await {
Ok(perms) => match perms {
PermissionCheck::All => {
let command = self
.commands
.get(
&full_match
.name("cmd")
.unwrap()
.as_str()
.to_lowercase(),
)
.unwrap();
let channel_data = ChannelData::from_channel(
msg.channel(&ctx).await.unwrap(),
&pool,
)
.await
.unwrap();
if !command.can_blacklist || !channel_data.blacklisted {
let args = full_match
.name("args")
.map(|m| m.as_str())
.unwrap_or("")
.to_string();
let member = guild.member(&ctx, &msg.author).await.unwrap();
if command.check_permissions(&ctx, &guild, &member).await {
dbg!(command.name);
{
let guild_id = guild.id.as_u64().to_owned();
GuildData::from_guild(guild, &pool)
.await
.unwrap_or_else(|_| {
panic!(
"Failed to create new guild object for {}",
guild_id
)
});
}
if msg.id == MessageId(0)
|| !ctx.check_executing(msg.author.id).await
{
ctx.set_executing(msg.author.id).await;
(command.func)(&ctx, &msg, args).await;
ctx.drop_executing(msg.author.id).await;
}
} else if command.required_perms
== PermissionLevel::Restricted
{
let _ = msg
.channel_id
.say(
&ctx,
lm.get(&language.await, "no_perms_restricted"),
)
.await;
} else if command.required_perms == PermissionLevel::Managed
{
let _ = msg
.channel_id
.say(
&ctx,
lm.get(&language.await, "no_perms_managed")
.replace(
"{prefix}",
&ctx.prefix(msg.guild_id).await,
),
)
.await;
}
}
}
PermissionCheck::Basic(manage_webhooks, embed_links) => {
let response = lm
.get(&language.await, "no_perms_general")
.replace(
"{manage_webhooks}",
if manage_webhooks { "" } else { "" },
)
.replace(
"{embed_links}",
if embed_links { "" } else { "" },
);
let _ = msg.channel_id.say(&ctx, response).await;
}
PermissionCheck::None => {
warn!("Missing enough permissions for guild {}", guild.id);
}
},
Err(e) => {
error!(
"Error occurred getting permissions in guild {}: {:?}",
guild.id, e
);
}
}
}
}
}
// DM Command
else if self.dm_enabled {
if let Some(full_match) = self.dm_regex_matcher.captures(&msg.content[..]) {
let command = self
.commands
.get(&full_match.name("cmd").unwrap().as_str().to_lowercase())
.unwrap();
let args = full_match
.name("args")
.map(|m| m.as_str())
.unwrap_or("")
.to_string();
dbg!(command.name);
if msg.id == MessageId(0) || !ctx.check_executing(msg.author.id).await {
ctx.set_executing(msg.author.id).await;
(command.func)(&ctx, &msg, args).await;
ctx.drop_executing(msg.author.id).await;
}
}
}
}
}
}

View File

@ -1,100 +0,0 @@
use poise::{
serenity_prelude::model::channel::Channel, ApplicationCommandOrAutocompleteInteraction,
};
use crate::{consts::MACRO_MAX_COMMANDS, models::command_macro::RecordedCommand, Context, Error};
async fn macro_check(ctx: Context<'_>) -> bool {
if let Context::Application(app_ctx) = ctx {
if let ApplicationCommandOrAutocompleteInteraction::ApplicationCommand(_) =
app_ctx.interaction
{
if let Some(guild_id) = ctx.guild_id() {
if ctx.command().identifying_name != "finish_macro" {
let mut lock = ctx.data().recording_macros.write().await;
if let Some(command_macro) = lock.get_mut(&(guild_id, ctx.author().id)) {
if command_macro.commands.len() >= MACRO_MAX_COMMANDS {
let _ = ctx.send(|m| {
m.ephemeral(true).content(
format!("{} commands already recorded. Please use `/macro finish` to end recording.", MACRO_MAX_COMMANDS),
)
})
.await;
} else {
let recorded = RecordedCommand {
action: None,
command_name: ctx.command().identifying_name.clone(),
options: Vec::from(app_ctx.args),
};
command_macro.commands.push(recorded);
let _ = ctx
.send(|m| m.ephemeral(true).content("Command recorded to macro"))
.await;
}
return false;
}
}
}
}
}
true
}
async fn check_self_permissions(ctx: Context<'_>) -> bool {
if let Some(guild) = ctx.guild() {
let user_id = ctx.serenity_context().cache.current_user_id();
let manage_webhooks =
guild.member_permissions(&ctx, user_id).await.map_or(false, |p| p.manage_webhooks());
let (view_channel, send_messages, embed_links) = ctx
.channel_id()
.to_channel(&ctx)
.await
.ok()
.and_then(|c| {
if let Channel::Guild(channel) = c {
let perms = channel.permissions_for_user(&ctx, user_id).ok()?;
Some((perms.view_channel(), perms.send_messages(), perms.embed_links()))
} else {
None
}
})
.unwrap_or((false, false, false));
if manage_webhooks && send_messages && embed_links {
true
} else {
let _ = ctx
.send(|m| {
m.content(format!(
"Please ensure the bot has the correct permissions:
{} **View Channel**
{} **Send Message**
{} **Embed Links**
{} **Manage Webhooks**",
if view_channel { "" } else { "" },
if send_messages { "" } else { "" },
if embed_links { "" } else { "" },
if manage_webhooks { "" } else { "" },
))
})
.await;
false
}
} else {
true
}
}
pub async fn all_checks(ctx: Context<'_>) -> Result<bool, Error> {
Ok(macro_check(ctx).await && check_self_permissions(ctx).await)
}

View File

@ -1,327 +0,0 @@
/*
With modifications, 2022 Jude Southworth
Original copyright notice:
Copyright 2021 Paul Colomiets
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
and associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
use std::{error::Error as StdError, fmt, str::Chars};
/// Error parsing human-friendly duration
#[derive(Debug, PartialEq, Clone)]
pub enum Error {
/// Invalid character during parsing
///
/// More specifically anything that is not alphanumeric is prohibited
///
/// The field is an byte offset of the character in the string.
InvalidCharacter(usize),
/// Non-numeric value where number is expected
///
/// This usually means that either time unit is broken into words,
/// e.g. `m sec` instead of `msec`, or just number is omitted,
/// for example `2 hours min` instead of `2 hours 1 min`
///
/// The field is an byte offset of the errorneous character
/// in the string.
NumberExpected(usize),
/// Unit in the number is not one of allowed units
///
/// See documentation of `parse_duration` for the list of supported
/// time units.
///
/// The two fields are start and end (exclusive) of the slice from
/// the original string, containing errorneous value
UnknownUnit {
/// Start of the invalid unit inside the original string
start: usize,
/// End of the invalid unit inside the original string
end: usize,
/// The unit verbatim
unit: String,
/// A number associated with the unit
value: u64,
},
/// The numeric value is too large
///
/// Usually this means value is too large to be useful. If user writes
/// data in subsecond units, then the maximum is about 3k years. When
/// using seconds, or larger units, the limit is even larger.
NumberOverflow,
/// The value was an empty string (or consists only whitespace)
Empty,
}
impl StdError for Error {}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Error::InvalidCharacter(offset) => write!(f, "invalid character at {}", offset),
Error::NumberExpected(offset) => write!(f, "expected number at {}", offset),
Error::UnknownUnit { unit, value, .. } if unit.is_empty() => {
write!(f, "time unit needed, for example {0}sec or {0}ms", value,)
}
Error::UnknownUnit { unit, .. } => {
write!(
f,
"unknown time unit {:?}, \
supported units: ns, us, ms, sec, min, hours, days, \
weeks, months, years (and few variations)",
unit
)
}
Error::NumberOverflow => write!(f, "number is too large"),
Error::Empty => write!(f, "value was empty"),
}
}
}
trait OverflowOp: Sized {
fn mul(self, other: Self) -> Result<Self, Error>;
fn add(self, other: Self) -> Result<Self, Error>;
}
impl OverflowOp for u64 {
fn mul(self, other: Self) -> Result<Self, Error> {
self.checked_mul(other).ok_or(Error::NumberOverflow)
}
fn add(self, other: Self) -> Result<Self, Error> {
self.checked_add(other).ok_or(Error::NumberOverflow)
}
}
#[derive(Copy, Clone)]
pub struct Interval {
pub month: u64,
pub day: u64,
pub sec: u64,
}
struct Parser<'a> {
iter: Chars<'a>,
src: &'a str,
current: (u64, u64, u64, u64),
}
impl<'a> Parser<'a> {
fn off(&self) -> usize {
self.src.len() - self.iter.as_str().len()
}
fn parse_first_char(&mut self) -> Result<Option<u64>, Error> {
let off = self.off();
for c in self.iter.by_ref() {
match c {
'0'..='9' => {
return Ok(Some(c as u64 - '0' as u64));
}
c if c.is_whitespace() => continue,
_ => {
return Err(Error::NumberExpected(off));
}
}
}
Ok(None)
}
fn parse_unit(&mut self, n: u64, start: usize, end: usize) -> Result<(), Error> {
let (mut month, mut day, mut sec, nsec) = match &self.src[start..end] {
"nanos" | "nsec" | "ns" => (0, 0u64, 0u64, n),
"usec" | "us" => (0, 0, 0u64, n.mul(1000)?),
"millis" | "msec" | "ms" => (0, 0, 0u64, n.mul(1_000_000)?),
"seconds" | "second" | "secs" | "sec" | "s" => (0, 0, n, 0),
"minutes" | "minute" | "min" | "mins" | "m" => (0, 0, n.mul(60)?, 0),
"hours" | "hour" | "hr" | "hrs" | "h" => (0, 0, n.mul(3600)?, 0),
"days" | "day" | "d" => (0, n, 0, 0),
"weeks" | "week" | "w" => (0, n.mul(7)?, 0, 0),
"months" | "month" | "M" => (n, 0, 0, 0),
"years" | "year" | "y" => (n.mul(12)?, 0, 0, 0),
_ => {
return Err(Error::UnknownUnit {
start,
end,
unit: self.src[start..end].to_string(),
value: n,
});
}
};
let mut nsec = self.current.3 + nsec;
if nsec > 1_000_000_000 {
sec += nsec / 1_000_000_000;
nsec %= 1_000_000_000;
}
sec += self.current.2;
day += self.current.1;
month += self.current.0;
self.current = (month, day, sec, nsec);
Ok(())
}
fn parse(mut self) -> Result<Interval, Error> {
let mut n = self.parse_first_char()?.ok_or(Error::Empty)?;
'outer: loop {
let mut off = self.off();
while let Some(c) = self.iter.next() {
match c {
'0'..='9' => {
n = n
.checked_mul(10)
.and_then(|x| x.checked_add(c as u64 - '0' as u64))
.ok_or(Error::NumberOverflow)?;
}
c if c.is_whitespace() => {}
'a'..='z' | 'A'..='Z' => {
break;
}
_ => {
return Err(Error::InvalidCharacter(off));
}
}
off = self.off();
}
let start = off;
let mut off = self.off();
while let Some(c) = self.iter.next() {
match c {
'0'..='9' => {
self.parse_unit(n, start, off)?;
n = c as u64 - '0' as u64;
continue 'outer;
}
c if c.is_whitespace() => break,
'a'..='z' | 'A'..='Z' => {}
_ => {
return Err(Error::InvalidCharacter(off));
}
}
off = self.off();
}
self.parse_unit(n, start, off)?;
n = match self.parse_first_char()? {
Some(n) => n,
None => {
return Ok(Interval {
month: self.current.0,
day: self.current.1,
sec: self.current.2,
})
}
};
}
}
}
/// Parse duration object `1hour 12min 5s`
///
/// The duration object is a concatenation of time spans. Where each time
/// span is an integer number and a suffix. Supported suffixes:
///
/// * `nsec`, `ns` -- nanoseconds
/// * `usec`, `us` -- microseconds
/// * `msec`, `ms` -- milliseconds
/// * `seconds`, `second`, `sec`, `s`
/// * `minutes`, `minute`, `min`, `m`
/// * `hours`, `hour`, `hr`, `h`
/// * `days`, `day`, `d`
/// * `weeks`, `week`, `w`
/// * `months`, `month`, `M` -- defined as 30.44 days
/// * `years`, `year`, `y` -- defined as 365.25 days
///
/// # Examples
///
/// ```
/// use std::time::Duration;
/// use humantime::parse_duration;
///
/// assert_eq!(parse_duration("2h 37min"), Ok(Duration::new(9420, 0)));
/// assert_eq!(parse_duration("32ms"), Ok(Duration::new(0, 32_000_000)));
/// ```
pub fn parse_duration(s: &str) -> Result<Interval, Error> {
Parser { iter: s.chars(), src: s, current: (0, 0, 0, 0) }.parse()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_seconds() {
let interval = parse_duration("10 seconds").unwrap();
assert_eq!(interval.sec, 10);
assert_eq!(interval.day, 0);
assert_eq!(interval.month, 0);
}
#[test]
fn parse_minutes() {
let interval = parse_duration("10 minutes").unwrap();
assert_eq!(interval.sec, 600);
assert_eq!(interval.day, 0);
assert_eq!(interval.month, 0);
}
#[test]
fn parse_hours() {
let interval = parse_duration("10 hours").unwrap();
assert_eq!(interval.sec, 36_000);
assert_eq!(interval.day, 0);
assert_eq!(interval.month, 0);
}
#[test]
fn parse_days() {
let interval = parse_duration("10 days").unwrap();
assert_eq!(interval.sec, 0);
assert_eq!(interval.day, 10);
assert_eq!(interval.month, 0);
}
#[test]
fn parse_weeks() {
let interval = parse_duration("10 weeks").unwrap();
assert_eq!(interval.sec, 0);
assert_eq!(interval.day, 70);
assert_eq!(interval.month, 0);
}
#[test]
fn parse_months() {
let interval = parse_duration("10 months").unwrap();
assert_eq!(interval.sec, 0);
assert_eq!(interval.day, 0);
assert_eq!(interval.month, 10);
}
#[test]
fn parse_years() {
let interval = parse_duration("10 years").unwrap();
assert_eq!(interval.sec, 0);
assert_eq!(interval.day, 0);
assert_eq!(interval.month, 120);
}
}

65
src/language_manager.rs Normal file
View File

@ -0,0 +1,65 @@
use serde::Deserialize;
use serde_json::from_str;
use serenity::prelude::TypeMapKey;
use std::{collections::HashMap, error::Error, sync::Arc};
use crate::consts::LOCAL_LANGUAGE;
#[derive(Deserialize)]
pub struct LanguageManager {
languages: HashMap<String, String>,
strings: HashMap<String, HashMap<String, String>>,
}
impl LanguageManager {
pub fn from_compiled(content: &'static str) -> Result<Self, Box<dyn Error + Send + Sync>> {
let new: Self = from_str(content)?;
Ok(new)
}
pub fn get(&self, language: &str, name: &str) -> &str {
self.strings
.get(language)
.map(|sm| sm.get(name))
.unwrap_or_else(|| panic!(r#"Language does not exist: "{}""#, language))
.unwrap_or_else(|| {
self.strings
.get(&*LOCAL_LANGUAGE)
.map(|sm| {
sm.get(name)
.unwrap_or_else(|| panic!(r#"String does not exist: "{}""#, name))
})
.expect("LOCAL_LANGUAGE is not available")
})
}
pub fn get_language(&self, language: &str) -> Option<&str> {
let language_normal = language.to_lowercase();
self.languages
.iter()
.filter(|(k, v)| {
k.to_lowercase() == language_normal || v.to_lowercase() == language_normal
})
.map(|(k, _)| k.as_str())
.next()
}
pub fn get_language_by_flag(&self, flag: &str) -> Option<&str> {
self.languages
.iter()
.filter(|(k, _)| self.get(k, "flag") == flag)
.map(|(k, _)| k.as_str())
.next()
}
pub fn all_languages(&self) -> impl Iterator<Item = (&str, &str)> {
self.languages.iter().map(|(k, v)| (k.as_str(), v.as_str()))
}
}
impl TypeMapKey for LanguageManager {
type Value = Arc<Self>;
}

View File

@ -1,269 +1,598 @@
#![feature(int_roundings)]
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
mod commands; mod commands;
mod component_models;
mod consts; mod consts;
mod event_handlers; mod framework;
mod hooks; mod language_manager;
mod interval_parser;
mod models; mod models;
mod time_parser; mod time_parser;
mod utils;
use std::{ use serenity::{
collections::HashMap, async_trait,
env, cache::Cache,
error::Error as StdError, client::Client,
fmt::{Debug, Display, Formatter}, futures::TryFutureExt,
path::Path, http::{client::Http, CacheHttp},
model::{
channel::GuildChannel,
channel::Message,
guild::Guild,
id::{GuildId, UserId},
interactions::Interaction,
},
prelude::{Context, EventHandler, TypeMapKey},
utils::shard_id,
}; };
use chrono_tz::Tz; use sqlx::mysql::MySqlPool;
use log::{error, warn};
use poise::serenity_prelude::model::{ use dotenv::dotenv;
gateway::GatewayIntents,
id::{GuildId, UserId}, use std::{collections::HashMap, env, sync::Arc, time::Instant};
};
use sqlx::{MySql, Pool};
use tokio::sync::{broadcast, broadcast::Sender, RwLock};
use crate::{ use crate::{
commands::{command_macro, info_cmds, moderation_cmds, reminder_cmds, todo_cmds}, commands::{info_cmds, moderation_cmds, reminder_cmds, todo_cmds},
consts::THEME_COLOR, consts::{CNC_GUILD, DEFAULT_PREFIX, SUBSCRIPTION_ROLES, THEME_COLOR},
event_handlers::listener, framework::RegexFramework,
hooks::all_checks, language_manager::LanguageManager,
models::command_macro::CommandMacro, models::{guild_data::GuildData, user_data::UserData},
utils::register_application_commands,
}; };
type Database = MySql; use inflector::Inflector;
use log::info;
type Error = Box<dyn std::error::Error + Send + Sync>; use dashmap::DashMap;
type Context<'a> = poise::Context<'a, Data, Error>;
type ApplicationContext<'a> = poise::ApplicationContext<'a, Data, Error>;
pub struct Data { use tokio::sync::RwLock;
database: Pool<Database>,
http: reqwest::Client, use chrono::Utc;
recording_macros: RwLock<HashMap<(GuildId, UserId), CommandMacro<Data, Error>>>, use chrono_tz::Tz;
popular_timezones: Vec<Tz>, use serenity::model::gateway::GatewayIntents;
_broadcast: Sender<()>, use serenity::model::guild::UnavailableGuild;
use serenity::model::prelude::{
InteractionApplicationCommandCallbackDataFlags, InteractionResponseType,
};
struct GuildDataCache;
impl TypeMapKey for GuildDataCache {
type Value = Arc<DashMap<GuildId, Arc<RwLock<GuildData>>>>;
} }
impl Debug for Data { struct SQLPool;
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "Data {{ .. }}") impl TypeMapKey for SQLPool {
type Value = MySqlPool;
}
struct ReqwestClient;
impl TypeMapKey for ReqwestClient {
type Value = Arc<reqwest::Client>;
}
struct FrameworkCtx;
impl TypeMapKey for FrameworkCtx {
type Value = Arc<RegexFramework>;
}
struct PopularTimezones;
impl TypeMapKey for PopularTimezones {
type Value = Arc<Vec<Tz>>;
}
struct CurrentlyExecuting;
impl TypeMapKey for CurrentlyExecuting {
type Value = Arc<RwLock<HashMap<UserId, Instant>>>;
}
#[async_trait]
trait LimitExecutors {
async fn check_executing(&self, user: UserId) -> bool;
async fn set_executing(&self, user: UserId);
async fn drop_executing(&self, user: UserId);
}
#[async_trait]
impl LimitExecutors for Context {
async fn check_executing(&self, user: UserId) -> bool {
let currently_executing = self
.data
.read()
.await
.get::<CurrentlyExecuting>()
.cloned()
.unwrap();
let lock = currently_executing.read().await;
lock.get(&user)
.map_or(false, |now| now.elapsed().as_secs() < 4)
}
async fn set_executing(&self, user: UserId) {
let currently_executing = self
.data
.read()
.await
.get::<CurrentlyExecuting>()
.cloned()
.unwrap();
let mut lock = currently_executing.write().await;
lock.insert(user, Instant::now());
}
async fn drop_executing(&self, user: UserId) {
let currently_executing = self
.data
.read()
.await
.get::<CurrentlyExecuting>()
.cloned()
.unwrap();
let mut lock = currently_executing.write().await;
lock.remove(&user);
} }
} }
struct Ended; struct Handler;
impl Debug for Ended { #[async_trait]
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { impl EventHandler for Handler {
f.write_str("Process ended.") async fn channel_delete(&self, ctx: Context, channel: &GuildChannel) {
} let pool = ctx
} .data
.read()
.await
.get::<SQLPool>()
.cloned()
.expect("Could not get SQLPool from data");
impl Display for Ended { sqlx::query!(
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { "
f.write_str("Process ended.") DELETE FROM channels WHERE channel = ?
} ",
} channel.id.as_u64()
)
impl StdError for Ended {} .execute(&pool)
.await
#[tokio::main(flavor = "multi_thread")] .unwrap();
async fn main() -> Result<(), Box<dyn StdError + Send + Sync>> {
let (tx, mut rx) = broadcast::channel(16);
tokio::select! {
output = _main(tx) => output,
_ = rx.recv() => Err(Box::new(Ended) as Box<dyn StdError + Send + Sync>)
}
}
async fn _main(tx: Sender<()>) -> Result<(), Box<dyn StdError + Send + Sync>> {
env_logger::init();
if Path::new("/etc/reminder-rs/config.env").exists() {
dotenv::from_path("/etc/reminder-rs/config.env")?;
} else {
let _ = dotenv::dotenv();
} }
let discord_token = env::var("DISCORD_TOKEN").expect("Missing DISCORD_TOKEN from environment"); async fn guild_create(&self, ctx: Context, guild: Guild, is_new: bool) {
if is_new {
let guild_id = guild.id.as_u64().to_owned();
let options = poise::FrameworkOptions { {
commands: vec![ let pool = ctx
info_cmds::help(), .data
info_cmds::info(), .read()
info_cmds::donate(), .await
info_cmds::clock(), .get::<SQLPool>()
info_cmds::clock_context_menu(), .cloned()
info_cmds::dashboard(), .expect("Could not get SQLPool from data");
moderation_cmds::timezone(),
poise::Command { GuildData::from_guild(guild, &pool)
subcommands: vec![ .await
moderation_cmds::set_allowed_dm(), .unwrap_or_else(|_| {
moderation_cmds::unset_allowed_dm(), panic!("Failed to create new guild object for {}", guild_id)
], });
..moderation_cmds::allowed_dm() }
},
poise::Command { if let Ok(token) = env::var("DISCORDBOTS_TOKEN") {
subcommands: vec![poise::Command { let shard_count = ctx.cache.shard_count();
subcommands: vec![ let current_shard_id = shard_id(guild_id, shard_count);
moderation_cmds::set_ephemeral_confirmations(),
moderation_cmds::unset_ephemeral_confirmations(), let guild_count = ctx
], .cache
..moderation_cmds::ephemeral_confirmations() .guilds()
}], .iter()
..moderation_cmds::settings() .filter(|g| shard_id(g.as_u64().to_owned(), shard_count) == current_shard_id)
}, .count() as u64;
moderation_cmds::webhook(),
poise::Command { let mut hm = HashMap::new();
subcommands: vec![ hm.insert("server_count", guild_count);
command_macro::delete::delete_macro(), hm.insert("shard_id", current_shard_id);
command_macro::record::finish_macro(), hm.insert("shard_count", shard_count);
command_macro::list::list_macro(),
command_macro::record::record_macro(), let client = ctx
command_macro::run::run_macro(), .data
command_macro::migrate::migrate_macro(), .read()
], .await
..command_macro::macro_base() .get::<ReqwestClient>()
}, .cloned()
reminder_cmds::pause(), .expect("Could not get ReqwestClient from data");
reminder_cmds::offset(),
reminder_cmds::nudge(), let response = client
reminder_cmds::look(), .post(
reminder_cmds::delete(), format!(
poise::Command { "https://top.gg/api/bots/{}/stats",
subcommands: vec![ ctx.cache.current_user_id().as_u64()
reminder_cmds::list_timer(), )
reminder_cmds::start_timer(), .as_str(),
reminder_cmds::delete_timer(), )
], .header("Authorization", token)
..reminder_cmds::timer_base() .json(&hm)
}, .send()
reminder_cmds::multiline(), .await;
reminder_cmds::remind(),
poise::Command { if let Err(res) = response {
subcommands: vec![ println!("DiscordBots Response: {:?}", res);
poise::Command { }
subcommands: vec![ }
todo_cmds::todo_guild_add(), }
todo_cmds::todo_guild_view(), }
],
..todo_cmds::todo_guild_base() async fn guild_delete(
}, &self,
poise::Command { ctx: Context,
subcommands: vec![ deleted_guild: UnavailableGuild,
todo_cmds::todo_channel_add(), _guild: Option<Guild>,
todo_cmds::todo_channel_view(), ) {
], let pool = ctx
..todo_cmds::todo_channel_base() .data
}, .read()
poise::Command { .await
subcommands: vec![todo_cmds::todo_user_add(), todo_cmds::todo_user_view()], .get::<SQLPool>()
..todo_cmds::todo_user_base() .cloned()
}, .expect("Could not get SQLPool from data");
],
..todo_cmds::todo_base() let guild_data_cache = ctx
}, .data
], .read()
allowed_mentions: None, .await
command_check: Some(|ctx| Box::pin(all_checks(ctx))), .get::<GuildDataCache>()
event_handler: |ctx, event, _framework, data| Box::pin(listener(ctx, event, data)), .cloned()
on_error: |error| { .unwrap();
Box::pin(async move { guild_data_cache.remove(&deleted_guild.id);
match error {
poise::FrameworkError::CommandCheckFailed { .. } => { sqlx::query!(
// suppress error "
} DELETE FROM guilds WHERE guild = ?
error => { ",
if let Err(e) = poise::builtins::on_error(error).await { deleted_guild.id.as_u64()
log::error!("Error while handling error: {}", e); )
.execute(&pool)
.await
.unwrap();
}
async fn interaction_create(&self, ctx: Context, interaction: Interaction) {
let (pool, lm) = get_ctx_data(&&ctx).await;
match interaction {
Interaction::MessageComponent(interaction) => {
if let Some(member) = interaction.clone().member {
let data = interaction.data.clone();
if data.custom_id.starts_with("timezone:") {
let mut user_data = UserData::from_user(&member.user, &ctx, &pool)
.await
.unwrap();
let new_timezone = data.custom_id.replace("timezone:", "").parse::<Tz>();
if let Ok(timezone) = new_timezone {
user_data.timezone = timezone.to_string();
user_data.commit_changes(&pool).await;
let _ = interaction.create_interaction_response(&ctx, |r| {
r.kind(InteractionResponseType::ChannelMessageWithSource)
.interaction_response_data(|d| {
let footer_text = lm.get(&user_data.language, "timezone/footer").replacen(
"{timezone}",
&user_data.timezone,
1,
);
let now = Utc::now().with_timezone(&user_data.timezone());
let content = lm
.get(&user_data.language, "timezone/set_p")
.replacen("{timezone}", &user_data.timezone, 1)
.replacen(
"{time}",
&now.format("%H:%M").to_string(),
1,
);
d.create_embed(|e| e.title(lm.get(&user_data.language, "timezone/set_p_title"))
.color(*THEME_COLOR)
.description(content)
.footer(|f| f.text(footer_text)))
.flags(InteractionApplicationCommandCallbackDataFlags::EPHEMERAL);
d
})
}).await;
}
} else if data.custom_id.starts_with("lang:") {
let mut user_data = UserData::from_user(&member.user, &ctx, &pool)
.await
.unwrap();
let lang_code = data.custom_id.replace("lang:", "");
if let Some(lang) = lm.get_language(&lang_code) {
user_data.language = lang.to_string();
user_data.commit_changes(&pool).await;
let _ = interaction
.create_interaction_response(&ctx, |r| {
r.kind(InteractionResponseType::ChannelMessageWithSource)
.interaction_response_data(|d| {
d.create_embed(|e| {
e.title(
lm.get(&user_data.language, "lang/set_p_title"),
)
.color(*THEME_COLOR)
.description(
lm.get(&user_data.language, "lang/set_p"),
)
})
.flags(InteractionApplicationCommandCallbackDataFlags::EPHEMERAL)
})
})
.await;
} }
} }
} }
}) }
}, _ => {}
..Default::default() }
}; }
}
let database = #[tokio::main]
Pool::connect(&env::var("DATABASE_URL").expect("No database URL provided")).await.unwrap(); async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
env_logger::init();
sqlx::migrate!().run(&database).await?; dotenv()?;
let popular_timezones = sqlx::query!( let token = env::var("DISCORD_TOKEN").expect("Missing DISCORD_TOKEN from environment");
"SELECT IFNULL(timezone, 'UTC') AS timezone
FROM users
WHERE timezone IS NOT NULL
GROUP BY timezone
ORDER BY COUNT(timezone) DESC
LIMIT 21"
)
.fetch_all(&database)
.await
.unwrap()
.iter()
.map(|t| t.timezone.parse::<Tz>().unwrap())
.collect::<Vec<Tz>>();
poise::Framework::builder() let http = Http::new_with_token(&token);
.token(discord_token)
.setup(move |ctx, _bot, framework| {
Box::pin(async move {
register_application_commands(ctx, framework, None).await.unwrap();
let kill_tx = tx.clone(); let logged_in_id = http
let kill_recv = tx.subscribe(); .get_current_user()
.map_ok(|user| user.id.as_u64().to_owned())
let ctx1 = ctx.clone();
let ctx2 = ctx.clone();
let pool1 = database.clone();
let pool2 = database.clone();
let run_settings = env::var("DONTRUN").unwrap_or_else(|_| "".to_string());
if !run_settings.contains("postman") {
tokio::spawn(async move {
match postman::initialize(kill_recv, ctx1, &pool1).await {
Ok(_) => {}
Err(e) => {
error!("postman exiting: {}", e);
}
};
});
} else {
warn!("Not running postman");
}
if !run_settings.contains("web") {
tokio::spawn(async move {
reminder_web::initialize(kill_tx, ctx2, pool2).await.unwrap();
});
} else {
warn!("Not running web");
}
Ok(Data {
http: reqwest::Client::new(),
database,
popular_timezones,
recording_macros: Default::default(),
_broadcast: tx,
})
})
})
.options(options)
.intents(GatewayIntents::GUILDS)
.run_autosharded()
.await?; .await?;
let application_id = http.get_current_application_info().await?.id;
let dm_enabled = env::var("DM_ENABLED").map_or(true, |var| var == "1");
let framework = RegexFramework::new(logged_in_id)
.default_prefix(DEFAULT_PREFIX.clone())
.case_insensitive(env::var("CASE_INSENSITIVE").map_or(true, |var| var == "1"))
.ignore_bots(env::var("IGNORE_BOTS").map_or(true, |var| var == "1"))
.dm_enabled(dm_enabled)
// info commands
.add_command("ping", &info_cmds::PING_COMMAND)
.add_command("help", &info_cmds::HELP_COMMAND)
.add_command("info", &info_cmds::INFO_COMMAND)
.add_command("invite", &info_cmds::INFO_COMMAND)
.add_command("donate", &info_cmds::DONATE_COMMAND)
.add_command("dashboard", &info_cmds::DASHBOARD_COMMAND)
.add_command("clock", &info_cmds::CLOCK_COMMAND)
// reminder commands
.add_command("timer", &reminder_cmds::TIMER_COMMAND)
.add_command("remind", &reminder_cmds::REMIND_COMMAND)
.add_command("r", &reminder_cmds::REMIND_COMMAND)
.add_command("interval", &reminder_cmds::INTERVAL_COMMAND)
.add_command("i", &reminder_cmds::INTERVAL_COMMAND)
.add_command("natural", &reminder_cmds::NATURAL_COMMAND)
.add_command("n", &reminder_cmds::NATURAL_COMMAND)
.add_command("", &reminder_cmds::NATURAL_COMMAND)
.add_command("countdown", &reminder_cmds::COUNTDOWN_COMMAND)
// management commands
.add_command("look", &reminder_cmds::LOOK_COMMAND)
.add_command("del", &reminder_cmds::DELETE_COMMAND)
// to-do commands
.add_command("todo", &todo_cmds::TODO_USER_COMMAND)
.add_command("todo user", &todo_cmds::TODO_USER_COMMAND)
.add_command("todoc", &todo_cmds::TODO_CHANNEL_COMMAND)
.add_command("todo channel", &todo_cmds::TODO_CHANNEL_COMMAND)
.add_command("todos", &todo_cmds::TODO_GUILD_COMMAND)
.add_command("todo server", &todo_cmds::TODO_GUILD_COMMAND)
.add_command("todo guild", &todo_cmds::TODO_GUILD_COMMAND)
// moderation commands
.add_command("blacklist", &moderation_cmds::BLACKLIST_COMMAND)
.add_command("restrict", &moderation_cmds::RESTRICT_COMMAND)
.add_command("timezone", &moderation_cmds::TIMEZONE_COMMAND)
.add_command("prefix", &moderation_cmds::PREFIX_COMMAND)
.add_command("lang", &moderation_cmds::LANGUAGE_COMMAND)
.add_command("pause", &reminder_cmds::PAUSE_COMMAND)
.add_command("offset", &reminder_cmds::OFFSET_COMMAND)
.add_command("nudge", &reminder_cmds::NUDGE_COMMAND)
.add_command("alias", &moderation_cmds::ALIAS_COMMAND)
.add_command("a", &moderation_cmds::ALIAS_COMMAND)
.build();
let framework_arc = Arc::new(framework);
let mut client = Client::builder(&token)
.intents(if dm_enabled {
GatewayIntents::GUILD_MESSAGES
| GatewayIntents::GUILDS
| GatewayIntents::GUILD_MESSAGE_REACTIONS
| GatewayIntents::DIRECT_MESSAGES
| GatewayIntents::DIRECT_MESSAGE_REACTIONS
} else {
GatewayIntents::GUILD_MESSAGES
| GatewayIntents::GUILDS
| GatewayIntents::GUILD_MESSAGE_REACTIONS
})
.application_id(application_id.0)
.event_handler(Handler)
.framework_arc(framework_arc.clone())
.await
.expect("Error occurred creating client");
{
let guild_data_cache = dashmap::DashMap::new();
let pool = MySqlPool::connect(
&env::var("DATABASE_URL").expect("Missing DATABASE_URL from environment"),
)
.await
.unwrap();
let language_manager = LanguageManager::from_compiled(include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/assets/",
env!("STRINGS_FILE")
)))
.unwrap();
let popular_timezones = sqlx::query!(
"SELECT timezone FROM users GROUP BY timezone ORDER BY COUNT(timezone) DESC LIMIT 21"
)
.fetch_all(&pool)
.await
.unwrap()
.iter()
.map(|t| t.timezone.parse::<Tz>().unwrap())
.collect::<Vec<Tz>>();
let mut data = client.data.write().await;
data.insert::<GuildDataCache>(Arc::new(guild_data_cache));
data.insert::<CurrentlyExecuting>(Arc::new(RwLock::new(HashMap::new())));
data.insert::<SQLPool>(pool);
data.insert::<PopularTimezones>(Arc::new(popular_timezones));
data.insert::<ReqwestClient>(Arc::new(reqwest::Client::new()));
data.insert::<FrameworkCtx>(framework_arc.clone());
data.insert::<LanguageManager>(Arc::new(language_manager))
}
if let Ok((Some(lower), Some(upper))) = env::var("SHARD_RANGE").map(|sr| {
let mut split = sr
.split(',')
.map(|val| val.parse::<u64>().expect("SHARD_RANGE not an integer"));
(split.next(), split.next())
}) {
let total_shards = env::var("SHARD_COUNT")
.map(|shard_count| shard_count.parse::<u64>().ok())
.ok()
.flatten()
.expect("No SHARD_COUNT provided, but SHARD_RANGE was provided");
assert!(
lower < upper,
"SHARD_RANGE lower limit is not less than the upper limit"
);
info!(
"Starting client fragment with shards {}-{}/{}",
lower, upper, total_shards
);
client
.start_shard_range([lower, upper], total_shards)
.await?;
} else if let Ok(total_shards) = env::var("SHARD_COUNT").map(|shard_count| {
shard_count
.parse::<u64>()
.expect("SHARD_COUNT not an integer")
}) {
info!("Starting client with {} shards", total_shards);
client.start_shards(total_shards).await?;
} else {
info!("Starting client as autosharded");
client.start_autosharded().await?;
}
Ok(()) Ok(())
} }
pub async fn check_subscription(cache_http: impl CacheHttp, user_id: impl Into<UserId>) -> bool {
if let Some(subscription_guild) = *CNC_GUILD {
let guild_member = GuildId(subscription_guild)
.member(cache_http, user_id)
.await;
if let Ok(member) = guild_member {
for role in member.roles {
if SUBSCRIPTION_ROLES.contains(role.as_u64()) {
return true;
}
}
}
false
} else {
true
}
}
pub async fn check_subscription_on_message(
cache_http: impl CacheHttp + AsRef<Cache>,
msg: &Message,
) -> bool {
check_subscription(&cache_http, &msg.author).await
|| if let Some(guild) = msg.guild(&cache_http) {
check_subscription(&cache_http, guild.owner_id).await
} else {
false
}
}
pub async fn get_ctx_data(ctx: &&Context) -> (MySqlPool, Arc<LanguageManager>) {
let pool;
let lm;
{
let data = ctx.data.read().await;
pool = data
.get::<SQLPool>()
.cloned()
.expect("Could not get SQLPool");
lm = data
.get::<LanguageManager>()
.cloned()
.expect("Could not get LanguageManager");
}
(pool, lm)
}
async fn command_help(
ctx: &Context,
msg: &Message,
lm: Arc<LanguageManager>,
prefix: &str,
language: &str,
command_name: &str,
) {
let _ = msg
.channel_id
.send_message(ctx, |m| {
m.embed(move |e| {
e.title(format!("{} Help", command_name.to_title_case()))
.description(
lm.get(&language, &format!("help/{}", command_name))
.replace("{prefix}", &prefix),
)
.footer(|f| {
f.text(concat!(
env!("CARGO_PKG_NAME"),
" ver ",
env!("CARGO_PKG_VERSION")
))
})
.color(*THEME_COLOR)
})
})
.await;
}

View File

@ -1,7 +1,9 @@
use chrono::NaiveDateTime; use serenity::model::channel::Channel;
use poise::serenity_prelude::model::channel::Channel;
use sqlx::MySqlPool; use sqlx::MySqlPool;
use chrono::NaiveDateTime;
pub struct ChannelData { pub struct ChannelData {
pub id: u32, pub id: u32,
pub name: Option<String>, pub name: Option<String>,
@ -10,84 +12,56 @@ pub struct ChannelData {
pub webhook_id: Option<u64>, pub webhook_id: Option<u64>,
pub webhook_token: Option<String>, pub webhook_token: Option<String>,
pub paused: bool, pub paused: bool,
pub db_guild_id: Option<u32>,
pub paused_until: Option<NaiveDateTime>, pub paused_until: Option<NaiveDateTime>,
} }
impl ChannelData { impl ChannelData {
pub async fn from_channel( pub async fn from_channel(
channel: &Channel, channel: Channel,
pool: &MySqlPool, pool: &MySqlPool,
) -> Result<Self, Box<dyn std::error::Error + Sync + Send>> { ) -> Result<Self, Box<dyn std::error::Error + Sync + Send>> {
let channel_id = channel.id().as_u64().to_owned(); let channel_id = channel.id().as_u64().to_owned();
if let Ok(c) = sqlx::query_as_unchecked!( if let Ok(c) = sqlx::query_as_unchecked!(Self,
Self,
" "
SELECT id, name, nudge, blacklisted, webhook_id, webhook_token, paused, paused_until, SELECT id, name, nudge, blacklisted, webhook_id, webhook_token, paused, paused_until FROM channels WHERE channel = ?
guild_id AS db_guild_id ", channel_id)
FROM channels WHERE channel = ? .fetch_one(pool)
", .await {
channel_id
)
.fetch_one(pool)
.await
{
Ok(c)
} else {
let props =
channel.to_owned().guild().map(|g| (g.guild_id.as_u64().to_owned(), g.name));
let (guild_id, channel_name) = Ok(c)
if let Some((a, b)) = props { (Some(a), Some(b)) } else { (None, None) }; }
else {
let props = channel.guild().map(|g| (g.guild_id.as_u64().to_owned(), g.name));
let (guild_id, channel_name) = if let Some((a, b)) = props {
(Some(a), Some(b))
} else {
(None, None)
};
sqlx::query!( sqlx::query!(
" "
INSERT IGNORE INTO channels INSERT IGNORE INTO channels (channel, name, guild_id) VALUES (?, ?, (SELECT id FROM guilds WHERE guild = ?))
(channel, name, guild_id) ", channel_id, channel_name, guild_id)
VALUES (?, ?, (SELECT id FROM guilds WHERE guild = ?)) .execute(&pool.clone())
", .await?;
channel_id,
channel_name,
guild_id
)
.execute(&pool.clone())
.await?;
Ok(sqlx::query_as_unchecked!( Ok(sqlx::query_as_unchecked!(Self,
Self,
" "
SELECT id, name, nudge, blacklisted, webhook_id, webhook_token, paused, SELECT id, name, nudge, blacklisted, webhook_id, webhook_token, paused, paused_until FROM channels WHERE channel = ?
paused_until, guild_id AS db_guild_id ", channel_id)
FROM channels .fetch_one(pool)
WHERE channel = ? .await?)
",
channel_id
)
.fetch_one(pool)
.await?)
} }
} }
pub async fn commit_changes(&self, pool: &MySqlPool) { pub async fn commit_changes(&self, pool: &MySqlPool) {
sqlx::query!( sqlx::query!(
" "
UPDATE channels UPDATE channels SET name = ?, nudge = ?, blacklisted = ?, webhook_id = ?, webhook_token = ?, paused = ?, paused_until = ? WHERE id = ?
SET name = ?, nudge = ?, blacklisted = ?, webhook_id = ?, webhook_token = ?, ", self.name, self.nudge, self.blacklisted, self.webhook_id, self.webhook_token, self.paused, self.paused_until, self.id)
paused = ?, paused_until = ? .execute(pool)
WHERE id = ? .await.unwrap();
",
self.name,
self.nudge,
self.blacklisted,
self.webhook_id,
self.webhook_token,
self.paused,
self.paused_until,
self.id
)
.execute(pool)
.await
.unwrap();
} }
} }

View File

@ -1,77 +0,0 @@
use poise::serenity_prelude::model::{
application::interaction::application_command::CommandDataOption, id::GuildId,
};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::{Context, Data, Error};
type Func<U, E> = for<'a> fn(
poise::ApplicationContext<'a, U, E>,
) -> poise::BoxFuture<'a, Result<(), poise::FrameworkError<'a, U, E>>>;
fn default_none<U, E>() -> Option<Func<U, E>> {
None
}
#[derive(Serialize, Deserialize)]
pub struct RecordedCommand<U, E> {
#[serde(skip)]
#[serde(default = "default_none::<U, E>")]
pub action: Option<Func<U, E>>,
pub command_name: String,
pub options: Vec<CommandDataOption>,
}
pub struct CommandMacro<U, E> {
pub guild_id: GuildId,
pub name: String,
pub description: Option<String>,
pub commands: Vec<RecordedCommand<U, E>>,
}
pub struct RawCommandMacro {
pub guild_id: GuildId,
pub name: String,
pub description: Option<String>,
pub commands: Value,
}
pub async fn guild_command_macro(
ctx: &Context<'_>,
name: &str,
) -> Option<CommandMacro<Data, Error>> {
let row = sqlx::query!(
"
SELECT * FROM macro WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?) AND name = ?
",
ctx.guild_id().unwrap().0,
name
)
.fetch_one(&ctx.data().database)
.await
.ok()?;
let mut commands: Vec<RecordedCommand<Data, Error>> =
serde_json::from_str(&row.commands).unwrap();
for recorded_command in &mut commands {
let command = &ctx
.framework()
.options()
.commands
.iter()
.find(|c| c.identifying_name == recorded_command.command_name);
recorded_command.action = command.map(|c| c.slash_action).flatten();
}
let command_macro = CommandMacro {
guild_id: ctx.guild_id().unwrap(),
name: row.name,
description: row.description,
commands,
};
Some(command_macro)
}

View File

@ -1,44 +1,75 @@
use poise::serenity_prelude::GuildId; use serenity::model::guild::Guild;
use sqlx::MySqlPool; use sqlx::MySqlPool;
use log::error;
use crate::consts::DEFAULT_PREFIX;
pub struct GuildData { pub struct GuildData {
pub ephemeral_confirmations: bool,
pub id: u32, pub id: u32,
pub name: Option<String>,
pub prefix: String,
} }
impl GuildData { impl GuildData {
pub async fn from_guild( pub async fn from_guild(guild: Guild, pool: &MySqlPool) -> Result<Self, sqlx::Error> {
guild_id: GuildId, let guild_id = guild.id.as_u64().to_owned();
pool: &MySqlPool,
) -> Result<Self, Box<dyn std::error::Error + Sync + Send>> { match sqlx::query_as!(
if let Ok(c) = sqlx::query_as_unchecked!(
Self, Self,
"SELECT id, ephemeral_confirmations FROM guilds WHERE guild = ?", "
guild_id.0 SELECT id, name, prefix FROM guilds WHERE guild = ?
",
guild_id
) )
.fetch_one(pool) .fetch_one(pool)
.await .await
{ {
Ok(c) Ok(mut g) => {
} else { g.name = Some(guild.name);
sqlx::query!("INSERT IGNORE INTO guilds (guild) VALUES (?)", guild_id.0)
Ok(g)
}
Err(sqlx::Error::RowNotFound) => {
sqlx::query!(
"
INSERT INTO guilds (guild, name, prefix) VALUES (?, ?, ?)
",
guild_id,
guild.name,
*DEFAULT_PREFIX
)
.execute(&pool.clone()) .execute(&pool.clone())
.await?; .await?;
Ok(sqlx::query_as_unchecked!( Ok(sqlx::query_as!(
Self, Self,
"SELECT id, ephemeral_confirmations FROM guilds WHERE guild = ?", "
guild_id.0 SELECT id, name, prefix FROM guilds WHERE guild = ?
) ",
.fetch_one(pool) guild_id
.await?) )
.fetch_one(pool)
.await?)
}
Err(e) => {
error!("Unexpected error in guild query: {:?}", e);
Err(e)
}
} }
} }
pub async fn commit_changes(&self, pool: &MySqlPool) { pub async fn commit_changes(&self, pool: &MySqlPool) {
sqlx::query!( sqlx::query!(
"UPDATE guilds SET ephemeral_confirmations = ? WHERE id = ?", "
self.ephemeral_confirmations, UPDATE guilds SET name = ?, prefix = ? WHERE id = ?
",
self.name,
self.prefix,
self.id self.id
) )
.execute(pool) .execute(pool)

View File

@ -1,97 +1,78 @@
pub mod channel_data; pub mod channel_data;
pub mod command_macro;
pub mod guild_data; pub mod guild_data;
pub mod reminder; pub mod reminder;
pub mod timer; pub mod timer;
pub mod user_data; pub mod user_data;
use chrono_tz::Tz; use serenity::{async_trait, model::id::GuildId, prelude::Context};
use poise::serenity_prelude::{async_trait, model::id::UserId, ChannelType};
use crate::{ use crate::{consts::DEFAULT_PREFIX, GuildDataCache, SQLPool};
models::{channel_data::ChannelData, guild_data::GuildData, user_data::UserData},
CommandMacro, Context, Data, Error, GuildId, use guild_data::GuildData;
};
use std::sync::Arc;
use tokio::sync::RwLock;
#[async_trait] #[async_trait]
pub trait CtxData { pub trait CtxGuildData {
async fn user_data<U: Into<UserId> + Send>(&self, user_id: U) -> Result<UserData, Error>; async fn guild_data<G: Into<GuildId> + Send + Sync>(
&self,
guild_id: G,
) -> Result<Arc<RwLock<GuildData>>, sqlx::Error>;
async fn author_data(&self) -> Result<UserData, Error>; async fn prefix<G: Into<GuildId> + Send + Sync>(&self, guild_id: Option<G>) -> String;
async fn guild_data(&self) -> Option<Result<GuildData, Error>>;
async fn timezone(&self) -> Tz;
async fn channel_data(&self) -> Result<ChannelData, Error>;
async fn command_macros(&self) -> Result<Vec<CommandMacro<Data, Error>>, Error>;
} }
#[async_trait] #[async_trait]
impl CtxData for Context<'_> { impl CtxGuildData for Context {
async fn user_data<U: Into<UserId> + Send>(&self, user_id: U) -> Result<UserData, Error> { async fn guild_data<G: Into<GuildId> + Send + Sync>(
UserData::from_user(user_id, &self.serenity_context(), &self.data().database).await &self,
} guild_id: G,
) -> Result<Arc<RwLock<GuildData>>, sqlx::Error> {
let guild_id = guild_id.into();
async fn author_data(&self) -> Result<UserData, Error> { let guild = guild_id.to_guild_cached(&self.cache).unwrap();
UserData::from_user(&self.author().id, &self.serenity_context(), &self.data().database)
let guild_cache = self
.data
.read()
.await .await
} .get::<GuildDataCache>()
.cloned()
.unwrap();
async fn guild_data(&self) -> Option<Result<GuildData, Error>> { let x = if let Some(guild_data) = guild_cache.get(&guild_id) {
if let Some(guild_id) = self.guild_id() { Ok(guild_data.clone())
Some(GuildData::from_guild(guild_id, &self.data().database).await)
} else { } else {
None let pool = self.data.read().await.get::<SQLPool>().cloned().unwrap();
}
}
async fn timezone(&self) -> Tz { match GuildData::from_guild(guild, &pool).await {
UserData::timezone_of(self.author().id, &self.data().database).await Ok(d) => {
} let lock = Arc::new(RwLock::new(d));
async fn channel_data(&self) -> Result<ChannelData, Box<dyn std::error::Error + Sync + Send>> { guild_cache.insert(guild_id, lock.clone());
// If we're in a thread, get the parent channel.
let recv_channel = self.channel_id().to_channel(&self).await?;
let channel = match recv_channel.guild() { Ok(lock)
Some(guild_channel) => {
if guild_channel.kind == ChannelType::PublicThread {
guild_channel.parent_id.unwrap().to_channel_cached(&self).unwrap()
} else {
self.channel_id().to_channel_cached(&self).unwrap()
} }
}
None => self.channel_id().to_channel_cached(&self).unwrap(), Err(e) => Err(e),
}
}; };
ChannelData::from_channel(&channel, &self.data().database).await x
} }
async fn command_macros(&self) -> Result<Vec<CommandMacro<Data, Error>>, Error> { async fn prefix<G: Into<GuildId> + Send + Sync>(&self, guild_id: Option<G>) -> String {
self.data().command_macros(self.guild_id().unwrap()).await if let Some(guild_id) = guild_id {
} self.guild_data(guild_id)
} .await
.unwrap()
impl Data { .read()
pub(crate) async fn command_macros( .await
&self, .prefix
guild_id: GuildId, .clone()
) -> Result<Vec<CommandMacro<Data, Error>>, Error> { } else {
let rows = sqlx::query!( DEFAULT_PREFIX.clone()
"SELECT name, description, commands FROM macro WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?)", }
guild_id.0
)
.fetch_all(&self.database)
.await?.iter().map(|row| CommandMacro {
guild_id,
name: row.name.clone(),
description: row.description.clone(),
commands: serde_json::from_str(&row.commands).unwrap(),
}).collect();
Ok(rows)
} }
} }

View File

@ -1,33 +1,43 @@
pub mod builder; use serenity::{
pub mod content; client::Context,
pub mod errors;
mod helper;
pub mod look_flags;
use std::hash::{Hash, Hasher};
use chrono::{DateTime, NaiveDateTime, Utc};
use chrono_tz::Tz;
use poise::serenity_prelude::{
model::id::{ChannelId, GuildId, UserId}, model::id::{ChannelId, GuildId, UserId},
Cache,
}; };
use sqlx::Executor;
use chrono::NaiveDateTime;
use crate::{ use crate::{
models::reminder::look_flags::{LookFlags, TimeDisplayType}, consts::{DAY, HOUR, MINUTE, REGEX_CHANNEL},
Database, SQLPool,
}; };
#[derive(Debug, Clone)] use num_integer::Integer;
fn longhand_displacement(seconds: u64) -> String {
let (days, seconds) = seconds.div_rem(&DAY);
let (hours, seconds) = seconds.div_rem(&HOUR);
let (minutes, seconds) = seconds.div_rem(&MINUTE);
let mut sections = vec![];
for (var, name) in [days, hours, minutes, seconds]
.iter()
.zip(["days", "hours", "minutes", "seconds"].iter())
{
if *var > 0 {
sections.push(format!("{} {}", var, name));
}
}
sections.join(", ")
}
#[derive(Debug)]
pub struct Reminder { pub struct Reminder {
pub id: u32, pub id: u32,
pub uid: String, pub uid: String,
pub channel: u64, pub channel: u64,
pub utc_time: DateTime<Utc>, pub utc_time: NaiveDateTime,
pub interval_seconds: Option<u32>, pub interval_seconds: Option<u32>,
pub interval_days: Option<u32>,
pub interval_months: Option<u32>,
pub expires: Option<NaiveDateTime>, pub expires: Option<NaiveDateTime>,
pub enabled: bool, pub enabled: bool,
pub content: String, pub content: String,
@ -35,22 +45,10 @@ pub struct Reminder {
pub set_by: Option<u64>, pub set_by: Option<u64>,
} }
impl Hash for Reminder {
fn hash<H: Hasher>(&self, state: &mut H) {
self.uid.hash(state);
}
}
impl PartialEq<Self> for Reminder {
fn eq(&self, other: &Self) -> bool {
self.uid == other.uid
}
}
impl Eq for Reminder {}
impl Reminder { impl Reminder {
pub async fn from_uid(pool: impl Executor<'_, Database = Database>, uid: &str) -> Option<Self> { pub async fn from_uid(ctx: &Context, uid: String) -> Option<Self> {
let pool = ctx.data.read().await.get::<SQLPool>().cloned().unwrap();
sqlx::query_as_unchecked!( sqlx::query_as_unchecked!(
Self, Self,
" "
@ -60,8 +58,6 @@ SELECT
channels.channel, channels.channel,
reminders.utc_time, reminders.utc_time,
reminders.interval_seconds, reminders.interval_seconds,
reminders.interval_days,
reminders.interval_months,
reminders.expires, reminders.expires,
reminders.enabled, reminders.enabled,
reminders.content, reminders.content,
@ -82,53 +78,18 @@ WHERE
", ",
uid uid
) )
.fetch_one(pool) .fetch_one(&pool)
.await
.ok()
}
pub async fn from_id(pool: impl Executor<'_, Database = Database>, id: u32) -> Option<Self> {
sqlx::query_as_unchecked!(
Self,
"
SELECT
reminders.id,
reminders.uid,
channels.channel,
reminders.utc_time,
reminders.interval_seconds,
reminders.interval_days,
reminders.interval_months,
reminders.expires,
reminders.enabled,
reminders.content,
reminders.embed_description,
users.user AS set_by
FROM
reminders
INNER JOIN
channels
ON
reminders.channel_id = channels.id
LEFT JOIN
users
ON
reminders.set_by = users.id
WHERE
reminders.id = ?
",
id
)
.fetch_one(pool)
.await .await
.ok() .ok()
} }
pub async fn from_channel<C: Into<ChannelId>>( pub async fn from_channel<C: Into<ChannelId>>(
pool: impl Executor<'_, Database = Database>, ctx: &Context,
channel_id: C, channel_id: C,
flags: &LookFlags, flags: &LookFlags,
) -> Vec<Self> { ) -> Vec<Self> {
let pool = ctx.data.read().await.get::<SQLPool>().cloned().unwrap();
let enabled = if flags.show_disabled { "0,1" } else { "1" }; let enabled = if flags.show_disabled { "0,1" } else { "1" };
let channel_id = channel_id.into(); let channel_id = channel_id.into();
@ -141,8 +102,6 @@ SELECT
channels.channel, channels.channel,
reminders.utc_time, reminders.utc_time,
reminders.interval_seconds, reminders.interval_seconds,
reminders.interval_days,
reminders.interval_months,
reminders.expires, reminders.expires,
reminders.enabled, reminders.enabled,
reminders.content, reminders.content,
@ -159,28 +118,27 @@ LEFT JOIN
ON ON
reminders.set_by = users.id reminders.set_by = users.id
WHERE WHERE
`status` = 'pending' AND
channels.channel = ? AND channels.channel = ? AND
FIND_IN_SET(reminders.enabled, ?) FIND_IN_SET(reminders.enabled, ?)
ORDER BY ORDER BY
reminders.utc_time reminders.utc_time
LIMIT
?
", ",
channel_id.as_u64(), channel_id.as_u64(),
enabled, enabled,
flags.limit
) )
.fetch_all(pool) .fetch_all(&pool)
.await .await
.unwrap() .unwrap()
} }
pub async fn from_guild( pub async fn from_guild(ctx: &Context, guild_id: Option<GuildId>, user: UserId) -> Vec<Self> {
cache: impl AsRef<Cache>, let pool = ctx.data.read().await.get::<SQLPool>().cloned().unwrap();
pool: impl Executor<'_, Database = Database>,
guild_id: Option<GuildId>,
user: UserId,
) -> Vec<Self> {
if let Some(guild_id) = guild_id { if let Some(guild_id) = guild_id {
let guild_opt = guild_id.to_guild_cached(cache); let guild_opt = guild_id.to_guild_cached(&ctx);
if let Some(guild) = guild_opt { if let Some(guild) = guild_opt {
let channels = guild let channels = guild
@ -200,8 +158,6 @@ SELECT
channels.channel, channels.channel,
reminders.utc_time, reminders.utc_time,
reminders.interval_seconds, reminders.interval_seconds,
reminders.interval_days,
reminders.interval_months,
reminders.expires, reminders.expires,
reminders.enabled, reminders.enabled,
reminders.content, reminders.content,
@ -218,12 +174,11 @@ LEFT JOIN
ON ON
reminders.set_by = users.id reminders.set_by = users.id
WHERE WHERE
`status` = 'pending' AND
FIND_IN_SET(channels.channel, ?) FIND_IN_SET(channels.channel, ?)
", ",
channels channels
) )
.fetch_all(pool) .fetch_all(&pool)
.await .await
} else { } else {
sqlx::query_as_unchecked!( sqlx::query_as_unchecked!(
@ -235,8 +190,6 @@ SELECT
channels.channel, channels.channel,
reminders.utc_time, reminders.utc_time,
reminders.interval_seconds, reminders.interval_seconds,
reminders.interval_days,
reminders.interval_months,
reminders.expires, reminders.expires,
reminders.enabled, reminders.enabled,
reminders.content, reminders.content,
@ -253,12 +206,11 @@ LEFT JOIN
ON ON
reminders.set_by = users.id reminders.set_by = users.id
WHERE WHERE
`status` = 'pending' AND
channels.guild_id = (SELECT id FROM guilds WHERE guild = ?) channels.guild_id = (SELECT id FROM guilds WHERE guild = ?)
", ",
guild_id.as_u64() guild_id.as_u64()
) )
.fetch_all(pool) .fetch_all(&pool)
.await .await
} }
} else { } else {
@ -271,8 +223,6 @@ SELECT
channels.channel, channels.channel,
reminders.utc_time, reminders.utc_time,
reminders.interval_seconds, reminders.interval_seconds,
reminders.interval_days,
reminders.interval_months,
reminders.expires, reminders.expires,
reminders.enabled, reminders.enabled,
reminders.content, reminders.content,
@ -289,30 +239,16 @@ LEFT JOIN
ON ON
reminders.set_by = users.id reminders.set_by = users.id
WHERE WHERE
`status` = 'pending' AND
channels.id = (SELECT dm_channel FROM users WHERE user = ?) channels.id = (SELECT dm_channel FROM users WHERE user = ?)
", ",
user.as_u64() user.as_u64()
) )
.fetch_all(pool) .fetch_all(&pool)
.await .await
} }
.unwrap() .unwrap()
} }
pub async fn delete(
&self,
db: impl Executor<'_, Database = Database>,
) -> Result<(), sqlx::Error> {
sqlx::query!(
"UPDATE reminders SET `status` = 'deleted', `status_change_time` = NOW() WHERE uid = ?",
self.uid
)
.execute(db)
.await
.map(|_| ())
}
pub fn display_content(&self) -> &str { pub fn display_content(&self) -> &str {
if self.content.is_empty() { if self.content.is_empty() {
&self.embed_description &self.embed_description
@ -321,42 +257,90 @@ WHERE
} }
} }
pub fn display_del(&self, count: usize, timezone: &Tz) -> String { pub fn display(&self, flags: &LookFlags, inter: &str) -> String {
format!(
"**{}**: '{}' *<#{}>* at **{}**",
count + 1,
self.display_content(),
self.channel,
self.utc_time.with_timezone(timezone).format("%Y-%m-%d %H:%M:%S")
)
}
pub fn display(&self, flags: &LookFlags, timezone: &Tz) -> String {
let time_display = match flags.time_display { let time_display = match flags.time_display {
TimeDisplayType::Absolute => { TimeDisplayType::Absolute => format!("<t:{}>", self.utc_time.timestamp()),
self.utc_time.with_timezone(timezone).format("%Y-%m-%d %H:%M:%S").to_string()
}
TimeDisplayType::Relative => format!("<t:{}:R>", self.utc_time.timestamp()), TimeDisplayType::Relative => format!("<t:{}:R>", self.utc_time.timestamp()),
}; };
if self.interval_seconds.is_some() if let Some(interval) = self.interval_seconds {
|| self.interval_days.is_some()
|| self.interval_months.is_some()
{
format!( format!(
"'{}' *occurs next at* **{}**, repeating (set by {})\n", "'{}' *{}* **{}**, repeating every **{}** (set by {})",
self.display_content(), self.display_content(),
&inter,
time_display, time_display,
self.set_by.map(|i| format!("<@{}>", i)).unwrap_or_else(|| "unknown".to_string()) longhand_displacement(interval as u64),
self.set_by
.map(|i| format!("<@{}>", i))
.unwrap_or_else(|| "unknown".to_string())
) )
} else { } else {
format!( format!(
"'{}' *occurs next at* **{}** (set by {})\n", "'{}' *{}* **{}** (set by {})",
self.display_content(), self.display_content(),
&inter,
time_display, time_display,
self.set_by.map(|i| format!("<@{}>", i)).unwrap_or_else(|| "unknown".to_string()) self.set_by
.map(|i| format!("<@{}>", i))
.unwrap_or_else(|| "unknown".to_string())
) )
} }
} }
} }
enum TimeDisplayType {
Absolute,
Relative,
}
pub struct LookFlags {
pub limit: u16,
pub show_disabled: bool,
pub channel_id: Option<ChannelId>,
time_display: TimeDisplayType,
}
impl Default for LookFlags {
fn default() -> Self {
Self {
limit: u16::MAX,
show_disabled: true,
channel_id: None,
time_display: TimeDisplayType::Relative,
}
}
}
impl LookFlags {
pub fn from_string(args: &str) -> Self {
let mut new_flags: Self = Default::default();
for arg in args.split(' ') {
match arg {
"enabled" => {
new_flags.show_disabled = false;
}
"time" => {
new_flags.time_display = TimeDisplayType::Absolute;
}
param => {
if let Ok(val) = param.parse::<u16>() {
new_flags.limit = val;
} else if let Some(channel) = REGEX_CHANNEL
.captures(&arg)
.map(|cap| cap.get(1))
.flatten()
.map(|c| c.as_str().parse::<u64>().unwrap())
{
new_flags.channel_id = Some(ChannelId(channel));
}
}
}
}
new_flags
}
}

View File

@ -1,358 +0,0 @@
use std::{collections::HashSet, fmt::Display};
use chrono::{Duration, NaiveDateTime, Utc};
use chrono_tz::Tz;
use poise::serenity_prelude::{
http::CacheHttp,
model::{
channel::GuildChannel,
id::{ChannelId, GuildId, UserId},
webhook::Webhook,
},
ChannelType, Result as SerenityResult,
};
use sqlx::MySqlPool;
use crate::{
consts::{DAY, DEFAULT_AVATAR, MAX_TIME, MIN_INTERVAL},
interval_parser::Interval,
models::{
channel_data::ChannelData,
reminder::{content::Content, errors::ReminderError, helper::generate_uid, Reminder},
user_data::UserData,
},
Context,
};
async fn create_webhook(
ctx: impl CacheHttp,
channel: GuildChannel,
name: impl Display,
) -> SerenityResult<Webhook> {
channel.create_webhook_with_avatar(ctx.http(), name, DEFAULT_AVATAR.clone()).await
}
#[derive(Hash, PartialEq, Eq)]
pub enum ReminderScope {
User(u64),
Channel(u64),
}
impl ReminderScope {
pub fn mention(&self) -> String {
match self {
Self::User(id) => format!("<@{}>", id),
Self::Channel(id) => format!("<#{}>", id),
}
}
}
pub struct ReminderBuilder {
pool: MySqlPool,
uid: String,
channel: u32,
guild: Option<u32>,
thread_id: Option<u64>,
utc_time: NaiveDateTime,
timezone: String,
interval_seconds: Option<i64>,
interval_days: Option<i64>,
interval_months: Option<i64>,
expires: Option<NaiveDateTime>,
content: String,
tts: bool,
attachment_name: Option<String>,
attachment: Option<Vec<u8>>,
set_by: Option<u32>,
}
impl ReminderBuilder {
pub async fn build(self) -> Result<Reminder, ReminderError> {
let queried_time = sqlx::query!(
"SELECT DATE_ADD(?, INTERVAL (SELECT nudge FROM channels WHERE id = ?) SECOND) AS `utc_time`",
self.utc_time,
self.channel,
)
.fetch_one(&self.pool)
.await
.unwrap();
match queried_time.utc_time {
Some(utc_time) => {
if utc_time < (Utc::now() - Duration::seconds(60)).naive_local() {
Err(ReminderError::PastTime)
} else {
sqlx::query!(
"
INSERT INTO reminders (
`uid`,
`channel_id`,
`guild_id`,
`utc_time`,
`timezone`,
`interval_seconds`,
`interval_days`,
`interval_months`,
`expires`,
`content`,
`tts`,
`attachment_name`,
`attachment`,
`set_by`
) VALUES (
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?
)
",
self.uid,
self.channel,
self.guild,
utc_time,
self.timezone,
self.interval_seconds,
self.interval_days,
self.interval_months,
self.expires,
self.content,
self.tts,
self.attachment_name,
self.attachment,
self.set_by
)
.execute(&self.pool)
.await
.unwrap();
Ok(Reminder::from_uid(&self.pool, &self.uid).await.unwrap())
}
}
None => Err(ReminderError::LongTime),
}
}
}
pub struct MultiReminderBuilder<'a> {
scopes: Vec<ReminderScope>,
utc_time: NaiveDateTime,
timezone: Tz,
interval: Option<Interval>,
expires: Option<NaiveDateTime>,
content: Content,
set_by: Option<u32>,
ctx: &'a Context<'a>,
guild_id: Option<GuildId>,
}
impl<'a> MultiReminderBuilder<'a> {
pub fn new(ctx: &'a Context, guild_id: Option<GuildId>) -> Self {
MultiReminderBuilder {
scopes: vec![],
utc_time: Utc::now().naive_utc(),
timezone: Tz::UTC,
interval: None,
expires: None,
content: Content::new(),
set_by: None,
ctx,
guild_id,
}
}
pub fn timezone(mut self, timezone: Tz) -> Self {
self.timezone = timezone;
self
}
pub fn content(mut self, content: Content) -> Self {
self.content = content;
self
}
pub fn time<T: Into<i64>>(mut self, time: T) -> Self {
if let Some(utc_time) = NaiveDateTime::from_timestamp_opt(time.into(), 0) {
self.utc_time = utc_time;
}
self
}
pub fn expires<T: Into<i64>>(mut self, time: Option<T>) -> Self {
self.expires = time.map(|t| NaiveDateTime::from_timestamp_opt(t.into(), 0)).flatten();
self
}
pub fn author(mut self, user: UserData) -> Self {
self.set_by = Some(user.id);
self.timezone = user.timezone();
self
}
pub fn interval(mut self, interval: Option<Interval>) -> Self {
self.interval = interval;
self
}
pub fn set_scopes(&mut self, scopes: Vec<ReminderScope>) {
self.scopes = scopes;
}
pub async fn build(self) -> (HashSet<ReminderError>, HashSet<(Reminder, ReminderScope)>) {
let mut errors = HashSet::new();
let mut ok_locs = HashSet::new();
if self
.interval
.map_or(false, |i| ((i.sec + i.day * DAY + i.month * 30 * DAY) as i64) < *MIN_INTERVAL)
{
errors.insert(ReminderError::ShortInterval);
} else if self
.interval
.map_or(false, |i| ((i.sec + i.day * DAY + i.month * 30 * DAY) as i64) > *MAX_TIME)
{
errors.insert(ReminderError::LongInterval);
} else {
for scope in self.scopes {
let thread_id = None;
let db_channel_id = match scope {
ReminderScope::User(user_id) => {
if let Ok(user) = UserId(user_id).to_user(&self.ctx).await {
let user_data = UserData::from_user(
&user,
&self.ctx.serenity_context(),
&self.ctx.data().database,
)
.await
.unwrap();
if let Some(guild_id) = self.guild_id {
if guild_id.member(&self.ctx, user).await.is_err() {
Err(ReminderError::InvalidTag)
} else if self.set_by.map_or(true, |i| i != user_data.id)
&& !user_data.allowed_dm
{
Err(ReminderError::UserBlockedDm)
} else {
Ok((user_data.dm_channel, None))
}
} else {
Ok((user_data.dm_channel, None))
}
} else {
Err(ReminderError::InvalidTag)
}
}
ReminderScope::Channel(channel_id) => {
let channel = ChannelId(channel_id).to_channel(&self.ctx).await.unwrap();
if let Some(mut guild_channel) = channel.clone().guild() {
if Some(guild_channel.guild_id) != self.guild_id {
Err(ReminderError::InvalidTag)
} else {
let mut channel_data = if guild_channel.kind
== ChannelType::PublicThread
{
// fixme jesus christ
let parent = guild_channel
.parent_id
.unwrap()
.to_channel(&self.ctx)
.await
.unwrap();
guild_channel = parent.clone().guild().unwrap();
ChannelData::from_channel(&parent, &self.ctx.data().database)
.await
.unwrap()
} else {
ChannelData::from_channel(&channel, &self.ctx.data().database)
.await
.unwrap()
};
if channel_data.webhook_id.is_none()
|| channel_data.webhook_token.is_none()
{
match create_webhook(&self.ctx, guild_channel, "Reminder").await
{
Ok(webhook) => {
channel_data.webhook_id =
Some(webhook.id.as_u64().to_owned());
channel_data.webhook_token = webhook.token;
channel_data
.commit_changes(&self.ctx.data().database)
.await;
Ok((channel_data.id, channel_data.db_guild_id))
}
Err(e) => Err(ReminderError::DiscordError(e.to_string())),
}
} else {
Ok((channel_data.id, channel_data.db_guild_id))
}
}
} else {
Err(ReminderError::InvalidTag)
}
}
};
match db_channel_id {
Ok(c) => {
let builder = ReminderBuilder {
pool: self.ctx.data().database.clone(),
uid: generate_uid(),
channel: c.0,
guild: c.1,
thread_id,
utc_time: self.utc_time,
timezone: self.timezone.to_string(),
interval_seconds: self.interval.map(|i| i.sec as i64),
interval_days: self.interval.map(|i| i.day as i64),
interval_months: self.interval.map(|i| i.month as i64),
expires: self.expires,
content: self.content.content.clone(),
tts: self.content.tts,
attachment_name: self.content.attachment_name.clone(),
attachment: self.content.attachment.clone(),
set_by: self.set_by,
};
match builder.build().await {
Ok(r) => {
ok_locs.insert((r, scope));
}
Err(e) => {
errors.insert(e);
}
}
}
Err(e) => {
errors.insert(e);
}
}
}
}
(errors, ok_locs)
}
}

View File

@ -1,12 +0,0 @@
pub struct Content {
pub content: String,
pub tts: bool,
pub attachment: Option<Vec<u8>>,
pub attachment_name: Option<String>,
}
impl Content {
pub fn new() -> Self {
Self { content: "".to_string(), tts: false, attachment: None, attachment_name: None }
}
}

View File

@ -1,40 +0,0 @@
use crate::consts::{MAX_TIME, MIN_INTERVAL};
#[derive(PartialEq, Eq, Hash, Debug)]
pub enum ReminderError {
LongTime,
LongInterval,
PastTime,
ShortInterval,
InvalidTag,
UserBlockedDm,
DiscordError(String),
}
impl ToString for ReminderError {
fn to_string(&self) -> String {
match self {
ReminderError::LongTime => {
"That time is too far in the future. Please specify a shorter time.".to_string()
}
ReminderError::LongInterval => format!(
"Please ensure the interval specified is less than {max_time} days",
max_time = *MAX_TIME / 86_400
),
ReminderError::PastTime => {
"Please ensure the time provided is in the future. If the time should be in the future, please be more specific with the definition.".to_string()
}
ReminderError::ShortInterval => format!(
"Please ensure the interval provided is longer than {min_interval} seconds",
min_interval = *MIN_INTERVAL
),
ReminderError::InvalidTag => {
"Couldn't find a location by your tag. Your tag must be either a channel or a user (not a role)".to_string()
}
ReminderError::UserBlockedDm => {
"User has DM reminders disabled".to_string()
}
ReminderError::DiscordError(s) => format!("A Discord error occurred: **{}**", s),
}
}
}

View File

@ -1,12 +0,0 @@
use rand::{rngs::OsRng, seq::IteratorRandom};
use crate::consts::CHARACTERS;
pub fn generate_uid() -> String {
let mut generator: OsRng = Default::default();
(0..64)
.map(|_| CHARACTERS.chars().choose(&mut generator).unwrap().to_owned().to_string())
.collect::<Vec<String>>()
.join("")
}

View File

@ -1,23 +0,0 @@
use poise::serenity_prelude::model::id::ChannelId;
use serde::{Deserialize, Serialize};
use serde_repr::*;
#[derive(Serialize_repr, Deserialize_repr, Copy, Clone, Debug)]
#[repr(u8)]
pub enum TimeDisplayType {
Absolute = 0,
Relative = 1,
}
#[derive(Serialize, Deserialize, Copy, Clone, Debug)]
pub struct LookFlags {
pub show_disabled: bool,
pub channel_id: Option<ChannelId>,
pub time_display: TimeDisplayType,
}
impl Default for LookFlags {
fn default() -> Self {
Self { show_disabled: true, channel_id: None, time_display: TimeDisplayType::Relative }
}
}

View File

@ -1,9 +1,10 @@
use chrono::{DateTime, Utc};
use sqlx::MySqlPool; use sqlx::MySqlPool;
use chrono::NaiveDateTime;
pub struct Timer { pub struct Timer {
pub name: String, pub name: String,
pub start_time: DateTime<Utc>, pub start_time: NaiveDateTime,
pub owner: u64, pub owner: u64,
} }

View File

@ -1,19 +1,47 @@
use chrono_tz::Tz; use serenity::{
use log::error; http::CacheHttp,
use poise::serenity_prelude::{http::CacheHttp, model::id::UserId}; model::{id::UserId, user::User},
};
use sqlx::MySqlPool; use sqlx::MySqlPool;
use crate::consts::LOCAL_TIMEZONE; use chrono_tz::Tz;
use log::error;
use crate::consts::{LOCAL_LANGUAGE, LOCAL_TIMEZONE};
pub struct UserData { pub struct UserData {
pub id: u32, pub id: u32,
pub user: u64, pub user: u64,
pub name: String,
pub dm_channel: u32, pub dm_channel: u32,
pub language: String,
pub timezone: String, pub timezone: String,
pub allowed_dm: bool,
} }
impl UserData { impl UserData {
pub async fn language_of<U>(user: U, pool: &MySqlPool) -> String
where
U: Into<UserId>,
{
let user_id = user.into().as_u64().to_owned();
match sqlx::query!(
"
SELECT language FROM users WHERE user = ?
",
user_id
)
.fetch_one(pool)
.await
{
Ok(r) => r.language,
Err(_) => LOCAL_LANGUAGE.clone(),
}
}
pub async fn timezone_of<U>(user: U, pool: &MySqlPool) -> Tz pub async fn timezone_of<U>(user: U, pool: &MySqlPool) -> Tz
where where
U: Into<UserId>, U: Into<UserId>,
@ -22,7 +50,7 @@ impl UserData {
match sqlx::query!( match sqlx::query!(
" "
SELECT IFNULL(timezone, 'UTC') AS timezone FROM users WHERE user = ? SELECT timezone FROM users WHERE user = ?
", ",
user_id user_id
) )
@ -37,20 +65,19 @@ SELECT IFNULL(timezone, 'UTC') AS timezone FROM users WHERE user = ?
.unwrap() .unwrap()
} }
pub async fn from_user<U: Into<UserId>>( pub async fn from_user(
user: U, user: &User,
ctx: impl CacheHttp, ctx: impl CacheHttp,
pool: &MySqlPool, pool: &MySqlPool,
) -> Result<Self, Box<dyn std::error::Error + Sync + Send>> { ) -> Result<Self, Box<dyn std::error::Error + Sync + Send>> {
let user_id = user.into(); let user_id = user.id.as_u64().to_owned();
match sqlx::query_as_unchecked!( match sqlx::query_as_unchecked!(
Self, Self,
" "
SELECT id, user, dm_channel, IF(timezone IS NULL, ?, timezone) AS timezone, allowed_dm FROM users WHERE user = ? SELECT id, user, name, dm_channel, IF(language IS NULL, ?, language) AS language, IF(timezone IS NULL, ?, timezone) AS timezone FROM users WHERE user = ?
", ",
*LOCAL_TIMEZONE, *LOCAL_LANGUAGE, *LOCAL_TIMEZONE, user_id
user_id.0
) )
.fetch_one(pool) .fetch_one(pool)
.await .await
@ -58,35 +85,33 @@ SELECT id, user, dm_channel, IF(timezone IS NULL, ?, timezone) AS timezone, allo
Ok(c) => Ok(c), Ok(c) => Ok(c),
Err(sqlx::Error::RowNotFound) => { Err(sqlx::Error::RowNotFound) => {
let dm_channel = user_id.create_dm_channel(ctx).await?; let dm_channel = user.create_dm_channel(ctx).await?;
let dm_id = dm_channel.id.as_u64().to_owned();
let pool_c = pool.clone(); let pool_c = pool.clone();
sqlx::query!( sqlx::query!(
" "
INSERT IGNORE INTO channels (channel) VALUES (?) INSERT IGNORE INTO channels (channel) VALUES (?)
", ",
dm_channel.id.0 dm_id
) )
.execute(&pool_c) .execute(&pool_c)
.await?; .await?;
sqlx::query!( sqlx::query!(
" "
INSERT INTO users (name, user, dm_channel, timezone) VALUES ('', ?, (SELECT id FROM channels WHERE channel = ?), ?) INSERT INTO users (user, name, dm_channel, language, timezone) VALUES (?, ?, (SELECT id FROM channels WHERE channel = ?), ?, ?)
", ", user_id, user.name, dm_id, *LOCAL_LANGUAGE, *LOCAL_TIMEZONE)
user_id.0, .execute(&pool_c)
dm_channel.id.0, .await?;
*LOCAL_TIMEZONE
)
.execute(&pool_c)
.await?;
Ok(sqlx::query_as_unchecked!( Ok(sqlx::query_as_unchecked!(
Self, Self,
" "
SELECT id, user, dm_channel, timezone, allowed_dm FROM users WHERE user = ? SELECT id, user, name, dm_channel, language, timezone FROM users WHERE user = ?
", ",
user_id.0 user_id
) )
.fetch_one(pool) .fetch_one(pool)
.await?) .await?)
@ -96,17 +121,18 @@ SELECT id, user, dm_channel, timezone, allowed_dm FROM users WHERE user = ?
error!("Error querying for user: {:?}", e); error!("Error querying for user: {:?}", e);
Err(Box::new(e)) Err(Box::new(e))
} },
} }
} }
pub async fn commit_changes(&self, pool: &MySqlPool) { pub async fn commit_changes(&self, pool: &MySqlPool) {
sqlx::query!( sqlx::query!(
" "
UPDATE users SET timezone = ?, allowed_dm = ? WHERE id = ? UPDATE users SET name = ?, language = ?, timezone = ? WHERE id = ?
", ",
self.name,
self.language,
self.timezone, self.timezone,
self.allowed_dm,
self.id self.id
) )
.execute(pool) .execute(pool)

View File

@ -1,16 +1,15 @@
use std::{ use std::time::{SystemTime, UNIX_EPOCH};
convert::TryFrom,
fmt::{Display, Formatter, Result as FmtResult}, use std::fmt::{Display, Formatter, Result as FmtResult};
str::from_utf8,
time::{SystemTime, UNIX_EPOCH}, use crate::consts::{LOCAL_TIMEZONE, PYTHON_LOCATION};
};
use chrono::{DateTime, Datelike, Timelike, Utc}; use chrono::{DateTime, Datelike, Timelike, Utc};
use chrono_tz::Tz; use chrono_tz::Tz;
use std::convert::TryFrom;
use std::str::from_utf8;
use tokio::process::Command; use tokio::process::Command;
use crate::consts::{LOCAL_TIMEZONE, PYTHON_LOCATION};
#[derive(Debug)] #[derive(Debug)]
pub enum InvalidTime { pub enum InvalidTime {
ParseErrorDMY, ParseErrorDMY,
@ -27,13 +26,11 @@ impl Display for InvalidTime {
impl std::error::Error for InvalidTime {} impl std::error::Error for InvalidTime {}
#[derive(Copy, Clone)]
enum ParseType { enum ParseType {
Explicit, Explicit,
Displacement, Displacement,
} }
#[derive(Clone)]
pub struct TimeParser { pub struct TimeParser {
timezone: Tz, timezone: Tz,
inverted: bool, inverted: bool,
@ -98,7 +95,10 @@ impl TimeParser {
} }
fn process_explicit(&self) -> Result<i64, InvalidTime> { fn process_explicit(&self) -> Result<i64, InvalidTime> {
let mut time = Utc::now().with_timezone(&self.timezone).with_second(0).unwrap(); let mut time = Utc::now()
.with_timezone(&self.timezone)
.with_second(0)
.unwrap();
let mut segments = self.time_string.rsplit('-'); let mut segments = self.time_string.rsplit('-');
// this segment will always exist even if split fails // this segment will always exist even if split fails
@ -106,9 +106,11 @@ impl TimeParser {
let h_m_s = hms.split(':'); let h_m_s = hms.split(':');
for (t, setter) in for (t, setter) in h_m_s.take(3).zip(&[
h_m_s.take(3).zip(&[DateTime::with_hour, DateTime::with_minute, DateTime::with_second]) DateTime::with_hour,
{ DateTime::with_minute,
DateTime::with_second,
]) {
time = setter(&time, t.parse().map_err(|_| InvalidTime::ParseErrorHMS)?) time = setter(&time, t.parse().map_err(|_| InvalidTime::ParseErrorHMS)?)
.map_or_else(|| Err(InvalidTime::ParseErrorHMS), Ok)?; .map_or_else(|| Err(InvalidTime::ParseErrorHMS), Ok)?;
} }
@ -120,7 +122,9 @@ impl TimeParser {
let month = d_m_y.next(); let month = d_m_y.next();
let year = d_m_y.next(); let year = d_m_y.next();
for (t, setter) in [day, month].iter().zip(&[DateTime::with_day, DateTime::with_month]) for (t, setter) in [day, month]
.iter()
.zip(&[DateTime::with_day, DateTime::with_month])
{ {
if let Some(t) = t { if let Some(t) = t {
time = setter(&time, t.parse().map_err(|_| InvalidTime::ParseErrorDMY)?) time = setter(&time, t.parse().map_err(|_| InvalidTime::ParseErrorDMY)?)
@ -211,12 +215,14 @@ pub async fn natural_parser(time: &str, timezone: &str) -> Option<i64> {
.output() .output()
.await .await
.ok() .ok()
.and_then(|inner| { .map(|inner| {
if inner.status.success() { if inner.status.success() {
Some(from_utf8(&*inner.stdout).unwrap().parse::<i64>().unwrap()) Some(from_utf8(&*inner.stdout).unwrap().parse::<i64>().unwrap())
} else { } else {
None None
} }
}) })
.and_then(|inner| if inner < 0 { None } else { Some(inner) }) .flatten()
.map(|inner| if inner < 0 { None } else { Some(inner) })
.flatten()
} }

View File

@ -1,108 +0,0 @@
use poise::{
serenity_prelude as serenity,
serenity_prelude::{
builder::CreateApplicationCommands,
http::CacheHttp,
interaction::MessageFlags,
model::id::{GuildId, UserId},
},
};
use crate::{
consts::{CNC_GUILD, SUBSCRIPTION_ROLES},
Data, Error,
};
pub async fn register_application_commands(
ctx: &serenity::Context,
framework: &poise::Framework<Data, Error>,
guild_id: Option<GuildId>,
) -> Result<(), serenity::Error> {
let mut commands_builder = CreateApplicationCommands::default();
let commands = &framework.options().commands;
for command in commands {
if let Some(slash_command) = command.create_as_slash_command() {
commands_builder.add_application_command(slash_command);
}
if let Some(context_menu_command) = command.create_as_context_menu_command() {
commands_builder.add_application_command(context_menu_command);
}
}
let commands_builder = poise::serenity_prelude::json::Value::Array(commands_builder.0);
if let Some(guild_id) = guild_id {
ctx.http.create_guild_application_commands(guild_id.0, &commands_builder).await?;
} else {
ctx.http.create_global_application_commands(&commands_builder).await?;
}
Ok(())
}
pub async fn check_subscription(cache_http: impl CacheHttp, user_id: impl Into<UserId>) -> bool {
if let Some(subscription_guild) = *CNC_GUILD {
let guild_member = GuildId(subscription_guild).member(cache_http, user_id).await;
if let Ok(member) = guild_member {
for role in member.roles {
if SUBSCRIPTION_ROLES.contains(role.as_u64()) {
return true;
}
}
}
false
} else {
true
}
}
pub async fn check_guild_subscription(
cache_http: impl CacheHttp,
guild_id: impl Into<GuildId>,
) -> bool {
if let Some(guild) = cache_http.cache().unwrap().guild(guild_id) {
let owner = guild.owner_id;
check_subscription(&cache_http, owner).await
} else {
false
}
}
/// Sends the message, specified via [`crate::CreateReply`], to the interaction initial response
/// endpoint
pub fn send_as_initial_response(
data: poise::CreateReply<'_>,
f: &mut serenity::CreateInteractionResponseData,
) {
let poise::CreateReply {
content,
embeds,
attachments: _, // serenity doesn't support attachments in initial response yet
components,
ephemeral,
allowed_mentions,
reply: _,
} = data;
if let Some(content) = content {
f.content(content);
}
f.set_embeds(embeds);
if let Some(allowed_mentions) = allowed_mentions {
f.allowed_mentions(|f| {
*f = allowed_mentions.clone();
f
});
}
if let Some(components) = components {
f.components(|f| {
f.0 = components.0;
f
});
}
if ephemeral {
f.flags(MessageFlags::EPHEMERAL);
}
}

View File

@ -1,14 +0,0 @@
[Unit]
Description=Reminder Bot
[Service]
User=reminder
Type=simple
ExecStart=/usr/bin/reminder-rs
WorkingDirectory=/etc/reminder-rs
Restart=always
RestartSec=4
Environment="reminder_rs=warn,postman=warn"
[Install]
WantedBy=multi-user.target

View File

@ -1,21 +0,0 @@
[package]
name = "reminder_web"
version = "0.1.0"
authors = ["jellywx <judesouthworth@pm.me>"]
edition = "2018"
[dependencies]
rocket = { git = "https://github.com/SergioBenitez/Rocket", branch = "master", features = ["tls", "secrets", "json"] }
rocket_dyn_templates = { git = "https://github.com/SergioBenitez/Rocket", branch = "master", features = ["tera"] }
serenity = { version = "0.11", default-features = false, features = ["builder", "cache", "client", "gateway", "http", "model", "utils", "rustls_backend"] }
oauth2 = "4"
log = "0.4"
reqwest = "0.11"
serde = { version = "1.0", features = ["derive"] }
sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "macros", "mysql", "chrono", "json"] }
chrono = "0.4"
chrono-tz = "0.8"
lazy_static = "1.4.0"
rand = "0.7"
base64 = "0.13"
csv = "1.1"

View File

@ -1,32 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIFbzCCA1egAwIBAgIUUY2fYP5h41dtqcuNLVUS99N7+jAwDQYJKoZIhvcNAQEL
BQAwRzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQKDAlSb2NrZXQg
Q0ExFzAVBgNVBAMMDlJvY2tldCBSb290IENBMB4XDTIxMDUxNzE5MDIyNFoXDTMx
MDUxNTE5MDIyNFowRzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQK
DAlSb2NrZXQgQ0ExFzAVBgNVBAMMDlJvY2tldCBSb290IENBMIICIjANBgkqhkiG
9w0BAQEFAAOCAg8AMIICCgKCAgEA3MmVOUxbZ0rtBBbXjWkkM2n2IoPuqfiENdrM
NH9IcIbEgFb7AQ/c2vGBLWWw2RFtXxe2ubbgcHyAySQ+ENGEf+W2yox5JpcOOBP+
/KrUQ4ROqxLBWOD6+fA/q5m9/CXXJosWXly3DFE9nUEUlNXcaSxJm/bxIqnxwEMQ
NESWdG1qlsGVhHLi5sflgwAcYH+nZzdtINLyvQ5AGIs8s6LpmvUVbC3I+5oNNSwW
rRsIfCFaYLuUx7jZMaOk8Dfz8MbFL68+7OCGwmhduSHoGW8jVzDXwXv/crAmFtau
zmu43/vPDwKa4w/kQJLNr1QmiGz9FBt8DXVJ0s5bokOIlodEgAvjsL3xwnrKak0F
8ak74IBetfDdVvuC9WWmJ9/dj3z91KvCWLdZGm8ijlYZ2OuaiiJ/oC6iebaPFHEY
IQmF5wzT/nZLIbQsti9wFnOpptgkhDAcKIAk3+pncuMwZ9WRvWO2IFt+ZCqKCCDU
JliDZmm3ow/zob8DD6Txex5OkJGUF6iu8o7fjI9BxsjSc76bLrxIhvR43UXjzoHl
t3lvDH8SzSN/kxzQMXVl48h3xydJ3+ZyDOLle1bqMga8Kcin9GcEUqmL1JcyFafe
CUE/Nk1pHyrlnhpMCYIDVI3pmNLB0/m/IEWLckSr9+hXyUPJQyWiyrixwC86SPxZ
AonU2aUCAwEAAaNTMFEwHQYDVR0OBBYEFJ5uQa9kD5fioNeS/Ff/mxDcMAzhMB8G
A1UdIwQYMBaAFJ5uQa9kD5fioNeS/Ff/mxDcMAzhMA8GA1UdEwEB/wQFMAMBAf8w
DQYJKoZIhvcNAQELBQADggIBABf7adF1J40/8EjfSJ5XdG7OBUrZTas3+nuULh8B
6h1igAq0BgX9v3awmPCaxqDLqwJCsFZCk0s9Vgd62McKVKasyW1TQchWbdvlqeAB
QQfZpJtAZK12dcMl6iznC/JBTPvYl9q1FKS8kYtg19in/xlhxiiEJaL5z+slpTgT
cN12650W2FqjT9sD9jshZI/a8o53d2yUBXBBecCZ49djceBGLbxbteEi/sb9qM4f
IUxeSrvK6owxKMZ5okUqZWaFseFCkdMKpMg9eecyfSTweac8yLlZmeqX5D/H85Hr
hnWHjI5NeVZAoDkdmNeaDbAIv4f3prqC8uFP3ub8D0rG/WiPdOAd79KNgqbUUFyp
NbjSiAesx4Rm1WIgjHljFQKXJdtnxt+v1VkKV9entXJX2tye7+Z1+zurNYyUyM1J
COdXeV4jpq2mP8cLpAqX7ip1tNx9YMy5ctbAE1WsUw7lPyO91+VN2Q6MN5OyemY3
4nBzRJU8X1nsDgeNQWjzb/ZC7eoS916Gywk8Hu6GoIwvYMm3zea25n6mAAOX17vE
1YdvSzUlnVbwnbgd4BgFRGUfBVjO7qvFC7ZWLngWhBzIIYIGUJfO2rippgkxAoHH
dgWYk1MNnk2yM8WSo0VKoe4yuF9NwPlfPqeCE683JHdwGEJKZWMocszzHrGZ2KX2
I4/u
-----END CERTIFICATE-----

View File

@ -1,51 +0,0 @@
-----BEGIN RSA PRIVATE KEY-----
MIIJKAIBAAKCAgEA3MmVOUxbZ0rtBBbXjWkkM2n2IoPuqfiENdrMNH9IcIbEgFb7
AQ/c2vGBLWWw2RFtXxe2ubbgcHyAySQ+ENGEf+W2yox5JpcOOBP+/KrUQ4ROqxLB
WOD6+fA/q5m9/CXXJosWXly3DFE9nUEUlNXcaSxJm/bxIqnxwEMQNESWdG1qlsGV
hHLi5sflgwAcYH+nZzdtINLyvQ5AGIs8s6LpmvUVbC3I+5oNNSwWrRsIfCFaYLuU
x7jZMaOk8Dfz8MbFL68+7OCGwmhduSHoGW8jVzDXwXv/crAmFtauzmu43/vPDwKa
4w/kQJLNr1QmiGz9FBt8DXVJ0s5bokOIlodEgAvjsL3xwnrKak0F8ak74IBetfDd
VvuC9WWmJ9/dj3z91KvCWLdZGm8ijlYZ2OuaiiJ/oC6iebaPFHEYIQmF5wzT/nZL
IbQsti9wFnOpptgkhDAcKIAk3+pncuMwZ9WRvWO2IFt+ZCqKCCDUJliDZmm3ow/z
ob8DD6Txex5OkJGUF6iu8o7fjI9BxsjSc76bLrxIhvR43UXjzoHlt3lvDH8SzSN/
kxzQMXVl48h3xydJ3+ZyDOLle1bqMga8Kcin9GcEUqmL1JcyFafeCUE/Nk1pHyrl
nhpMCYIDVI3pmNLB0/m/IEWLckSr9+hXyUPJQyWiyrixwC86SPxZAonU2aUCAwEA
AQKCAgBVSXlfXOOiDwtnnPs/IPJe+fuecaBsABfyRcbEMLbm4OhfOzpSurHx0YC4
7KNX9qdtKFfpfX9NdIq7KEjhbk3kqfPmYkUaZxeTCgZhzAua2S0aYHBXyPCqQ+gU
fZsqH+Pwe6H0aZQ8KdXHPTCaHdK6veThXo7feQ5t2noT9rq31txpx/Xd6BNGWsmJ
xS0xCZ68/GgnWdVyumKAGKkmKzRaK3pPA5CzwFqBw7ouvFaWvLuQymU6kWk1B6Xb
NYIB7IaXWPbRwhnMV0x9C2ABEzFvqOpvT1rqDqloAR4dlvcfbsIZZkQ2mhjt6MeT
hsorwQ4yCjvtZvVRfW1gTP4iR7ZpmHgHIYQDJy7raZqRVNe9WgMxKTS/IYsHvEvH
MUBOfQEclAQ8FTUOgTg9+Hf9VXJOtjZRxY08gb+OgKxoAQKxRZmLDUZli9SNWzGe
R3UECh0gImm/CzWnV3fercLX+qHLTcyJFcb2gclAfG8v8cOT2qu8RtsJAQM2ZSn7
L8FaWXewjAwkZwCl8Zl5ky1RbBXUkcuLIkAeLAl0+ffM9xiwLhiIj8gRJoq0/jSr
K1pA8CQ/rZC+9RsI8hP4x2Fn1CU8bOyEBPeNFEIDJHBiCrs/Rl6EAzDMJHlhL/HT
f7bqssuRjnSbRCKaAdtfGTxQUEjefvqJ11xE3BfHGnKPqoasMQKCAQEA8nY+3MAB
eBPlE/H4JeVpj7/9/q+JWLFFH9E3Re25LbObzRzIQOd5bTCJkOPQXYRbRIZ1pMt9
+nZzeLKvWn5nuUFgG2S4n+BEJkBDV78LOkUuGIAPdztF2mwVumygEGJFFfZHbYrh
XtaGUKdNxn1R3Z4B8W5jWpNNkU+dvoq4Zt0LrL28HB4Sa2yrtHeXbhqSb0BA15+N
vO9mlfX2I6Yr8F+L/OBfxB0gaNLISJJsj5NSm302je/QrfoMAwbePoNPjEX1/qd2
rgj9JfM+NE2FsVnFhrV+q4DywRUdX4VBFP4+x7fPm8LxvtVUmLVA3/NtGwPdnC6U
mQh/+kKVhU2AQwKCAQEA6R2FrxZIUdmk45b/tSjlq7r1ycBYeSztjzXw6hx+w/K3
Lf+5OGyoGoKr5bZHMhHFxqoWzP6kiZ2Uyvu+pLAGLfenJPec89ZncN4FvW9yU8yL
nE2Sc8Vf2GnOw2KGJcJR11Ew4dDspHfnM3rof2G4gPC/EMZeGojXoc5GHmT4iasD
Xwje4qqx5WKvRu1Rw2y+XM5h4gDn3QLLojDOvBpt22yaqSTAupY7OliGFO9h2WPL
r9TL6va87nXNepCdtzuSlxqTVtgMu2wVu3CnQyw9RiD2M31YnUtBDLNy/UNFj/5Z
6uXYuakh8vSFFvjgCUyQwR1xCTJur/6LdpAnt9Bz9wKCAQAUqoN9KVh2tatm4c72
2/D9ca3ikW+xgZqUta5yZWrNPGvhNbzT22b8KZDwKprN/cQRuSw52aZpPMNm3EQa
AIAyyCG69ADQj7r/T6btybjZRKBDMlcfIIw5q9DGTQ/vlZCx6IX6DkZbYQmdwkTc
0D20GA2uWGxbggawhgq5/PTuv5SJKrrn4qBLS73u6eqcVeN5XA6q0kywd+9UhNxv
+W/xUxOJgE5pVto2VREBLonWSwZVfnyx6GjvC0sOzv0Ocv7KxAPNqtRwzQ9Wtr7s
klb84Nv3OW0MjTcjwfr481Cyy2DqgP5PFnSogWJuibR34jXAgbnX4BiGWrUdzaMU
86AlAoIBABnw9Bh41VFuc9/zxL7nLy++HW33HqFVc5Y1PXr/8sdhcisHQxhZVxek
JPbqIuAahDTIZsMnLy41QAKaoyt2fymMXqhJecjUuiwgOOlMxp82qu6Y30xM0Y6m
r6CkjSMUjcD1QwhOFJd01GCxM8BBIqQOpmR6fqxbQAu8hacKO3IuerCPryXwMt3A
7ppo/GlP55sySEg7K5I3pmuFHOxn0IPTgR6DfYMGBs9GXJ1lyjDD3z3Q42RhUsMC
jvwtra9fTL/N8EmAv2H39C8oqSRbfvIX5u3x6/ONFU8RhSFT5CDTADSYoVZ/0MxV
k53r0hqWz6D94r9QQmsJW4G1JwZYhx8CggEBANUybXsJRgDC5ZOlwbaq0FeTOpZ4
pSKx1RkVV+G93hK5XdXIVu365pSt3T68MgF+4wWlbC4MuKuzJltFnJBapzz5ygcU
jw+Q+l/jq+mJj8uvUfo5TAy9thuggj1a7SCs/dm5DBwYA7bfmamLbbBpA0qywMsF
/vL1bpePIFhbGMhBK7uiEzOAOZVuceZWqcUByjUYy925K/an0ANsQAch5PVeAsEv
wXC3soaCzfzUyv+eAYBy7LOcz+hpdRj1l4c9Zx6hZeBxMc5LSRoTs+HfE6GgTuI2
cCfCZNFw7DhDy1TBd3XjQ0AFykeaesImZVR6gp0NYH1kionsMtR5rl4C2tw=
-----END RSA PRIVATE KEY-----

View File

@ -1,21 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDYTCCAUmgAwIBAgIUA/EaCcXKgOxBiuaMNTackeF9DgcwDQYJKoZIhvcNAQEL
BQAwRzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQKDAlSb2NrZXQg
Q0ExFzAVBgNVBAMMDlJvY2tldCBSb290IENBMB4XDTIxMDUxNzE5MDIyNFoXDTMx
MDUxNTE5MDIyNFowPzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQ8wDQYDVQQK
DAZSb2NrZXQxEjAQBgNVBAMMCWxvY2FsaG9zdDBZMBMGByqGSM49AgEGCCqGSM49
AwEHA0IABOEVdmVd218y3Yy+ocjtt5siaFsNR7tknvS21pzKzxsFc05UrF74YqRx
Ic6/AQq56C48x6gDhUCdf+nMzD0NWTijGDAWMBQGA1UdEQQNMAuCCWxvY2FsaG9z
dDANBgkqhkiG9w0BAQsFAAOCAgEAW32kadHWEIsN5WXacjtgE9jbFii/rWJjrAO/
GWuARwLvjWeEFM5xSTny1cTDEz/7Bmfd9GRRpfgxKCFBGaU7zTmOV/AokrjUay+s
KPj0EV9ZE/84KVfr+MOuhwXhSSv+NvxduFw1sdhTUHs+Wopwjte+bnOUBXmnQH97
ITSQuUvEamPUS4tJD/kQ0qSGJKNv3CShhbe3XkjUd0UKK7lZzn6hY2fy3CrkkJDT
GyzFCRf3ZDfjW5/fGXN/TNoEK65pPQVr5taK/bovDesEO7rR4Y4YgtnGlJ7YToWh
E6I77CbsJCJdmgpjPNwRlwQ8upoWAfxOHqwg32s6uWq20v4TXZTOnEKOPEJG74bh
JHtwqsy2fIdGz4kZksKGerzJffyQPhX4f3qxxrijT9Hj2EoFIQ4u/jTRpK31IW3R
gEeNB8O4iyg3crx0oHRwc6zX63O6wBJCZ+0uxLoyjwtjKCxVYbJpccjoQh6zO3UO
pqAO+NKxQ469QDBV3uRyyQ7DRPu6p67/8gn1E/o8kyU1P7eLFIhBp4T4wCaMQBG6
IpL5W7eCyuOcqfdm471N07Z4KAqiV8eBJcKaAE+WzNIvrFvCZ/zq7Gj8p07nkjK8
+ZGDUieiY0mQEpiXLQZt1xNtT/MmlAzFYrK7t6gSygykQKjO1o4GG4g+eXu3h1YK
avsOwtc=
-----END CERTIFICATE-----

View File

@ -1,5 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIGHAgEAMBMGByqGSM49AgEGCCqGSM49AwEHBG0wawIBAQQgeo/7wBIYVax9bj4m
1UEe2H+H4YLsbApDCZMslZbubRuhRANCAAThFXZlXdtfMt2MvqHI7bebImhbDUe7
ZJ70ttacys8bBXNOVKxe+GKkcSHOvwEKueguPMeoA4VAnX/pzMw9DVk4
-----END PRIVATE KEY-----

View File

@ -1,21 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDfjCCAWagAwIBAgIUfmkM99JpQUsQsh8TVILPQDBGOhowDQYJKoZIhvcNAQEM
BQAwRzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQKDAlSb2NrZXQg
Q0ExFzAVBgNVBAMMDlJvY2tldCBSb290IENBMB4XDTIxMDUxNzE5MDIyNFoXDTMx
MDUxNTE5MDIyNFowPzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQ8wDQYDVQQK
DAZSb2NrZXQxEjAQBgNVBAMMCWxvY2FsaG9zdDB2MBAGByqGSM49AgEGBSuBBAAi
A2IABM5rQQNZEGWeDyrg2dVQS15c+JsX/ginN9/ArHpTgGO6xaLfkbekIj7gewUR
VQcQrYulwu02HTFDzGVvf1DdCZzIJLJUpl+jagdI05yMPFg2s5lThzGB6HENyw1I
hU1dMaMYMBYwFAYDVR0RBA0wC4IJbG9jYWxob3N0MA0GCSqGSIb3DQEBDAUAA4IC
AQAzpXFEeCMvTBG+kzmHN/+esjCK8MO/Grrw3Qoa1stX0yjNWzfhlGHfWk9Mgwnp
DnIEFT9lB+nT9uTkKL81Opu2bkWXuL0MyjyYmcCfEgJeDblUbD4HYzPO/s2P7QZu
Oa1pNKBiSWe1xq+F/gkn0+nDfICq3QQOOQMzaAmlFmszN3v7pryz+x21MqTFsfuW
ymycRcwZ3VyYeceZuBxjOhR2l8I5yZbR+KPj8VS7A3vgqvkS8ixTK0LrxcLwrTIz
W9Z1skzVpux4K7iwn3qlWIJ7EbERi9Ydz0MzpjD+CHxGlgHTzT552DGZhPO8D7BE
+4IoS0YeXEGLeC2a9Hmiy3FXbM4nt3aIWMiWyhjslXIbvWOJL1Vi5GNpdQCG+rB7
lvA0IISp/tAi9duufdONjgRceRDsqf7o6TOBQdB3QxHRt9gCB2QquRh5llMUY8YH
PxFJAEzF4X5b0q0k4b50HRVNfDY0SC/XIR7S2xnLDGuoUqrOpUligBzfXV4JHrPv
YKHsWSOiVxU9eY1SYKuKqnOsGvXSSQlYqSK1ol94eOKDjNy8wekaVYAQNsdNL7o5
QSWZUcbZLkaNMFdD9twWGduAs0eTichVgyvRgPdevjTGDPBHKNSUURZRTYeW4aIJ
QzSQUQXwOXsQOmfCkaaTISsDwZAJ0wFqqST1AOhlCWD1OQ==
-----END CERTIFICATE-----

View File

@ -1,6 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIG2AgEAMBAGByqGSM49AgEGBSuBBAAiBIGeMIGbAgEBBDCRBt7MeJXyJRq7grGQ
jEdBHE31hG5LuKQ5iL8yTVGk75Kc8ISll2LewbvQ3NhR6E+hZANiAATOa0EDWRBl
ng8q4NnVUEteXPibF/4IpzffwKx6U4BjusWi35G3pCI+4HsFEVUHEK2LpcLtNh0x
Q8xlb39Q3QmcyCSyVKZfo2oHSNOcjDxYNrOZU4cxgehxDcsNSIVNXTE=
-----END PRIVATE KEY-----

View File

@ -1,20 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIDMjCCARqgAwIBAgIUSu1CGfaWlNPjjLG7SaEEgxI+AsAwDQYJKoZIhvcNAQEL
BQAwRzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQKDAlSb2NrZXQg
Q0ExFzAVBgNVBAMMDlJvY2tldCBSb290IENBMB4XDTIxMDUxNzE5MDIyNFoXDTMx
MDUxNTE5MDIyNFowPzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQ8wDQYDVQQK
DAZSb2NrZXQxEjAQBgNVBAMMCWxvY2FsaG9zdDAqMAUGAytlcAMhAKNv1fkv5rxY
xGT84C98g2xPpain+jsliHvYrqNkS1zhoxgwFjAUBgNVHREEDTALgglsb2NhbGhv
c3QwDQYJKoZIhvcNAQELBQADggIBAMgWZhr3whYbFPNYm5RZxjgEonMY7cH/Rza1
UrBkyoMRL9v00YKJTEvjl5Xl4ku7jqxY2qjValacDroD8hYTLhhkkbHxH6u+kJSC
cKRQ8QVBZMmoor8qD2Y2BuXZYGWEtgeoXh+DLHWOim7gXDD6GyFPnYFGHnRhNmkE
6fPcfw1FZmBrMM9rk31EeK9nLVRabL+vuLDEddX1LH4LwK4OuV51wQMZGYiC3M2b
JpmuPAUAUyiyN53Utuw/ubeih3cEglOwCyDPFt6XISYHO0UaQdw25uhpjxs8KTZB
qOPJAI4cvGaN3GARXvz2P/QHUiTzsf2XOXXtrO0g+F9P7t6x2xL35c0A8yxKkfsa
RKdCveRuVlsLXVLVBikqLE/OgPC6SIhIFvTYzXTaZyNfge6dRy2Wg6qWPcGwseeA
QpFKgWiY3cuy7nJm6uybR6zOEMj5GgNWIbICGguQ5161MLnv0bEmKh2d9/jQ/XN5
M+nixtGla/G4hL3FQIy9rhHVZzPWwSxl+/eE4qgnInEhmlQ2KrcpgneCt38t0vjJ
dwHZSzVv8yX7fE0OSq/DWQXJraWUcT7Ds0g7T7jWkWfS0qStVd9VJ+rYQ8dBbE9Y
gcmkm1DMUd+y9xDRDjxby7gMDWFiN2Au9FQgaIpIctTNCj0+agFBPnfXPVSIH6gX
10kA2ZVX
-----END CERTIFICATE-----

View File

@ -1,3 +0,0 @@
-----BEGIN PRIVATE KEY-----
MC4CAQAwBQYDK2VwBCIEIGtLkaYE4D+P5HLkoc3a/tNhPP+gnhPLF3jEtdYcAtpd
-----END PRIVATE KEY-----

View File

@ -1,114 +0,0 @@
#! /bin/bash
# Usage:
# ./gen_certs.sh [cert-kind]
#
# [cert-kind]:
# ed25519
# rsa_sha256
# ecdsa_nistp256_sha256
# ecdsa_nistp384_sha384
#
# Generate a certificate of the [cert-kind] key type, or if no cert-kind is
# specified, all of the certificates.
#
# Examples:
# ./gen_certs.sh ed25519
# ./gen_certs.sh rsa_sha256
# TODO: `rustls` (really, `webpki`) doesn't currently use the CN in the subject
# to check if a certificate is valid for a server name sent via SNI. It's not
# clear if this is intended, since certificates _should_ have a `subjectAltName`
# with a DNS name, or if it simply hasn't been implemented yet. See
# https://bugzilla.mozilla.org/show_bug.cgi?id=552346 for a bit more info.
CA_SUBJECT="/C=US/ST=CA/O=Rocket CA/CN=Rocket Root CA"
SUBJECT="/C=US/ST=CA/O=Rocket/CN=localhost"
ALT="DNS:localhost"
function gen_ca() {
openssl genrsa -out ca_key.pem 4096
openssl req -new -x509 -days 3650 -key ca_key.pem \
-subj "${CA_SUBJECT}" -out ca_cert.pem
}
function gen_ca_if_non_existent() {
if ! [ -f ./ca_cert.pem ]; then gen_ca; fi
}
function gen_rsa_sha256() {
gen_ca_if_non_existent
openssl req -newkey rsa:4096 -nodes -sha256 -keyout rsa_sha256_key.pem \
-subj "${SUBJECT}" -out server.csr
openssl x509 -req -sha256 -extfile <(printf "subjectAltName=${ALT}") -days 3650 \
-CA ca_cert.pem -CAkey ca_key.pem -CAcreateserial \
-in server.csr -out rsa_sha256_cert.pem
rm ca_cert.srl server.csr
}
function gen_ed25519() {
gen_ca_if_non_existent
openssl genpkey -algorithm ED25519 > ed25519_key.pem
openssl req -new -key ed25519_key.pem -subj "${SUBJECT}" -out server.csr
openssl x509 -req -extfile <(printf "subjectAltName=${ALT}") -days 3650 \
-CA ca_cert.pem -CAkey ca_key.pem -CAcreateserial \
-in server.csr -out ed25519_cert.pem
rm ca_cert.srl server.csr
}
function gen_ecdsa_nistp256_sha256() {
gen_ca_if_non_existent
openssl ecparam -out ecdsa_nistp256_sha256_key.pem -name prime256v1 -genkey
# Convert to pkcs8 format supported by rustls
openssl pkcs8 -topk8 -nocrypt -in ecdsa_nistp256_sha256_key.pem \
-out ecdsa_nistp256_sha256_key_pkcs8.pem
openssl req -new -nodes -sha256 -key ecdsa_nistp256_sha256_key_pkcs8.pem \
-subj "${SUBJECT}" -out server.csr
openssl x509 -req -sha256 -extfile <(printf "subjectAltName=${ALT}") -days 3650 \
-CA ca_cert.pem -CAkey ca_key.pem -CAcreateserial \
-in server.csr -out ecdsa_nistp256_sha256_cert.pem
rm ca_cert.srl server.csr ecdsa_nistp256_sha256_key.pem
}
function gen_ecdsa_nistp384_sha384() {
gen_ca_if_non_existent
openssl ecparam -out ecdsa_nistp384_sha384_key.pem -name secp384r1 -genkey
# Convert to pkcs8 format supported by rustls
openssl pkcs8 -topk8 -nocrypt -in ecdsa_nistp384_sha384_key.pem \
-out ecdsa_nistp384_sha384_key_pkcs8.pem
openssl req -new -nodes -sha384 -key ecdsa_nistp384_sha384_key_pkcs8.pem \
-subj "${SUBJECT}" -out server.csr
openssl x509 -req -sha384 -extfile <(printf "subjectAltName=${ALT}") -days 3650 \
-CA ca_cert.pem -CAkey ca_key.pem -CAcreateserial \
-in server.csr -out ecdsa_nistp384_sha384_cert.pem
rm ca_cert.srl server.csr ecdsa_nistp384_sha384_key.pem
}
case $1 in
ed25519) gen_ed25519 ;;
rsa_sha256) gen_rsa_sha256 ;;
ecdsa_nistp256_sha256) gen_ecdsa_nistp256_sha256 ;;
ecdsa_nistp384_sha384) gen_ecdsa_nistp384_sha384 ;;
*)
gen_ed25519
gen_rsa_sha256
gen_ecdsa_nistp256_sha256
gen_ecdsa_nistp384_sha384
;;
esac

View File

@ -1,30 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIFLDCCAxSgAwIBAgIUZ461KXDgTT25LP1S6PK6i9ZKtOYwDQYJKoZIhvcNAQEL
BQAwRzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRIwEAYDVQQKDAlSb2NrZXQg
Q0ExFzAVBgNVBAMMDlJvY2tldCBSb290IENBMB4XDTIxMDUxNzE5MDIyNFoXDTMx
MDUxNTE5MDIyNFowPzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQ8wDQYDVQQK
DAZSb2NrZXQxEjAQBgNVBAMMCWxvY2FsaG9zdDCCAiIwDQYJKoZIhvcNAQEBBQAD
ggIPADCCAgoCggIBAKl8Re1dneFB2obYPOuZf3d6BR2xGcMhr2vmHpnVgkeg/xGI
cwHhhB04mEg4TqbZ0Q1wdKN4ruNigdrQAXDqnm4y2IB1q/uTdoXVWNsvAZmDq4N4
rPUDWagpkilV4HOHDQOI27Lo/+30wJX6H8i3J6Nag1y9spuySkL1F1SgQng9uzvP
3SwbsO9R/jS7qTZNEtirnJv3qJlxmT4BCvBuWNALShFlSZYnZk/2csYXEaVkWMUE
rnWGmTmzMZEzDOdQdPC3sJDCPQbbgD4SnQANqhOVjPSdJ6Y6joN6XYtB2EP+6LJ8
UBTICETnUROWeKqMm215Gsen32cyWkaCwEWtTFn7rJHbPvUY4vPYQZAB2/h07lYq
v67W3r5lTq1KuoStD8M/7nCIYIuNhmJLMzzWrSfJrQpYexoMII6kjOxv2kiUgb1y
bYKnFlVf4up3pTpi2/0We4SHRgc7xTcEPNvU5z5hc5JwHZIFjmi5dx50ZoAULrXl
OUhfdUPut7H38UQg3riJiNAzedUiTubek26po8qH1iS/EMgAi5RboaBovjWhgjwq
P/RP0vzpln6OdQIRaRlY9vZiik9HbFybafuA2zEkyc+zc4HqJk3vqX/0hgd9qWeL
zgsHr6A86tNUXsUaoInQbzznjpbFE85klxd6HeqasOyVDCeDUs4lWoDNp0DbAgMB
AAGjGDAWMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDANBgkqhkiG9w0BAQsFAAOCAgEA
sS2TUKKYtNYC68TEQb5KAa8/zi6Lhz9lyn9rpBXAcAVeBdxCqrBU0nRN8EjXEff1
oBeau9Wi9PwdK6J+4xFuer9YrZZWuWLKUBXJL9ZXEfI+USo5WVz7v/puoKZSUSu2
+Ee4+v1eoAsCtaA8IR0Sh1KTc9kg1QZW1nIdBV0zgAERWTzm1iy2Ff+euowM/lUR
FczMAJsNq83O2kaH541fRx3gC2iMN/QLwRalBR2y8hTI5wQa0Yr8moC4IsTfRrKQ
/SZDjFT1XoA9TnrByIvGQNlxK1Rm2hvK1OQn4OL6sdYK6F+MxfUn/atQNyBQ6CF+
oKuvOnE2jces8GGZIU1jYJ2271SQkzp0CW3N1ZKjClSQFAYED+5ERTGyeEL4o3Vr
V/BUd0KC7HhbWBlFc2EDmPOoOTp/ID901Kf499M68pe2Fr/63/nCAON6WFk1NPYA
+sWMfS25hikU5rOHGQgXxXAz90pwpvpoLQvaq0/azsbHvq9B4ubdcLE0xcoK+DGq
+/aOeWlYkalWp93akPYpWN3UJXzDXzzagRID/1LEGS+Ssbh1WVhAhLKVoxzBvkgm
ozVAhR3zHXI2QpQ2FEbOmkcRtpxv2CiaTsgHg2MK8cdmPx3G+ufCZjRbQx/VXVdN
vaFRdmzr/5EQ7DT5Uy8w32h1to4Fm2sAtbAFYaFLXaM=
-----END CERTIFICATE-----

View File

@ -1,52 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCpfEXtXZ3hQdqG
2DzrmX93egUdsRnDIa9r5h6Z1YJHoP8RiHMB4YQdOJhIOE6m2dENcHSjeK7jYoHa
0AFw6p5uMtiAdav7k3aF1VjbLwGZg6uDeKz1A1moKZIpVeBzhw0DiNuy6P/t9MCV
+h/ItyejWoNcvbKbskpC9RdUoEJ4Pbs7z90sG7DvUf40u6k2TRLYq5yb96iZcZk+
AQrwbljQC0oRZUmWJ2ZP9nLGFxGlZFjFBK51hpk5szGRMwznUHTwt7CQwj0G24A+
Ep0ADaoTlYz0nSemOo6Del2LQdhD/uiyfFAUyAhE51ETlniqjJtteRrHp99nMlpG
gsBFrUxZ+6yR2z71GOLz2EGQAdv4dO5WKr+u1t6+ZU6tSrqErQ/DP+5wiGCLjYZi
SzM81q0nya0KWHsaDCCOpIzsb9pIlIG9cm2CpxZVX+Lqd6U6Ytv9FnuEh0YHO8U3
BDzb1Oc+YXOScB2SBY5ouXcedGaAFC615TlIX3VD7rex9/FEIN64iYjQM3nVIk7m
3pNuqaPKh9YkvxDIAIuUW6GgaL41oYI8Kj/0T9L86ZZ+jnUCEWkZWPb2YopPR2xc
m2n7gNsxJMnPs3OB6iZN76l/9IYHfalni84LB6+gPOrTVF7FGqCJ0G88546WxRPO
ZJcXeh3qmrDslQwng1LOJVqAzadA2wIDAQABAoICABT4gHp/Q+K0UEKxDNCl/ISe
/3UODb78MwVpws2MAoO0YvsbZAeOjNdEwmrlNK4mc1xzVqtHanROIv0dEaCUFyhR
eEJkzPPi6h5jKIxuQ4doKFerHdNvJ6/L/P7KVmxVAII4c96uP8SErTOhcD9Ykjn/
IBPgkPH83H1ucAWTksXn9XvQG3CyuHDUN1z0/1ntrXBLw6P0v9LEoI5weJcJQEn1
q6N9Yd6HX3xzZP4nqpJJWUZ/bsqx7dGa3340z9rrNJz4TYuLzRtFG5gSm4R/LFUi
Av/dViOWST3xbROnAQhgyRAUm6AGpCdKa9i9nI6VuUGRY4PivJy7OTpSQVIdwD2K
VFbFauCmsTY7B+Z+DXe9JJV+Tlazvlwop1DApxCgLMNr2pVB/1yPzMrNYDB4Sg1c
T3stu4A8PtljTykla2C2LPFrdIijvYv9n6PSPWV4vf1ix9uYs99FhZPQJMgm+CDr
n3cFfuofIWIRJpCuu3hn4woGgW2bXor4pyMNg1yCp1T2c8g6eJUYAAIRpse0HDbT
ZUifxlNBx819bf9aqv+lhwMU4UFlmWMv3NETcCBqgUn+z4scNJ3kZwO9ZodLRblK
SpalZ6SNejTnVukg3zbwJG8w5vIrJvfjBkikAL1O5X6Kn3YqfrXAl38bIvA50ZBe
eOFcgDPClLPGlNKxQXiZAoIBAQDh0aArTjWi8Kxt2Ga3Hu4jQ7IXklL9osGAlFCB
wZs831/ktLY0c7vY+mbwrY4AtqK21A00MrNSTsp/McHwAUi410DdcTqzkGnm9BBZ
FKVO1H9KGg2t344tOXcPsFTl6SR5a0aPqagyvgdH+YWC4ccfYZWMcLELn3sOGoEp
a7VBxjLZZ+/b+/oIzhwxEaBi8N0U3IZ3SsC9Lmojnb9+LMwGs14TFfahD3uM1hnU
vww1elwPwXafWc+7Ej1bXijWBXbyzkCITzHafmDsAatEZnemHL8zKKtTn2aSTUSj
Fl/y94WR7/V4nVEQ0R3fjGC0rKh08BtKzxPjYBqjT5aI7+yfAoIBAQDAIzROr00o
65auD5TB2k/pSVKQpB/U6ESGafDxg4a+R9Ng2ESmDN5hUUZDefm0Uxf9AZqS6eno
GSAqxNDixWPy2WFzbZ0mUCoyQn+Eh09WBvt0IqDZ2+ngBEXiKA/8dsvXinBHLuHV
u+rJdLMzPfhWVo1/iXq7SOjBvDuthKROku5BEt6Q3Cs0nk6uneRIqKtaqa7KIInF
BPtUifZtCP09h6iFFGNv2g2OYRYDg8TXvjt3scKy0DUC3kTeTCPvNvHaOObGbMRU
Q85HkXburxWfsMto5m/lRsbY3COxddB47WIQ6QNewESfCab/R2lOdlXQeaH8fuxT
wWC5DMz4F0ZFAoIBAFm+EjZDnaNEnHIHB0MNIryXAabGev7beKUdzCTVCVmWuChO
/P45ZFTlppVNk9qKun2IJjsxTvyN3YHRB27XQ8xZlyiqABcudDfZlMmiH9QFNRUA
56DK8Fjetodgn0zDa8BpNqCPXw3TYVdkPX/3NEgvYtxuSJ4C4keHlv8cE+uw1bJ6
0OMO754iMyf5BlFrwaCxxyqPZauJT5sZ7Ok66lZbYC6bkukNGx+sUpWu2y5Bk2ab
jwXjDmAc7o9qCzaK82upNhI1zu0zPldsjmDfi/tS/1VYe0X/WicYWAesM7N+VPHb
eCVX98iEIqgdxKzo1QWsClyfkRrSraNrVLrVBqcCggEAaLIGK6YMNnMBPUGSPnt2
NdlVWymDiuExjcimmQOhZYf/33KZHZ4/gunljpklfqQUmzHHh6xcX7NpOsTaSedj
Sg43ss0U566g/5gKoi2VBnxxglvoKC5T51SMu+o2o8wb0QxHmBIszulBy5qClzZ6
Xpl1Kvy/2tOkuQSXxDpVydb4ao8cpfTCuj5VA4NXxFvcW1/AtbU7PRc02GEA3XMb
gu6r3jA46tb3shCnDS09Eo4/Gz7Kp+MaL8Dr5/G3Vv8qlE2TOqZD6OK1wXu7Qd43
uzd772I5sMZ7TenOrUFUYsB/QlWmF3hPLBX3YH0KHc4PfrT4lnyWzCDAUrVt7vXH
vQKCAQEAz1Q+jW+NG7CAqkOJ19n+KmGn+zddvy8x4txKx8kV0+3XIVNkseS6IT65
uemD85Gn7MYwHxcKUHghHSKyJsvCb1vSmB3JtCnrsodmQNMb6Lsq89VlM8Ylc/s3
F4AraiOlylqcEwLkanD3XSfPdQZhExZHEtpAW/Rr6zYT9J94VO1nK3+RkFI4a8kl
pEbY+tqJj3LGTuaQkshB9rDtcBT5/JsaxlMk783qkKziCPZF47BWqrJb+t7tm3qg
5gf+QUInEjdW3k3uZBL4316RP/TJlLvo29PkEwoC8X4R2EgDeHQhhRC2Fi2Wpy4O
ce4G+zZOOYXwvWGJLwNhgsve8C3oqg==
-----END PRIVATE KEY-----

View File

@ -1,49 +0,0 @@
pub const DISCORD_OAUTH_TOKEN: &'static str = "https://discord.com/api/oauth2/token";
pub const DISCORD_OAUTH_AUTHORIZE: &'static str = "https://discord.com/api/oauth2/authorize";
pub const DISCORD_API: &'static str = "https://discord.com/api";
pub const MAX_NAME_LENGTH: usize = 100;
pub const MAX_CONTENT_LENGTH: usize = 2000;
pub const MAX_EMBED_DESCRIPTION_LENGTH: usize = 4096;
pub const MAX_EMBED_TITLE_LENGTH: usize = 256;
pub const MAX_EMBED_AUTHOR_LENGTH: usize = 256;
pub const MAX_EMBED_FOOTER_LENGTH: usize = 2048;
pub const MAX_URL_LENGTH: usize = 512;
pub const MAX_USERNAME_LENGTH: usize = 100;
pub const MAX_EMBED_FIELDS: usize = 25;
pub const MAX_EMBED_FIELD_TITLE_LENGTH: usize = 256;
pub const MAX_EMBED_FIELD_VALUE_LENGTH: usize = 1024;
pub const MINUTE: usize = 60;
pub const HOUR: usize = 60 * MINUTE;
pub const DAY: usize = 24 * HOUR;
pub const CHARACTERS: &str = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_";
use std::{collections::HashSet, env, iter::FromIterator};
use lazy_static::lazy_static;
use serenity::model::prelude::AttachmentType;
lazy_static! {
pub static ref DEFAULT_AVATAR: AttachmentType<'static> = (
include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/../assets/webhook.jpg")) as &[u8],
"webhook.jpg",
)
.into();
pub static ref SUBSCRIPTION_ROLES: HashSet<u64> = HashSet::from_iter(
env::var("PATREON_ROLE_ID")
.map(|var| var
.split(',')
.filter_map(|item| { item.parse::<u64>().ok() })
.collect::<Vec<u64>>())
.unwrap_or_else(|_| Vec::new())
);
pub static ref CNC_GUILD: Option<u64> =
env::var("PATREON_GUILD_ID").map(|var| var.parse::<u64>().ok()).ok().flatten();
pub static ref MIN_INTERVAL: u32 = env::var("MIN_INTERVAL")
.ok()
.map(|inner| inner.parse::<u32>().ok())
.flatten()
.unwrap_or(600);
}

View File

@ -1,225 +0,0 @@
#[macro_use]
extern crate rocket;
mod consts;
#[macro_use]
mod macros;
mod routes;
use std::{collections::HashMap, env, path::Path};
use oauth2::{basic::BasicClient, AuthUrl, ClientId, ClientSecret, RedirectUrl, TokenUrl};
use rocket::{
fs::FileServer,
serde::json::{json, Value as JsonValue},
tokio::sync::broadcast::Sender,
};
use rocket_dyn_templates::Template;
use serenity::{
client::Context,
http::CacheHttp,
model::id::{GuildId, UserId},
};
use sqlx::{MySql, Pool};
use crate::consts::{CNC_GUILD, DISCORD_OAUTH_AUTHORIZE, DISCORD_OAUTH_TOKEN, SUBSCRIPTION_ROLES};
type Database = MySql;
#[derive(Debug)]
enum Error {
SQLx(sqlx::Error),
Serenity(serenity::Error),
}
#[catch(401)]
async fn not_authorized() -> Template {
let map: HashMap<String, String> = HashMap::new();
Template::render("errors/401", &map)
}
#[catch(403)]
async fn forbidden() -> Template {
let map: HashMap<String, String> = HashMap::new();
Template::render("errors/403", &map)
}
#[catch(404)]
async fn not_found() -> Template {
let map: HashMap<String, String> = HashMap::new();
Template::render("errors/404", &map)
}
#[catch(413)]
async fn payload_too_large() -> JsonValue {
json!({"error": "Data too large.", "errors": ["Data too large."]})
}
#[catch(422)]
async fn unprocessable_entity() -> JsonValue {
json!({"error": "Invalid request.", "errors": ["Invalid request."]})
}
#[catch(500)]
async fn internal_server_error() -> Template {
let map: HashMap<String, String> = HashMap::new();
Template::render("errors/500", &map)
}
pub async fn initialize(
kill_channel: Sender<()>,
serenity_context: Context,
db_pool: Pool<Database>,
) -> Result<(), Box<dyn std::error::Error>> {
info!("Checking environment variables...");
if env::var("OFFLINE").map_or(true, |v| v != "1") {
env::var("OAUTH2_CLIENT_ID").expect("`OAUTH2_CLIENT_ID' not supplied");
env::var("OAUTH2_CLIENT_SECRET").expect("`OAUTH2_CLIENT_SECRET' not supplied");
env::var("OAUTH2_DISCORD_CALLBACK").expect("`OAUTH2_DISCORD_CALLBACK' not supplied");
env::var("PATREON_GUILD_ID").expect("`PATREON_GUILD_ID' not supplied");
}
info!("Done!");
let oauth2_client = BasicClient::new(
ClientId::new(env::var("OAUTH2_CLIENT_ID")?),
Some(ClientSecret::new(env::var("OAUTH2_CLIENT_SECRET")?)),
AuthUrl::new(DISCORD_OAUTH_AUTHORIZE.to_string())?,
Some(TokenUrl::new(DISCORD_OAUTH_TOKEN.to_string())?),
)
.set_redirect_uri(RedirectUrl::new(env::var("OAUTH2_DISCORD_CALLBACK")?)?);
let reqwest_client = reqwest::Client::new();
let static_path =
if Path::new("web/static").exists() { "web/static" } else { "/lib/reminder-rs/static" };
rocket::build()
.attach(Template::fairing())
.register(
"/",
catchers![
not_authorized,
forbidden,
not_found,
internal_server_error,
unprocessable_entity,
payload_too_large,
],
)
.manage(oauth2_client)
.manage(reqwest_client)
.manage(serenity_context)
.manage(db_pool)
.mount("/static", FileServer::from(static_path))
.mount(
"/",
routes![
routes::index,
routes::cookies,
routes::privacy,
routes::terms,
routes::return_to_same_site,
routes::report::report_error,
],
)
.mount(
"/help",
routes![
routes::help,
routes::help_timezone,
routes::help_create_reminder,
routes::help_delete_reminder,
routes::help_timers,
routes::help_todo_lists,
routes::help_macros,
routes::help_intervals,
routes::help_dashboard,
routes::help_iemanager,
],
)
.mount(
"/login",
routes![
routes::login::discord_login,
routes::login::discord_logout,
routes::login::discord_callback
],
)
.mount(
"/dashboard",
routes![
routes::dashboard::dashboard_1,
routes::dashboard::dashboard_2,
routes::dashboard::dashboard_home,
routes::dashboard::user::get_user_info,
routes::dashboard::user::update_user_info,
routes::dashboard::user::get_user_guilds,
routes::dashboard::guild::get_guild_patreon,
routes::dashboard::guild::get_guild_channels,
routes::dashboard::guild::get_guild_roles,
routes::dashboard::guild::get_reminder_templates,
routes::dashboard::guild::create_reminder_template,
routes::dashboard::guild::delete_reminder_template,
routes::dashboard::guild::create_guild_reminder,
routes::dashboard::guild::get_reminders,
routes::dashboard::guild::edit_reminder,
routes::dashboard::guild::delete_reminder,
routes::dashboard::export::export_reminders,
routes::dashboard::export::export_reminder_templates,
routes::dashboard::export::export_todos,
routes::dashboard::export::import_reminders,
routes::dashboard::export::import_todos,
],
)
.mount("/admin", routes![routes::admin::admin_dashboard_home, routes::admin::bot_data])
.launch()
.await?;
warn!("Exiting rocket runtime");
// distribute kill signal
match kill_channel.send(()) {
Ok(_) => {}
Err(e) => {
error!("Failed to issue kill signal: {:?}", e);
}
}
Ok(())
}
pub async fn check_subscription(cache_http: impl CacheHttp, user_id: impl Into<UserId>) -> bool {
offline!(true);
if let Some(subscription_guild) = *CNC_GUILD {
let guild_member = GuildId(subscription_guild).member(cache_http, user_id).await;
if let Ok(member) = guild_member {
for role in member.roles {
if SUBSCRIPTION_ROLES.contains(role.as_u64()) {
return true;
}
}
}
false
} else {
true
}
}
pub async fn check_guild_subscription(
cache_http: impl CacheHttp,
guild_id: impl Into<GuildId>,
) -> bool {
offline!(true);
if let Some(guild) = cache_http.cache().unwrap().guild(guild_id) {
let owner = guild.owner_id;
check_subscription(&cache_http, owner).await
} else {
false
}
}

View File

@ -1,149 +0,0 @@
macro_rules! offline {
($field:expr) => {
if std::env::var("OFFLINE").map_or(false, |v| v == "1") {
return $field;
}
};
}
macro_rules! check_length {
($max:ident, $field:expr) => {
if $field.len() > $max {
return Err(json!({ "error": format!("{} exceeded", stringify!($max)) }));
}
};
($max:ident, $field:expr, $($fields:expr),+) => {
check_length!($max, $field);
check_length!($max, $($fields),+);
};
}
macro_rules! check_length_opt {
($max:ident, $field:expr) => {
if let Some(field) = &$field {
check_length!($max, field);
}
};
($max:ident, $field:expr, $($fields:expr),+) => {
check_length_opt!($max, $field);
check_length_opt!($max, $($fields),+);
};
}
macro_rules! check_url {
($field:expr) => {
if !($field.starts_with("http://") || $field.starts_with("https://")) {
return Err(json!({ "error": "URL invalid" }));
}
};
($field:expr, $($fields:expr),+) => {
check_url!($max, $field);
check_url!($max, $($fields),+);
};
}
macro_rules! check_url_opt {
($field:expr) => {
if let Some(field) = &$field {
check_url!(field);
}
};
($field:expr, $($fields:expr),+) => {
check_url_opt!($field);
check_url_opt!($($fields),+);
};
}
macro_rules! check_authorization {
($cookies:expr, $ctx:expr, $guild:expr) => {
use serenity::model::id::UserId;
let user_id = $cookies.get_private("userid").map(|c| c.value().parse::<u64>().ok()).flatten();
if std::env::var("OFFLINE").map_or(true, |v| v != "1") {
match user_id {
Some(user_id) => {
match GuildId($guild).to_guild_cached($ctx) {
Some(guild) => {
let member_res = guild.member($ctx, UserId(user_id)).await;
match member_res {
Err(_) => {
return Err(json!({"error": "User not in guild"}));
}
Ok(member) => {
let permissions_res = member.permissions($ctx);
match permissions_res {
Err(_) => {
return Err(json!({"error": "Couldn't fetch permissions"}));
}
Ok(permissions) => {
if !(permissions.manage_messages() || permissions.manage_guild() || permissions.administrator()) {
return Err(json!({"error": "Incorrect permissions"}));
}
}
}
}
}
}
None => {
return Err(json!({"error": "Bot not in guild"}));
}
}
}
None => {
return Err(json!({"error": "User not authorized"}));
}
}
}
}
}
macro_rules! update_field {
($pool:expr, $error:ident, $reminder:ident.[$field:ident]) => {
if let Some(value) = &$reminder.$field {
match sqlx::query(concat!(
"UPDATE reminders SET `",
stringify!($field),
"` = ? WHERE uid = ?"
))
.bind(value)
.bind(&$reminder.uid)
.execute($pool)
.await
{
Ok(_) => {}
Err(e) => {
warn!(
concat!(
"Error in `update_field!(",
stringify!($pool),
stringify!($reminder),
stringify!($field),
")': {:?}"
),
e
);
$error.push(format!("Error setting field {}", stringify!($field)));
}
}
}
};
($pool:expr, $error:ident, $reminder:ident.[$field:ident, $($fields:ident),+]) => {
update_field!($pool, $error, $reminder.[$field]);
update_field!($pool, $error, $reminder.[$($fields),+]);
};
}
macro_rules! json_err {
($message:expr) => {
Err(json!({ "error": $message }))
};
}

View File

@ -1,218 +0,0 @@
use std::{collections::HashMap, env};
use chrono::{DateTime, Utc};
use rocket::{
http::{CookieJar, Status},
serde::json::json,
State,
};
use rocket_dyn_templates::Template;
use serde::Serialize;
use sqlx::{MySql, Pool};
use crate::routes::JsonResult;
fn is_admin(cookies: &CookieJar<'_>) -> bool {
cookies
.get_private("userid")
.map_or(false, |cookie| Some(cookie.value().to_string()) == env::var("ADMIN_ID").ok())
}
#[get("/")]
pub async fn admin_dashboard_home(cookies: &CookieJar<'_>) -> Result<Template, Status> {
if let Some(cookie) = cookies.get_private("userid") {
let map: HashMap<&str, String> = HashMap::new();
if Some(cookie.value().to_string()) == env::var("ADMIN_ID").ok() {
Ok(Template::render("admin_dashboard", &map))
} else {
Err(Status::Forbidden)
}
} else {
Err(Status::Unauthorized)
}
}
#[derive(Serialize)]
struct TimeFrame {
time_key: DateTime<Utc>,
count: i64,
}
#[get("/data")]
pub async fn bot_data(cookies: &CookieJar<'_>, pool: &State<Pool<MySql>>) -> JsonResult {
if !is_admin(cookies) {
return json_err!("Not authorized");
}
let backlog = sqlx::query!(
"SELECT COUNT(1) AS backlog FROM reminders WHERE `utc_time` < NOW() AND enabled = 1 AND `status` = 'pending'"
)
.fetch_one(pool.inner())
.await
.unwrap();
let schedule_once = sqlx::query_as_unchecked!(
TimeFrame,
"SELECT
FROM_UNIXTIME(FLOOR(UNIX_TIMESTAMP(`utc_time`) / 300) * 300) AS `time_key`,
COUNT(1) AS `count`
FROM reminders
WHERE
`utc_time` < DATE_ADD(NOW(), INTERVAL 1 DAY) AND
`utc_time` >= NOW() AND
`enabled` = 1 AND
`status` = 'pending' AND
`interval_seconds` IS NULL AND
`interval_months` IS NULL AND
`interval_days` IS NULL
GROUP BY `time_key`
ORDER BY `time_key`"
)
.fetch_all(pool.inner())
.await
.unwrap();
let schedule_interval = sqlx::query_as_unchecked!(
TimeFrame,
"SELECT
FROM_UNIXTIME(FLOOR(UNIX_TIMESTAMP(`utc_time`) / 300) * 300) AS `time_key`,
COUNT(1) AS `count`
FROM reminders
WHERE
`utc_time` < DATE_ADD(NOW(), INTERVAL 1 DAY) AND
`utc_time` >= NOW() AND
`status` = 'pending' AND
`enabled` = 1 AND (
`interval_seconds` IS NOT NULL OR
`interval_months` IS NOT NULL OR
`interval_days` IS NOT NULL
)
GROUP BY `time_key`
ORDER BY `time_key`"
)
.fetch_all(pool.inner())
.await
.unwrap();
let schedule_once_long = sqlx::query_as_unchecked!(
TimeFrame,
"SELECT
FROM_UNIXTIME(FLOOR(UNIX_TIMESTAMP(`utc_time`) / 86400) * 86400) AS `time_key`,
COUNT(1) AS `count`
FROM reminders
WHERE
`utc_time` < DATE_ADD(NOW(), INTERVAL 31 DAY) AND
`utc_time` >= NOW() AND
`enabled` = 1 AND
`status` = 'pending' AND
`interval_seconds` IS NULL AND
`interval_months` IS NULL AND
`interval_days` IS NULL
GROUP BY `time_key`
ORDER BY `time_key`"
)
.fetch_all(pool.inner())
.await
.unwrap();
let schedule_interval_long = sqlx::query_as_unchecked!(
TimeFrame,
"SELECT
FROM_UNIXTIME(FLOOR(UNIX_TIMESTAMP(`utc_time`) / 86400) * 86400) AS `time_key`,
COUNT(1) AS `count`
FROM reminders
WHERE
`utc_time` < DATE_ADD(NOW(), INTERVAL 31 DAY) AND
`utc_time` >= NOW() AND
`status` = 'pending' AND
`enabled` = 1 AND (
`interval_seconds` IS NOT NULL OR
`interval_months` IS NOT NULL OR
`interval_days` IS NOT NULL
)
GROUP BY `time_key`
ORDER BY `time_key`"
)
.fetch_all(pool.inner())
.await
.unwrap();
let history = sqlx::query_as_unchecked!(
TimeFrame,
"SELECT
FROM_UNIXTIME(FLOOR(UNIX_TIMESTAMP(`utc_time`) / 86400) * 86400) AS `time_key`,
COUNT(1) AS `count`
FROM stat
WHERE
`utc_time` > DATE_SUB(NOW(), INTERVAL 31 DAY) AND
`type` = 'reminder_sent'
GROUP BY `time_key`
ORDER BY `time_key`"
)
.fetch_all(pool.inner())
.await
.unwrap();
let history_failed = sqlx::query_as_unchecked!(
TimeFrame,
"SELECT
FROM_UNIXTIME(FLOOR(UNIX_TIMESTAMP(`utc_time`) / 86400) * 86400) AS `time_key`,
COUNT(1) AS `count`
FROM stat
WHERE
`utc_time` > DATE_SUB(NOW(), INTERVAL 31 DAY) AND
`type` = 'reminder_failed'
GROUP BY `time_key`
ORDER BY `time_key`"
)
.fetch_all(pool.inner())
.await
.unwrap();
let interval_count = sqlx::query!(
"SELECT COUNT(1) AS count
FROM reminders
WHERE
`status` = 'pending' AND (
`interval_seconds` IS NOT NULL OR
`interval_months` IS NOT NULL OR
`interval_days` IS NOT NULL
)"
)
.fetch_one(pool.inner())
.await
.unwrap();
let reminder_count = sqlx::query!(
"SELECT COUNT(1) AS count
FROM reminders
WHERE
`status` = 'pending' AND
`interval_seconds` IS NULL AND
`interval_months` IS NULL AND
`interval_days` IS NULL"
)
.fetch_one(pool.inner())
.await
.unwrap();
Ok(json!({
"backlog": backlog.backlog,
"scheduleShort": {
"once": schedule_once,
"interval": schedule_interval
},
"scheduleLong": {
"once": schedule_once_long,
"interval": schedule_interval_long,
},
"historyLong": {
"sent": history,
"failed": history_failed,
},
"count": {
"reminders": reminder_count.count,
"intervals": interval_count.count,
}
}))
}

View File

@ -1,426 +0,0 @@
use csv::{QuoteStyle, WriterBuilder};
use rocket::{
http::CookieJar,
serde::json::{json, serde_json, Json},
State,
};
use serenity::{
client::Context,
model::id::{ChannelId, GuildId},
};
use sqlx::{MySql, Pool};
use crate::routes::{
dashboard::{
create_reminder, ImportBody, ReminderCreate, ReminderCsv, ReminderTemplateCsv, TodoCsv,
},
JsonResult,
};
#[get("/api/guild/<id>/export/reminders")]
pub async fn export_reminders(
id: u64,
cookies: &CookieJar<'_>,
ctx: &State<Context>,
pool: &State<Pool<MySql>>,
) -> JsonResult {
check_authorization!(cookies, ctx.inner(), id);
let mut csv_writer = WriterBuilder::new().quote_style(QuoteStyle::Always).from_writer(vec![]);
let channels_res = GuildId(id).channels(&ctx.inner()).await;
match channels_res {
Ok(channels) => {
let channels = channels
.keys()
.into_iter()
.map(|k| k.as_u64().to_string())
.collect::<Vec<String>>()
.join(",");
let result = sqlx::query_as_unchecked!(
ReminderCsv,
"SELECT
reminders.attachment,
reminders.attachment_name,
reminders.avatar,
CONCAT('#', channels.channel) AS channel,
reminders.content,
reminders.embed_author,
reminders.embed_author_url,
reminders.embed_color,
reminders.embed_description,
reminders.embed_footer,
reminders.embed_footer_url,
reminders.embed_image_url,
reminders.embed_thumbnail_url,
reminders.embed_title,
reminders.embed_fields,
reminders.enabled,
reminders.expires,
reminders.interval_seconds,
reminders.interval_days,
reminders.interval_months,
reminders.name,
reminders.restartable,
reminders.tts,
reminders.username,
reminders.utc_time
FROM reminders
LEFT JOIN channels ON channels.id = reminders.channel_id
WHERE FIND_IN_SET(channels.channel, ?)",
channels
)
.fetch_all(pool.inner())
.await;
match result {
Ok(reminders) => {
reminders.iter().for_each(|reminder| {
csv_writer.serialize(reminder).unwrap();
});
match csv_writer.into_inner() {
Ok(inner) => match String::from_utf8(inner) {
Ok(encoded) => Ok(json!({ "body": encoded })),
Err(e) => {
warn!("Failed to write UTF-8: {:?}", e);
Err(json!({"error": "Failed to write UTF-8"}))
}
},
Err(e) => {
warn!("Failed to extract CSV: {:?}", e);
Err(json!({"error": "Failed to extract CSV"}))
}
}
}
Err(e) => {
warn!("Failed to complete SQL query: {:?}", e);
Err(json!({"error": "Failed to query reminders"}))
}
}
}
Err(e) => {
warn!("Could not fetch channels from {}: {:?}", id, e);
Err(json!({"error": "Failed to get guild channels"}))
}
}
}
#[put("/api/guild/<id>/export/reminders", data = "<body>")]
pub async fn import_reminders(
id: u64,
cookies: &CookieJar<'_>,
body: Json<ImportBody>,
ctx: &State<Context>,
pool: &State<Pool<MySql>>,
) -> JsonResult {
check_authorization!(cookies, ctx.inner(), id);
let user_id =
cookies.get_private("userid").map(|c| c.value().parse::<u64>().ok()).flatten().unwrap();
match base64::decode(&body.body) {
Ok(body) => {
let mut reader = csv::Reader::from_reader(body.as_slice());
for result in reader.deserialize::<ReminderCsv>() {
match result {
Ok(record) => {
let channel_id = record.channel.split_at(1).1;
match channel_id.parse::<u64>() {
Ok(channel_id) => {
let reminder = ReminderCreate {
attachment: record.attachment,
attachment_name: record.attachment_name,
avatar: record.avatar,
channel: channel_id,
content: record.content,
embed_author: record.embed_author,
embed_author_url: record.embed_author_url,
embed_color: record.embed_color,
embed_description: record.embed_description,
embed_footer: record.embed_footer,
embed_footer_url: record.embed_footer_url,
embed_image_url: record.embed_image_url,
embed_thumbnail_url: record.embed_thumbnail_url,
embed_title: record.embed_title,
embed_fields: record
.embed_fields
.map(|s| serde_json::from_str(&s).ok())
.flatten(),
enabled: record.enabled,
expires: record.expires,
interval_seconds: record.interval_seconds,
interval_days: record.interval_days,
interval_months: record.interval_months,
name: record.name,
restartable: record.restartable,
tts: record.tts,
username: record.username,
utc_time: record.utc_time,
};
create_reminder(
ctx.inner(),
pool.inner(),
GuildId(id),
UserId(user_id),
reminder,
)
.await?;
}
Err(_) => {
return json_err!(format!(
"Failed to parse channel {}",
channel_id
));
}
}
}
Err(e) => {
warn!("Couldn't deserialize CSV row: {:?}", e);
return json_err!("Deserialize error. Aborted");
}
}
}
Ok(json!({}))
}
Err(_) => {
json_err!("Malformed base64")
}
}
}
#[get("/api/guild/<id>/export/todos")]
pub async fn export_todos(
id: u64,
cookies: &CookieJar<'_>,
ctx: &State<Context>,
pool: &State<Pool<MySql>>,
) -> JsonResult {
check_authorization!(cookies, ctx.inner(), id);
let mut csv_writer = WriterBuilder::new().quote_style(QuoteStyle::Always).from_writer(vec![]);
match sqlx::query_as_unchecked!(
TodoCsv,
"SELECT value, CONCAT('#', channels.channel) AS channel_id FROM todos
LEFT JOIN channels ON todos.channel_id = channels.id
INNER JOIN guilds ON todos.guild_id = guilds.id
WHERE guilds.guild = ?",
id
)
.fetch_all(pool.inner())
.await
{
Ok(todos) => {
todos.iter().for_each(|todo| {
csv_writer.serialize(todo).unwrap();
});
match csv_writer.into_inner() {
Ok(inner) => match String::from_utf8(inner) {
Ok(encoded) => Ok(json!({ "body": encoded })),
Err(e) => {
warn!("Failed to write UTF-8: {:?}", e);
json_err!("Failed to write UTF-8")
}
},
Err(e) => {
warn!("Failed to extract CSV: {:?}", e);
json_err!("Failed to extract CSV")
}
}
}
Err(e) => {
warn!("Could not fetch templates from {}: {:?}", id, e);
json_err!("Failed to query templates")
}
}
}
#[put("/api/guild/<id>/export/todos", data = "<body>")]
pub async fn import_todos(
id: u64,
cookies: &CookieJar<'_>,
body: Json<ImportBody>,
ctx: &State<Context>,
pool: &State<Pool<MySql>>,
) -> JsonResult {
check_authorization!(cookies, ctx.inner(), id);
let channels_res = GuildId(id).channels(&ctx.inner()).await;
match channels_res {
Ok(channels) => match base64::decode(&body.body) {
Ok(body) => {
let mut reader = csv::Reader::from_reader(body.as_slice());
let query_placeholder = "(?, (SELECT id FROM channels WHERE channel = ?), (SELECT id FROM guilds WHERE guild = ?))";
let mut query_params = vec![];
for result in reader.deserialize::<TodoCsv>() {
match result {
Ok(record) => match record.channel_id {
Some(channel_id) => {
let channel_id = channel_id.split_at(1).1;
match channel_id.parse::<u64>() {
Ok(channel_id) => {
if channels.contains_key(&ChannelId(channel_id)) {
query_params.push((record.value, Some(channel_id), id));
} else {
return json_err!(format!(
"Invalid channel ID {}",
channel_id
));
}
}
Err(_) => {
return json_err!(format!(
"Invalid channel ID {}",
channel_id
));
}
}
}
None => {
query_params.push((record.value, None, id));
}
},
Err(e) => {
warn!("Couldn't deserialize CSV row: {:?}", e);
return json_err!("Deserialize error. Aborted");
}
}
}
let query_str = format!(
"INSERT INTO todos (value, channel_id, guild_id) VALUES {}",
vec![query_placeholder].repeat(query_params.len()).join(",")
);
let mut query = sqlx::query(&query_str);
for param in query_params {
query = query.bind(param.0).bind(param.1).bind(param.2);
}
let res = query.execute(pool.inner()).await;
match res {
Ok(_) => Ok(json!({})),
Err(e) => {
warn!("Couldn't execute todo query: {:?}", e);
json_err!("An unexpected error occured.")
}
}
}
Err(_) => {
json_err!("Malformed base64")
}
},
Err(e) => {
warn!("Couldn't fetch channels for guild {}: {:?}", id, e);
json_err!("Couldn't fetch channels.")
}
}
}
#[get("/api/guild/<id>/export/reminder_templates")]
pub async fn export_reminder_templates(
id: u64,
cookies: &CookieJar<'_>,
ctx: &State<Context>,
pool: &State<Pool<MySql>>,
) -> JsonResult {
check_authorization!(cookies, ctx.inner(), id);
let mut csv_writer = WriterBuilder::new().quote_style(QuoteStyle::Always).from_writer(vec![]);
match sqlx::query_as_unchecked!(
ReminderTemplateCsv,
"SELECT
name,
attachment,
attachment_name,
avatar,
content,
embed_author,
embed_author_url,
embed_color,
embed_description,
embed_footer,
embed_footer_url,
embed_image_url,
embed_thumbnail_url,
embed_title,
embed_fields,
tts,
username
FROM reminder_template WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?)",
id
)
.fetch_all(pool.inner())
.await
{
Ok(templates) => {
templates.iter().for_each(|template| {
csv_writer.serialize(template).unwrap();
});
match csv_writer.into_inner() {
Ok(inner) => match String::from_utf8(inner) {
Ok(encoded) => Ok(json!({ "body": encoded })),
Err(e) => {
warn!("Failed to write UTF-8: {:?}", e);
json_err!("Failed to write UTF-8")
}
},
Err(e) => {
warn!("Failed to extract CSV: {:?}", e);
json_err!("Failed to extract CSV")
}
}
}
Err(e) => {
warn!("Could not fetch templates from {}: {:?}", id, e);
json_err!("Failed to query templates")
}
}
}

View File

@ -1,621 +0,0 @@
use std::env;
use rocket::{
http::CookieJar,
serde::json::{json, Json},
State,
};
use serde::Serialize;
use serenity::{
client::Context,
model::{
channel::GuildChannel,
id::{ChannelId, GuildId, RoleId},
},
};
use sqlx::{MySql, Pool};
use crate::{
check_guild_subscription, check_subscription,
consts::{
MAX_CONTENT_LENGTH, MAX_EMBED_AUTHOR_LENGTH, MAX_EMBED_DESCRIPTION_LENGTH,
MAX_EMBED_FIELDS, MAX_EMBED_FIELD_TITLE_LENGTH, MAX_EMBED_FIELD_VALUE_LENGTH,
MAX_EMBED_FOOTER_LENGTH, MAX_EMBED_TITLE_LENGTH, MAX_URL_LENGTH, MAX_USERNAME_LENGTH,
MIN_INTERVAL,
},
routes::{
dashboard::{
create_database_channel, create_reminder, template_name_default, DeleteReminder,
DeleteReminderTemplate, PatchReminder, Reminder, ReminderCreate, ReminderTemplate,
},
JsonResult,
},
};
#[derive(Serialize)]
struct ChannelInfo {
id: String,
name: String,
webhook_avatar: Option<String>,
webhook_name: Option<String>,
}
#[get("/api/guild/<id>/patreon")]
pub async fn get_guild_patreon(
id: u64,
cookies: &CookieJar<'_>,
ctx: &State<Context>,
) -> JsonResult {
offline!(Ok(json!({ "patreon": true })));
check_authorization!(cookies, ctx.inner(), id);
match GuildId(id).to_guild_cached(ctx.inner()) {
Some(guild) => {
let member_res = GuildId(env::var("PATREON_GUILD_ID").unwrap().parse().unwrap())
.member(&ctx.inner(), guild.owner_id)
.await;
let patreon = member_res.map_or(false, |member| {
member
.roles
.contains(&RoleId(env::var("PATREON_ROLE_ID").unwrap().parse().unwrap()))
});
Ok(json!({ "patreon": patreon }))
}
None => json_err!("Bot not in guild"),
}
}
#[get("/api/guild/<id>/channels")]
pub async fn get_guild_channels(
id: u64,
cookies: &CookieJar<'_>,
ctx: &State<Context>,
) -> JsonResult {
offline!(Ok(json!(vec![ChannelInfo {
name: "general".to_string(),
id: "1".to_string(),
webhook_avatar: None,
webhook_name: None,
}])));
check_authorization!(cookies, ctx.inner(), id);
match GuildId(id).to_guild_cached(ctx.inner()) {
Some(guild) => {
let mut channels = guild
.channels
.iter()
.filter_map(|(id, channel)| channel.to_owned().guild().map(|c| (id.to_owned(), c)))
.filter(|(_, channel)| channel.is_text_based())
.collect::<Vec<(ChannelId, GuildChannel)>>();
channels.sort_by(|(_, c1), (_, c2)| c1.position.cmp(&c2.position));
let channel_info = channels
.iter()
.map(|(channel_id, channel)| ChannelInfo {
name: channel.name.to_string(),
id: channel_id.to_string(),
webhook_avatar: None,
webhook_name: None,
})
.collect::<Vec<ChannelInfo>>();
Ok(json!(channel_info))
}
None => json_err!("Bot not in guild"),
}
}
#[derive(Serialize)]
struct RoleInfo {
id: String,
name: String,
}
#[get("/api/guild/<id>/roles")]
pub async fn get_guild_roles(id: u64, cookies: &CookieJar<'_>, ctx: &State<Context>) -> JsonResult {
offline!(Ok(json!(vec![RoleInfo { name: "@everyone".to_string(), id: "1".to_string() }])));
check_authorization!(cookies, ctx.inner(), id);
let roles_res = ctx.cache.guild_roles(id);
match roles_res {
Some(roles) => {
let roles = roles
.iter()
.map(|(_, r)| RoleInfo { id: r.id.to_string(), name: r.name.to_string() })
.collect::<Vec<RoleInfo>>();
Ok(json!(roles))
}
None => {
warn!("Could not fetch roles from {}", id);
json_err!("Could not get roles")
}
}
}
#[get("/api/guild/<id>/templates")]
pub async fn get_reminder_templates(
id: u64,
cookies: &CookieJar<'_>,
ctx: &State<Context>,
pool: &State<Pool<MySql>>,
) -> JsonResult {
check_authorization!(cookies, ctx.inner(), id);
match sqlx::query_as_unchecked!(
ReminderTemplate,
"SELECT * FROM reminder_template WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?)",
id
)
.fetch_all(pool.inner())
.await
{
Ok(templates) => Ok(json!(templates)),
Err(e) => {
warn!("Could not fetch templates from {}: {:?}", id, e);
json_err!("Could not get templates")
}
}
}
#[post("/api/guild/<id>/templates", data = "<reminder_template>")]
pub async fn create_reminder_template(
id: u64,
reminder_template: Json<ReminderTemplate>,
cookies: &CookieJar<'_>,
ctx: &State<Context>,
pool: &State<Pool<MySql>>,
) -> JsonResult {
check_authorization!(cookies, ctx.inner(), id);
// validate lengths
check_length!(MAX_CONTENT_LENGTH, reminder_template.content);
check_length!(MAX_EMBED_DESCRIPTION_LENGTH, reminder_template.embed_description);
check_length!(MAX_EMBED_TITLE_LENGTH, reminder_template.embed_title);
check_length!(MAX_EMBED_AUTHOR_LENGTH, reminder_template.embed_author);
check_length!(MAX_EMBED_FOOTER_LENGTH, reminder_template.embed_footer);
check_length_opt!(MAX_EMBED_FIELDS, reminder_template.embed_fields);
if let Some(fields) = &reminder_template.embed_fields {
for field in &fields.0 {
check_length!(MAX_EMBED_FIELD_VALUE_LENGTH, field.value);
check_length!(MAX_EMBED_FIELD_TITLE_LENGTH, field.title);
}
}
check_length_opt!(MAX_USERNAME_LENGTH, reminder_template.username);
check_length_opt!(
MAX_URL_LENGTH,
reminder_template.embed_footer_url,
reminder_template.embed_thumbnail_url,
reminder_template.embed_author_url,
reminder_template.embed_image_url,
reminder_template.avatar
);
// validate urls
check_url_opt!(
reminder_template.embed_footer_url,
reminder_template.embed_thumbnail_url,
reminder_template.embed_author_url,
reminder_template.embed_image_url,
reminder_template.avatar
);
let name = if reminder_template.name.is_empty() {
template_name_default()
} else {
reminder_template.name.clone()
};
match sqlx::query!(
"INSERT INTO reminder_template
(guild_id,
name,
attachment,
attachment_name,
avatar,
content,
embed_author,
embed_author_url,
embed_color,
embed_description,
embed_footer,
embed_footer_url,
embed_image_url,
embed_thumbnail_url,
embed_title,
embed_fields,
tts,
username
) VALUES ((SELECT id FROM guilds WHERE guild = ?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
id, name,
reminder_template.attachment,
reminder_template.attachment_name,
reminder_template.avatar,
reminder_template.content,
reminder_template.embed_author,
reminder_template.embed_author_url,
reminder_template.embed_color,
reminder_template.embed_description,
reminder_template.embed_footer,
reminder_template.embed_footer_url,
reminder_template.embed_image_url,
reminder_template.embed_thumbnail_url,
reminder_template.embed_title,
reminder_template.embed_fields,
reminder_template.tts,
reminder_template.username,
)
.fetch_all(pool.inner())
.await
{
Ok(_) => {
Ok(json!({}))
}
Err(e) => {
warn!("Could not create template for {}: {:?}", id, e);
json_err!("Could not create template")
}
}
}
#[delete("/api/guild/<id>/templates", data = "<delete_reminder_template>")]
pub async fn delete_reminder_template(
id: u64,
delete_reminder_template: Json<DeleteReminderTemplate>,
cookies: &CookieJar<'_>,
ctx: &State<Context>,
pool: &State<Pool<MySql>>,
) -> JsonResult {
check_authorization!(cookies, ctx.inner(), id);
match sqlx::query!(
"DELETE FROM reminder_template WHERE guild_id = (SELECT id FROM guilds WHERE guild = ?) AND id = ?",
id, delete_reminder_template.id
)
.fetch_all(pool.inner())
.await
{
Ok(_) => {
Ok(json!({}))
}
Err(e) => {
warn!("Could not delete template from {}: {:?}", id, e);
json_err!("Could not delete template")
}
}
}
#[post("/api/guild/<id>/reminders", data = "<reminder>")]
pub async fn create_guild_reminder(
id: u64,
reminder: Json<ReminderCreate>,
cookies: &CookieJar<'_>,
serenity_context: &State<Context>,
pool: &State<Pool<MySql>>,
) -> JsonResult {
check_authorization!(cookies, serenity_context.inner(), id);
let user_id =
cookies.get_private("userid").map(|c| c.value().parse::<u64>().ok()).flatten().unwrap();
create_reminder(
serenity_context.inner(),
pool.inner(),
GuildId(id),
UserId(user_id),
reminder.into_inner(),
)
.await
}
#[get("/api/guild/<id>/reminders?<status>")]
pub async fn get_reminders(
id: u64,
cookies: &CookieJar<'_>,
serenity_context: &State<Context>,
pool: &State<Pool<MySql>>,
status: Option<String>,
) -> JsonResult {
check_authorization!(cookies, serenity_context.inner(), id);
let status = status.unwrap_or("pending".to_string());
sqlx::query_as_unchecked!(
Reminder,
"
SELECT
reminders.attachment,
reminders.attachment_name,
reminders.avatar,
channels.channel,
reminders.content,
reminders.embed_author,
reminders.embed_author_url,
reminders.embed_color,
reminders.embed_description,
reminders.embed_footer,
reminders.embed_footer_url,
reminders.embed_image_url,
reminders.embed_thumbnail_url,
reminders.embed_title,
IFNULL(reminders.embed_fields, '[]') AS embed_fields,
reminders.enabled,
reminders.expires,
reminders.interval_seconds,
reminders.interval_days,
reminders.interval_months,
reminders.name,
reminders.restartable,
reminders.tts,
reminders.uid,
reminders.username,
reminders.utc_time,
reminders.status,
reminders.status_change_time,
reminders.status_message
FROM reminders
LEFT JOIN channels ON channels.id = reminders.channel_id
WHERE FIND_IN_SET(`status`, ?) AND reminders.guild_id = (SELECT id FROM guilds WHERE guild = ?)",
status,
id
)
.fetch_all(pool.inner())
.await
.map(|r| Ok(json!(r)))
.unwrap_or_else(|e| {
warn!("Failed to complete SQL query: {:?}", e);
json_err!("Could not load reminders")
})
}
#[patch("/api/guild/<id>/reminders", data = "<reminder>")]
pub async fn edit_reminder(
id: u64,
reminder: Json<PatchReminder>,
serenity_context: &State<Context>,
pool: &State<Pool<MySql>>,
cookies: &CookieJar<'_>,
) -> JsonResult {
check_authorization!(cookies, serenity_context.inner(), id);
let mut error = vec![];
let user_id =
cookies.get_private("userid").map(|c| c.value().parse::<u64>().ok()).flatten().unwrap();
if reminder.message_ok() {
update_field!(pool.inner(), error, reminder.[
content,
embed_author,
embed_description,
embed_footer,
embed_title,
embed_fields,
username
]);
} else {
error.push("Message exceeds limits.".to_string());
}
update_field!(pool.inner(), error, reminder.[
attachment,
attachment_name,
avatar,
embed_author_url,
embed_color,
embed_footer_url,
embed_image_url,
embed_thumbnail_url,
enabled,
expires,
name,
restartable,
tts,
utc_time
]);
if reminder.interval_days.flatten().is_some()
|| reminder.interval_months.flatten().is_some()
|| reminder.interval_seconds.flatten().is_some()
{
if check_guild_subscription(&serenity_context.inner(), id).await
|| check_subscription(&serenity_context.inner(), user_id).await
{
let new_interval_length = match reminder.interval_days {
Some(interval) => interval.unwrap_or(0),
None => sqlx::query!(
"SELECT interval_days AS days FROM reminders WHERE uid = ?",
reminder.uid
)
.fetch_one(pool.inner())
.await
.map_err(|e| {
warn!("Error updating reminder interval: {:?}", e);
json!({ "reminder": Option::<Reminder>::None, "errors": vec!["Unknown error"] })
})?
.days
.unwrap_or(0),
} * 86400 + match reminder.interval_months {
Some(interval) => interval.unwrap_or(0),
None => sqlx::query!(
"SELECT interval_months AS months FROM reminders WHERE uid = ?",
reminder.uid
)
.fetch_one(pool.inner())
.await
.map_err(|e| {
warn!("Error updating reminder interval: {:?}", e);
json!({ "reminder": Option::<Reminder>::None, "errors": vec!["Unknown error"] })
})?
.months
.unwrap_or(0),
} * 2592000 + match reminder.interval_seconds {
Some(interval) => interval.unwrap_or(0),
None => sqlx::query!(
"SELECT interval_seconds AS seconds FROM reminders WHERE uid = ?",
reminder.uid
)
.fetch_one(pool.inner())
.await
.map_err(|e| {
warn!("Error updating reminder interval: {:?}", e);
json!({ "reminder": Option::<Reminder>::None, "errors": vec!["Unknown error"] })
})?
.seconds
.unwrap_or(0),
};
if new_interval_length < *MIN_INTERVAL {
error.push(String::from("New interval is too short."));
} else {
update_field!(pool.inner(), error, reminder.[
interval_days,
interval_months,
interval_seconds
]);
}
}
}
if reminder.channel > 0 {
let channel = ChannelId(reminder.channel).to_channel_cached(&serenity_context.inner());
match channel {
Some(channel) => {
let channel_matches_guild = channel.guild().map_or(false, |c| c.guild_id.0 == id);
if !channel_matches_guild {
warn!(
"Error in `edit_reminder`: channel {:?} not found for guild {}",
reminder.channel, id
);
return Err(json!({"error": "Channel not found"}));
}
let channel = create_database_channel(
serenity_context.inner(),
ChannelId(reminder.channel),
pool.inner(),
)
.await;
if let Err(e) = channel {
warn!("`create_database_channel` returned an error code: {:?}", e);
return Err(
json!({"error": "Failed to configure channel for reminders. Please check the bot permissions"}),
);
}
let channel = channel.unwrap();
match sqlx::query!(
"UPDATE reminders SET channel_id = ? WHERE uid = ?",
channel,
reminder.uid
)
.execute(pool.inner())
.await
{
Ok(_) => {}
Err(e) => {
warn!("Error setting channel: {:?}", e);
error.push("Couldn't set channel".to_string())
}
}
}
None => {
warn!(
"Error in `edit_reminder`: channel {:?} not found for guild {}",
reminder.channel, id
);
return Err(json!({"error": "Channel not found"}));
}
}
}
match sqlx::query_as_unchecked!(
Reminder,
"
SELECT reminders.attachment,
reminders.attachment_name,
reminders.avatar,
channels.channel,
reminders.content,
reminders.embed_author,
reminders.embed_author_url,
reminders.embed_color,
reminders.embed_description,
reminders.embed_footer,
reminders.embed_footer_url,
reminders.embed_image_url,
reminders.embed_thumbnail_url,
reminders.embed_title,
reminders.embed_fields,
reminders.enabled,
reminders.expires,
reminders.interval_seconds,
reminders.interval_days,
reminders.interval_months,
reminders.name,
reminders.restartable,
reminders.tts,
reminders.uid,
reminders.username,
reminders.utc_time,
reminders.status,
reminders.status_change_time,
reminders.status_message
FROM reminders
LEFT JOIN channels ON channels.id = reminders.channel_id
WHERE uid = ?",
reminder.uid
)
.fetch_one(pool.inner())
.await
{
Ok(reminder) => Ok(json!({"reminder": reminder, "errors": error})),
Err(e) => {
warn!("Error exiting `edit_reminder': {:?}", e);
Err(json!({"reminder": Option::<Reminder>::None, "errors": vec!["Unknown error"]}))
}
}
}
#[delete("/api/guild/<_>/reminders", data = "<reminder>")]
pub async fn delete_reminder(
reminder: Json<DeleteReminder>,
pool: &State<Pool<MySql>>,
) -> JsonResult {
match sqlx::query!(
"UPDATE reminders SET `status` = 'deleted', `status_change_time` = NOW() WHERE uid = ?",
reminder.uid
)
.execute(pool.inner())
.await
{
Ok(_) => Ok(json!({})),
Err(e) => {
warn!("Error in `delete_reminder`: {:?}", e);
Err(json!({"error": "Could not delete reminder"}))
}
}
}

View File

@ -1,745 +0,0 @@
use std::collections::HashMap;
use chrono::{naive::NaiveDateTime, Utc};
use rand::{rngs::OsRng, seq::IteratorRandom};
use rocket::{http::CookieJar, response::Redirect, serde::json::json};
use rocket_dyn_templates::Template;
use serde::{Deserialize, Deserializer, Serialize};
use serenity::{
client::Context,
http::Http,
model::id::{ChannelId, GuildId, UserId},
};
use sqlx::{types::Json, Executor};
use crate::{
check_guild_subscription, check_subscription,
consts::{
CHARACTERS, DAY, DEFAULT_AVATAR, MAX_CONTENT_LENGTH, MAX_EMBED_AUTHOR_LENGTH,
MAX_EMBED_DESCRIPTION_LENGTH, MAX_EMBED_FIELDS, MAX_EMBED_FIELD_TITLE_LENGTH,
MAX_EMBED_FIELD_VALUE_LENGTH, MAX_EMBED_FOOTER_LENGTH, MAX_EMBED_TITLE_LENGTH,
MAX_NAME_LENGTH, MAX_URL_LENGTH, MAX_USERNAME_LENGTH, MIN_INTERVAL,
},
routes::JsonResult,
Database, Error,
};
pub mod export;
pub mod guild;
pub mod user;
type Unset<T> = Option<T>;
fn name_default() -> String {
"Reminder".to_string()
}
fn template_name_default() -> String {
"Template".to_string()
}
fn channel_default() -> u64 {
0
}
fn id_default() -> u32 {
0
}
fn interval_default() -> Unset<Option<u32>> {
None
}
fn deserialize_optional_field<'de, T, D>(deserializer: D) -> Result<Option<Option<T>>, D::Error>
where
D: Deserializer<'de>,
T: Deserialize<'de>,
{
Ok(Some(Option::deserialize(deserializer)?))
}
#[derive(Serialize, Deserialize)]
pub struct ReminderTemplate {
#[serde(default = "id_default")]
id: u32,
#[serde(default = "id_default")]
guild_id: u32,
#[serde(default = "template_name_default")]
name: String,
attachment: Option<Vec<u8>>,
attachment_name: Option<String>,
avatar: Option<String>,
content: String,
embed_author: String,
embed_author_url: Option<String>,
embed_color: u32,
embed_description: String,
embed_footer: String,
embed_footer_url: Option<String>,
embed_image_url: Option<String>,
embed_thumbnail_url: Option<String>,
embed_title: String,
embed_fields: Option<Json<Vec<EmbedField>>>,
tts: bool,
username: Option<String>,
}
#[derive(Serialize, Deserialize)]
pub struct ReminderTemplateCsv {
#[serde(default = "template_name_default")]
name: String,
attachment: Option<Vec<u8>>,
attachment_name: Option<String>,
avatar: Option<String>,
content: String,
embed_author: String,
embed_author_url: Option<String>,
embed_color: u32,
embed_description: String,
embed_footer: String,
embed_footer_url: Option<String>,
embed_image_url: Option<String>,
embed_thumbnail_url: Option<String>,
embed_title: String,
embed_fields: Option<String>,
tts: bool,
username: Option<String>,
}
#[derive(Deserialize)]
pub struct DeleteReminderTemplate {
id: u32,
}
#[derive(Serialize, Deserialize)]
pub struct EmbedField {
title: String,
value: String,
inline: bool,
}
#[derive(Deserialize)]
pub struct ReminderCreate {
#[serde(with = "base64s")]
attachment: Option<Vec<u8>>,
attachment_name: Option<String>,
avatar: Option<String>,
#[serde(with = "string")]
channel: u64,
content: String,
embed_author: String,
embed_author_url: Option<String>,
embed_color: u32,
embed_description: String,
embed_footer: String,
embed_footer_url: Option<String>,
embed_image_url: Option<String>,
embed_thumbnail_url: Option<String>,
embed_title: String,
embed_fields: Option<Json<Vec<EmbedField>>>,
enabled: bool,
expires: Option<NaiveDateTime>,
interval_seconds: Option<u32>,
interval_days: Option<u32>,
interval_months: Option<u32>,
#[serde(default = "name_default")]
name: String,
restartable: bool,
tts: bool,
username: Option<String>,
utc_time: NaiveDateTime,
}
#[derive(Serialize, Deserialize)]
pub struct Reminder {
#[serde(with = "base64s")]
attachment: Option<Vec<u8>>,
attachment_name: Option<String>,
avatar: Option<String>,
#[serde(with = "string_opt")]
channel: Option<u64>,
content: String,
embed_author: String,
embed_author_url: Option<String>,
embed_color: u32,
embed_description: String,
embed_footer: String,
embed_footer_url: Option<String>,
embed_image_url: Option<String>,
embed_thumbnail_url: Option<String>,
embed_title: String,
embed_fields: Option<Json<Vec<EmbedField>>>,
enabled: bool,
expires: Option<NaiveDateTime>,
interval_seconds: Option<u32>,
interval_days: Option<u32>,
interval_months: Option<u32>,
#[serde(default = "name_default")]
name: String,
restartable: bool,
tts: bool,
#[serde(default)]
uid: String,
username: Option<String>,
utc_time: NaiveDateTime,
status: String,
status_message: Option<String>,
status_change_time: Option<NaiveDateTime>,
}
#[derive(Serialize, Deserialize)]
pub struct ReminderCsv {
#[serde(with = "base64s")]
attachment: Option<Vec<u8>>,
attachment_name: Option<String>,
avatar: Option<String>,
channel: String,
content: String,
embed_author: String,
embed_author_url: Option<String>,
embed_color: u32,
embed_description: String,
embed_footer: String,
embed_footer_url: Option<String>,
embed_image_url: Option<String>,
embed_thumbnail_url: Option<String>,
embed_title: String,
embed_fields: Option<String>,
enabled: bool,
expires: Option<NaiveDateTime>,
interval_seconds: Option<u32>,
interval_days: Option<u32>,
interval_months: Option<u32>,
#[serde(default = "name_default")]
name: String,
restartable: bool,
tts: bool,
username: Option<String>,
utc_time: NaiveDateTime,
}
#[derive(Deserialize)]
pub struct PatchReminder {
uid: String,
#[serde(default)]
#[serde(deserialize_with = "deserialize_optional_field")]
attachment: Unset<Option<String>>,
#[serde(default)]
#[serde(deserialize_with = "deserialize_optional_field")]
attachment_name: Unset<Option<String>>,
#[serde(default)]
#[serde(deserialize_with = "deserialize_optional_field")]
avatar: Unset<Option<String>>,
#[serde(default = "channel_default")]
#[serde(with = "string")]
channel: u64,
#[serde(default)]
content: Unset<String>,
#[serde(default)]
embed_author: Unset<String>,
#[serde(default)]
#[serde(deserialize_with = "deserialize_optional_field")]
embed_author_url: Unset<Option<String>>,
#[serde(default)]
embed_color: Unset<u32>,
#[serde(default)]
embed_description: Unset<String>,
#[serde(default)]
embed_footer: Unset<String>,
#[serde(default)]
#[serde(deserialize_with = "deserialize_optional_field")]
embed_footer_url: Unset<Option<String>>,
#[serde(default)]
#[serde(deserialize_with = "deserialize_optional_field")]
embed_image_url: Unset<Option<String>>,
#[serde(default)]
#[serde(deserialize_with = "deserialize_optional_field")]
embed_thumbnail_url: Unset<Option<String>>,
#[serde(default)]
embed_title: Unset<String>,
#[serde(default)]
embed_fields: Unset<Json<Vec<EmbedField>>>,
#[serde(default)]
enabled: Unset<bool>,
#[serde(default)]
#[serde(deserialize_with = "deserialize_optional_field")]
expires: Unset<Option<NaiveDateTime>>,
#[serde(default = "interval_default")]
#[serde(deserialize_with = "deserialize_optional_field")]
interval_seconds: Unset<Option<u32>>,
#[serde(default = "interval_default")]
#[serde(deserialize_with = "deserialize_optional_field")]
interval_days: Unset<Option<u32>>,
#[serde(default = "interval_default")]
#[serde(deserialize_with = "deserialize_optional_field")]
interval_months: Unset<Option<u32>>,
#[serde(default)]
name: Unset<String>,
#[serde(default)]
restartable: Unset<bool>,
#[serde(default)]
tts: Unset<bool>,
#[serde(default)]
#[serde(deserialize_with = "deserialize_optional_field")]
username: Unset<Option<String>>,
#[serde(default)]
utc_time: Unset<NaiveDateTime>,
}
impl PatchReminder {
fn message_ok(&self) -> bool {
self.content.as_ref().map_or(true, |c| c.len() <= MAX_CONTENT_LENGTH)
&& self.embed_author.as_ref().map_or(true, |c| c.len() <= MAX_EMBED_AUTHOR_LENGTH)
&& self
.embed_description
.as_ref()
.map_or(true, |c| c.len() <= MAX_EMBED_DESCRIPTION_LENGTH)
&& self.embed_footer.as_ref().map_or(true, |c| c.len() <= MAX_EMBED_FOOTER_LENGTH)
&& self.embed_title.as_ref().map_or(true, |c| c.len() <= MAX_EMBED_TITLE_LENGTH)
&& self.embed_fields.as_ref().map_or(true, |c| {
c.0.len() <= MAX_EMBED_FIELDS
&& c.0.iter().all(|f| {
f.title.len() <= MAX_EMBED_FIELD_TITLE_LENGTH
&& f.value.len() <= MAX_EMBED_FIELD_VALUE_LENGTH
})
})
&& self
.username
.as_ref()
.map_or(true, |c| c.as_ref().map_or(true, |v| v.len() <= MAX_USERNAME_LENGTH))
}
}
pub fn generate_uid() -> String {
let mut generator: OsRng = Default::default();
(0..64)
.map(|_| CHARACTERS.chars().choose(&mut generator).unwrap().to_owned().to_string())
.collect::<Vec<String>>()
.join("")
}
// https://github.com/serde-rs/json/issues/329#issuecomment-305608405
mod string {
use std::{fmt::Display, str::FromStr};
use serde::{de, Deserialize, Deserializer};
pub fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where
T: FromStr,
T::Err: Display,
D: Deserializer<'de>,
{
String::deserialize(deserializer)?.parse().map_err(de::Error::custom)
}
}
mod string_opt {
use std::{fmt::Display, str::FromStr};
use serde::{de, Deserialize, Deserializer, Serializer};
pub fn serialize<T, S>(value: &Option<T>, serializer: S) -> Result<S::Ok, S::Error>
where
T: Display,
S: Serializer,
{
match value {
Some(value) => serializer.collect_str(value),
None => serializer.serialize_none(),
}
}
pub fn deserialize<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error>
where
T: FromStr,
T::Err: Display,
D: Deserializer<'de>,
{
Option::deserialize(deserializer)?
.map(|d: String| d.parse().map_err(de::Error::custom))
.transpose()
}
}
mod base64s {
use serde::{de, Deserialize, Deserializer, Serializer};
pub fn serialize<S>(value: &Option<Vec<u8>>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if let Some(opt) = value {
serializer.collect_str(&base64::encode(opt))
} else {
serializer.serialize_none()
}
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<Vec<u8>>, D::Error>
where
D: Deserializer<'de>,
{
let string = Option::<String>::deserialize(deserializer)?;
Some(string.map(|b| base64::decode(b).map_err(de::Error::custom))).flatten().transpose()
}
}
#[derive(Deserialize)]
pub struct DeleteReminder {
uid: String,
}
#[derive(Deserialize)]
pub struct ImportBody {
body: String,
}
#[derive(Serialize, Deserialize)]
pub struct TodoCsv {
value: String,
channel_id: Option<String>,
}
pub async fn create_reminder(
ctx: &Context,
pool: impl sqlx::Executor<'_, Database = Database> + Copy,
guild_id: GuildId,
user_id: UserId,
reminder: ReminderCreate,
) -> JsonResult {
// check guild in db
match sqlx::query!("SELECT 1 as A FROM guilds WHERE guild = ?", guild_id.0)
.fetch_one(pool)
.await
{
Err(sqlx::Error::RowNotFound) => {
if sqlx::query!("INSERT INTO guilds (guild) VALUES (?)", guild_id.0)
.execute(pool)
.await
.is_err()
{
return Err(json!({"error": "Guild could not be created"}));
}
}
_ => {}
}
// validate channel
let channel = ChannelId(reminder.channel).to_channel_cached(&ctx);
let channel_exists = channel.is_some();
let channel_matches_guild =
channel.map_or(false, |c| c.guild().map_or(false, |c| c.guild_id == guild_id));
if !channel_matches_guild || !channel_exists {
warn!(
"Error in `create_reminder`: channel {:?} not found for guild {} (channel exists: {})",
reminder.channel, guild_id, channel_exists
);
return Err(json!({"error": "Channel not found"}));
}
let channel = create_database_channel(&ctx, ChannelId(reminder.channel), pool).await;
if let Err(e) = channel {
warn!("`create_database_channel` returned an error code: {:?}", e);
return Err(
json!({"error": "Failed to configure channel for reminders. Please check the bot permissions"}),
);
}
let channel = channel.unwrap();
// validate lengths
check_length!(MAX_NAME_LENGTH, reminder.name);
check_length!(MAX_CONTENT_LENGTH, reminder.content);
check_length!(MAX_EMBED_DESCRIPTION_LENGTH, reminder.embed_description);
check_length!(MAX_EMBED_TITLE_LENGTH, reminder.embed_title);
check_length!(MAX_EMBED_AUTHOR_LENGTH, reminder.embed_author);
check_length!(MAX_EMBED_FOOTER_LENGTH, reminder.embed_footer);
check_length_opt!(MAX_EMBED_FIELDS, reminder.embed_fields);
if let Some(fields) = &reminder.embed_fields {
for field in &fields.0 {
check_length!(MAX_EMBED_FIELD_VALUE_LENGTH, field.value);
check_length!(MAX_EMBED_FIELD_TITLE_LENGTH, field.title);
}
}
check_length_opt!(MAX_USERNAME_LENGTH, reminder.username);
check_length_opt!(
MAX_URL_LENGTH,
reminder.embed_footer_url,
reminder.embed_thumbnail_url,
reminder.embed_author_url,
reminder.embed_image_url,
reminder.avatar
);
// validate urls
check_url_opt!(
reminder.embed_footer_url,
reminder.embed_thumbnail_url,
reminder.embed_author_url,
reminder.embed_image_url,
reminder.avatar
);
// validate time and interval
if reminder.utc_time < Utc::now().naive_utc() {
return Err(json!({"error": "Time must be in the future"}));
}
if reminder.interval_seconds.is_some()
|| reminder.interval_days.is_some()
|| reminder.interval_months.is_some()
{
if reminder.interval_months.unwrap_or(0) * 30 * DAY as u32
+ reminder.interval_days.unwrap_or(0) * DAY as u32
+ reminder.interval_seconds.unwrap_or(0)
< *MIN_INTERVAL
{
return Err(json!({"error": "Interval too short"}));
}
}
// check patreon if necessary
if reminder.interval_seconds.is_some()
|| reminder.interval_days.is_some()
|| reminder.interval_months.is_some()
{
if !check_guild_subscription(&ctx, guild_id).await
&& !check_subscription(&ctx, user_id).await
{
return Err(json!({"error": "Patreon is required to set intervals"}));
}
}
// base64 decode error dropped here
let attachment_data = reminder.attachment.as_ref().map(|s| base64::decode(s).ok()).flatten();
let name = if reminder.name.is_empty() { name_default() } else { reminder.name.clone() };
let username = if reminder.username.as_ref().map(|s| s.is_empty()).unwrap_or(true) {
None
} else {
reminder.username
};
let new_uid = generate_uid();
// write to db
match sqlx::query!(
"
INSERT INTO reminders (
uid,
attachment,
attachment_name,
channel_id,
guild_id,
avatar,
content,
embed_author,
embed_author_url,
embed_color,
embed_description,
embed_footer,
embed_footer_url,
embed_image_url,
embed_thumbnail_url,
embed_title,
embed_fields,
enabled,
expires,
interval_seconds,
interval_days,
interval_months,
name,
restartable,
tts,
username,
`utc_time`
) VALUES (?, ?, ?, ?,
(SELECT id FROM guilds WHERE guild = ?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
?, ?, ?, ?, ?)",
new_uid,
attachment_data,
reminder.attachment_name,
channel,
guild_id.0,
reminder.avatar,
reminder.content,
reminder.embed_author,
reminder.embed_author_url,
reminder.embed_color,
reminder.embed_description,
reminder.embed_footer,
reminder.embed_footer_url,
reminder.embed_image_url,
reminder.embed_thumbnail_url,
reminder.embed_title,
reminder.embed_fields,
reminder.enabled,
reminder.expires,
reminder.interval_seconds,
reminder.interval_days,
reminder.interval_months,
name,
reminder.restartable,
reminder.tts,
username,
reminder.utc_time,
)
.execute(pool)
.await
{
Ok(_) => sqlx::query_as_unchecked!(
Reminder,
"SELECT
reminders.attachment,
reminders.attachment_name,
reminders.avatar,
channels.channel,
reminders.content,
reminders.embed_author,
reminders.embed_author_url,
reminders.embed_color,
reminders.embed_description,
reminders.embed_footer,
reminders.embed_footer_url,
reminders.embed_image_url,
reminders.embed_thumbnail_url,
reminders.embed_title,
reminders.embed_fields,
reminders.enabled,
reminders.expires,
reminders.interval_seconds,
reminders.interval_days,
reminders.interval_months,
reminders.name,
reminders.restartable,
reminders.tts,
reminders.uid,
reminders.username,
reminders.utc_time,
reminders.status,
reminders.status_change_time,
reminders.status_message
FROM reminders
LEFT JOIN channels ON channels.id = reminders.channel_id
WHERE uid = ?",
new_uid
)
.fetch_one(pool)
.await
.map(|r| Ok(json!(r)))
.unwrap_or_else(|e| {
warn!("Failed to complete SQL query: {:?}", e);
Err(json!({"error": "Could not load reminder"}))
}),
Err(e) => {
warn!("Error in `create_reminder`: Could not execute query: {:?}", e);
Err(json!({"error": "Unknown error"}))
}
}
}
async fn create_database_channel(
ctx: impl AsRef<Http>,
channel: ChannelId,
pool: impl Executor<'_, Database = Database> + Copy,
) -> Result<u32, crate::Error> {
let row =
sqlx::query!("SELECT webhook_token, webhook_id FROM channels WHERE channel = ?", channel.0)
.fetch_one(pool)
.await;
match row {
Ok(row) => {
if row.webhook_token.is_none() || row.webhook_id.is_none() {
let webhook = channel
.create_webhook_with_avatar(&ctx, "Reminder", DEFAULT_AVATAR.clone())
.await
.map_err(|e| Error::Serenity(e))?;
sqlx::query!(
"UPDATE channels SET webhook_id = ?, webhook_token = ? WHERE channel = ?",
webhook.id.0,
webhook.token,
channel.0
)
.execute(pool)
.await
.map_err(|e| Error::SQLx(e))?;
}
Ok(())
}
Err(sqlx::Error::RowNotFound) => {
// create webhook
let webhook = channel
.create_webhook_with_avatar(&ctx, "Reminder", DEFAULT_AVATAR.clone())
.await
.map_err(|e| Error::Serenity(e))?;
// create database entry
sqlx::query!(
"INSERT INTO channels (
webhook_id,
webhook_token,
channel
) VALUES (?, ?, ?)",
webhook.id.0,
webhook.token,
channel.0
)
.execute(pool)
.await
.map_err(|e| Error::SQLx(e))?;
Ok(())
}
Err(e) => Err(Error::SQLx(e)),
}?;
let row = sqlx::query!("SELECT id FROM channels WHERE channel = ?", channel.0)
.fetch_one(pool)
.await
.map_err(|e| Error::SQLx(e))?;
Ok(row.id)
}
#[get("/")]
pub async fn dashboard_home(cookies: &CookieJar<'_>) -> Result<Template, Redirect> {
if cookies.get_private("userid").is_some() {
let map: HashMap<&str, String> = HashMap::new();
Ok(Template::render("dashboard", &map))
} else {
Err(Redirect::to("/login/discord"))
}
}
#[get("/<_>")]
pub async fn dashboard_1(cookies: &CookieJar<'_>) -> Result<Template, Redirect> {
if cookies.get_private("userid").is_some() {
let map: HashMap<&str, String> = HashMap::new();
Ok(Template::render("dashboard", &map))
} else {
Err(Redirect::to("/login/discord"))
}
}
#[get("/<_>/<_>")]
pub async fn dashboard_2(cookies: &CookieJar<'_>) -> Result<Template, Redirect> {
if cookies.get_private("userid").is_some() {
let map: HashMap<&str, String> = HashMap::new();
Ok(Template::render("dashboard", &map))
} else {
Err(Redirect::to("/login/discord"))
}
}

View File

@ -1,172 +0,0 @@
use std::env;
use chrono_tz::Tz;
use reqwest::Client;
use rocket::{
http::CookieJar,
serde::json::{json, Json, Value as JsonValue},
State,
};
use serde::{Deserialize, Serialize};
use serenity::{
client::Context,
model::{
id::{GuildId, RoleId},
permissions::Permissions,
},
};
use sqlx::{MySql, Pool};
use crate::consts::DISCORD_API;
#[derive(Serialize)]
struct UserInfo {
name: String,
patreon: bool,
timezone: Option<String>,
}
#[derive(Deserialize)]
pub struct UpdateUser {
timezone: String,
}
#[derive(Serialize)]
struct GuildInfo {
id: String,
name: String,
}
#[derive(Deserialize)]
pub struct PartialGuild {
pub id: GuildId,
pub icon: Option<String>,
pub name: String,
#[serde(default)]
pub owner: bool,
#[serde(rename = "permissions_new")]
pub permissions: Option<String>,
}
#[get("/api/user")]
pub async fn get_user_info(
cookies: &CookieJar<'_>,
ctx: &State<Context>,
pool: &State<Pool<MySql>>,
) -> JsonValue {
offline!(json!(UserInfo { name: "Discord".to_string(), patreon: true, timezone: None }));
if let Some(user_id) =
cookies.get_private("userid").map(|u| u.value().parse::<u64>().ok()).flatten()
{
let member_res = GuildId(env::var("PATREON_GUILD_ID").unwrap().parse().unwrap())
.member(&ctx.inner(), user_id)
.await;
let timezone = sqlx::query!(
"SELECT IFNULL(timezone, 'UTC') AS timezone FROM users WHERE user = ?",
user_id
)
.fetch_one(pool.inner())
.await
.map_or(None, |q| Some(q.timezone));
let user_info = UserInfo {
name: cookies
.get_private("username")
.map_or("DiscordUser#0000".to_string(), |c| c.value().to_string()),
patreon: member_res.map_or(false, |member| {
member
.roles
.contains(&RoleId(env::var("PATREON_ROLE_ID").unwrap().parse().unwrap()))
}),
timezone,
};
json!(user_info)
} else {
json!({"error": "Not authorized"})
}
}
#[patch("/api/user", data = "<user>")]
pub async fn update_user_info(
cookies: &CookieJar<'_>,
user: Json<UpdateUser>,
pool: &State<Pool<MySql>>,
) -> JsonValue {
if let Some(user_id) =
cookies.get_private("userid").map(|u| u.value().parse::<u64>().ok()).flatten()
{
if user.timezone.parse::<Tz>().is_ok() {
let _ = sqlx::query!(
"UPDATE users SET timezone = ? WHERE user = ?",
user.timezone,
user_id,
)
.execute(pool.inner())
.await;
json!({})
} else {
json!({"error": "Timezone not recognized"})
}
} else {
json!({"error": "Not authorized"})
}
}
#[get("/api/user/guilds")]
pub async fn get_user_guilds(cookies: &CookieJar<'_>, reqwest_client: &State<Client>) -> JsonValue {
offline!(json!(vec![GuildInfo { id: "1".to_string(), name: "Guild".to_string() }]));
if let Some(access_token) = cookies.get_private("access_token") {
let request_res = reqwest_client
.get(format!("{}/users/@me/guilds", DISCORD_API))
.bearer_auth(access_token.value())
.send()
.await;
match request_res {
Ok(response) => {
let guilds_res = response.json::<Vec<PartialGuild>>().await;
match guilds_res {
Ok(guilds) => {
let reduced_guilds = guilds
.iter()
.filter(|g| {
g.owner
|| g.permissions.as_ref().map_or(false, |p| {
let permissions =
Permissions::from_bits_truncate(p.parse().unwrap());
permissions.manage_messages()
|| permissions.manage_guild()
|| permissions.administrator()
})
})
.map(|g| GuildInfo { id: g.id.to_string(), name: g.name.to_string() })
.collect::<Vec<GuildInfo>>();
json!(reduced_guilds)
}
Err(e) => {
warn!("Error constructing user from request: {:?}", e);
json!({"error": "Could not get user details"})
}
}
}
Err(e) => {
warn!("Error getting user guilds: {:?}", e);
json!({"error": "Could not reach Discord"})
}
}
} else {
json!({"error": "Not authorized"})
}
}

View File

@ -1,157 +0,0 @@
use log::warn;
use oauth2::{
basic::BasicClient, reqwest::async_http_client, AuthorizationCode, CsrfToken,
PkceCodeChallenge, PkceCodeVerifier, Scope, TokenResponse,
};
use reqwest::Client;
use rocket::{
http::{private::cookie::Expiration, Cookie, CookieJar, SameSite},
response::{Flash, Redirect},
uri, State,
};
use serenity::model::user::User;
use crate::{consts::DISCORD_API, routes};
#[get("/discord")]
pub async fn discord_login(
oauth2_client: &State<BasicClient>,
cookies: &CookieJar<'_>,
) -> Redirect {
let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256();
let (auth_url, csrf_token) = oauth2_client
.authorize_url(CsrfToken::new_random)
// Set the desired scopes.
.add_scope(Scope::new("identify".to_string()))
.add_scope(Scope::new("guilds".to_string()))
// Set the PKCE code challenge.
.set_pkce_challenge(pkce_challenge)
.url();
// store the pkce secret to verify the authorization later
cookies.add_private(
Cookie::build("verify", pkce_verifier.secret().to_string())
.http_only(true)
.path("/login")
.same_site(SameSite::Lax)
.expires(Expiration::Session)
.finish(),
);
// store the csrf token to verify no interference
cookies.add_private(
Cookie::build("csrf", csrf_token.secret().to_string())
.http_only(true)
.path("/login")
.same_site(SameSite::Lax)
.expires(Expiration::Session)
.finish(),
);
Redirect::to(auth_url.to_string())
}
#[get("/discord/logout")]
pub async fn discord_logout(cookies: &CookieJar<'_>) -> Redirect {
cookies.remove_private(Cookie::named("username"));
cookies.remove_private(Cookie::named("userid"));
cookies.remove_private(Cookie::named("access_token"));
Redirect::to(uri!(routes::index))
}
#[get("/discord/authorized?<code>&<state>")]
pub async fn discord_callback(
code: &str,
state: &str,
cookies: &CookieJar<'_>,
oauth2_client: &State<BasicClient>,
reqwest_client: &State<Client>,
) -> Result<Redirect, Flash<Redirect>> {
if let (Some(pkce_secret), Some(csrf_token)) =
(cookies.get_private("verify"), cookies.get_private("csrf"))
{
if state == csrf_token.value() {
let token_result = oauth2_client
.exchange_code(AuthorizationCode::new(code.to_string()))
// Set the PKCE code verifier.
.set_pkce_verifier(PkceCodeVerifier::new(pkce_secret.value().to_string()))
.request_async(async_http_client)
.await;
cookies.remove_private(Cookie::named("verify"));
cookies.remove_private(Cookie::named("csrf"));
match token_result {
Ok(token) => {
cookies.add_private(
Cookie::build("access_token", token.access_token().secret().to_string())
.secure(true)
.http_only(true)
.path("/dashboard")
.finish(),
);
let request_res = reqwest_client
.get(format!("{}/users/@me", DISCORD_API))
.bearer_auth(token.access_token().secret())
.send()
.await;
match request_res {
Ok(response) => {
let user_res = response.json::<User>().await;
match user_res {
Ok(user) => {
let user_name = format!("{}#{}", user.name, user.discriminator);
let user_id = user.id.as_u64().to_string();
cookies.add_private(Cookie::new("username", user_name));
cookies.add_private(Cookie::new("userid", user_id));
Ok(Redirect::to(uri!(super::return_to_same_site("dashboard"))))
}
Err(e) => {
warn!("Error constructing user from request: {:?}", e);
Err(Flash::new(
Redirect::to(uri!(super::return_to_same_site(""))),
"danger",
"Failed to contact Discord",
))
}
}
}
Err(e) => {
warn!("Error getting user info: {:?}", e);
Err(Flash::new(
Redirect::to(uri!(super::return_to_same_site(""))),
"danger",
"Failed to contact Discord",
))
}
}
}
Err(e) => {
warn!("Error in discord callback: {:?}", e);
Err(Flash::new(
Redirect::to(uri!(super::return_to_same_site(""))),
"warning",
"Your login request was rejected. The server may be misconfigured. Please retry or alert us in Discord.",
))
}
}
} else {
Err(Flash::new(Redirect::to(uri!(super::return_to_same_site(""))), "danger", "Your request failed to validate, and so has been rejected (CSRF Validation Failure)"))
}
} else {
Err(Flash::new(Redirect::to(uri!(super::return_to_same_site(""))), "warning", "Your request was missing information, and so has been rejected (CSRF Validation Tokens Missing)"))
}
}

View File

@ -1,110 +0,0 @@
pub mod admin;
pub mod dashboard;
pub mod login;
pub mod report;
use std::collections::HashMap;
use rocket::{request::FlashMessage, serde::json::Value as JsonValue};
use rocket_dyn_templates::Template;
pub type JsonResult = Result<JsonValue, JsonValue>;
#[get("/")]
pub async fn index(flash: Option<FlashMessage<'_>>) -> Template {
let mut map: HashMap<&str, String> = HashMap::new();
if let Some(message) = flash {
map.insert("flashed_message", message.message().to_string());
map.insert("flashed_grade", message.kind().to_string());
}
Template::render("index", &map)
}
#[get("/ret?<to>")]
pub async fn return_to_same_site(to: &str) -> Template {
let mut map: HashMap<&str, String> = HashMap::new();
map.insert("to", to.to_string());
Template::render("return", &map)
}
#[get("/cookies")]
pub async fn cookies() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("cookies", &map)
}
#[get("/privacy")]
pub async fn privacy() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("privacy", &map)
}
#[get("/terms")]
pub async fn terms() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("terms", &map)
}
#[get("/")]
pub async fn help() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("help", &map)
}
#[get("/timezone")]
pub async fn help_timezone() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("support/timezone", &map)
}
#[get("/create_reminder")]
pub async fn help_create_reminder() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("support/create_reminder", &map)
}
#[get("/delete_reminder")]
pub async fn help_delete_reminder() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("support/delete_reminder", &map)
}
#[get("/timers")]
pub async fn help_timers() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("support/timers", &map)
}
#[get("/todo_lists")]
pub async fn help_todo_lists() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("support/todo_lists", &map)
}
#[get("/macros")]
pub async fn help_macros() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("support/macros", &map)
}
#[get("/intervals")]
pub async fn help_intervals() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("support/intervals", &map)
}
#[get("/dashboard")]
pub async fn help_dashboard() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("support/dashboard", &map)
}
#[get("/iemanager")]
pub async fn help_iemanager() -> Template {
let map: HashMap<&str, String> = HashMap::new();
Template::render("support/iemanager", &map)
}

View File

@ -1,48 +0,0 @@
use rocket::{
http::CookieJar,
serde::{
json::{json, Json},
Deserialize,
},
};
use crate::routes::JsonResult;
#[derive(Deserialize)]
pub struct ClientError {
#[serde(rename = "reporterId")]
reporter_id: String,
url: String,
#[serde(rename = "relativeTimestamp")]
relative_timestamp: i64,
#[serde(rename = "errorMessage")]
error_message: String,
#[serde(rename = "errorLine")]
error_line: u64,
#[serde(rename = "errorFile")]
error_file: String,
#[serde(rename = "errorType")]
error_type: String,
}
#[post("/report", data = "<client_error>")]
pub async fn report_error(cookies: &CookieJar<'_>, client_error: Json<ClientError>) -> JsonResult {
if let Some(user_id) = cookies.get_private("userid") {
error!(
"User {} reports a client-side error.
{}, {}:{} at {}ms
{}: {}
Chain: {}",
user_id,
client_error.url,
client_error.error_file,
client_error.error_line,
client_error.relative_timestamp,
client_error.error_type,
client_error.error_message,
client_error.reporter_id
);
}
Ok(json!({}))
}

File diff suppressed because one or more lines are too long

View File

@ -1,91 +0,0 @@
.date-selector-wrapper {
width: 200px;
padding: 3px;
background-color: #fff;
box-shadow: 1px 1px 10px 1px #5c5c5c;
position: absolute;
font-size: 12px;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
-o-user-select: none;
/* user-select: none; */
}
.cal-header, .cal-row {
display: flex;
width: 100%;
height: 30px;
line-height: 30px;
text-align: center;
}
.cal-cell, .cal-nav {
cursor: pointer;
}
.cal-day-names {
height: 25px;
line-height: 25px;
}
.cal-day-names .cal-cell {
cursor: default;
font-weight: bold;
}
.cal-cell-prev, .cal-cell-next {
color: #777;
}
.cal-months .cal-row, .cal-years .cal-row {
height: 60px;
line-height: 60px;
}
.cal-nav-prev, .cal-nav-next {
flex: 0.15;
}
.cal-nav-current {
flex: 0.75;
font-weight: bold;
}
.cal-months .cal-cell, .cal-years .cal-cell {
flex: 0.25;
}
.cal-days .cal-cell {
flex: 0.143;
}
.cal-value {
color: #fff;
background-color: #286090;
}
.cal-cell:hover, .cal-nav:hover {
background-color: #eee;
}
.cal-value:hover {
background-color: #204d74;
}
/* time footer */
.cal-time {
display: flex;
justify-content: flex-start;
height: 27px;
line-height: 27px;
}
.cal-time-label, .cal-time-value {
flex: 0.12;
text-align: center;
}
.cal-time-slider {
flex: 0.77;
background-image: linear-gradient(to right, #d1d8dd, #d1d8dd);
background-repeat: no-repeat;
background-size: 100% 1px;
background-position: left 50%;
height: 100%;
}
.cal-time-slider input {
width: 100%;
-webkit-appearance: none;
background: 0 0;
cursor: pointer;
height: 100%;
outline: 0;
user-select: auto;
}

Some files were not shown because too many files have changed in this diff Show More