Compare commits

..

12 commits
main ... logger

18 changed files with 230 additions and 337 deletions

View file

@ -18,5 +18,4 @@
import-self,
relative-beyond-top-level,
too-many-instance-attributes,
duplicate-code,
too-many-nested-blocks
duplicate-code

View file

@ -58,7 +58,7 @@ jobs:
npx -p "@getmeli/cli" meli upload ./site \
--url "https://pages.coastalcommits.com" \
--site "${{ vars.MELI_SITE_ID }}" \
--token "${{ secrets.MELI_SECRET }}" \
--token "${{ secrets.MELI_SITE_SECRET }}" \
--release "$CI_ACTION_REF_NAME_SLUG/${{ env.GITHUB_SHA }}" \
--branch "$CI_ACTION_REF_NAME_SLUG"

View file

@ -19,7 +19,8 @@ from redbot.core import app_commands, commands, data_manager
from redbot.core.app_commands import Choice
from redbot.core.bot import Red
from redbot.core.commands.converter import parse_relativedelta, parse_timedelta
from redbot.core.utils.chat_formatting import box, error, humanize_list, humanize_timedelta, warning
from redbot.core.utils.chat_formatting import (box, error, humanize_list,
humanize_timedelta, warning)
from aurora.importers.aurora import ImportAuroraView
from aurora.importers.galacticbot import ImportGalacticBotView
@ -28,10 +29,19 @@ from aurora.menus.guild import Guild
from aurora.menus.immune import Immune
from aurora.menus.overrides import Overrides
from aurora.utilities.config import config, register_config
from aurora.utilities.database import connect, create_guild_table, fetch_case, mysql_log
from aurora.utilities.factory import addrole_embed, case_factory, changes_factory, evidenceformat_factory, guild_embed, immune_embed, message_factory, overrides_embed
from aurora.utilities.database import (connect, create_guild_table, fetch_case,
mysql_log)
from aurora.utilities.factory import (addrole_embed, case_factory,
changes_factory, evidenceformat_factory,
guild_embed, immune_embed,
message_factory, overrides_embed)
from aurora.utilities.logger import logger
from aurora.utilities.utils import check_moddable, check_permissions, convert_timedelta_to_str, fetch_channel_dict, fetch_user_dict, generate_dict, get_footer_image, log, send_evidenceformat, timedelta_from_relativedelta
from aurora.utilities.utils import (check_moddable, check_permissions,
convert_timedelta_to_str,
fetch_channel_dict, fetch_user_dict,
generate_dict, get_footer_image, log,
send_evidenceformat,
timedelta_from_relativedelta)
class Aurora(commands.Cog):
@ -40,7 +50,7 @@ class Aurora(commands.Cog):
This cog stores all of its data in an SQLite database."""
__author__ = ["SeaswimmerTheFsh"]
__version__ = "2.1.3"
__version__ = "2.1.2"
__documentation__ = "https://seacogs.coastalcommits.com/aurora/"
async def red_delete_data_for_user(self, *, requester, user_id: int):

View file

@ -381,10 +381,6 @@ async def evidenceformat_factory(interaction: Interaction, case_dict: dict) -> s
content = f"Case: {case_dict['moderation_id']:,} ({str.title(case_dict['moderation_type'])})\nTarget: {target_name} ({target_user['id']})\nModerator: {moderator_name} ({moderator_user['id']})"
if case_dict["role_id"] != "0":
role = interaction.guild.get_role(int(case_dict["role_id"]))
content += "\nRole: " + (role.name if role is not None else case_dict["role_id"])
if case_dict["duration"] != "NULL":
hours, minutes, seconds = map(int, case_dict["duration"].split(":"))
td = timedelta(hours=hours, minutes=minutes, seconds=seconds)

View file

@ -100,7 +100,7 @@ class Backup(commands.Cog):
except (json.JSONDecodeError, IndexError):
try:
export = json.loads(await ctx.message.reference.resolved.attachments[0].read())
except (json.JSONDecodeError, IndexError, AttributeError):
except (json.JSONDecodeError, IndexError):
await ctx.send(error("Please provide a valid JSON export file."))
return

View file

@ -62,7 +62,7 @@ class EmojiInfo(commands.Cog):
else:
emoji_url = emoji.url
if emoji.id is not None:
if emoji.id:
emoji_id = f"{bold('ID:')} `{emoji.id}`\n"
markdown = f"`<{'a' if emoji.animated else ''}:{emoji.name}:{emoji.id}>`"
name = f"{bold('Name:')} {emoji.name}\n"
@ -91,14 +91,13 @@ class EmojiInfo(commands.Cog):
emoji="What emoji would you like to get information on?",
ephemeral="Would you like the response to be hidden?"
)
async def emoji_slash(self, interaction: discord.Interaction, emoji: str, ephemeral: bool = True) -> None:
async def emoji_slash(self, interaction: discord.Interaction, emoji: str, ephemeral: bool = False) -> None:
"""Retrieve information about an emoji."""
await interaction.response.defer(ephemeral=ephemeral)
try:
emoji: PartialEmoji = PartialEmoji.from_str(self, value=emoji)
string, emoji_url, = await self.get_emoji_info(emoji)
self.logger.verbose(f"Emoji:\n{string}")
except (IndexError, UnboundLocalError):
return await interaction.followup.send("Please provide a valid emoji!")
@ -116,7 +115,6 @@ class EmojiInfo(commands.Cog):
try:
emoji: PartialEmoji = PartialEmoji.from_str(self, value=emoji)
string, emoji_url, = await self.get_emoji_info(emoji)
self.logger.verbose(f"Emoji:\n{string}")
except (IndexError, UnboundLocalError):
return await ctx.send("Please provide a valid emoji!")

View file

@ -81,7 +81,6 @@ class PartialEmoji(discord.PartialEmoji):
with open(path, "r", encoding="UTF-8") as file:
emojis: dict = json.load(file)
emoji_aliases = []
emoji_group = None
for dict_name, group in emojis.items():
for k, v in group.items():
if v == value:

View file

@ -3,7 +3,7 @@
"SeaswimmerTheFsh (seasw.)"
],
"install_msg": "Thanks for installing my repo!\n\nIf you have any issues with any of the cogs, please create an issue [here](https://coastalcommits.com/SeaswimmerTheFsh/SeaCogs/issues) or join my [Discord Server](https://discord.gg/eMUMe77Yb8 ).",
"index_name": "sea-cogs",
"name": "SeaCogs",
"short": "Various cogs for Red, by SeaswimmerTheFsh (seasw.)",
"description": "Various cogs for Red, by SeaswimmerTheFsh (seasw.)"
}

7
logger/__init__.py Normal file
View file

@ -0,0 +1,7 @@
from redbot.core.bot import Red
from .logger import Logger
async def setup(bot: Red) -> None:
await bot.add_cog(Logger(bot))

106
logger/config.py Normal file
View file

@ -0,0 +1,106 @@
from redbot.core import Config
config: Config = Config.get_conf(None, identifier=34236413658947743, cog_name="Logger", force_registration=True)
def register_config():
config.register_guild(
guild_update = False,
guild_update_channel = None,
channel_update = False,
channel_update_channel = None,
channel_delete = False,
channel_delete_channel = None,
overwrite_create = False,
overwrite_create_channel = None,
overwrite_update = False,
overwrite_update_channel = None,
overwrite_delete = False,
overwrite_delete_channel = None,
kick = False,
kick_channel = None,
member_prune = False,
member_prune_channel = None,
ban = False,
ban_channel = None,
unban = False,
unban_channel = None,
member_update = False,
member_update_channel = None,
member_role_update = False,
member_role_update_channel = None,
member_move = False,
member_move_channel = None,
member_disconnect = False,
member_disconnect_channel = None,
bot_add = False,
bot_add_channel = None,
role_create = False,
role_create_channel = None,
role_update = False,
role_update_channel = None,
role_delete = False,
role_delete_channel = None,
invite_create = False,
invite_create_channel = None,
invite_delete = False,
invite_delete_channel = None,
webhook_create = False,
webhook_create_channel = None,
webhook_update = False,
webhook_update_channel = None,
webhook_delete = False,
webhook_delete_channel = None,
emoji_create = False,
emoji_create_channel = None,
emoji_update = False,
emoji_update_channel = None,
emoji_delete = False,
emoji_delete_channel = None,
message_delete = False,
message_delete_channel = 967981200549494804,
message_edit = False,
message_edit_channel = 967981200549494804,
message_pin = False,
message_pin_channel = None,
message_unpin = False,
message_unpin_channel = None,
message_ignored_channels = [],
integration_create = False,
integration_create_channel = None,
integration_update = False,
integration_update_channel = None,
integration_delete = False,
integration_delete_channel = None,
stage_instance_create = False,
stage_instance_create_channel = None,
stage_instance_update = False,
stage_instance_update_channel = None,
stage_instance_delete = False,
stage_instance_delete_channel = None,
sticker_create = False,
sticker_create_channel = None,
sticker_update = False,
sticker_update_channel = None,
sticker_delete = False,
sticker_delete_channel = None,
scheduled_event_create = False,
scheduled_event_create_channel = None,
scheduled_event_update = False,
scheduled_event_update_channel = None,
scheduled_event_delete = False,
scheduled_event_delete_channel = None,
thread_create = False,
thread_create_channel = None,
thread_update = False,
thread_update_channel = None,
thread_delete = False,
thread_delete_channel = None,
app_command_permission_update = False,
app_command_permission_update_channel = None,
automod_rule_create = False,
automod_rule_create_channel = None,
automod_rule_update = False,
automod_rule_update_channel = None,
automod_rule_delete = False,
automod_rule_delete_channel = None,
)

17
logger/info.json Normal file
View file

@ -0,0 +1,17 @@
{
"author" : ["SeaswimmerTheFsh (seasw.)"],
"install_msg" : "Thank you for installing Logger!",
"name" : "Logger",
"short" : "Log events configurably.",
"description" : "Logger logs events to a channel of your choice. You can configure which events to log and which channel to log them to.",
"end_user_data_statement" : "This cog does not store end user data.",
"hidden": false,
"disabled": false,
"min_bot_version": "3.5.0",
"min_python_version": [3, 10, 0],
"tags": [
"utility",
"moderation",
"logging"
]
}

48
logger/logger.py Normal file
View file

@ -0,0 +1,48 @@
from datetime import UTC, datetime
import discord
from redbot.core import commands
from redbot.core.bot import Red
from redbot.core.utils.chat_formatting import bold
from .config import config, register_config
class Logger(commands.Cog):
def __init__(self, bot: Red) -> None:
self.bot: Red = bot
register_config()
@commands.Cog.listener()
async def on_raw_message_delete(self, payload: discord.RawMessageDeleteEvent) -> None:
if payload.guild_id:
guild = self.bot.get_guild(payload.guild_id)
if guild is None:
return
else:
return
if await self.bot.cog_disabled_in_guild(self, guild):
return
if payload.channel_id in await config.guild(guild).message_ignored_channels():
return
if payload.cached_message:
if payload.cached_message.author.bot:
return
content = f">>> {payload.cached_message.content}"
author = payload.cached_message.author
c = await config.guild(guild).message_delete_channel()
if c:
channel = self.bot.get_channel(c)
if channel:
embed = discord.Embed(color=discord.Color.from_str(value="#ff470f"), timestamp=datetime.now(tz=UTC))
embed.set_author(name=f"{author.name}", icon_url=author.display_avatar.url)
embed.description = bold(text=f"Message sent by {author.mention} deleted in {payload.cached_message.channel.mention}\n", escape_formatting=False) + content
embed.set_footer(text=f"Author: {author.id} | Message ID: {payload.message_id}", icon_url=guild.icon.url)
await channel.send(embed=embed)

30
logger/menu.py Normal file
View file

@ -0,0 +1,30 @@
from discord import Embed, Message, ui
from redbot.core import commands
from .config import config
async def get_config(ctx: commands.Context) -> dict:
return dict(sorted(await config.guild(ctx.guild).all().items()))
async def get_embed(ctx: commands.Context, type: str = None):
conf = await get_config(ctx)
if not type or type not in conf.keys():
embed = Embed(
title="Logger Configuration - Message Delete",
description="Please select a configuration option below.",
color=await ctx.embed_color(),
)
class ConfigMenu(ui.View):
def __init__(self, ctx: commands.Context, message: Message, type: str = None, timeout: int = None):
super().__init__()
self.ctx = ctx
self.message = message
self.type = type
self.timeout = timeout
async def on_timeout(self):
await self.message.edit(view=None)

49
poetry.lock generated
View file

@ -228,27 +228,6 @@ files = [
[package.extras]
dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
[[package]]
name = "beautifulsoup4"
version = "4.12.3"
description = "Screen-scraping library"
optional = false
python-versions = ">=3.6.0"
files = [
{file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"},
{file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"},
]
[package.dependencies]
soupsieve = ">1.2"
[package.extras]
cchardet = ["cchardet"]
chardet = ["chardet"]
charset-normalizer = ["charset-normalizer"]
html5lib = ["html5lib"]
lxml = ["lxml"]
[[package]]
name = "brotli"
version = "1.1.0"
@ -911,21 +890,6 @@ profiling = ["gprof2dot"]
rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
[[package]]
name = "markdownify"
version = "0.12.1"
description = "Convert HTML to markdown."
optional = false
python-versions = "*"
files = [
{file = "markdownify-0.12.1-py3-none-any.whl", hash = "sha256:a3805abd8166dbb7b27783c5599d91f54f10d79894b2621404d85b333c7ce561"},
{file = "markdownify-0.12.1.tar.gz", hash = "sha256:1fb08c618b30e0ee7a31a39b998f44a18fb28ab254f55f4af06b6d35a2179e27"},
]
[package.dependencies]
beautifulsoup4 = ">=4.9,<5"
six = ">=1.15,<2"
[[package]]
name = "markupsafe"
version = "2.1.5"
@ -2147,17 +2111,6 @@ files = [
{file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"},
]
[[package]]
name = "soupsieve"
version = "2.5"
description = "A modern CSS selector implementation for Beautiful Soup."
optional = false
python-versions = ">=3.8"
files = [
{file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"},
{file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"},
]
[[package]]
name = "tinycss2"
version = "1.2.1"
@ -2498,4 +2451,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = ">=3.11,<3.12"
content-hash = "229d7fd39618cf708f3cd5409dde2e6e25b822e4f936e14b3ade9800bf00daab"
content-hash = "0ac382e0399d9c23c5f89a0ffeb3aae056dc8b28e864b22f815c0e3eb34175bd"

View file

@ -15,8 +15,6 @@ websockets = "^12.0"
pillow = "^10.3.0"
numpy = "^1.26.4"
colorthief = "^0.2.1"
beautifulsoup4 = "^4.12.3"
markdownify = "^0.12.1"
[tool.poetry.group.dev]
optional = true

View file

@ -1,5 +0,0 @@
from .seautils import SeaUtils
async def setup(bot):
await bot.add_cog(SeaUtils(bot))

View file

@ -1,13 +0,0 @@
{
"author" : ["SeaswimmerTheFsh (seasw.)"],
"install_msg" : "Thank you for installing SeaUtils!\nYou can find the source code of this cog [here](https://coastalcommits.com/SeaswimmerTheFsh/SeaCogs).",
"name" : "SeaUtils",
"short" : "A collection of useful utilities.",
"description" : "A collection of useful utilities.",
"end_user_data_statement" : "This cog does not store end user data.",
"hidden": true,
"disabled": false,
"min_bot_version": "3.5.0",
"min_python_version": [3, 8, 0],
"requirements": ["beautifulsoup4", "markdownify"]
}

View file

@ -1,250 +0,0 @@
# _____ _
# / ____| (_)
# | (___ ___ __ _ _____ ___ _ __ ___ _ __ ___ ___ _ __
# \___ \ / _ \/ _` / __\ \ /\ / / | '_ ` _ \| '_ ` _ \ / _ \ '__|
# ____) | __/ (_| \__ \\ V V /| | | | | | | | | | | | __/ |
# |_____/ \___|\__,_|___/ \_/\_/ |_|_| |_| |_|_| |_| |_|\___|_|
import asyncio
import inspect
import operator
import re
from asyncio.subprocess import Process
from functools import partial, partialmethod
from typing import Any
import aiohttp
import yaml
from bs4 import BeautifulSoup
from discord import Color, Embed, app_commands
from discord.utils import CachedSlotProperty, cached_property
from markdownify import MarkdownConverter
from redbot.core import commands
from redbot.core.bot import Red
from redbot.core.dev_commands import cleanup_code
from redbot.core.utils import chat_formatting as cf
from redbot.core.utils.views import SimpleMenu
def md(soup: BeautifulSoup, **options) -> Any | str:
return MarkdownConverter(**options).convert_soup(soup=soup)
def format_rfc_text(text: str, number: int) -> str:
one: str = re.sub(r"\(\.\/rfc(\d+)", r"(https://www.rfc-editor.org/rfc/rfc\1.html", text)
two: str = re.sub(r"\((#(?:section|page)-\d+(?:.\d+)?)\)", f"(https://www.rfc-editor.org/rfc/rfc{number}.html\1)", one)
three: str = re.sub(r"\n{3,}", "\n\n", two)
return three
class SeaUtils(commands.Cog):
"""A collection of random utilities."""
__author__ = ["SeaswimmerTheFsh"]
__version__ = "1.0.0"
def __init__(self, bot: Red) -> None:
self.bot = bot
def format_help_for_context(self, ctx: commands.Context) -> str:
pre_processed = super().format_help_for_context(ctx=ctx) or ""
n = "\n" if "\n\n" not in pre_processed else ""
text = [
f"{pre_processed}{n}",
f"Cog Version: **{self.__version__}**",
f"Author: {cf.humanize_list(items=self.__author__)}"
]
return "\n".join(text)
def format_src(self, obj: Any) -> str:
"""A large portion of this code is repurposed from Zephyrkul's RTFS cog.
https://github.com/Zephyrkul/FluffyCogs/blob/master/rtfs/rtfs.py"""
obj = inspect.unwrap(func=obj)
src: Any = getattr(obj, "__func__", obj)
if isinstance(obj, (commands.Command, app_commands.Command)):
src = obj.callback
elif isinstance(obj, (partial, partialmethod)):
src = obj.func
elif isinstance(obj, property):
src = obj.fget
elif isinstance(obj, (cached_property, CachedSlotProperty)):
src = obj.function
return inspect.getsource(object=src)
@commands.command(aliases=["source", "src", "code", "showsource"])
@commands.is_owner()
async def showcode(self, ctx: commands.Context, *, object: str) -> None: # pylint: disable=redefined-builtin
"""Show the code for a particular object."""
try:
if object.startswith("/") and (obj := ctx.bot.tree.get_command(object[1:])):
text = self.format_src(obj)
elif obj := ctx.bot.get_cog(object):
text = self.format_src(type(obj))
elif obj := ctx.bot.get_command(object):
text = self.format_src(obj)
else:
raise AttributeError
temp_content = cf.pagify(
text=cleanup_code(text),
escape_mass_mentions=True,
page_length = 1977
)
content = []
max_i = operator.length_hint(temp_content)
i = 1
for page in temp_content:
content.append(f"**Page {i}/{max_i}**\n{cf.box(page, lang='py')}")
i += 1
await SimpleMenu(pages=content, disable_after_timeout=True, timeout=180).start(ctx)
except (OSError, AttributeError, UnboundLocalError):
if ctx.embed_requested():
embed = Embed(title="Object not found!", color=await ctx.embed_color())
await ctx.send(embed=embed, reference=ctx.message.to_reference(fail_if_not_exists=False))
else:
await ctx.send(content="Object not found!", reference=ctx.message.to_reference(fail_if_not_exists=False))
@commands.command(name='dig', aliases=['dnslookup', 'nslookup'])
@commands.is_owner()
async def dig(self, ctx: commands.Context, name: str, record_type: str | None = None, server: str | None = None, port: int = 53) -> None:
"""Retrieve DNS information for a domain.
Uses `dig` to perform a DNS query. Will fall back to `nslookup` if `dig` is not installed on the system.
`nslookup` does not provide as much information as `dig`, so only the `name` parameter will be used if `nslookup` is used.
Will return the A, AAAA, and CNAME records for a domain by default. You can specify a different record type with the `type` parameter."""
command_opts: list[str | int] = ['dig']
query_types: list[str] = [record_type] if record_type else ['A', 'AAAA', 'CNAME']
if server:
command_opts.extend(['@', server])
for query_type in query_types:
command_opts.extend([name, query_type])
command_opts.extend(['-p', str(port), '+yaml'])
try:
process: Process = await asyncio.create_subprocess_exec(*command_opts, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
stdout, stderr = await process.communicate()
if stderr:
await ctx.maybe_send_embed(message="An error was encountered!\n" + cf.box(text=stderr.decode()))
else:
data = yaml.safe_load(stdout.decode())
message_data: dict = data[0]['message']
response_data: dict = message_data['response_message_data']
if ctx.embed_requested():
embed = Embed(
title="DNS Query Result",
color=await ctx.embed_color(),
timestamp=message_data['response_time']
)
embed.add_field(name="Response Address", value=message_data['response_address'], inline=True)
embed.add_field(name="Response Port", value=message_data['response_port'], inline=True)
embed.add_field(name="Query Address", value=message_data['query_address'], inline=True)
embed.add_field(name="Query Port", value=message_data['query_port'], inline=True)
embed.add_field(name="Status", value=response_data['status'], inline=True)
embed.add_field(name="Flags", value=response_data['flags'], inline=True)
if response_data.get('status') != 'NOERROR':
embed.colour = Color.red()
embed.description = cf.error("Dig query did not return `NOERROR` status.")
questions = []
answers = []
authorities = []
for m in data:
response = m['message']['response_message_data']
if 'QUESTION_SECTION' in response:
for question in response['QUESTION_SECTION']:
if question not in questions:
questions.append(question)
if 'ANSWER_SECTION' in response:
for answer in response['ANSWER_SECTION']:
if answer not in answers:
answers.append(answer)
if 'AUTHORITY_SECTION' in response:
for authority in response['AUTHORITY_SECTION']:
if authority not in authorities:
authorities.append(authority)
if questions:
question_section = "\n".join(questions)
embed.add_field(name="Question Section", value=f"{cf.box(text=question_section, lang='prolog')}", inline=False)
if answers:
answer_section = "\n".join(answers)
if len(answer_section) > 1024:
embed.description = cf.warning("Answer section is too long to fit within embed field, falling back to description.") + cf.box(answer_section)
else:
embed.add_field(name="Answer Section", value=f"{cf.box(text=answer_section, lang='prolog')}", inline=False)
if authorities:
authority_section = "\n".join(authorities)
embed.add_field(name="Authority Section", value=f"{cf.box(text=authority_section, lang='prolog')}", inline=False)
await ctx.send(embed=embed)
else:
await ctx.send(content=cf.box(text=stdout, lang='yaml'))
except (FileNotFoundError):
try:
ns_process = await asyncio.create_subprocess_exec('nslookup', name, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
ns_stdout, ns_stderr = await ns_process.communicate()
if ns_stderr:
await ctx.maybe_send_embed(message="An error was encountered!\n" + cf.box(text=ns_stderr.decode()))
else:
warning = cf.warning("`dig` is not installed! Defaulting to `nslookup`.\nThis command provides more information when `dig` is installed on the system.\n")
if await ctx.embed_requested():
embed = Embed(
title="DNS Query Result",
color=await ctx.embed_color(),
timestamp=ctx.message.created_at
)
embed.description = warning + cf.box(text=ns_stdout.decode())
await ctx.send(embed=embed)
else:
await ctx.send(content = warning + cf.box(text=ns_stdout.decode()))
except (FileNotFoundError):
await ctx.maybe_send_embed(message=cf.error("Neither `dig` nor `nslookup` are installed on the system. Unable to resolve DNS query."))
@commands.command()
async def rfc(self, ctx: commands.Context, number: int) -> None:
"""Retrieve the text of an RFC document.
This command uses the [RFC Editor website](https://www.rfc-editor.org/) to fetch the text of an RFC document.
A [Request for Comments (RFC)](https://en.wikipedia.org/wiki/Request_for_Comments) is a publication in a series from the principal technical development and standards-setting bodies for the [Internet](https://en.wikipedia.org/wiki/Internet), most prominently the [Internet Engineering Task Force](https://en.wikipedia.org/wiki/Internet_Engineering_Task_Force). An RFC is authored by individuals or groups of engineers and [computer scientists](https://en.wikipedia.org/wiki/Computer_scientist) in the form of a [memorandum](https://en.wikipedia.org/wiki/Memorandum) describing methods, behaviors, research, or innovations applicable to the working of the Internet and Internet-connected systems. It is submitted either for [peer review](https://en.wikipedia.org/wiki/Peer_review) or to convey new concepts, information, or, occasionally, engineering humor.""" # noqa: E501
url = f"https://www.rfc-editor.org/rfc/rfc{number}.html"
datatracker_url = f"https://datatracker.ietf.org/doc/rfc{number}"
async with aiohttp.ClientSession() as session:
async with session.get(url=url) as response:
if response.status == 200:
html = await response.text()
soup = BeautifulSoup(html, 'html.parser')
pre_tags = soup.find_all('pre')
content: list[Embed | str] = []
for pre_tag in pre_tags:
text = format_rfc_text(md(pre_tag), number)
if len(text) > 4096:
pagified_text = cf.pagify(text, delims=["\n\n"], page_length=4096)
for page in pagified_text:
if await ctx.embed_requested():
embed = Embed(
title=f"RFC Document {number}",
url=datatracker_url,
description=page,
color=await ctx.embed_color()
)
content.append(embed)
else:
content.append(page)
else:
if await ctx.embed_requested():
embed = Embed(
title=f"RFC Document {number}",
url=datatracker_url,
description=text,
color=await ctx.embed_color()
)
content.append(embed)
else:
content.append(text)
if await ctx.embed_requested():
for embed in content:
embed.set_footer(text=f"Page {content.index(embed) + 1}/{len(content)}")
await SimpleMenu(pages=content, disable_after_timeout=True, timeout=300).start(ctx)
else:
await ctx.maybe_send_embed(content=cf.error(f"An error occurred while fetching RFC {number}. Status code: {response.status}."))