Compare commits

...

2 Commits

Author SHA1 Message Date
9b66061ee7 Reformat TODO comments
All checks were successful
Build Heidi Docker image / build-docker (push) Successful in 15s
2023-12-09 17:35:04 +01:00
c6608e4695 Remove rocm stuff from flake 2023-12-09 17:34:54 +01:00
5 changed files with 9 additions and 29 deletions

10
bot.py
View File

@ -11,9 +11,9 @@ from heidi_client import *
install(show_locals=True)
# TODO: Only post in heidi-spam channel
# TODO: yt-dlp music support
# TODO: Somehow upload voicelines more easily (from discord voice message?)
# @todo Only post in heidi-spam channel
# @todo yt-dlp music support
# @todo Somehow upload voicelines more easily (from discord voice message?)
# Log to file
@ -124,7 +124,7 @@ def user_entrance_sound_autocomplete(
for board in boards
} # These are all sounds, organized per board
# TODO: Initially only suggest boards, because there are too many sounds to show them all
# @todo Initially only suggest boards, because there are too many sounds to show them all
completions: List[Choice[str]] = []
for (
board,
@ -224,7 +224,7 @@ async def magic_shell(interaction: Interaction, question: str) -> None:
)
# TODO: Allow , separated varargs, need to parse manually as slash commands don't support varargs
# @todo Allow , separated varargs, need to parse manually as slash commands don't support varargs
@client.tree.command(name="wähle", description="Heidi trifft die Wahl!")
@app_commands.rename(option_a="entweder")
@app_commands.describe(option_a="Ist es vielleicht dies?")

View File

@ -14,27 +14,6 @@
overlays = [ devshell.overlays.default ];
};
# TODO: Originally it was nixpkgs.fetchurl but that didn't work, pkgs.fetchurl did...
# Determine the difference between nixpkgs and pkgs
# Taken from: https://github.com/gbtb/nix-stable-diffusion/blob/master/flake.nix
# Overlay: https://nixos.wiki/wiki/Overlays
# FetchURL: https://ryantm.github.io/nixpkgs/builders/fetchers/
torch-rocm = pkgs.hiPrio (pkgs.python310Packages.torch-bin.overrideAttrs (old: {
src = pkgs.fetchurl {
name = "torch-1.12.1+rocm5.1.1-cp310-cp310-linux_x86_64.whl";
url = "https://download.pytorch.org/whl/rocm5.1.1/torch-1.12.1%2Brocm5.1.1-cp310-cp310-linux_x86_64.whl";
hash = "sha256-kNShDx88BZjRQhWgnsaJAT8hXnStVMU1ugPNMEJcgnA=";
};
}));
torchvision-rocm = pkgs.hiPrio (pkgs.python310Packages.torchvision-bin.overrideAttrs (old: {
src = pkgs.fetchurl {
name = "torchvision-0.13.1+rocm5.1.1-cp310-cp310-linux_x86_64.whl";
url = "https://download.pytorch.org/whl/rocm5.1.1/torchvision-0.13.1%2Brocm5.1.1-cp310-cp310-linux_x86_64.whl";
hash = "sha256-mYk4+XNXU6rjpgWfKUDq+5fH/HNPQ5wkEtAgJUDN/Jg=";
};
}));
myPython = pkgs.python311.withPackages (p: with p; [
# Basic
rich

View File

@ -8,6 +8,7 @@ from heidi_constants import *
print("Debug: Importing heidi_helpers.py")
# @todo Normalize volume when playing
async def play_voice_line(
interaction: Union[Interaction, None],
voice_channel: VoiceChannel,

View File

@ -178,7 +178,7 @@ class LSTMTextGenerator(textgen):
# Interface shit
# TODO: Also save/load generated prefixes
# @todo Also save/load generated prefixes
def load(self):
print(f"Loading \"{self.filename}\" LSTM model with {len(self.charbase)} characters from file.")

View File

@ -8,8 +8,8 @@ from rich.traceback import install
install()
# NOTE: This is word based, not character based
# TODO: Serialize and save/load model (don't train on the server)
# TODO: Maybe extract sentence beginnings and use them as starters?
# @todo Serialize and save/load model (don't train on the server)
# @todo Maybe extract sentence beginnings and use them as starters?
class MarkovTextGenerator(textgen):
# The greater the order (prefix length), the lesser the variation in generation, but the better the sentences (generally).