2022-12-18 01:36:33 +01:00
|
|
|
import typing as t
|
|
|
|
|
2022-12-23 20:37:47 +01:00
|
|
|
from waifu.core.consts import PromptConstants
|
2022-12-18 01:36:33 +01:00
|
|
|
from waifu.datasets.kajiwoto import (KajiwotoDataset, generate_variants_for,
|
|
|
|
replace_special_tokens_in)
|
|
|
|
from waifu.modules import BaseModule
|
|
|
|
|
|
|
|
|
|
|
|
class KajiwotoVDM(BaseModule):
|
|
|
|
'''A Vanilla Dialogue Module powered by the Kajiwoto dataset.'''
|
|
|
|
|
2023-01-04 13:05:51 +01:00
|
|
|
def generator(self) -> t.Generator[list[str], None, None]:
|
2022-12-18 01:36:33 +01:00
|
|
|
dataset = KajiwotoDataset()
|
|
|
|
for episode in dataset:
|
|
|
|
turns: t.List[str] = []
|
|
|
|
for turn in episode:
|
2022-12-23 20:37:47 +01:00
|
|
|
turns.append(
|
|
|
|
f"{PromptConstants.USER_PREFIX}: {turn.user_message}")
|
2022-12-27 16:46:57 +01:00
|
|
|
turns.append(
|
|
|
|
f"{PromptConstants.BOT_TOKEN}: {turn.bot_response}")
|
2022-12-18 01:36:33 +01:00
|
|
|
|
|
|
|
string = "\n".join(turns)
|
|
|
|
processed_string = replace_special_tokens_in(string)
|
|
|
|
|
|
|
|
for generated_string in generate_variants_for(processed_string):
|
2023-01-04 13:05:51 +01:00
|
|
|
yield generated_string.split("\n")
|