feat: bring down target word count per episode

After tokenization, most stuff was going over the 2048 context window so let's bring this down a little.
This commit is contained in:
11b 2022-12-26 17:31:28 -03:00
parent bcbf0910b4
commit 5dbde00d27
1 changed files with 1 additions and 1 deletions

View File

@ -10,7 +10,7 @@ class PromptConstants:
# Global target word count. The word count is chosen in such a way that we
# can fit all the required prompt trickery into the model's input, but still
# leave enough space for the user's input message and the infernce result.
TARGET_WORD_COUNT_PER_EPISODE = 1536
TARGET_WORD_COUNT_PER_EPISODE = 1024
@staticmethod
def pdm_prefix_for(name: str) -> str: