|
|
|
@@ -1,3 +1,4 @@
|
|
|
|
|
import sys
|
|
|
|
|
import argparse
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
from itertools import zip_longest
|
|
|
|
@@ -73,10 +74,34 @@ def create_message(chat: ChatDB, args: argparse.Namespace) -> Message:
|
|
|
|
|
tags=args.output_tags, # FIXME
|
|
|
|
|
ai=args.AI,
|
|
|
|
|
model=args.model)
|
|
|
|
|
chat.cache_add([message])
|
|
|
|
|
# only write the message (as a backup), don't add it
|
|
|
|
|
# to the current chat history
|
|
|
|
|
chat.cache_write([message])
|
|
|
|
|
return message
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def make_request(ai: AI, chat: ChatDB, message: Message, args: argparse.Namespace) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Make an AI request with the give AI, chat history, message and CLI arguments.
|
|
|
|
|
Print all answers.
|
|
|
|
|
"""
|
|
|
|
|
ai.print()
|
|
|
|
|
chat.print(paged=False)
|
|
|
|
|
print(message.to_str() + '\n')
|
|
|
|
|
response: AIResponse = ai.request(message,
|
|
|
|
|
chat,
|
|
|
|
|
args.num_answers,
|
|
|
|
|
args.output_tags)
|
|
|
|
|
# write all answers to the cache, don't add them to the chat history
|
|
|
|
|
chat.cache_write(response.messages)
|
|
|
|
|
for idx, msg in enumerate(response.messages):
|
|
|
|
|
print(f"=== ANSWER {idx+1} ===")
|
|
|
|
|
print(msg.answer)
|
|
|
|
|
if response.tokens:
|
|
|
|
|
print("===============")
|
|
|
|
|
print(response.tokens)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def question_cmd(args: argparse.Namespace, config: Config) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Handler for the 'question' command.
|
|
|
|
@@ -95,28 +120,29 @@ def question_cmd(args: argparse.Namespace, config: Config) -> None:
|
|
|
|
|
|
|
|
|
|
# create the correct AI instance
|
|
|
|
|
ai: AI = create_ai(args, config)
|
|
|
|
|
|
|
|
|
|
# === ASK ===
|
|
|
|
|
if args.ask:
|
|
|
|
|
ai.print()
|
|
|
|
|
chat.print(paged=False)
|
|
|
|
|
response: AIResponse = ai.request(message,
|
|
|
|
|
chat,
|
|
|
|
|
args.num_answers, # FIXME
|
|
|
|
|
args.output_tags) # FIXME
|
|
|
|
|
chat.msg_update([response.messages[0]])
|
|
|
|
|
chat.cache_add(response.messages[1:])
|
|
|
|
|
for idx, msg in enumerate(response.messages):
|
|
|
|
|
print(f"=== ANSWER {idx+1} ===")
|
|
|
|
|
print(msg.answer)
|
|
|
|
|
if response.tokens:
|
|
|
|
|
print("===============")
|
|
|
|
|
print(response.tokens)
|
|
|
|
|
make_request(ai, chat, message, args)
|
|
|
|
|
# === REPEAT ===
|
|
|
|
|
elif args.repeat is not None:
|
|
|
|
|
lmessage = chat.msg_latest()
|
|
|
|
|
assert lmessage
|
|
|
|
|
# TODO: repeat either the last question or the
|
|
|
|
|
# one(s) given in 'args.repeat' (overwrite
|
|
|
|
|
# existing ones if 'args.overwrite' is True)
|
|
|
|
|
pass
|
|
|
|
|
lmessage = chat.msg_latest(source='cache')
|
|
|
|
|
if lmessage is None:
|
|
|
|
|
print("No message found to repeat!")
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
else:
|
|
|
|
|
print(f"Repeating message '{lmessage.msg_id()}':")
|
|
|
|
|
# overwrite the latest message if requested or empty
|
|
|
|
|
if lmessage.answer is None or args.overwrite is True:
|
|
|
|
|
lmessage.clear_answer()
|
|
|
|
|
make_request(ai, chat, lmessage, args)
|
|
|
|
|
# otherwise create a new one
|
|
|
|
|
else:
|
|
|
|
|
args.ask = [lmessage.question]
|
|
|
|
|
message = create_message(chat, args)
|
|
|
|
|
make_request(ai, chat, message, args)
|
|
|
|
|
|
|
|
|
|
# === PROCESS ===
|
|
|
|
|
elif args.process is not None:
|
|
|
|
|
# TODO: process either all questions without an
|
|
|
|
|
# answer or the one(s) given in 'args.process'
|
|
|
|
|