Skip to content

Commit

Permalink
Merge pull request #352 from Pythagora-io/development
Browse files Browse the repository at this point in the history
Development
  • Loading branch information
LeonOstrez authored Dec 8, 2023
2 parents 14e0eea + c0e68ce commit 371d7fe
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 6 deletions.
4 changes: 2 additions & 2 deletions pilot/helpers/agents/Developer.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ def step_human_intervention(self, convo, step: dict):
if (self.project.ipc_client_instance is None or self.project.ipc_client_instance.client is None):
human_intervention_description += color_yellow_bold('\n\nIf you want to run the app, just type "r" and press ENTER and that will run `' + self.run_command + '`')
else:
print(self.run_command, type="run_command")
print(self.run_command, type='run_command')

response = self.project.ask_for_human_intervention('I need human intervention:',
human_intervention_description,
Expand Down Expand Up @@ -447,7 +447,7 @@ def continue_development(self, iteration_convo, last_branch_name, continue_descr
if self.project.ipc_client_instance is None or self.project.ipc_client_instance.client is None:
user_description += color_yellow_bold('\n\nIf you want to run the app, just type "r" and press ENTER and that will run `' + self.run_command + '`')
else:
print(self.run_command, type="run_command")
print(self.run_command, type='run_command')

# continue_description = ''
# TODO: Wait for a specific string in the output or timeout?
Expand Down
3 changes: 1 addition & 2 deletions pilot/helpers/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -511,8 +511,7 @@ def run_command_until_success(convo, command,
'timeout': timeout,
'command_id': command_id,
'success_message': success_message,
'ask_before_debug': True,
}, user_input=cli_response, is_root_task=is_root_task)
},user_input=cli_response, is_root_task=is_root_task, ask_before_debug=True)
return {'success': success, 'cli_response': cli_response}
except TooDeepRecursionError as e:
# this is only to put appropriate message in the response after TooDeepRecursionError is raised
Expand Down
6 changes: 4 additions & 2 deletions pilot/utils/llm_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
from jsonschema import validate, ValidationError
from utils.style import color_red
from typing import List
from const.llm import MIN_TOKENS_FOR_GPT_RESPONSE, MAX_GPT_MODEL_TOKENS
from const.llm import MAX_GPT_MODEL_TOKENS
from const.messages import AFFIRMATIVE_ANSWERS
from logger.logger import logger, logging
from helpers.exceptions import TokenLimitError, ApiKeyNotDefinedError
from utils.utils import fix_json, get_prompt
Expand Down Expand Up @@ -236,6 +237,7 @@ def wrapper(*args, **kwargs):
logger.error(f'There was a problem with request to openai API: {err_str}')

project = args[2]
print('yes/no', type='button')
user_message = styled_text(
project,
'Do you want to try make the same request again? If yes, just press ENTER. Otherwise, type "no".',
Expand All @@ -247,7 +249,7 @@ def wrapper(*args, **kwargs):

# TODO: take user's input into consideration - send to LLM?
# https://github.com/Pythagora-io/gpt-pilot/issues/122
if user_message != '':
if user_message.lower() not in AFFIRMATIVE_ANSWERS:
return {}

return wrapper
Expand Down

0 comments on commit 371d7fe

Please sign in to comment.