Skip to content

Commit

Permalink
feat(Agent): auto temp code ignore
Browse files Browse the repository at this point in the history
auto temp code ignore
  • Loading branch information
yhjun1026 committed Dec 26, 2023
1 parent d4da6cd commit baec12e
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 64 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ __pycache__/
*.so

message/

util/extensions/
.env*
.vscode
.idea
Expand Down
63 changes: 0 additions & 63 deletions dbgpt/util/code_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -464,66 +464,3 @@ def eval_function_completions(

_FUNC_COMPLETION_PROMPT = "# Python 3{definition}"
_FUNC_COMPLETION_STOP = ["\nclass", "\ndef", "\nif", "\nprint"]
# _IMPLEMENT_CONFIGS = [
# {"model": FAST_MODEL, "prompt": _FUNC_COMPLETION_PROMPT, "temperature": 0, "cache_seed": 0},
# {"model": FAST_MODEL, "prompt": _FUNC_COMPLETION_PROMPT, "stop": _FUNC_COMPLETION_STOP, "n": 7, "cache_seed": 0},
# {"model": DEFAULT_MODEL, "prompt": _FUNC_COMPLETION_PROMPT, "temperature": 0, "cache_seed": 1},
# {"model": DEFAULT_MODEL, "prompt": _FUNC_COMPLETION_PROMPT, "stop": _FUNC_COMPLETION_STOP, "n": 2, "cache_seed": 2},
# {"model": DEFAULT_MODEL, "prompt": _FUNC_COMPLETION_PROMPT, "stop": _FUNC_COMPLETION_STOP, "n": 1, "cache_seed": 2},
# ]


# class PassAssertionFilter:
# def __init__(self, assertions):
# self._assertions = assertions
# self.cost = 0
# self.metrics = self.responses = None
#
# def pass_assertions(self, context, response, **_):
# """(openai<1) Check if the response passes the assertions."""
# responses = oai.Completion.extract_text(response)
# metrics = eval_function_completions(responses, context["definition"], assertions=self._assertions)
# self._assertions = metrics["assertions"]
# self.cost += metrics["gen_cost"]
# self.metrics = metrics
# self.responses = responses
# return metrics["succeed_assertions"]


# def implement(
# definition: str,
# configs: Optional[List[Dict]] = None,
# assertions: Optional[Union[str, Callable[[str], Tuple[str, float]]]] = generate_assertions,
# ) -> Tuple[str, float]:
# """(openai<1) Implement a function from a definition.
#
# Args:
# definition (str): The function definition, including the signature and docstr.
# configs (list): The list of configurations for completion.
# assertions (Optional, str or Callable): The assertion code which serves as a filter of the responses, or an assertion generator.
#
# Returns:
# str: The implementation.
# float: The cost of the implementation.
# int: The index of the configuration which generates the implementation.
# """
# cost = 0
# configs = configs or _IMPLEMENT_CONFIGS
# if len(configs) > 1 and callable(assertions):
# assertions, cost = assertions(definition)
# assertion_filter = PassAssertionFilter(assertions)
# response = oai.Completion.create(
# {"definition": definition}, config_list=configs, filter_func=assertion_filter.pass_assertions
# )
# cost += assertion_filter.cost + response["cost"]
# return assertion_filter.responses[assertion_filter.metrics["index_selected"]], cost, response["config_id"]

# for i, config in enumerate(configs):
# response = oai.Completion.create({"definition": definition}, **config)
# cost += oai.Completion.cost(response)
# responses = oai.Completion.extract_text(response)
# metrics = eval_function_completions(responses, definition, assertions=assertions)
# assertions = metrics["assertions"]
# cost += metrics["gen_cost"]
# if metrics["succeed_assertions"] or i == len(configs) - 1:
# return responses[metrics["index_selected"]], cost, i

0 comments on commit baec12e

Please sign in to comment.