File size: 4,875 Bytes
d26280a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
# Generated by CodiumAI
import os

import gpt_engineer.applications.cli.main as main

from gpt_engineer.core.default.disk_execution_env import DiskExecutionEnv
from gpt_engineer.core.default.paths import ENTRYPOINT_FILE, META_DATA_REL_PATH
from tests.caching_ai import CachingAI

main.AI = CachingAI


def simplified_main(path: str, mode: str = ""):
    model = "gpt-4-1106-preview"
    lite_mode = False
    clarify_mode = False
    improve_mode = False
    self_heal_mode = False
    azure_endpoint = ""
    verbose = False
    if mode == "lite":
        lite_mode = True
    elif mode == "clarify":
        clarify_mode = True
    elif mode == "improve":
        improve_mode = True
    elif mode == "self-heal":
        self_heal_mode = True
    main.main(
        path,
        model=model,
        lite_mode=lite_mode,
        clarify_mode=clarify_mode,
        improve_mode=improve_mode,
        self_heal_mode=self_heal_mode,
        azure_endpoint=azure_endpoint,
        use_custom_preprompts=False,
        verbose=verbose,
    )


def input_generator():
    yield "y"  # First response
    while True:
        yield "n"  # Subsequent responses


prompt_text = "Make a python program that writes 'hello' to a file called 'output.txt'"


class TestMain:
    #  Runs gpt-engineer with default settings and generates a project in the specified path.
    def test_default_settings_generate_project(self, tmp_path, monkeypatch):
        gen = input_generator()
        monkeypatch.setattr("builtins.input", lambda _: next(gen))
        p = tmp_path / "projects/example"
        p.mkdir(parents=True)
        (p / "prompt").write_text(prompt_text)
        simplified_main(str(p), "")
        ex_env = DiskExecutionEnv(path=p)
        ex_env.run(f"bash {ENTRYPOINT_FILE}")
        assert (p / "output.txt").exists()
        text = (p / "output.txt").read_text().strip()
        assert text == "hello"

    #  Runs gpt-engineer with improve mode and improves an existing project in the specified path.
    def test_improve_existing_project(self, tmp_path, monkeypatch):
        def improve_generator():
            yield "y"
            while True:
                yield "n"  # Subsequent responses

        gen = improve_generator()
        monkeypatch.setattr("builtins.input", lambda _: next(gen))
        p = tmp_path / "projects/example"
        p.mkdir(parents=True)
        (p / "prompt").write_text(prompt_text)
        (p / "main.py").write_text("The program will be written in this file")
        meta_p = p / META_DATA_REL_PATH
        meta_p.mkdir(parents=True)
        (meta_p / "file_selection.toml").write_text(
            """
        [files."main.py"]
        selected = true
                    """
        )
        os.environ["GPTE_TEST_MODE"] = "True"
        simplified_main(str(p), "improve")
        DiskExecutionEnv(path=p)
        del os.environ["GPTE_TEST_MODE"]

    #  Runs gpt-engineer with lite mode and generates a project with only the main prompt.
    def test_lite_mode_generate_project(self, tmp_path, monkeypatch):
        gen = input_generator()
        monkeypatch.setattr("builtins.input", lambda _: next(gen))
        p = tmp_path / "projects/example"
        p.mkdir(parents=True)
        (p / "prompt").write_text(prompt_text)
        simplified_main(str(p), "lite")
        ex_env = DiskExecutionEnv(path=p)
        ex_env.run(f"bash {ENTRYPOINT_FILE}")
        assert (p / "output.txt").exists()
        text = (p / "output.txt").read_text().strip()
        assert text == "hello"

    #  Runs gpt-engineer with clarify mode and generates a project after discussing the specification with the AI.
    def test_clarify_mode_generate_project(self, tmp_path, monkeypatch):
        gen = input_generator()
        monkeypatch.setattr("builtins.input", lambda _: next(gen))
        p = tmp_path / "projects/example"
        p.mkdir(parents=True)
        (p / "prompt").write_text(prompt_text)
        simplified_main(str(p), "clarify")
        ex_env = DiskExecutionEnv(path=p)
        ex_env.run(f"bash {ENTRYPOINT_FILE}")
        assert (p / "output.txt").exists()
        text = (p / "output.txt").read_text().strip()
        assert text == "hello"

    #  Runs gpt-engineer with self-heal mode and generates a project after discussing the specification with the AI and self-healing the code.
    def test_self_heal_mode_generate_project(self, tmp_path, monkeypatch):
        monkeypatch.setattr("builtins.input", lambda _: next(input_generator()))
        p = tmp_path / "projects/example"
        p.mkdir(parents=True)
        (p / "prompt").write_text(prompt_text)
        simplified_main(str(p), "self-heal")
        ex_env = DiskExecutionEnv(path=p)
        ex_env.run(f"bash {ENTRYPOINT_FILE}")
        assert (p / "output.txt").exists()
        text = (p / "output.txt").read_text().strip()
        assert text == "hello"