Commit graph

1473 commits

Author SHA1 Message Date
Paul Gauthier
a3075d2da0 Added networkx==3.1 2023-06-05 08:27:39 -07:00
Paul Gauthier
140b03314e Merge branch 'main' into call-graph 2023-06-04 15:39:42 -07:00
Paul Gauthier
6398e5ef18 Support worktrees 2023-06-04 15:37:20 -07:00
Paul Gauthier
6b62756703 Merge branch 'main' into call-graph 2023-06-04 15:03:18 -07:00
Paul Gauthier
c0bf25d05b undo trailing newline "fix" 2023-06-04 15:03:13 -07:00
Paul Gauthier
53e8c65745 Return files without any tags at all at the tail of the ranked_tags list 2023-06-04 14:53:14 -07:00
Paul Gauthier
dfcf813ad6 include individual files in the ranked tags list, following specific ranked idents 2023-06-04 14:45:57 -07:00
Paul Gauthier
dedf3fbebb Merge branch 'main' into call-graph 2023-06-04 14:30:27 -07:00
Paul Gauthier
f47f611e65 Handle empty replacement text too 2023-06-04 14:30:22 -07:00
Paul Gauthier
1cf29a0166 Merge branch 'main' into call-graph 2023-06-04 14:28:19 -07:00
Paul Gauthier
fe1271114f Avoid stripping a newline when doing nearest search/replace 2023-06-04 14:28:03 -07:00
Paul Gauthier
d12d8ff69a lint 2023-06-04 14:16:06 -07:00
Paul Gauthier
5a61e6acd2 aider: Added a new file that imports MyClass and calls both its methods to test_get_repo_map_with_identifiers.
# Context:
USER: in `test_get_repo_map_with_identifiers` make a 2nd file that imports MyClass and calls both its methods. add that to `other_files`
ASSISTANT: Here are the changes you need to make:

1. Modify `test_get_repo_map_with_identifiers` in `tests/test_repomap.py` to add a new file that imports `MyClass` and calls both its methods:

```python
tests/test_repomap.py
<<<<<<< ORIGINAL
    def test_get_repo_map_with_identifiers(self):
        # Create a temporary directory with a sample Python file containing identifiers
        test_file = "test_file_with_identifiers.py"
        file_content = """\
class MyClass:
    def my_method(self, arg1, arg2):
        return arg1 + arg2

def my_function(arg1, arg2):
    return arg1 * arg2
"""

        with tempfile.TemporaryDirectory() as temp_dir:
            with open(os.path.join(temp_dir, test_file), "w") as f:
                f.write(file_content)

            repo_map = RepoMap(root=temp_dir)
            other_files = [os.path.join(temp_dir, test_file), os.path.join(temp_dir, "test_file_import.py")]
            result = repo_map.get_repo_map([], other_files)

            # Check if the result contains the expected tags map with identifiers
            self.assertIn("test_file_with_identifiers.py", result)
            self.assertIn("MyClass", result)
            self.assertIn("my_method", result)
            self.assertIn("my_function", result)
=======
    def test_get_repo_map_with_identifiers(self):
        # Create a temporary directory with a sample Python file containing identifiers
        test_file1 = "test_file_with_identifiers.py"
        file_content1 = """\
class MyClass:
    def my_method(self, arg1, arg2):
        return arg1 + arg2

def my_function(arg1, arg2):
    return arg1 * arg2
"""

        test_file2 = "test_file_import.py"
        file_content2 = """\
from test_file_with_identifiers import MyClass

obj = MyClass()
print(obj.my_method(1, 2))
print(my_function(3, 4))
"""

        with tempfile.TemporaryDirectory() as temp_dir:
            with open(os.path.join(temp_dir, test_file1), "w") as f:
                f.write(file_content1)

            with open(os.path.join(temp_dir, test_file2), "w") as f:
                f.write(file_content2)

            repo_map = RepoMap(root=temp_dir)
            other_files = [os.path.join(temp_dir, test_file1), os.path.join(temp_dir, test_file2)]
            result = repo_map.get_repo_map([], other_files)

            # Check if the result contains the expected tags map with identifiers
            self.assertIn("test_file_with_identifiers.py", result)
            self.assertIn("MyClass", result)
            self.assertIn("my_method", result)
            self.assertIn("my_function", result)
>>>>>>> UPDATED
```

2. Run the tests to make sure everything is working as expected.
2023-06-04 14:15:25 -07:00
Paul Gauthier
95cb482938 bugfix 2023-06-04 14:13:56 -07:00
Paul Gauthier
676dcd8fd1 fixed some tests 2023-06-04 14:07:50 -07:00
Paul Gauthier
5cc28bae15 Merge branch 'main' into call-graph 2023-06-04 13:20:44 -07:00
Paul Gauthier
87df24bcd9 period 2023-06-04 13:20:35 -07:00
Paul Gauthier
12512df707 Handle the case where there is no rank flow 2023-06-04 09:53:09 -07:00
Paul Gauthier
91f48dfdf1 aider: Added a check for an empty tags list in to_tree function.
# Context:
USER:  File "/Users/gauthier/Projects/aider/aider/repomap.py", line 107, in choose_files_listing
    files_listing = self.get_ranked_tags_map(chat_files, other_files)
                    ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/gauthier/Projects/aider/aider/repomap.py", line 318, in get_ranked_tags_map
    tree = to_tree(ranked_tags[:middle])
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/gauthier/Projects/aider/aider/repomap.py", line 26, in to_tree
    last = [None] * len(tags[0])
                        ~~~~^^^
ASSISTANT: I see the code snippets from the `aider/repomap.py` file. Please let me know what changes you would like to make, and make sure to provide me with the *read-write* access to the file.
USER: I added these *read-write* files: aider/repomap.py
ASSISTANT: Great! Now, please let me know what changes you would like to make in the `aider/repomap.py` file.
USER:   File "/Users/gauthier/Projects/aider/aider/repomap.py", line 318, in get_ranked_tags_map
    tree = to_tree(ranked_tags[:middle])
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/gauthier/Projects/aider/aider/repomap.py", line 26, in to_tree
    last = [None] * len(tags[0])
                        ~~~~^^^
IndexError: list index out of range
ASSISTANT: It seems like you are encountering an IndexError due to an empty `tags` list. To fix this issue, you can add a check to ensure that the `tags` list is not empty before accessing its elements. Here's the change you need to make in the `aider/repomap.py` file:

```python
aider/repomap.py
<<<<<<< ORIGINAL
def to_tree(tags):
    tags = sorted(tags)

    output = ""
    last = [None] * len(tags[0])
=======
def to_tree(tags):
    if not tags:
        return ""

    tags = sorted(tags)

    output = ""
    last = [None] * len(tags[0])
>>>>>>> UPDATED
```

This change adds a check for an empty `tags` list and returns an empty string if the list is empty, preventing the IndexError.
2023-06-04 06:43:30 -07:00
Paul Gauthier
6da3c3943e Merge branch 'main' into call-graph 2023-06-04 06:33:48 -07:00
Paul Gauthier
8aba2376ce added backoff==2.2.1 2023-06-04 06:22:26 -07:00
Paul Gauthier
106eab7a7a Merge branch 'main' into call-graph 2023-06-04 06:10:22 -07:00
Paul Gauthier
84421d8651 aider: Replaced patching of aider.coder.backoff.expo with patching of the print function and updated the assertions accordingly.
# Context:
USER: instead of patching and checking that aider.coder.backoff.expo, patch and check "print"
ASSISTANT: To patch and check the `print` function instead of `aider.coder.backoff.expo`, you can modify the `test_send_with_retries_rate_limit_error` and `test_send_with_retries_connection_error` test cases in the `tests/test_coder.py` file. Here are the changes:

```python
tests/test_coder.py
<<<<<<< ORIGINAL
    @patch("aider.coder.openai.ChatCompletion.create")
    @patch("aider.coder.backoff.expo")
    def test_send_with_retries_rate_limit_error(self, mock_backoff_expo, mock_chat_completion_create):
=======
    @patch("aider.coder.openai.ChatCompletion.create")
    @patch("builtins.print")
    def test_send_with_retries_rate_limit_error(self, mock_print, mock_chat_completion_create):
>>>>>>> UPDATED
```

```python
tests/test_coder.py
<<<<<<< ORIGINAL
        # Assert that backoff.expo was called once
        mock_backoff_expo.assert_called_once()
=======
        # Assert that print was called once
        mock_print.assert_called_once()
>>>>>>> UPDATED
```

```python
tests/test_coder.py
<<<<<<< ORIGINAL
    @patch("aider.coder.openai.ChatCompletion.create")
    @patch("aider.coder.backoff.expo")
    def test_send_with_retries_connection_error(self, mock_backoff_expo, mock_chat_completion_create):
=======
    @patch("aider.coder.openai.ChatCompletion.create")
    @patch("builtins.print")
    def test_send_with_retries_connection_error(self, mock_print, mock_chat_completion_create):
>>>>>>> UPDATED
```

```python
tests/test_coder.py
<<<<<<< ORIGINAL
        # Assert that backoff.expo was called once
        mock_backoff_expo.assert_called_once()
=======
        # Assert that print was called once
        mock_print.assert_called_once()
>>>>>>> UPDATED
```

These changes replace the patching of `aider.coder.backoff.expo` with patching of the `print` function and update the assertions accordingly.
2023-06-04 06:05:48 -07:00
Paul Gauthier
0abbf43466 just print 2023-06-04 06:04:32 -07:00
Paul Gauthier
091bcaa1a2 aider: Updated test_send_with_retries tests to use backoff library instead of time.sleep.
# Context:
USER: `tests/test_coder.py` `aider/coder.py` Fix the test_send_with_retries tests since we no longer use time.sleep but backoff instead
ASSISTANT: To fix the test_send_with_retries tests in `tests/test_coder.py`, we need to update the tests to use the backoff library instead of time.sleep. Here's how to do it:

1. Replace the `@patch("aider.coder.time.sleep")` with `@patch("aider.coder.backoff.expo")` in both test_send_with_retries_rate_limit_error and test_send_with_retries_connection_error tests.
2. Update the test_send_with_retries_rate_limit_error and test_send_with_retries_connection_error tests to assert that backoff.expo was called once instead of time.sleep.

Here are the changes:

```python
tests/test_coder.py
<<<<<<< ORIGINAL
    @patch("aider.coder.time.sleep")
    def test_send_with_retries_rate_limit_error(self, mock_sleep, mock_chat_completion_create):
=======
    @patch("aider.coder.backoff.expo")
    def test_send_with_retries_rate_limit_error(self, mock_backoff_expo, mock_chat_completion_create):
>>>>>>> UPDATED
```

```python
tests/test_coder.py
<<<<<<< ORIGINAL
        # Assert that time.sleep was called once
        mock_sleep.assert_called_once()
=======
        # Assert that backoff.expo was called once
        mock_backoff_expo.assert_called_once()
>>>>>>> UPDATED
```

```python
tests/test_coder.py
<<<<<<< ORIGINAL
    @patch("aider.coder.time.sleep")
    def test_send_with_retries_connection_error(self, mock_sleep, mock_chat_completion_create):
=======
    @patch("aider.coder.backoff.expo")
    def test_send_with_retries_connection_error(self, mock_backoff_expo, mock_chat_completion_create):
>>>>>>> UPDATED
```

```python
tests/test_coder.py
<<<<<<< ORIGINAL
        # Assert that time.sleep was called once
        mock_sleep.assert_called_once()
=======
        # Assert that backoff.expo was called once
        mock_backoff_expo.assert_called_once()
>>>>>>> UPDATED
```

These changes should fix the test_send_with_retries tests in `tests/test_coder.py`.
2023-06-04 05:58:06 -07:00
Paul Gauthier
df91e9ab1c aider: Implemented backoff library to handle rate limit issues with exponential backoff and a maximum of 5 retries.
# Context:
USER: Hello there,

thanks for making this open source!
I have a request: due to being still on the free credits of OpenAI API, I run into timeouts.

fetch_starred_repos.py
Add these files to the chat? y
RateLimitError: You exceeded your current quota, please check your plan and billing details.
Retry in 1 seconds.
...
Is it possible for you to implement this library to omit that?
https://pypi.org/project/backoff/

Thanks in advance.
Greetings from Germany

ASSISTANT: Sure, I can help you implement the `backoff` library to handle the rate limit issue. First, please make the `aider/coder.py` file *read-write* so I can propose changes to it.
USER: I added these *read-write* files: aider/coder.py
ASSISTANT: Great! First, you need to install the `backoff` library. You can do this by running:

```
pip install backoff
```

Next, I will show you the changes needed in the `aider/coder.py` file to implement the `backoff` library for handling rate limit issues.

1. Import the `backoff` library at the beginning of the file.
2. Replace the `send_with_retries` method with a new method that uses the `backoff` library.

Here are the changes:

```python
aider/coder.py
<<<<<<< ORIGINAL
import os
import sys
import time
import traceback
=======
import os
import sys
import traceback
import backoff
>>>>>>> UPDATED
```

```python
aider/coder.py
<<<<<<< ORIGINAL
    def send_with_retries(self, model, messages):
        while True:
            try:
                return openai.ChatCompletion.create(
                    model=model,
                    messages=messages,
                    temperature=0,
                    stream=True,
                )
            except RateLimitError as err:
                self.io.tool_error(f"RateLimitError: {err}")
            except requests.exceptions.ConnectionError as err:
                self.io.tool_error(f"ConnectionError: {err}")

            retry_after = 1
            self.io.tool_error(f"Retry in {retry_after} seconds.")
            time.sleep(retry_after)
=======
    @backoff.on_exception(
        backoff.expo,
        (RateLimitError, requests.exceptions.ConnectionError),
        max_tries=5,
        on_backoff=lambda details: self.io.tool_error(f"Retry in {details['wait']} seconds."),
    )
    def send_with_retries(self, model, messages):
        return openai.ChatCompletion.create(
            model=model,
            messages=messages,
            temperature=0,
            stream=True,
        )
>>>>>>> UPDATED
```

These changes will implement the `backoff` library to handle rate limit issues with exponential backoff and a maximum of 5 retries. If you want to adjust the number of retries, you can change the `max_tries` parameter in the `backoff.on_exception` decorator.
2023-06-04 05:53:06 -07:00
Paul Gauthier
b4fc6c2caa sssh 2023-06-03 18:11:45 -07:00
Paul Gauthier
ca8d13f194 scrub --ctags from docs 2023-06-03 18:10:02 -07:00
Paul Gauthier
14289ab44f tweaked --map-tokens help 2023-06-03 18:07:01 -07:00
Paul Gauthier
ef22f44d19 final details on --map-tokens 2023-06-03 18:06:14 -07:00
Paul Gauthier
1f9e9be64d aider: Replaced use_ctags with map_tokens parameter in RepoMap class.
# Context:
USER: update `aider/repomap.py`
ASSISTANT: Please provide the specific changes you would like me to make in the `aider/repomap.py` file.
USER: to use map_tokens not `use_ctags`
ASSISTANT: I will update the `aider/repomap.py` file to use `map_tokens` instead of `use_ctags`.

```python
aider/repomap.py
<<<<<<< ORIGINAL
    def __init__(self, use_ctags=None, root=None, main_model="gpt-4", io=None):
        self.io = io

        if not root:
            root = os.getcwd()
        self.root = root

        self.load_ident_cache()
        self.load_tags_cache()

        if use_ctags is None:
            self.use_ctags = self.check_for_ctags()
        else:
            self.use_ctags = use_ctags

        self.tokenizer = tiktoken.encoding_for_model(main_model)
=======
    def __init__(self, map_tokens=1024, root=None, main_model="gpt-4", io=None):
        self.io = io

        if not root:
            root = os.getcwd()
        self.root = root

        self.load_ident_cache()
        self.load_tags_cache()

        self.map_tokens = map_tokens

        self.tokenizer = tiktoken.encoding_for_model(main_model)
>>>>>>> UPDATED
```

This change replaces the `use_ctags` parameter with the `map_tokens` parameter in the `__init__` method of the `RepoMap` class. Make sure to update any other parts of the code that use `use_ctags` to use `map_tokens` instead.
2023-06-03 17:48:45 -07:00
Paul Gauthier
d996fbe1c3 aider: Updated the Coder class in aider/coder.py to accept the map_tokens parameter and pass it to the RepoMap class instead of use_ctags.
# Context:
USER: I added these *read-write* files: aider/coder.py
ASSISTANT: Great! Now I will modify the `aider/coder.py` file to accept and use the `map_tokens` parameter instead of `use_ctags`.

```python
aider/coder.py
<<<<<<< ORIGINAL
    def __init__(
        self,
        io,
        main_model="gpt-4",
        fnames=None,
        pretty=True,
        show_diffs=False,
        auto_commits=True,
        dirty_commits=True,
        dry_run=False,
        use_ctags=False,
        verbose=False,
        openai_api_key=None,
    ):
        ...
        self.repo_map = RepoMap(use_ctags, self.root, self.main_model, rm_io)
=======
    def __init__(
        self,
        io,
        main_model="gpt-4",
        fnames=None,
        pretty=True,
        show_diffs=False,
        auto_commits=True,
        dirty_commits=True,
        dry_run=False,
        map_tokens=1024,
        verbose=False,
        openai_api_key=None,
    ):
        ...
        self.repo_map = RepoMap(map_tokens, self.root, self.main_model, rm_io)
>>>>>>> UPDATED
```

This change updates the `Coder` class in `aider/coder.py` to accept the `map_tokens` parameter and pass it to the `RepoMap` class instead of `use_ctags`.
2023-06-03 17:47:45 -07:00
Paul Gauthier
15f34e2864 aider: Changed use_ctags to map_tokens parameter in main.py to pass to Coder class.
# Context:
USER: pass it to Coder as map_tokens instead of `use_ctags`
ASSISTANT: I will modify the `aider/main.py` file to pass `map_tokens` to the `Coder` class instead of `use_ctags`.

```python
aider/main.py
<<<<<<< ORIGINAL
    coder = Coder(
        io,
        main_model=args.model,
        fnames=args.files,
        pretty=args.pretty,
        show_diffs=args.show_diffs,
        auto_commits=args.auto_commits,
        dirty_commits=args.dirty_commits,
        dry_run=args.dry_run,
        use_ctags=args.ctags,
        verbose=args.verbose,
        openai_api_key=args.openai_api_key,
    )
=======
    coder = Coder(
        io,
        main_model=args.model,
        fnames=args.files,
        pretty=args.pretty,
        show_diffs=args.show_diffs,
        auto_commits=args.auto_commits,
        dirty_commits=args.dirty_commits,
        dry_run=args.dry_run,
        map_tokens=args.map_tokens,
        verbose=args.verbose,
        openai_api_key=args.openai_api_key,
    )
>>>>>>> UPDATED
```

Now, the `map_tokens` value is passed to the `Coder` class instead of `use_ctags`. Please make sure to update the `Coder` class in `aider/coder.py` to accept and use the `map_tokens` parameter.
2023-06-03 17:47:05 -07:00
Paul Gauthier
dcdad393c8 aider: Changed the --ctags switch into --map-tokens, which is an int that defaults to 1024.
# Context:
USER: change the --ctags switch into --map-tokens, which is an int that defaults to 1024
ASSISTANT: To make this change, I need to modify the `aider/main.py` file. Please make it *read-write* and let me know when it's done.
USER: I added these *read-write* files: aider/main.py
ASSISTANT: Great! I will now modify the `aider/main.py` file to change the `--ctags` switch into `--map-tokens`, which is an int that defaults to 1024.

```python
aider/main.py
<<<<<<< ORIGINAL
    parser.add_argument(
        "--ctags",
        type=lambda x: (str(x).lower() == "true"),
        nargs="?",
        const=True,
        default=None,
        help=(
            "Add ctags to the chat to help GPT understand the codebase (default: check for ctags"
            " executable)"
        ),
    )
=======
    parser.add_argument(
        "--map-tokens",
        type=int,
        default=1024,
        help="Number of tokens to map for code understanding (default: 1024)",
    )
>>>>>>> UPDATED
```

This change replaces the `--ctags` argument with the `--map-tokens` argument, which takes an integer value and defaults to 1024.
2023-06-03 17:46:11 -07:00
Paul Gauthier
05e3d2bfdb Merge branch 'main' into call-graph 2023-06-03 17:43:45 -07:00
paul-gauthier
23f972fb2e
Merge pull request #12 from paul-gauthier/handle-openai-exceptions 2023-06-03 06:31:00 -07:00
Paul Gauthier
5263367af0 added import 2023-06-03 06:23:07 -07:00
Paul Gauthier
2d1fc9f6da aider: Added a new test case test_send_with_retries_connection_error to ensure that send_with_retries handles the ConnectionError correctly and calls time.sleep once before retrying.
# Context:
USER: make another test like that, but raise requests.exceptions.ConnectionError
ASSISTANT: To add another test case for `coder.send_with_retries(model, messages)` that raises `requests.exceptions.ConnectionError`, we will modify the `tests/test_coder.py` file. We will create a new test method called `test_send_with_retries_connection_error` and use `patch` to mock the `openai.ChatCompletion.create` and `time.sleep` methods.

Here's the code change for the new test case:

```python
tests/test_coder.py
<<<<<<< ORIGINAL
if __name__ == "__main__":
    unittest.main()
=======
    @patch("aider.coder.openai.ChatCompletion.create")
    @patch("aider.coder.time.sleep")
    def test_send_with_retries_connection_error(self, mock_sleep, mock_chat_completion_create):
        # Mock the IO object
        mock_io = MagicMock()

        # Initialize the Coder object with the mocked IO and mocked repo
        coder = Coder(io=mock_io, openai_api_key="fake_key")

        # Set up the mock to raise ConnectionError on the first call and return None on the second call
        mock_chat_completion_create.side_effect = [requests.exceptions.ConnectionError("Connection error"), None]

        # Call the send_with_retries method
        coder.send_with_retries("model", ["message"])

        # Assert that time.sleep was called once
        mock_sleep.assert_called_once()

if __name__ == "__main__":
    unittest.main()
>>>>>>> UPDATED
```

This test case will ensure that `send_with_retries` handles the `ConnectionError` correctly and calls `time.sleep` once before retrying.
2023-06-03 06:22:33 -07:00
Paul Gauthier
3bdddf4b36 added import 2023-06-03 06:21:33 -07:00
Paul Gauthier
2a8e5a727c aider: Added a test case for coder.send_with_retries(model, messages) to handle RateLimitError and call time.sleep once.
# Context:
USER: add a test case for coder.send_with_retries(model, messages). it should patch openai.ChatCompletion.create so that on the first call it raises openai.error import RateLimitError. and on the 2nd call it returns None. the test should confirm that time.sleep is called 1 time.
ASSISTANT: To add the test case for `coder.send_with_retries(model, messages)`, we will modify the `tests/test_coder.py` file. We will create a new test method called `test_send_with_retries_rate_limit_error` and use `patch` to mock the `openai.ChatCompletion.create` and `time.sleep` methods.

Here's the code change for the new test case:

```python
tests/test_coder.py
<<<<<<< ORIGINAL
if __name__ == "__main__":
    unittest.main()
=======
    @patch("aider.coder.openai.ChatCompletion.create")
    @patch("aider.coder.time.sleep")
    def test_send_with_retries_rate_limit_error(self, mock_sleep, mock_chat_completion_create):
        # Mock the IO object
        mock_io = MagicMock()

        # Initialize the Coder object with the mocked IO and mocked repo
        coder = Coder(io=mock_io, openai_api_key="fake_key")

        # Set up the mock to raise RateLimitError on the first call and return None on the second call
        mock_chat_completion_create.side_effect = [openai.error.RateLimitError("Rate limit exceeded"), None]

        # Call the send_with_retries method
        coder.send_with_retries("model", ["message"])

        # Assert that time.sleep was called once
        mock_sleep.assert_called_once()

if __name__ == "__main__":
    unittest.main()
>>>>>>> UPDATED
```

This test case will ensure that `send_with_retries` handles the `RateLimitError` correctly and calls `time.sleep` once before retrying.
2023-06-03 06:20:56 -07:00
Paul Gauthier
c27483193b refac 2023-06-03 06:18:45 -07:00
Paul Gauthier
1882cf55db Handle ConnectionError 2023-06-03 06:16:33 -07:00
Paul Gauthier
575c6e2db6 Stop closing 2023-06-02 16:40:21 -07:00
Paul Gauthier
c5ed12e0c3 aider: Replaced shelve with diskcache for persistent dictionary-like object.
# Context:
USER: is there another persistent dictionary besides shelve?
ASSISTANT: Yes, there are several alternatives to `shelve` for creating persistent dictionaries in Python. Some popular options include:

1. `pickle`: A built-in Python module for object serialization and deserialization. You can use it to store dictionaries in binary format and load them back into memory when needed.

2. `json`: Another built-in Python module for working with JSON data. You can use it to store dictionaries in a human-readable format and load them back into memory when needed.

3. `sqlite3`: A built-in Python module for working with SQLite databases. You can use it to create a simple key-value store with SQL queries.

4. `TinyDB`: A lightweight, document-oriented database for Python
^C KeyboardInterrupt
USER: i want something which presents as a dict though
ASSISTANT: In that case, you can use the `sqlite3` module to create a custom class that behaves like a dictionary. Here's an example of how you can create a persistent dictionary using SQLite:

```python
import sqlite3

class
^C KeyboardInterrupt
USER: i don't want to build something. i want an existing package.
ASSISTANT: I apologize for the confusion. In that case, you can use the `diskcache` package, which provides a dictionary-like interface for disk-based storage. It is easy to use and has good performance.

To install `diskcache`, you can use pip:

```
pip install diskcache
```

Here's an example of how to use `diskcache`:

```python
from diskcache import Cache

cache = Cache('path/to/cache/directory')

# Set a key-value pair
cache['key'] = 'value'

# Get the value for a key
value = cache['key']

# Check if a key is in the cache
if 'key' in cache:
    print('Key exists')

# Delete a key-value pair
del cache['key']

# Close the cache when done
cache.close()
```

This will create a persistent dictionary-like object using the `diskcache` package. The data will be stored on disk in the specified directory, and you can use the standard dictionary methods to interact with it.
USER: great. use that.
ASSISTANT: To use `diskcache` in the `aider/repomap.py` file, you'll need to replace the `shelve` usage with `diskcache`. Here are the changes you need to make:

1. Install `diskcache` using pip:

```
pip install diskcache
```

2. Modify the `aider/repomap.py` file:

```python
aider/repomap.py
<<<<<<< ORIGINAL
import colorsys
import json
import os
import random
import subprocess
import sys
import tempfile
from collections import Counter, defaultdict

# import shelve
=======
import colorsys
import json
import os
import random
import subprocess
import sys
import tempfile
from collections import Counter, defaultdict
from diskcache import Cache

>>>>>>> UPDATED
```

```python
aider/repomap.py
<<<<<<< ORIGINAL
    IDENT_CACHE_FILE = ".aider.ident.cache"
    TAGS_CACHE_FILE = ".aider.tags.cache"
=======
    IDENT_CACHE_DIR = ".aider.ident.cache"
    TAGS_CACHE_DIR = ".aider.tags.cache"
>>>>>>> UPDATED
```

```python
aider/repomap.py
<<<<<<< ORIGINAL
    def load_tags_cache(self):
        self.TAGS_CACHE = dict()  # shelve.open(self.TAGS_CACHE_FILE)

    def save_tags_cache(self):
        # self.TAGS_CACHE.sync()
        pass

    def load_ident_cache(self):
        self.IDENT_CACHE = dict()  # shelve.open(self.IDENT_CACHE_FILE)

    def save_ident_cache(self):
        # self.IDENT_CACHE.sync()
        pass
=======
    def load_tags_cache(self):
        self.TAGS_CACHE = Cache(self.TAGS_CACHE_DIR)

    def save_tags_cache(self):
        self.TAGS_CACHE.close()

    def load_ident_cache(self):
        self.IDENT_CACHE = Cache(self.IDENT_CACHE_DIR)

    def save_ident_cache(self):
        self.IDENT_CACHE.close()
>>>>>>> UPDATED
```

These changes will replace the usage of `shelve` with `diskcache` for the `IDENT_CACHE` and `TAGS_CACHE` dictionaries. Make sure to install the `diskcache` package before running the updated code.
2023-06-02 16:36:03 -07:00
Paul Gauthier
100deccbff Merge branch 'main' into call-graph 2023-06-02 16:30:26 -07:00
Paul Gauthier
4f1e1181c4 Tweaked prompt 2023-06-02 16:30:19 -07:00
Paul Gauthier
eb38a470a3 Merge branch 'main' into call-graph 2023-06-02 13:28:42 -07:00
Paul Gauthier
f5b68a4f69 personalization 2023-06-02 13:28:40 -07:00
Paul Gauthier
68434cbaa1 cleanup 2023-06-02 13:24:29 -07:00
Paul Gauthier
47293c5ba9 removed graph 2023-06-02 13:22:25 -07:00