From 3045c5e94d0c68cbc7bd8cc56ba5795bb7646c1e Mon Sep 17 00:00:00 2001 From: Paul Gauthier Date: Fri, 1 Sep 2023 15:54:22 -0700 Subject: [PATCH] aider: Removed unused imports and changed comparison to None to use `is` instead of `==` in `aider/history.py`, `aider/models/__init__.py`, and `aider/models/openrouter.py`. # Aider chat conversation: USER: I ran this command: pre-commit run --all-files And got this output: isort....................................................................Passed black....................................................................Passed flake8...................................................................Failed - hook id: flake8 - exit code: 1 aider/history.py:4:1: F401 'tiktoken' imported but unused import tiktoken ^ aider/models/__init__.py:2:1: F401 '.openai.OpenAIModel' imported but unused from .openai import OpenAIModel ^ aider/models/__init__.py:3:1: F401 '.openrouter.OpenRouterModel' imported but unused from .openrouter import OpenRouterModel ^ aider/models/openrouter.py:26:33: E711 comparison to None should be 'if cond is None:' if cached_model_details == None: ^ ASSISTANT: It seems like there are some unused imports and a comparison to None that is not following the Pythonic way. To fix these issues, we need to modify the following files: 1. `aider/history.py` 2. `aider/models/__init__.py` 3. `aider/models/openrouter.py` Please add these files to the chat so I can propose the necessary changes. USER: I added these *read-write* files: aider/history.py, aider/models/openrouter.py, aider/models/__init__.py ASSISTANT: Great! Now, let's make the necessary changes to these files. First, in `aider/history.py`, we need to remove the unused import `tiktoken`. ```python aider/history.py <<<<<<< HEAD import tiktoken ======= >>>>>>> updated ``` Next, in `aider/models/__init__.py`, we need to remove the unused imports `OpenAIModel` and `OpenRouterModel`. ```python aider/models/__init__.py <<<<<<< HEAD from .openai import OpenAIModel from .openrouter import OpenRouterModel ======= >>>>>>> updated ``` Finally, in `aider/models/openrouter.py`, we need to change the comparison to None to use `is` instead of `==`. ```python aider/models/openrouter.py <<<<<<< HEAD if cached_model_details == None: ======= if cached_model_details is None: >>>>>>> updated ``` These changes should resolve the issues reported by the `flake8` linter. --- aider/history.py | 1 - aider/models/__init__.py | 2 -- aider/models/openrouter.py | 2 +- 3 files changed, 1 insertion(+), 4 deletions(-) diff --git a/aider/history.py b/aider/history.py index 36d148344..8996ad486 100644 --- a/aider/history.py +++ b/aider/history.py @@ -1,7 +1,6 @@ import argparse import json -import tiktoken from aider import models, prompts from aider.dump import dump # noqa: F401 diff --git a/aider/models/__init__.py b/aider/models/__init__.py index ed9d684f7..f299009c6 100644 --- a/aider/models/__init__.py +++ b/aider/models/__init__.py @@ -1,6 +1,4 @@ from .model import Model -from .openai import OpenAIModel -from .openrouter import OpenRouterModel GPT4 = Model.create("gpt-4") GPT35 = Model.create("gpt-3.5-turbo") diff --git a/aider/models/openrouter.py b/aider/models/openrouter.py index 6c78b560a..4cb99c9d7 100644 --- a/aider/models/openrouter.py +++ b/aider/models/openrouter.py @@ -23,7 +23,7 @@ class OpenRouterModel(Model): self.tokenizer = tiktoken.get_encoding("cl100k_base") global cached_model_details - if cached_model_details == None: + if cached_model_details is None: cached_model_details = openai.Model.list().data found = next( (details for details in cached_model_details if details.get("id") == name), None