mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-09 06:05:00 +00:00
Fix some LLM-generated mixups
This commit is contained in:
parent
9b56e1f099
commit
e9c0774f1f
2 changed files with 46 additions and 67 deletions
|
@ -1500,27 +1500,6 @@ Just reply with fixed versions of the {blocks} above that failed to match.
|
||||||
|
|
||||||
# ------------------- Helper for finding occurrences -------------------
|
# ------------------- Helper for finding occurrences -------------------
|
||||||
|
|
||||||
def _find_occurrences(self, content, pattern, near_context=None):
|
|
||||||
"""Find all occurrences of pattern, optionally filtered by near_context."""
|
|
||||||
occurrences = []
|
|
||||||
start = 0
|
|
||||||
while True:
|
|
||||||
index = content.find(pattern, start)
|
|
||||||
if index == -1:
|
|
||||||
break
|
|
||||||
|
|
||||||
if near_context:
|
|
||||||
# Check if near_context is within a window around the match
|
|
||||||
window_start = max(0, index - 200)
|
|
||||||
window_end = min(len(content), index + len(pattern) + 200)
|
|
||||||
window = content[window_start:window_end]
|
|
||||||
if near_context in window:
|
|
||||||
occurrences.append(index)
|
|
||||||
else:
|
|
||||||
occurrences.append(index)
|
|
||||||
|
|
||||||
start = index + 1 # Move past this occurrence's start
|
|
||||||
return occurrences
|
|
||||||
|
|
||||||
# ------------------- Granular Editing Tools -------------------
|
# ------------------- Granular Editing Tools -------------------
|
||||||
|
|
||||||
|
@ -1533,4 +1512,3 @@ Just reply with fixed versions of the {blocks} above that failed to match.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -247,6 +247,50 @@ class RepoMap:
|
||||||
self.io.tool_warning(f"File not found error: {fname}")
|
self.io.tool_warning(f"File not found error: {fname}")
|
||||||
|
|
||||||
def get_tags(self, fname, rel_fname):
|
def get_tags(self, fname, rel_fname):
|
||||||
|
# Check if the file is in the cache and if the modification time has not changed
|
||||||
|
file_mtime = self.get_mtime(fname)
|
||||||
|
if file_mtime is None:
|
||||||
|
return []
|
||||||
|
|
||||||
|
cache_key = fname
|
||||||
|
try:
|
||||||
|
val = self.TAGS_CACHE.get(cache_key) # Issue #1308
|
||||||
|
except SQLITE_ERRORS as e:
|
||||||
|
self.tags_cache_error(e)
|
||||||
|
val = self.TAGS_CACHE.get(cache_key)
|
||||||
|
|
||||||
|
if val is not None and val.get("mtime") == file_mtime:
|
||||||
|
try:
|
||||||
|
# Get the cached data
|
||||||
|
data = self.TAGS_CACHE[cache_key]["data"]
|
||||||
|
|
||||||
|
# Let our Tag class handle compatibility with old cache formats
|
||||||
|
# No need for special handling as TagBase.__new__ will supply default specific_kind
|
||||||
|
|
||||||
|
return data
|
||||||
|
except SQLITE_ERRORS as e:
|
||||||
|
self.tags_cache_error(e)
|
||||||
|
return self.TAGS_CACHE[cache_key]["data"]
|
||||||
|
except (TypeError, AttributeError) as e:
|
||||||
|
# If we hit an error related to missing fields in old cached Tag objects,
|
||||||
|
# force a cache refresh for this file
|
||||||
|
if self.verbose:
|
||||||
|
self.io.tool_warning(f"Cache format error for {fname}, refreshing: {e}")
|
||||||
|
# Return empty list to trigger cache refresh
|
||||||
|
return []
|
||||||
|
|
||||||
|
# miss!
|
||||||
|
data = list(self.get_tags_raw(fname, rel_fname))
|
||||||
|
|
||||||
|
# Update the cache
|
||||||
|
try:
|
||||||
|
self.TAGS_CACHE[cache_key] = {"mtime": file_mtime, "data": data}
|
||||||
|
self.save_tags_cache()
|
||||||
|
except SQLITE_ERRORS as e:
|
||||||
|
self.tags_cache_error(e)
|
||||||
|
self.TAGS_CACHE[cache_key] = {"mtime": file_mtime, "data": data}
|
||||||
|
|
||||||
|
return data
|
||||||
def get_symbol_definition_location(self, file_path, symbol_name):
|
def get_symbol_definition_location(self, file_path, symbol_name):
|
||||||
"""
|
"""
|
||||||
Finds the unique definition location (start/end line) for a symbol in a file.
|
Finds the unique definition location (start/end line) for a symbol in a file.
|
||||||
|
@ -293,49 +337,6 @@ class RepoMap:
|
||||||
definition_tag = definitions[0]
|
definition_tag = definitions[0]
|
||||||
return definition_tag.start_line, definition_tag.end_line
|
return definition_tag.start_line, definition_tag.end_line
|
||||||
# Check if the file is in the cache and if the modification time has not changed
|
# Check if the file is in the cache and if the modification time has not changed
|
||||||
file_mtime = self.get_mtime(fname)
|
|
||||||
if file_mtime is None:
|
|
||||||
return []
|
|
||||||
|
|
||||||
cache_key = fname
|
|
||||||
try:
|
|
||||||
val = self.TAGS_CACHE.get(cache_key) # Issue #1308
|
|
||||||
except SQLITE_ERRORS as e:
|
|
||||||
self.tags_cache_error(e)
|
|
||||||
val = self.TAGS_CACHE.get(cache_key)
|
|
||||||
|
|
||||||
if val is not None and val.get("mtime") == file_mtime:
|
|
||||||
try:
|
|
||||||
# Get the cached data
|
|
||||||
data = self.TAGS_CACHE[cache_key]["data"]
|
|
||||||
|
|
||||||
# Let our Tag class handle compatibility with old cache formats
|
|
||||||
# No need for special handling as TagBase.__new__ will supply default specific_kind
|
|
||||||
|
|
||||||
return data
|
|
||||||
except SQLITE_ERRORS as e:
|
|
||||||
self.tags_cache_error(e)
|
|
||||||
return self.TAGS_CACHE[cache_key]["data"]
|
|
||||||
except (TypeError, AttributeError) as e:
|
|
||||||
# If we hit an error related to missing fields in old cached Tag objects,
|
|
||||||
# force a cache refresh for this file
|
|
||||||
if self.verbose:
|
|
||||||
self.io.tool_warning(f"Cache format error for {fname}, refreshing: {e}")
|
|
||||||
# Return empty list to trigger cache refresh
|
|
||||||
return []
|
|
||||||
|
|
||||||
# miss!
|
|
||||||
data = list(self.get_tags_raw(fname, rel_fname))
|
|
||||||
|
|
||||||
# Update the cache
|
|
||||||
try:
|
|
||||||
self.TAGS_CACHE[cache_key] = {"mtime": file_mtime, "data": data}
|
|
||||||
self.save_tags_cache()
|
|
||||||
except SQLITE_ERRORS as e:
|
|
||||||
self.tags_cache_error(e)
|
|
||||||
self.TAGS_CACHE[cache_key] = {"mtime": file_mtime, "data": data}
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def get_tags_raw(self, fname, rel_fname):
|
def get_tags_raw(self, fname, rel_fname):
|
||||||
lang = filename_to_lang(fname)
|
lang = filename_to_lang(fname)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue