mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-30 17:24:59 +00:00
personalization
This commit is contained in:
parent
68434cbaa1
commit
f5b68a4f69
1 changed files with 17 additions and 18 deletions
|
@ -219,21 +219,21 @@ class RepoMap:
|
||||||
personalization = dict()
|
personalization = dict()
|
||||||
|
|
||||||
fnames = set(chat_fnames).union(set(other_fnames))
|
fnames = set(chat_fnames).union(set(other_fnames))
|
||||||
|
chat_rel_fnames = set()
|
||||||
|
|
||||||
show_fnames = set()
|
|
||||||
for fname in sorted(fnames):
|
for fname in sorted(fnames):
|
||||||
dump(fname)
|
dump(fname)
|
||||||
show_fname = os.path.relpath(fname, self.root)
|
rel_fname = os.path.relpath(fname, self.root)
|
||||||
show_fnames.add(show_fname)
|
|
||||||
|
|
||||||
if ".venv" not in show_fname:
|
if fname in chat_fnames:
|
||||||
personalization[show_fname] = 1.0
|
personalization[rel_fname] = 1.0
|
||||||
|
chat_rel_fnames.add(rel_fname)
|
||||||
|
|
||||||
data = self.run_ctags(fname)
|
data = self.run_ctags(fname)
|
||||||
|
|
||||||
for tag in data:
|
for tag in data:
|
||||||
ident = tag["name"]
|
ident = tag["name"]
|
||||||
defines[ident].add(show_fname)
|
defines[ident].add(rel_fname)
|
||||||
|
|
||||||
scope = tag.get("scope")
|
scope = tag.get("scope")
|
||||||
kind = tag.get("kind")
|
kind = tag.get("kind")
|
||||||
|
@ -244,19 +244,19 @@ class RepoMap:
|
||||||
if signature:
|
if signature:
|
||||||
last += " " + signature
|
last += " " + signature
|
||||||
|
|
||||||
res = [show_fname]
|
res = [rel_fname]
|
||||||
if scope:
|
if scope:
|
||||||
res.append(scope)
|
res.append(scope)
|
||||||
res += [kind, last]
|
res += [kind, last]
|
||||||
|
|
||||||
key = (show_fname, ident)
|
key = (rel_fname, ident)
|
||||||
definitions[key].add(tuple(res))
|
definitions[key].add(tuple(res))
|
||||||
# definitions[key].add((show_fname,))
|
# definitions[key].add((rel_fname,))
|
||||||
|
|
||||||
idents = self.get_name_identifiers(fname, uniq=False)
|
idents = self.get_name_identifiers(fname, uniq=False)
|
||||||
for ident in idents:
|
for ident in idents:
|
||||||
# dump("ref", fname, ident)
|
# dump("ref", fname, ident)
|
||||||
references[ident].append(show_fname)
|
references[ident].append(rel_fname)
|
||||||
|
|
||||||
idents = set(defines.keys()).intersection(set(references.keys()))
|
idents = set(defines.keys()).intersection(set(references.keys()))
|
||||||
|
|
||||||
|
@ -270,15 +270,12 @@ class RepoMap:
|
||||||
continue
|
continue
|
||||||
G.add_edge(referencer, definer, weight=num_refs, ident=ident)
|
G.add_edge(referencer, definer, weight=num_refs, ident=ident)
|
||||||
|
|
||||||
# personalization = dict()
|
if personalization:
|
||||||
# personalization["utils.py"] = 1.0
|
pers_args = dict(personalization=personalization, dangling=personalization)
|
||||||
|
else:
|
||||||
|
pers_args = dict()
|
||||||
|
|
||||||
ranked = nx.pagerank(
|
ranked = nx.pagerank(G, weight="weight", **pers_args)
|
||||||
G,
|
|
||||||
weight="weight",
|
|
||||||
# personalization=personalization,
|
|
||||||
# dangling=personalization,
|
|
||||||
)
|
|
||||||
|
|
||||||
top_rank = sorted([(rank, node) for (node, rank) in ranked.items()], reverse=True)
|
top_rank = sorted([(rank, node) for (node, rank) in ranked.items()], reverse=True)
|
||||||
# Print the PageRank of each node
|
# Print the PageRank of each node
|
||||||
|
@ -300,6 +297,8 @@ class RepoMap:
|
||||||
ranked_definitions = sorted(ranked_definitions.items(), reverse=True, key=lambda x: x[1])
|
ranked_definitions = sorted(ranked_definitions.items(), reverse=True, key=lambda x: x[1])
|
||||||
for (fname, ident), rank in ranked_definitions:
|
for (fname, ident), rank in ranked_definitions:
|
||||||
print(f"{rank:.03f} {fname} {ident}")
|
print(f"{rank:.03f} {fname} {ident}")
|
||||||
|
if fname in chat_rel_fnames:
|
||||||
|
continue
|
||||||
ranked_tags += list(definitions.get((fname, ident), []))
|
ranked_tags += list(definitions.get((fname, ident), []))
|
||||||
|
|
||||||
return ranked_tags
|
return ranked_tags
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue