mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-29 16:54:59 +00:00
feat: Compress long timestamp gaps to 0.5 seconds in asciinema cast processing
This commit is contained in:
parent
1bed4e8972
commit
1bb3041298
1 changed files with 31 additions and 3 deletions
34
redact.py
34
redact.py
|
@ -13,7 +13,9 @@ def process_file(input_path, output_path):
|
||||||
If a text field contains "\u001b[ROW;COL]H" followed by "Atuin", skip it and all subsequent
|
If a text field contains "\u001b[ROW;COL]H" followed by "Atuin", skip it and all subsequent
|
||||||
records until finding a text with "\u001b[ROW;(COL-1)H".
|
records until finding a text with "\u001b[ROW;(COL-1)H".
|
||||||
|
|
||||||
Maintains consistent timestamps by not advancing time during skip sections.
|
Maintains consistent timestamps by:
|
||||||
|
1. Not advancing time during skip sections
|
||||||
|
2. Compressing any long gaps to 0.5 seconds maximum
|
||||||
"""
|
"""
|
||||||
skip_mode = False
|
skip_mode = False
|
||||||
target_pattern = None
|
target_pattern = None
|
||||||
|
@ -21,6 +23,7 @@ def process_file(input_path, output_path):
|
||||||
is_first_line = True
|
is_first_line = True
|
||||||
last_timestamp = 0.0
|
last_timestamp = 0.0
|
||||||
time_offset = 0.0 # Accumulator for time to subtract
|
time_offset = 0.0 # Accumulator for time to subtract
|
||||||
|
max_gap = 0.5 # Maximum allowed time gap between events
|
||||||
|
|
||||||
with open(input_path, 'r', encoding='utf-8') as infile, open(output_path, 'w', encoding='utf-8') as outfile:
|
with open(input_path, 'r', encoding='utf-8') as infile, open(output_path, 'w', encoding='utf-8') as outfile:
|
||||||
for line in infile:
|
for line in infile:
|
||||||
|
@ -56,7 +59,20 @@ def process_file(input_path, output_path):
|
||||||
continue # Skip this record
|
continue # Skip this record
|
||||||
|
|
||||||
# If we're not skipping, write the record with adjusted timestamp
|
# If we're not skipping, write the record with adjusted timestamp
|
||||||
adjusted_timestamp = max(current_timestamp - time_offset, last_timestamp)
|
# First, adjust for skipped sections
|
||||||
|
adjusted_timestamp = current_timestamp - time_offset
|
||||||
|
|
||||||
|
# Then, check if there's a long gap to compress
|
||||||
|
if last_timestamp > 0:
|
||||||
|
time_gap = adjusted_timestamp - last_timestamp
|
||||||
|
if time_gap > max_gap:
|
||||||
|
# Compress the gap and add the excess to time_offset
|
||||||
|
excess_time = time_gap - max_gap
|
||||||
|
time_offset += excess_time
|
||||||
|
adjusted_timestamp -= excess_time
|
||||||
|
|
||||||
|
# Ensure timestamps never go backward
|
||||||
|
adjusted_timestamp = max(adjusted_timestamp, last_timestamp)
|
||||||
last_timestamp = adjusted_timestamp
|
last_timestamp = adjusted_timestamp
|
||||||
record[0] = adjusted_timestamp
|
record[0] = adjusted_timestamp
|
||||||
outfile.write(json.dumps(record) + '\n')
|
outfile.write(json.dumps(record) + '\n')
|
||||||
|
@ -72,7 +88,19 @@ def process_file(input_path, output_path):
|
||||||
last_timestamp += 0.5
|
last_timestamp += 0.5
|
||||||
|
|
||||||
# Write this record with adjusted timestamp
|
# Write this record with adjusted timestamp
|
||||||
adjusted_timestamp = max(current_timestamp - time_offset, last_timestamp)
|
adjusted_timestamp = current_timestamp - time_offset
|
||||||
|
|
||||||
|
# Check if there's a long gap to compress
|
||||||
|
if last_timestamp > 0:
|
||||||
|
time_gap = adjusted_timestamp - last_timestamp
|
||||||
|
if time_gap > max_gap:
|
||||||
|
# Compress the gap and add the excess to time_offset
|
||||||
|
excess_time = time_gap - max_gap
|
||||||
|
time_offset += excess_time
|
||||||
|
adjusted_timestamp -= excess_time
|
||||||
|
|
||||||
|
# Ensure timestamps never go backward
|
||||||
|
adjusted_timestamp = max(adjusted_timestamp, last_timestamp)
|
||||||
last_timestamp = adjusted_timestamp
|
last_timestamp = adjusted_timestamp
|
||||||
record[0] = adjusted_timestamp
|
record[0] = adjusted_timestamp
|
||||||
outfile.write(json.dumps(record) + '\n')
|
outfile.write(json.dumps(record) + '\n')
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue