mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-20 12:24:59 +00:00
style: Format code according to linter rules
This commit is contained in:
parent
8c78e09f03
commit
aad6c63206
1 changed files with 40 additions and 25 deletions
|
@ -29,49 +29,50 @@ def get_downloads_from_bigquery(credentials_path=None, package_name="aider-chat"
|
|||
credentials = None
|
||||
if credentials_path:
|
||||
credentials = service_account.Credentials.from_service_account_file(
|
||||
credentials_path,
|
||||
scopes=["https://www.googleapis.com/auth/cloud-platform"]
|
||||
credentials_path, scopes=["https://www.googleapis.com/auth/cloud-platform"]
|
||||
)
|
||||
|
||||
|
||||
# Create a client
|
||||
client = bigquery.Client(credentials=credentials)
|
||||
|
||||
|
||||
# Query to get total downloads for the package
|
||||
query = f"""
|
||||
SELECT COUNT(*) as total_downloads
|
||||
FROM `bigquery-public-data.pypi.file_downloads`
|
||||
WHERE file.project = '{package_name}'
|
||||
"""
|
||||
|
||||
|
||||
# Execute the query
|
||||
query_job = client.query(query)
|
||||
results = query_job.result()
|
||||
|
||||
|
||||
# Get the first (and only) row
|
||||
for row in results:
|
||||
return row.total_downloads
|
||||
|
||||
|
||||
return 0
|
||||
except Exception as e:
|
||||
print(f"Error fetching download statistics from BigQuery: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def get_total_downloads(api_key=None, package_name="aider-chat", use_bigquery=False, credentials_path=None):
|
||||
def get_total_downloads(
|
||||
api_key=None, package_name="aider-chat", use_bigquery=False, credentials_path=None
|
||||
):
|
||||
"""
|
||||
Fetch total downloads for a Python package
|
||||
|
||||
|
||||
If use_bigquery is True, fetches from BigQuery.
|
||||
Otherwise uses pepy.tech API (requires api_key).
|
||||
"""
|
||||
if use_bigquery:
|
||||
return get_downloads_from_bigquery(credentials_path, package_name)
|
||||
|
||||
|
||||
# Fall back to pepy.tech API
|
||||
if not api_key:
|
||||
print("API key not provided for pepy.tech", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
url = f"https://api.pepy.tech/api/v2/projects/{package_name}"
|
||||
headers = {"X-API-Key": api_key}
|
||||
|
||||
|
@ -209,14 +210,17 @@ def get_badges_md():
|
|||
# Check if we should use BigQuery
|
||||
use_bigquery = os.environ.get("USE_BIGQUERY", "false").lower() in ("true", "1", "yes")
|
||||
credentials_path = os.environ.get("GOOGLE_APPLICATION_CREDENTIALS")
|
||||
|
||||
|
||||
# Get API key from environment variable if not using BigQuery
|
||||
api_key = None
|
||||
if not use_bigquery:
|
||||
api_key = os.environ.get("PEPY_API_KEY")
|
||||
if not api_key:
|
||||
print(
|
||||
"API key not provided and BigQuery not enabled. Please set PEPY_API_KEY environment variable",
|
||||
(
|
||||
"API key not provided and BigQuery not enabled. Please set PEPY_API_KEY"
|
||||
" environment variable"
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
@ -244,14 +248,17 @@ def get_badges_html():
|
|||
# Check if we should use BigQuery
|
||||
use_bigquery = os.environ.get("USE_BIGQUERY", "false").lower() in ("true", "1", "yes")
|
||||
credentials_path = os.environ.get("GOOGLE_APPLICATION_CREDENTIALS")
|
||||
|
||||
|
||||
# Get API key from environment variable if not using BigQuery
|
||||
api_key = None
|
||||
if not use_bigquery:
|
||||
api_key = os.environ.get("PEPY_API_KEY")
|
||||
if not api_key:
|
||||
print(
|
||||
"API key not provided and BigQuery not enabled. Please set PEPY_API_KEY environment variable",
|
||||
(
|
||||
"API key not provided and BigQuery not enabled. Please set PEPY_API_KEY"
|
||||
" environment variable"
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
@ -327,19 +334,23 @@ def main():
|
|||
)
|
||||
parser.add_argument("--markdown", action="store_true", help="Generate markdown badges block")
|
||||
parser.add_argument(
|
||||
"--use-bigquery", action="store_true",
|
||||
help="Use BigQuery to fetch download statistics instead of pepy.tech"
|
||||
"--use-bigquery",
|
||||
action="store_true",
|
||||
help="Use BigQuery to fetch download statistics instead of pepy.tech",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--credentials-path",
|
||||
help="Path to Google Cloud service account credentials JSON file"
|
||||
"--credentials-path", help="Path to Google Cloud service account credentials JSON file"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
# Determine whether to use BigQuery
|
||||
use_bigquery = args.use_bigquery or os.environ.get("USE_BIGQUERY", "false").lower() in ("true", "1", "yes")
|
||||
use_bigquery = args.use_bigquery or os.environ.get("USE_BIGQUERY", "false").lower() in (
|
||||
"true",
|
||||
"1",
|
||||
"yes",
|
||||
)
|
||||
credentials_path = args.credentials_path or os.environ.get("GOOGLE_APPLICATION_CREDENTIALS")
|
||||
|
||||
|
||||
# Check for required parameters
|
||||
api_key = None
|
||||
if not use_bigquery:
|
||||
|
@ -347,15 +358,19 @@ def main():
|
|||
api_key = args.api_key or os.environ.get("PEPY_API_KEY")
|
||||
if not api_key:
|
||||
print(
|
||||
"API key not provided and BigQuery not enabled. Please set PEPY_API_KEY environment variable, "
|
||||
"use --api-key, or enable BigQuery with --use-bigquery",
|
||||
(
|
||||
"API key not provided and BigQuery not enabled. Please set PEPY_API_KEY"
|
||||
" environment variable, use --api-key, or enable BigQuery with --use-bigquery"
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
elif use_bigquery and not credentials_path and not args.credentials_path:
|
||||
print(
|
||||
"BigQuery enabled but no credentials provided. Please set GOOGLE_APPLICATION_CREDENTIALS "
|
||||
"environment variable or use --credentials-path",
|
||||
(
|
||||
"BigQuery enabled but no credentials provided. Please set"
|
||||
" GOOGLE_APPLICATION_CREDENTIALS environment variable or use --credentials-path"
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
# Continue execution - BigQuery might work without explicit credentials in some environments
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue