Skip to content

Commit

Permalink
Test and URL
Browse files Browse the repository at this point in the history
  • Loading branch information
Warsame committed Sep 27, 2025
1 parent 7c3f8b4 commit b5ea680
Show file tree
Hide file tree
Showing 24 changed files with 131 additions and 103 deletions.
Binary file modified requirements.txt
Binary file not shown.
14 changes: 10 additions & 4 deletions src/acmecli.egg-info/SOURCES.txt
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,13 @@ src/acmecli/metrics/license_metric.py
src/acmecli/metrics/performance_metric.py
src/acmecli/metrics/rampup_metric.py
src/acmecli/metrics/size_metric.py
tests/test_hf_handler.py
tests/test_metric_heuristics.py
tests/test_metrics_contract.py
tests/test_reporter_schema.py
tests/test_bus_factor_metric.py
tests/test_cli.py
tests/test_code_quality_metric.py
tests/test_dataset_and_code_metric.py
tests/test_dataset_quality_metric.py
tests/test_logging_env.py
tests/test_parallel_metrics.py
tests/test_performance_claims_metric.py
tests/test_ramp_up_metric.py
tests/test_size_metric.py
12 changes: 7 additions & 5 deletions src/acmecli/metrics/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from .license_metric import LicenseMetric
from .rampup_metric import RampUpMetric
from .busfactor_metric import BusFactorMetric
from .performance_metric import PerformanceClaimsMetric
from .ramp_up_metric import RampUpMetric
from .bus_factor_metric import BusFactorMetric
from .performance_claims_metric import PerformanceClaimsMetric
from .size_metric import SizeMetric
from .dataset_code_metric import DatasetAndCodeMetric
from .dataset_and_code_metric import DatasetAndCodeMetric
from .dataset_quality_metric import DatasetQualityMetric
from .code_quality_metric import CodeQualityMetric
from .hf_downloads_metric import HFDownloadsMetric
from .hf_downloads_metric import HFDownloadsMetric
from .cli_metric import CLIMetric
from .logging_env_metric import LoggingEnvMetric
Binary file modified src/acmecli/metrics/__pycache__/__init__.cpython-312.pyc
Binary file not shown.
File renamed without changes.
24 changes: 24 additions & 0 deletions src/acmecli/metrics/cli_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import time
from ..types import MetricValue
from .base import register

class CLIMetric:
"""Metric to assess CLI usability and script-based automation."""
name = "cli"

def score(self, meta: dict) -> MetricValue:
t0 = time.perf_counter()
# Heuristic: score higher if CLI commands or install/test/score are documented
score = 0.0
readme_text = meta.get("readme_text", "").lower()
if "cli" in readme_text or "command line" in readme_text:
score += 0.5
if any(cmd in readme_text for cmd in ["install", "test", "score"]):
score += 0.2
if "automation" in readme_text or "script" in readme_text:
score += 0.3
value = min(1.0, score)
latency_ms = int((time.perf_counter() - t0) * 1000)
return MetricValue(self.name, value, latency_ms)

register(CLIMetric())
File renamed without changes.
23 changes: 23 additions & 0 deletions src/acmecli/metrics/logging_env_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import time
from ..types import MetricValue
from .base import register

class LoggingEnvMetric:
"""Metric to assess logging configuration via environment variables."""
name = "logging_env"

def score(self, meta: dict) -> MetricValue:
t0 = time.perf_counter()
# Heuristic: score higher if LOG_FILE or LOG_LEVEL are mentioned/configured
score = 0.0
env_vars = meta.get("env_vars", {})
readme_text = meta.get("readme_text", "").lower()
if "log_file" in env_vars or "log_level" in env_vars:
score += 0.5
if "debug" in readme_text or "logging" in readme_text:
score += 0.3
value = min(1.0, score)
latency_ms = int((time.perf_counter() - t0) * 1000)
return MetricValue(self.name, value, latency_ms)

register(LoggingEnvMetric())
File renamed without changes.
File renamed without changes.
Empty file added tests/__init__.py
Empty file.
14 changes: 5 additions & 9 deletions tests/test_bus_factor_metric.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,12 @@
from acmecli.metrics.bus_factor_metric import BusFactorMetric

def test_bus_factor_range():
def test_bus_factor_dict_input():
metric = BusFactorMetric()
mv = metric.score({"contributors": 10})
# contributors dict: name -> count
mv = metric.score({"contributors": {"alice": 10, "bob": 5}})
assert 0.0 <= mv.value <= 1.0

def test_bus_factor_latency():
def test_bus_factor_zero_contrib():
metric = BusFactorMetric()
mv = metric.score({"contributors": 10})
assert mv.latency_ms >= 0

def test_bus_factor_edge_case_zero_contrib():
metric = BusFactorMetric()
mv = metric.score({"contributors": 0})
mv = metric.score({"contributors": {}})
assert mv.value == 0.0
18 changes: 0 additions & 18 deletions tests/test_cli.py

This file was deleted.

11 changes: 11 additions & 0 deletions tests/test_cli_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from acmecli.metrics.cli_metric import CLIMetric

def test_cli_metric_documentation():
metric = CLIMetric()
mv = metric.score({"readme_text": "Supports install, test, score via CLI"})
assert 0.0 <= mv.value <= 1.0

def test_cli_metric_no_cli():
metric = CLIMetric()
mv = metric.score({"readme_text": "This project is for data analysis"})
assert mv.value < 0.5
5 changes: 0 additions & 5 deletions tests/test_code_quality_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,6 @@ def test_code_quality_range():
mv = metric.score({"lint_score": 0.9})
assert 0.0 <= mv.value <= 1.0

def test_code_quality_latency():
metric = CodeQualityMetric()
mv = metric.score({"lint_score": 0.9})
assert mv.latency_ms >= 0

def test_code_quality_missing():
metric = CodeQualityMetric()
mv = metric.score({})
Expand Down
7 changes: 1 addition & 6 deletions tests/test_dataset_and_code_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,9 @@

def test_dataset_and_code_range():
metric = DatasetAndCodeMetric()
mv = metric.score({"linked": True})
mv = metric.score({"readme_text": "data and code available"})
assert 0.0 <= mv.value <= 1.0

def test_dataset_and_code_latency():
metric = DatasetAndCodeMetric()
mv = metric.score({"linked": True})
assert mv.latency_ms >= 0

def test_dataset_and_code_missing():
metric = DatasetAndCodeMetric()
mv = metric.score({})
Expand Down
7 changes: 1 addition & 6 deletions tests/test_dataset_quality_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,9 @@

def test_dataset_quality_range():
metric = DatasetQualityMetric()
mv = metric.score({"quality_score": 0.8})
mv = metric.score({"readme_text": "imagenet large-scale curated"})
assert 0.0 <= mv.value <= 1.0

def test_dataset_quality_latency():
metric = DatasetQualityMetric()
mv = metric.score({"quality_score": 0.8})
assert mv.latency_ms >= 0

def test_dataset_quality_missing():
metric = DatasetQualityMetric()
mv = metric.score({})
Expand Down
11 changes: 11 additions & 0 deletions tests/test_hf_downloads_metric
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from acmecli.metrics.hf_downloads_metric import HFDownloadsMetric

def test_downloads_zero():
metric = HFDownloadsMetric()
mv = metric.score({"downloads": 0})
assert mv.value == 0.0

def test_downloads_high():
metric = HFDownloadsMetric()
mv = metric.score({"downloads": 20000})
assert mv.value == 1.0
11 changes: 11 additions & 0 deletions tests/test_license_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from acmecli.metrics.license_metric import LicenseMetric

def test_license_metric_high_score():
metric = LicenseMetric()
mv = metric.score({"license": "MIT", "readme_text": "MIT license"})
assert 0.8 <= mv.value <= 1.0

def test_license_metric_no_license():
metric = LicenseMetric()
mv = metric.score({"license": "", "readme_text": ""})
assert mv.value == 0.0 or mv.value < 0.2
27 changes: 15 additions & 12 deletions tests/test_logging_env.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
import os
import tempfile
from acmecli.metrics.logging_env_metric import LoggingEnvMetric

def test_logging_env(monkeypatch):
with tempfile.NamedTemporaryFile() as log_file:
monkeypatch.setenv("LOG_FILE", log_file.name)
monkeypatch.setenv("LOG_LEVEL", "DEBUG")
# You should call your project's logging function here if available
from acmecli import main_logger
main_logger.debug("test message")
log_file.seek(0)
contents = log_file.read().decode()
assert "test message" in contents
def test_logging_env_metric_env_vars():
metric = LoggingEnvMetric()
mv = metric.score({"env_vars": {"LOG_FILE": "log.txt", "LOG_LEVEL": "DEBUG"}})
assert 0.0 <= mv.value <= 1.0

def test_logging_env_metric_readme():
metric = LoggingEnvMetric()
mv = metric.score({"readme_text": "This project uses logging and debug level"})
assert 0.0 <= mv.value <= 1.0

def test_logging_env_metric_missing():
metric = LoggingEnvMetric()
mv = metric.score({})
assert mv.value == 0.0
11 changes: 0 additions & 11 deletions tests/test_parallel_metrics.py

This file was deleted.

11 changes: 3 additions & 8 deletions tests/test_performance_claims_metric.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,11 @@
from acmecli.metrics.performance_claims_metric import PerformanceClaimsMetric

def test_performance_claims_range():
def test_performance_metric_range():
metric = PerformanceClaimsMetric()
mv = metric.score({"claims": 5})
mv = metric.score({"readme_text": "benchmark performance 99%"})
assert 0.0 <= mv.value <= 1.0

def test_performance_claims_latency():
metric = PerformanceClaimsMetric()
mv = metric.score({"claims": 5})
assert mv.latency_ms >= 0

def test_performance_claims_missing():
def test_performance_metric_missing():
metric = PerformanceClaimsMetric()
mv = metric.score({})
assert mv.value == 0.0
11 changes: 3 additions & 8 deletions tests/test_ramp_up_metric.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,11 @@
from acmecli.metrics.ramp_up_metric import RampUpMetric

def test_ramp_up_range():
def test_rampup_metric_range():
metric = RampUpMetric()
mv = metric.score({"readme_size": 1000})
mv = metric.score({"readme_text": "Install and usage quickstart", "pushed_at": "2025-09-01T00:00:00Z"})
assert 0.0 <= mv.value <= 1.0

def test_ramp_up_latency():
metric = RampUpMetric()
mv = metric.score({"readme_size": 1000})
assert mv.latency_ms >= 0

def test_ramp_up_missing_readme():
def test_rampup_metric_missing():
metric = RampUpMetric()
mv = metric.score({})
assert mv.value == 0.0
17 changes: 6 additions & 11 deletions tests/test_size_metric.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,11 @@
from acmecli.metrics.size_metric import SizeMetric

def test_size_range():
def test_size_metric_range():
metric = SizeMetric()
mv = metric.score({"size_kb": 100})
assert 0.0 <= mv.value <= 1.0
mv = metric.score({"size": 1000})
assert all(0.0 <= v <= 1.0 for v in mv.value.values())

def test_size_latency():
def test_size_metric_zero():
metric = SizeMetric()
mv = metric.score({"size_kb": 100})
assert mv.latency_ms >= 0

def test_size_edge_case_zero():
metric = SizeMetric()
mv = metric.score({"size_kb": 0})
assert mv.value == 0.0
mv = metric.score({"size": 0})
assert all(v == 0.5 for v in mv.value.values())

0 comments on commit b5ea680

Please sign in to comment.