Skip to content

Commit

Permalink
V3
Browse files Browse the repository at this point in the history
  • Loading branch information
Warsame committed Sep 27, 2025
1 parent 1144250 commit 7c3f8b4
Show file tree
Hide file tree
Showing 19 changed files with 154 additions and 43 deletions.
Binary file modified requirements.txt
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
16 changes: 16 additions & 0 deletions tests/test_bus_factor_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from acmecli.metrics.bus_factor_metric import BusFactorMetric

def test_bus_factor_range():
metric = BusFactorMetric()
mv = metric.score({"contributors": 10})
assert 0.0 <= mv.value <= 1.0

def test_bus_factor_latency():
metric = BusFactorMetric()
mv = metric.score({"contributors": 10})
assert mv.latency_ms >= 0

def test_bus_factor_edge_case_zero_contrib():
metric = BusFactorMetric()
mv = metric.score({"contributors": 0})
assert mv.value == 0.0
18 changes: 18 additions & 0 deletions tests/test_cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import subprocess

def test_run_install():
result = subprocess.run(["./run", "install"], capture_output=True)
assert result.returncode == 0

def test_run_test():
result = subprocess.run(["./run", "test"], capture_output=True)
assert b"test cases passed" in result.stdout

def test_run_score_valid_url():
result = subprocess.run(["./run", "score", "urls.txt"], capture_output=True)
assert result.stdout.decode().strip().startswith("{")
assert result.returncode == 0

def test_run_score_invalid_url():
result = subprocess.run(["./run", "score", "bad_urls.txt"], capture_output=True)
assert result.returncode == 1 or "error" in result.stderr.decode().lower()
16 changes: 16 additions & 0 deletions tests/test_code_quality_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from acmecli.metrics.code_quality_metric import CodeQualityMetric

def test_code_quality_range():
metric = CodeQualityMetric()
mv = metric.score({"lint_score": 0.9})
assert 0.0 <= mv.value <= 1.0

def test_code_quality_latency():
metric = CodeQualityMetric()
mv = metric.score({"lint_score": 0.9})
assert mv.latency_ms >= 0

def test_code_quality_missing():
metric = CodeQualityMetric()
mv = metric.score({})
assert mv.value == 0.0
16 changes: 16 additions & 0 deletions tests/test_dataset_and_code_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from acmecli.metrics.dataset_and_code_metric import DatasetAndCodeMetric

def test_dataset_and_code_range():
metric = DatasetAndCodeMetric()
mv = metric.score({"linked": True})
assert 0.0 <= mv.value <= 1.0

def test_dataset_and_code_latency():
metric = DatasetAndCodeMetric()
mv = metric.score({"linked": True})
assert mv.latency_ms >= 0

def test_dataset_and_code_missing():
metric = DatasetAndCodeMetric()
mv = metric.score({})
assert mv.value == 0.0
16 changes: 16 additions & 0 deletions tests/test_dataset_quality_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from acmecli.metrics.dataset_quality_metric import DatasetQualityMetric

def test_dataset_quality_range():
metric = DatasetQualityMetric()
mv = metric.score({"quality_score": 0.8})
assert 0.0 <= mv.value <= 1.0

def test_dataset_quality_latency():
metric = DatasetQualityMetric()
mv = metric.score({"quality_score": 0.8})
assert mv.latency_ms >= 0

def test_dataset_quality_missing():
metric = DatasetQualityMetric()
mv = metric.score({})
assert mv.value == 0.0
9 changes: 0 additions & 9 deletions tests/test_hf_handler.py

This file was deleted.

13 changes: 13 additions & 0 deletions tests/test_logging_env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import os
import tempfile

def test_logging_env(monkeypatch):
with tempfile.NamedTemporaryFile() as log_file:
monkeypatch.setenv("LOG_FILE", log_file.name)
monkeypatch.setenv("LOG_LEVEL", "DEBUG")
# You should call your project's logging function here if available
from acmecli import main_logger
main_logger.debug("test message")
log_file.seek(0)
contents = log_file.read().decode()
assert "test message" in contents
7 changes: 0 additions & 7 deletions tests/test_metric_heuristics.py

This file was deleted.

11 changes: 0 additions & 11 deletions tests/test_metrics_contract.py

This file was deleted.

11 changes: 11 additions & 0 deletions tests/test_parallel_metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import time
from acmecli.cli import score_all_metrics_parallel

def test_metrics_run_parallel():
start = time.time()
# Simulate input that runs several metrics
report = score_all_metrics_parallel({"contributors": 10, "size_kb": 100, "linked": True})
duration = time.time() - start
# For parallel, duration should be less than sum of all metric durations
assert isinstance(report, dict)
assert duration < 2.0 # Example threshold, adjust to your actual metric timings
16 changes: 16 additions & 0 deletions tests/test_performance_claims_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from acmecli.metrics.performance_claims_metric import PerformanceClaimsMetric

def test_performance_claims_range():
metric = PerformanceClaimsMetric()
mv = metric.score({"claims": 5})
assert 0.0 <= mv.value <= 1.0

def test_performance_claims_latency():
metric = PerformanceClaimsMetric()
mv = metric.score({"claims": 5})
assert mv.latency_ms >= 0

def test_performance_claims_missing():
metric = PerformanceClaimsMetric()
mv = metric.score({})
assert mv.value == 0.0
16 changes: 16 additions & 0 deletions tests/test_ramp_up_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from acmecli.metrics.ramp_up_metric import RampUpMetric

def test_ramp_up_range():
metric = RampUpMetric()
mv = metric.score({"readme_size": 1000})
assert 0.0 <= mv.value <= 1.0

def test_ramp_up_latency():
metric = RampUpMetric()
mv = metric.score({"readme_size": 1000})
assert mv.latency_ms >= 0

def test_ramp_up_missing_readme():
metric = RampUpMetric()
mv = metric.score({})
assert mv.value == 0.0
16 changes: 0 additions & 16 deletions tests/test_reporter_schema.py

This file was deleted.

16 changes: 16 additions & 0 deletions tests/test_size_metric.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from acmecli.metrics.size_metric import SizeMetric

def test_size_range():
metric = SizeMetric()
mv = metric.score({"size_kb": 100})
assert 0.0 <= mv.value <= 1.0

def test_size_latency():
metric = SizeMetric()
mv = metric.score({"size_kb": 100})
assert mv.latency_ms >= 0

def test_size_edge_case_zero():
metric = SizeMetric()
mv = metric.score({"size_kb": 0})
assert mv.value == 0.0

0 comments on commit 7c3f8b4

Please sign in to comment.