Skip to content

Commit

Permalink
test_feature_extraction: use baseline fixture
Browse files Browse the repository at this point in the history
Signed-off-by: U. Artie Eoff <[email protected]>
  • Loading branch information
uartie committed Feb 20, 2025
1 parent aa0b422 commit 091e8bc
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 1 deletion.
10 changes: 10 additions & 0 deletions tests/baselines/fixture/tests/test_feature_extraction.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"tests/test_feature_extraction.py::GaudiFeatureExtractionTester::test_latency_graph_bf16": {
"gaudi1": {
"time_per_iter": 0.7987
},
"gaudi2": {
"time_per_iter": 0.6812
}
}
}
13 changes: 12 additions & 1 deletion tests/test_feature_extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,13 @@ class GaudiFeatureExtractionTester(TestCase):
Tests for Supabase/gte-small feature extraction on Gaudi
"""

@pytest.fixture(autouse=True)
def _use_(self, baseline):
"""
https://docs.pytest.org/en/stable/how-to/unittest.html#using-autouse-fixtures-and-accessing-other-fixtures
"""
self.baseline = baseline

def test_inference_default(self):
"""
Tests for equivalent CPU and HPU outputs
Expand Down Expand Up @@ -135,4 +142,8 @@ def test_latency_graph_bf16(self):
torch.hpu.synchronize()
end_time = time.time()
time_per_iter = (end_time - start_time) * 1000 / test_iters # time in ms
self.assertLess(time_per_iter, 1.05 * LATENCY_GTE_SMALL_BF16_GRAPH_BASELINE)
self.baseline.assertRef(
compare=lambda actual, ref: actual < (1.05 * ref),
context=[OH_TEST_DEVICE],
time_per_iter=time_per_iter,
)

0 comments on commit 091e8bc

Please sign in to comment.