|
| 1 | +# tests/test_neuromorphic_analytics/test_npae.py |
| 2 | + |
| 3 | +import unittest |
| 4 | +from src.neuromorphic_analytics.npae import NeuromorphicPredictiveAnalyticsEngine |
| 5 | +from src.neuromorphic_analytics.model import SpikingNeuralNetworkModel |
| 6 | +from unittest.mock import patch |
| 7 | + |
| 8 | +class TestNPAE(unittest.TestCase): |
| 9 | + def setUp(self): |
| 10 | + """Set up the Neuromorphic Predictive Analytics Engine for testing.""" |
| 11 | + model_params = {'num_neurons': 10, 'threshold': 1.0, 'decay': 0.9} |
| 12 | + data_sources = ['source1', 'source2'] |
| 13 | + self.npae = NeuromorphicPredictiveAnalyticsEngine(model_params, data_sources) |
| 14 | + |
| 15 | + @patch('src.neuromorphic_analytics.data_pipeline.DataPipeline.collect_data') |
| 16 | + @patch('src.neuromorphic_analytics.data_pipeline.DataPipeline.preprocess_data') |
| 17 | + def test_process_data(self, mock_preprocess_data, mock_collect_data): |
| 18 | + """Test the data processing functionality.""" |
| 19 | + mock_collect_data.return_value = [0.5, 0.7, 0.2] |
| 20 | + mock_preprocess_data.return_value = [0.5, 0.7, 0.2] |
| 21 | + |
| 22 | + predictions = self.npae.process_data() |
| 23 | + self.assertEqual(len(predictions), 3) # Expecting 3 predictions |
| 24 | + self.assertTrue(all(isinstance(pred, int) for pred in predictions)) # Predictions should be integers (0 or 1) |
| 25 | + |
| 26 | + @patch('src.neuromorphic_analytics.model.SpikingNeuralNetworkModel.evaluate') |
| 27 | + def test_evaluate_model(self, mock_evaluate): |
| 28 | + """Test the model evaluation functionality.""" |
| 29 | + mock_evaluate.return_value = 0.85 # Simulate an accuracy of 85% |
| 30 | + accuracy = self.npae.evaluate_model([[0.5]], [1]) |
| 31 | + self.assertEqual(accuracy, 0.85) # Check if the returned accuracy matches the mock |
| 32 | + |
| 33 | +if __name__ == '__main__': |
| 34 | + unittest.main() |
0 commit comments