cpdbench.examples.Example_VeryLargeDataset

 1from cpdbench.examples.ExampleDatasets import get_extreme_large_dataset_from_file
 2from cpdbench.examples.ExampleAlgorithms import numpy_array_accesses
 3from cpdbench.examples.ExampleMetrics import metric_accuracy_in_allowed_windows
 4from cpdbench.CPDBench import CPDBench
 5import pathlib
 6
 7cpdb = CPDBench()
 8
 9
10@cpdb.dataset
11def get_large_dataset():
12    return get_extreme_large_dataset_from_file()
13
14
15@cpdb.algorithm
16def execute_algorithm(dataset, *, array_indexes):
17    return numpy_array_accesses(dataset, array_indexes)
18
19
20@cpdb.metric
21def compute_metric(indexes, confidences, ground_truths):
22    return metric_accuracy_in_allowed_windows(indexes, confidences, ground_truths, window_size=20)
23
24
25# IMPORTANT!
26# To run this example, the file "data/very_big_numpy_file" has to be generated first.
27# To do this first run the script "data/generate_very_big_numpy_file.dat.py"
28
29if __name__ == '__main__':
30    path = pathlib.Path(__file__).parent.resolve()
31    path = path.joinpath("configs", "VeryLargeDatasetConfig.yml")
32    cpdb.start(config_file=str(path))
cpdb = <cpdbench.CPDBench.CPDBench object>
@cpdb.dataset
def get_large_dataset():
11@cpdb.dataset
12def get_large_dataset():
13    return get_extreme_large_dataset_from_file()
@cpdb.algorithm
def execute_algorithm(dataset, *, array_indexes):
16@cpdb.algorithm
17def execute_algorithm(dataset, *, array_indexes):
18    return numpy_array_accesses(dataset, array_indexes)
@cpdb.metric
def compute_metric(indexes, confidences, ground_truths):
21@cpdb.metric
22def compute_metric(indexes, confidences, ground_truths):
23    return metric_accuracy_in_allowed_windows(indexes, confidences, ground_truths, window_size=20)