diff --git a/CHANGELOG.md b/CHANGELOG.md index b50bf09ecb..1c6f07555a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,10 +15,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### Changed +- Add duration of experiments in seconds in the benchmark CSV result by [mzweilin](https://github.com/mzweilin) in https://github.com/openvinotoolkit/anomalib/pull/2392 + ### Deprecated ### Fixed +- Make single GPU benchmarking 5x more efficient by [mzweilin](https://github.com/mzweilin) in https://github.com/openvinotoolkit/anomalib/pull/2390 + ### New Contributors **Full Changelog**: diff --git a/docs/source/markdown/get_started/anomalib.md b/docs/source/markdown/get_started/anomalib.md index 37af563b3e..4580c7fae5 100644 --- a/docs/source/markdown/get_started/anomalib.md +++ b/docs/source/markdown/get_started/anomalib.md @@ -17,7 +17,7 @@ The installer can be installed using the following commands: :::{tab-item} API :sync: label-1 -```{literalinclude} ../../snippets/install/pypi.txt +```{literalinclude} /snippets/install/pypi.txt :language: bash ``` @@ -26,7 +26,7 @@ The installer can be installed using the following commands: :::{tab-item} Source :sync: label-2 -```{literalinclude} ../../snippets/install/source.txt +```{literalinclude} /snippets/install/source.txt :language: bash ``` @@ -42,7 +42,7 @@ The next section demonstrates how to install the full package using the CLI inst :::::{dropdown} Installing the Full Package After installing anomalib, you can install the full package using the following commands: -```{literalinclude} ../../snippets/install/anomalib_help.txt +```{literalinclude} /snippets/install/anomalib_help.txt :language: bash ``` @@ -50,14 +50,14 @@ As can be seen above, the only available sub-command is `install` at the moment. The `install` sub-command has options to install either the full package or the specific components of the package. -```{literalinclude} ../../snippets/install/anomalib_install_help.txt +```{literalinclude} /snippets/install/anomalib_install_help.txt :language: bash ``` By default the `install` sub-command installs the full package. If you want to install only the specific components of the package, you can use the `--option` flag. -```{literalinclude} ../../snippets/install/anomalib_install.txt +```{literalinclude} /snippets/install/anomalib_install.txt :language: bash ``` @@ -66,13 +66,15 @@ After following these steps, your environment will be ready to use anomalib! ## {octicon}`mortar-board` Training -Anomalib supports both API and CLI-based training. The API is more flexible and allows for more customization, while the CLI training utilizes command line interfaces, and might be easier for those who would like to use anomalib off-the-shelf. +Anomalib supports both API and CLI-based training. The API is more flexible +and allows for more customization, while the CLI training utilizes command line +interfaces, and might be easier for those who would like to use anomalib off-the-shelf. ::::{tab-set} :::{tab-item} API -```{literalinclude} ../../snippets/train/api/default.txt +```{literalinclude} /snippets/train/api/default.txt :language: python ``` @@ -80,7 +82,7 @@ Anomalib supports both API and CLI-based training. The API is more flexible and :::{tab-item} CLI -```{literalinclude} ../../snippets/train/cli/default.txt +```{literalinclude} /snippets/train/cli/default.txt :language: bash ``` @@ -100,7 +102,7 @@ Anomalib includes multiple inferencing scripts, including Torch, Lightning, Grad :::{tab-item} API :sync: label-1 -```{literalinclude} ../../snippets/inference/api/lightning.txt +```{literalinclude} /snippets/inference/api/lightning.txt :language: python ``` @@ -109,7 +111,7 @@ Anomalib includes multiple inferencing scripts, including Torch, Lightning, Grad :::{tab-item} CLI :sync: label-2 -```{literalinclude} ../../snippets/inference/cli/lightning.txt +```{literalinclude} /snippets/inference/cli/lightning.txt :language: bash ``` @@ -201,7 +203,7 @@ Anomalib supports hyper-parameter optimization using [wandb](https://wandb.ai/) :::{tab-item} CLI -```{literalinclude} ../../snippets/pipelines/hpo/cli.txt +```{literalinclude} /snippets/pipelines/hpo/cli.txt :language: bash ``` @@ -209,7 +211,7 @@ Anomalib supports hyper-parameter optimization using [wandb](https://wandb.ai/) :::{tab-item} API -```{literalinclude} ../../snippets/pipelines/hpo/api.txt +```{literalinclude} /snippets/pipelines/hpo/api.txt :language: bash ``` @@ -233,7 +235,7 @@ To run a training experiment with experiment tracking, you will need the followi By using the configuration file above, you can run the experiment with the following command: -```{literalinclude} ../../snippets/logging/cli.txt +```{literalinclude} /snippets/logging/cli.txt :language: bash ``` @@ -241,7 +243,7 @@ By using the configuration file above, you can run the experiment with the follo :::{tab-item} API -```{literalinclude} ../../snippets/logging/api.txt +```{literalinclude} /snippets/logging/api.txt :language: bash ``` diff --git a/docs/source/snippets/train/api/default.txt b/docs/source/snippets/train/api/default.txt index 30293cf501..1fe6cb895c 100644 --- a/docs/source/snippets/train/api/default.txt +++ b/docs/source/snippets/train/api/default.txt @@ -1,12 +1,15 @@ # Import the required modules from anomalib.data import MVTec -from anomalib.models import Patchcore from anomalib.engine import Engine +from anomalib.models import EfficientAd # Initialize the datamodule, model and engine -datamodule = MVTec() -model = Patchcore() -engine = Engine() +datamodule = MVTec(train_batch_size=1) +model = EfficientAd() +engine = Engine(max_epochs=5) # Train the model engine.fit(datamodule=datamodule, model=model) + +# Continue from a checkpoint +engine.fit(datamodule=datamodule, model=model, ckpt_path="path/to/checkpoint.ckpt") diff --git a/docs/source/snippets/train/cli/default.txt b/docs/source/snippets/train/cli/default.txt index 3f64f687ad..1990dbf97e 100644 --- a/docs/source/snippets/train/cli/default.txt +++ b/docs/source/snippets/train/cli/default.txt @@ -2,10 +2,13 @@ anomalib train -h # Train by using the default values. -anomalib train --model Patchcore --data anomalib.data.MVTec +anomalib train --model EfficientAd --data anomalib.data.MVTec --data.train_batch_size 1 # Train by overriding arguments. -anomalib train --model Patchcore --data anomalib.data.MVTec --data.category transistor +anomalib train --model EfficientAd --data anomalib.data.MVTec --data.train_batch_size 1 --data.category transistor # Train by using a config file. anomalib train --config + +# Continue training from a checkpoint +anomalib train --config --ckpt_path diff --git a/src/anomalib/pipelines/benchmark/job.py b/src/anomalib/pipelines/benchmark/job.py index ab443cfa8a..01822d5f29 100644 --- a/src/anomalib/pipelines/benchmark/job.py +++ b/src/anomalib/pipelines/benchmark/job.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: Apache-2.0 import logging +import time from datetime import datetime from pathlib import Path from tempfile import TemporaryDirectory @@ -48,6 +49,7 @@ def run( task_id: int | None = None, ) -> dict[str, Any]: """Run the benchmark.""" + job_start_time = time.time() devices: str | list[int] = "auto" if task_id is not None: devices = [task_id] @@ -59,8 +61,16 @@ def run( devices=devices, default_root_dir=temp_dir, ) + fit_start_time = time.time() engine.fit(self.model, self.datamodule) + test_start_time = time.time() test_results = engine.test(self.model, self.datamodule) + job_end_time = time.time() + durations = { + "job_duration": job_end_time - job_start_time, + "fit_duration": test_start_time - fit_start_time, + "test_duration": job_end_time - test_start_time, + } # TODO(ashwinvaidya17): Restore throughput # https://github.com/openvinotoolkit/anomalib/issues/2054 output = { @@ -69,6 +79,7 @@ def run( "model": self.model.__class__.__name__, "data": self.datamodule.__class__.__name__, "category": self.datamodule.category, + **durations, **test_results[0], } logger.info(f"Completed with result {output}") diff --git a/src/anomalib/pipelines/benchmark/pipeline.py b/src/anomalib/pipelines/benchmark/pipeline.py index 730b3ecccc..f68ee5e2a1 100644 --- a/src/anomalib/pipelines/benchmark/pipeline.py +++ b/src/anomalib/pipelines/benchmark/pipeline.py @@ -20,11 +20,12 @@ def _setup_runners(args: dict) -> list[Runner]: accelerators = args["accelerator"] if isinstance(args["accelerator"], list) else [args["accelerator"]] runners: list[Runner] = [] for accelerator in accelerators: - if accelerator == "cpu": - runners.append(SerialRunner(BenchmarkJobGenerator("cpu"))) - elif accelerator == "cuda": - runners.append(ParallelRunner(BenchmarkJobGenerator("cuda"), n_jobs=torch.cuda.device_count())) - else: + if accelerator not in {"cpu", "cuda"}: msg = f"Unsupported accelerator: {accelerator}" raise ValueError(msg) + device_count = torch.cuda.device_count() + if device_count <= 1: + runners.append(SerialRunner(BenchmarkJobGenerator(accelerator))) + else: + runners.append(ParallelRunner(BenchmarkJobGenerator(accelerator), n_jobs=device_count)) return runners diff --git a/src/anomalib/utils/logging.py b/src/anomalib/utils/logging.py index 21f7994fbf..d73ef440c4 100644 --- a/src/anomalib/utils/logging.py +++ b/src/anomalib/utils/logging.py @@ -74,10 +74,8 @@ def redirect_logs(log_file: str) -> None: """ Path(log_file).parent.mkdir(exist_ok=True, parents=True) logger_file_handler = logging.FileHandler(log_file) - root_logger = logging.getLogger() - root_logger.setLevel(logging.DEBUG) format_string = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" - logging.basicConfig(format=format_string, level=logging.DEBUG, handlers=[logger_file_handler]) + logging.basicConfig(format=format_string, handlers=[logger_file_handler]) logging.captureWarnings(capture=True) # remove other handlers from all loggers loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict]