From 279dfddee6ab347fc31f2e620344dc9ad0d2e6ac Mon Sep 17 00:00:00 2001 From: Holger Bruch Date: Wed, 25 Oct 2023 08:47:42 +0200 Subject: [PATCH] fix: correct doc --- pipeline/assets/sharing.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pipeline/assets/sharing.py b/pipeline/assets/sharing.py index 15e322b..d58ad84 100644 --- a/pipeline/assets/sharing.py +++ b/pipeline/assets/sharing.py @@ -64,8 +64,8 @@ def vehicles(context, lamassu: LamassuResource) -> pd.DataFrame: ''' Default execution mode (which could be overriden for the whole code location) -is multiprocess, resulting in a new process startet for every new job execution. -That results in a large overhead for launching aa new process, initializing db connections etc., +is multiprocess, resulting in a new process started for every new job execution. +That results in a large overhead for launching a new process, initializing db connections etc., so we want high frequency jobs to be execucted in process. Note: this config has to be provided for job definitions and for RunRequests. ''' @@ -85,8 +85,7 @@ def vehicles(context, lamassu: LamassuResource) -> pd.DataFrame: @schedule(job=stations_and_vehicles_job, cron_schedule='* * * * *', default_status=DefaultScheduleStatus.RUNNING) def update_stations_and_vehicles_minutely(context): """ - For currently registered systems (which we treat as partition), - the stations_and_vehicles_job is run on the provided schedule (minutely). + Run stations_and_vehicles_job in process on the provided schedule (minutely). """ return [ RunRequest(run_config=in_process_job_config)