Skip to content

Commit

Permalink
apacheGH-44058: [CI][Integration] Group logs on GitHub Actions (apach…
Browse files Browse the repository at this point in the history
…e#44060)

### Rationale for this change

The current log is difficult to read.

### What changes are included in this PR?

Group logs.

### Are these changes tested?

Yes.

### Are there any user-facing changes?

No.
* GitHub Issue: apache#44058

Authored-by: Sutou Kouhei <kou@clear-code.com>
Signed-off-by: Sutou Kouhei <kou@clear-code.com>
  • Loading branch information
kou authored Sep 13, 2024
1 parent 9d56569 commit 8e891a3
Show file tree
Hide file tree
Showing 6 changed files with 160 additions and 67 deletions.
8 changes: 7 additions & 1 deletion ci/scripts/integration_arrow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# specific language governing permissions and limitations
# under the License.

set -ex
set -e

arrow_dir=${1}
build_dir=${2}
Expand All @@ -30,15 +30,21 @@ gold_dir=$arrow_dir/testing/data/arrow-ipc-stream/integration
: ${ARROW_INTEGRATION_JAVA:=ON}
: ${ARROW_INTEGRATION_JS:=ON}

. ${arrow_dir}/ci/scripts/util_log.sh

github_actions_group_begin "Integration: Prepare: Archery"
pip install -e $arrow_dir/dev/archery[integration]
github_actions_group_end

github_actions_group_begin "Integration: Prepare: Dependencies"
# For C Data Interface testing
if [ "${ARROW_INTEGRATION_CSHARP}" == "ON" ]; then
pip install pythonnet
fi
if [ "${ARROW_INTEGRATION_JAVA}" == "ON" ]; then
pip install jpype1
fi
github_actions_group_end

export ARROW_BUILD_ROOT=${build_dir}

Expand Down
18 changes: 17 additions & 1 deletion ci/scripts/integration_arrow_build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
# specific language governing permissions and limitations
# under the License.

set -ex
set -e

arrow_dir=${1}
build_dir=${2}
Expand All @@ -28,30 +28,46 @@ build_dir=${2}
: ${ARROW_INTEGRATION_JAVA:=ON}
: ${ARROW_INTEGRATION_JS:=ON}

. ${arrow_dir}/ci/scripts/util_log.sh

github_actions_group_begin "Integration: Build: Rust"
${arrow_dir}/ci/scripts/rust_build.sh ${arrow_dir} ${build_dir}
github_actions_group_end

github_actions_group_begin "Integration: Build: nanoarrow"
${arrow_dir}/ci/scripts/nanoarrow_build.sh ${arrow_dir} ${build_dir}
github_actions_group_end

github_actions_group_begin "Integration: Build: C++"
if [ "${ARROW_INTEGRATION_CPP}" == "ON" ]; then
${arrow_dir}/ci/scripts/cpp_build.sh ${arrow_dir} ${build_dir}
fi
github_actions_group_end

github_actions_group_begin "Integration: Build: C#"
if [ "${ARROW_INTEGRATION_CSHARP}" == "ON" ]; then
${arrow_dir}/ci/scripts/csharp_build.sh ${arrow_dir} ${build_dir}
fi
github_actions_group_end

github_actions_group_begin "Integration: Build: Go"
if [ "${ARROW_INTEGRATION_GO}" == "ON" ]; then
${arrow_dir}/ci/scripts/go_build.sh ${arrow_dir} ${build_dir}
fi
github_actions_group_end

github_actions_group_begin "Integration: Build: Java"
if [ "${ARROW_INTEGRATION_JAVA}" == "ON" ]; then
export ARROW_JAVA_CDATA="ON"
export JAVA_JNI_CMAKE_ARGS="-DARROW_JAVA_JNI_ENABLE_DEFAULT=OFF -DARROW_JAVA_JNI_ENABLE_C=ON"

${arrow_dir}/ci/scripts/java_jni_build.sh ${arrow_dir} ${ARROW_HOME} ${build_dir} /tmp/dist/java
${arrow_dir}/ci/scripts/java_build.sh ${arrow_dir} ${build_dir} /tmp/dist/java
fi
github_actions_group_end

github_actions_group_begin "Integration: Build: JavaScript"
if [ "${ARROW_INTEGRATION_JS}" == "ON" ]; then
${arrow_dir}/ci/scripts/js_build.sh ${arrow_dir} ${build_dir}
fi
github_actions_group_end
26 changes: 26 additions & 0 deletions ci/scripts/util_log.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

github_actions_group_begin() {
echo "::group::$1"
set -x
}

github_actions_group_end() {
set +x
echo "::endgroup::"
}
20 changes: 12 additions & 8 deletions dev/archery/archery/docker/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import click

from ..utils.cli import validate_arrow_sources
from ..utils.logger import group
from .core import DockerCompose, UndefinedImage


Expand Down Expand Up @@ -82,11 +83,12 @@ def docker(ctx, src, dry_run, using_legacy_docker_compose, using_docker_cli,
using_docker_cli |= using_docker_buildx
compose_bin = ("docker-compose" if using_legacy_docker_compose
else "docker compose")
compose = DockerCompose(config_path, params=os.environ,
using_docker=using_docker_cli,
using_buildx=using_docker_buildx,
debug=ctx.obj.get('debug', False),
compose_bin=compose_bin)
with group("Docker: Preppare"):
compose = DockerCompose(config_path, params=os.environ,
using_docker=using_docker_cli,
using_buildx=using_docker_buildx,
debug=ctx.obj.get('debug', False),
compose_bin=compose_bin)
if dry_run:
_mock_compose_calls(compose)
ctx.obj['compose'] = compose
Expand Down Expand Up @@ -229,10 +231,12 @@ def docker_run(obj, image, command, *, env, user, force_pull, force_build,
env = dict(kv.split('=', 1) for kv in env)
try:
if force_pull:
compose.pull(image, pull_leaf=use_leaf_cache)
with group("Docker: Pull"):
compose.pull(image, pull_leaf=use_leaf_cache)
if force_build:
compose.build(image, use_cache=use_cache,
use_leaf_cache=use_leaf_cache)
with group("Docker: Build"):
compose.build(image, use_cache=use_cache,
use_leaf_cache=use_leaf_cache)
if build_only:
return
compose.run(
Expand Down
132 changes: 75 additions & 57 deletions dev/archery/archery/integration/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

from collections import namedtuple
from concurrent.futures import ThreadPoolExecutor
import contextlib
from functools import partial
import glob
import gzip
Expand All @@ -39,6 +40,7 @@
from .tester_nanoarrow import NanoarrowTester
from .util import guid, printer
from .util import SKIP_C_ARRAY, SKIP_C_SCHEMA, SKIP_FLIGHT, SKIP_IPC
from ..utils.logger import group as group_raw
from ..utils.source import ARROW_ROOT_DEFAULT
from . import datagen

Expand All @@ -49,6 +51,12 @@
log = printer.print


@contextlib.contextmanager
def group(name):
with group_raw(name, log):
yield


class Outcome:
def __init__(self):
self.failure = None
Expand Down Expand Up @@ -91,20 +99,22 @@ def run_ipc(self):
self._compare_ipc_implementations(
producer, consumer, self._produce_consume,
self.json_files)

if self.gold_dirs:
for gold_dir, consumer in itertools.product(
self.gold_dirs,
filter(lambda t: t.CONSUMER, self.testers)):
log('\n')
log('******************************************************')
log('Tests against golden files in {}'.format(gold_dir))
log('******************************************************')

def run_gold(_, consumer, test_case: datagen.File):
return self._run_gold(gold_dir, consumer, test_case)
self._compare_ipc_implementations(
consumer, consumer, run_gold,
self._gold_tests(gold_dir))
with group(f"Integration: Test: IPC: Gold: {consumer.name}"):
log('\n')
log('******************************************************')
log('Tests against golden files in {}'.format(gold_dir))
log('******************************************************')

def run_gold(_, consumer, test_case: datagen.File):
return self._run_gold(gold_dir, consumer, test_case)
self._compare_ipc_implementations(
consumer, consumer, run_gold,
self._gold_tests(gold_dir))
log('\n')

def run_flight(self):
Expand Down Expand Up @@ -233,14 +243,15 @@ def _compare_ipc_implementations(
"""
Compare Arrow IPC for two implementations (one producer, one consumer).
"""
log('##########################################################')
log('IPC: {0} producing, {1} consuming'
.format(producer.name, consumer.name))
log('##########################################################')
with group(f"Integration: Test: IPC: {producer.name} -> {consumer.name}"):
log('##########################################################')
log('IPC: {0} producing, {1} consuming'
.format(producer.name, consumer.name))
log('##########################################################')

case_runner = partial(self._run_ipc_test_case,
producer, consumer, run_binaries)
self._run_test_cases(case_runner, test_cases)
case_runner = partial(self._run_ipc_test_case,
producer, consumer, run_binaries)
self._run_test_cases(case_runner, test_cases)

def _run_ipc_test_case(
self,
Expand Down Expand Up @@ -357,14 +368,15 @@ def _compare_flight_implementations(
producer: Tester,
consumer: Tester
):
log('##########################################################')
log('Flight: {0} serving, {1} requesting'
.format(producer.name, consumer.name))
log('##########################################################')
with group(f"Integration: Test: Flight: {producer.name} -> {consumer.name}"):
log('##########################################################')
log('Flight: {0} serving, {1} requesting'
.format(producer.name, consumer.name))
log('##########################################################')

case_runner = partial(self._run_flight_test_case, producer, consumer)
self._run_test_cases(
case_runner, self.json_files + self.flight_scenarios)
case_runner = partial(self._run_flight_test_case, producer, consumer)
self._run_test_cases(
case_runner, self.json_files + self.flight_scenarios)

def _run_flight_test_case(self,
producer: Tester,
Expand Down Expand Up @@ -415,27 +427,32 @@ def _compare_c_data_implementations(
producer: Tester,
consumer: Tester
):
log('##########################################################')
log(f'C Data Interface: '
f'{producer.name} exporting, {consumer.name} importing')
log('##########################################################')

# Serial execution is required for proper memory accounting
serial = True

with producer.make_c_data_exporter() as exporter:
with consumer.make_c_data_importer() as importer:
case_runner = partial(self._run_c_schema_test_case,
producer, consumer,
exporter, importer)
self._run_test_cases(case_runner, self.json_files, serial=serial)

if producer.C_DATA_ARRAY_EXPORTER and consumer.C_DATA_ARRAY_IMPORTER:
case_runner = partial(self._run_c_array_test_cases,
with group("Integration: Test: C Data Interface: "
f"{producer.name} -> {consumer.name}"):
log('##########################################################')
log(f'C Data Interface: '
f'{producer.name} exporting, {consumer.name} importing')
log('##########################################################')

# Serial execution is required for proper memory accounting
serial = True

with producer.make_c_data_exporter() as exporter:
with consumer.make_c_data_importer() as importer:
case_runner = partial(self._run_c_schema_test_case,
producer, consumer,
exporter, importer)
self._run_test_cases(case_runner, self.json_files, serial=serial)

if producer.C_DATA_ARRAY_EXPORTER and \
consumer.C_DATA_ARRAY_IMPORTER:
case_runner = partial(self._run_c_array_test_cases,
producer, consumer,
exporter, importer)
self._run_test_cases(case_runner,
self.json_files,
serial=serial)

def _run_c_schema_test_case(self,
producer: Tester, consumer: Tester,
exporter: CDataExporter,
Expand Down Expand Up @@ -657,22 +674,23 @@ def run_all_tests(with_cpp=True, with_java=True, with_js=True,
if run_c_data:
runner.run_c_data()

fail_count = 0
if runner.failures:
log("################# FAILURES #################")
for test_case, producer, consumer, exc_info in runner.failures:
fail_count += 1
log("FAILED TEST:", end=" ")
log(test_case.name, producer.name, "producing, ",
consumer.name, "consuming")
if exc_info:
exc_type, exc_value, exc_tb = exc_info
log(f'{exc_type}: {exc_value}')
log()

log(f"{fail_count} failures, {len(runner.skips)} skips")
if fail_count > 0:
sys.exit(1)
with group("Integration: Test: Result"):
fail_count = 0
if runner.failures:
log("################# FAILURES #################")
for test_case, producer, consumer, exc_info in runner.failures:
fail_count += 1
log("FAILED TEST:", end=" ")
log(test_case.name, producer.name, "producing, ",
consumer.name, "consuming")
if exc_info:
exc_type, exc_value, exc_tb = exc_info
log(f'{exc_type}: {exc_value}')
log()

log(f"{fail_count} failures, {len(runner.skips)} skips")
if fail_count > 0:
sys.exit(1)


def write_js_test_json(directory):
Expand Down
23 changes: 23 additions & 0 deletions dev/archery/archery/utils/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@
# specific language governing permissions and limitations
# under the License.

import contextlib
import logging
import os

""" Global logger. """
logger = logging.getLogger("archery")
Expand All @@ -27,3 +29,24 @@ def __init__(self, quiet=False):


ctx = LoggingContext()

in_github_actions = (os.environ.get("GITHUB_ACTIONS") == "true")


@contextlib.contextmanager
def group(name, output=None):
"""
Group outputs in the given with block.
This does nothing in non GitHub Actions environment for now.
"""
if output is None:
def output(message):
print(message, flush=True)
if in_github_actions:
output(f"::group::{name}")
try:
yield
finally:
if in_github_actions:
output("::endgroup::")

0 comments on commit 8e891a3

Please sign in to comment.