mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-04 21:30:07 +06:00
extract warnings in GH workflows (#20487)
Co-authored-by: ydshieh <ydshieh@users.noreply.github.com>
This commit is contained in:
parent
bbcd5eea3b
commit
e8d448edcf
53
.github/workflows/self-scheduled.yml
vendored
53
.github/workflows/self-scheduled.yml
vendored
@ -398,6 +398,56 @@ jobs:
|
|||||||
name: ${{ matrix.machine_type }}_run_tests_torch_cuda_extensions_gpu_test_reports
|
name: ${{ matrix.machine_type }}_run_tests_torch_cuda_extensions_gpu_test_reports
|
||||||
path: /workspace/transformers/reports/${{ matrix.machine_type }}_tests_torch_cuda_extensions_gpu
|
path: /workspace/transformers/reports/${{ matrix.machine_type }}_tests_torch_cuda_extensions_gpu
|
||||||
|
|
||||||
|
run_extract_warnings:
|
||||||
|
name: Extract warnings in CI artifacts
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: always()
|
||||||
|
needs: [
|
||||||
|
check_runner_status,
|
||||||
|
check_runners,
|
||||||
|
setup,
|
||||||
|
run_tests_single_gpu,
|
||||||
|
run_tests_multi_gpu,
|
||||||
|
run_examples_gpu,
|
||||||
|
run_pipelines_tf_gpu,
|
||||||
|
run_pipelines_torch_gpu,
|
||||||
|
run_all_tests_torch_cuda_extensions_gpu
|
||||||
|
]
|
||||||
|
steps:
|
||||||
|
- name: Checkout transformers
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
fetch-depth: 2
|
||||||
|
|
||||||
|
- name: Install transformers
|
||||||
|
run: pip install transformers
|
||||||
|
|
||||||
|
- name: Show installed libraries and their versions
|
||||||
|
run: pip freeze
|
||||||
|
|
||||||
|
- name: Create output directory
|
||||||
|
run: mkdir warnings_in_ci
|
||||||
|
|
||||||
|
- uses: actions/download-artifact@v2
|
||||||
|
with:
|
||||||
|
path: warnings_in_ci
|
||||||
|
|
||||||
|
- name: Show artifacts
|
||||||
|
run: echo "$(python3 -c 'import os; d = os.listdir(); print(d)')"
|
||||||
|
working-directory: warnings_in_ci
|
||||||
|
|
||||||
|
- name: Extract warnings in CI artifacts
|
||||||
|
run: |
|
||||||
|
python3 utils/extract_warnings.py --workflow_run_id ${{ github.run_id }} --output_dir warnings_in_ci --token ${{ secrets.ACCESS_REPO_INFO_TOKEN }} --from_gh
|
||||||
|
echo "$(python3 -c 'import os; import json; fp = open("warnings_in_ci/selected_warnings.json"); d = json.load(fp); d = "\n".join(d) ;print(d)')"
|
||||||
|
|
||||||
|
- name: Upload artifact
|
||||||
|
if: ${{ always() }}
|
||||||
|
uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: warnings_in_ci
|
||||||
|
path: warnings_in_ci/selected_warnings.json
|
||||||
|
|
||||||
send_results:
|
send_results:
|
||||||
name: Send results to webhook
|
name: Send results to webhook
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@ -411,7 +461,8 @@ jobs:
|
|||||||
run_examples_gpu,
|
run_examples_gpu,
|
||||||
run_pipelines_tf_gpu,
|
run_pipelines_tf_gpu,
|
||||||
run_pipelines_torch_gpu,
|
run_pipelines_torch_gpu,
|
||||||
run_all_tests_torch_cuda_extensions_gpu
|
run_all_tests_torch_cuda_extensions_gpu,
|
||||||
|
run_extract_warnings
|
||||||
]
|
]
|
||||||
steps:
|
steps:
|
||||||
- name: Preliminary job status
|
- name: Preliminary job status
|
||||||
|
@ -11,40 +11,54 @@ from transformers import logging
|
|||||||
logger = logging.get_logger(__name__)
|
logger = logging.get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def extract_warnings_from_single_artifact(artifact_zip_path, targets):
|
def extract_warnings_from_single_artifact(artifact_path, targets):
|
||||||
"""Extract warnings from a downloaded artifact (in .zip format)"""
|
"""Extract warnings from a downloaded artifact (in .zip format)"""
|
||||||
selected_warnings = set()
|
selected_warnings = set()
|
||||||
buffer = []
|
buffer = []
|
||||||
|
|
||||||
try:
|
def parse_line(fp):
|
||||||
with zipfile.ZipFile(artifact_zip_path) as z:
|
for line in fp:
|
||||||
for filename in z.namelist():
|
if isinstance(line, bytes):
|
||||||
if not os.path.isdir(filename):
|
line = line.decode("UTF-8")
|
||||||
# read the file
|
if "warnings summary (final)" in line:
|
||||||
if filename != "warnings.txt":
|
continue
|
||||||
continue
|
# This means we are outside the body of a warning
|
||||||
with z.open(filename) as f:
|
elif not line.startswith(" "):
|
||||||
for line in f:
|
# process a single warning and move it to `selected_warnings`.
|
||||||
line = line.decode("UTF-8")
|
if len(buffer) > 0:
|
||||||
if "warnings summary (final)" in line:
|
warning = "\n".join(buffer)
|
||||||
continue
|
# Only keep the warnings specified in `targets`
|
||||||
# This means we are outside the body of a warning
|
if any(f": {x}: " in warning for x in targets):
|
||||||
elif not line.startswith(" "):
|
selected_warnings.add(warning)
|
||||||
# process a single warning and move it to `selected_warnings`.
|
buffer.clear()
|
||||||
if len(buffer) > 0:
|
continue
|
||||||
warning = "\n".join(buffer)
|
else:
|
||||||
# Only keep the warnings specified in `targets`
|
line = line.strip()
|
||||||
if any(f": {x}: " in warning for x in targets):
|
buffer.append(line)
|
||||||
selected_warnings.add(warning)
|
|
||||||
buffer = []
|
if from_gh:
|
||||||
continue
|
for filename in os.listdir(artifact_path):
|
||||||
else:
|
file_path = os.path.join(artifact_path, filename)
|
||||||
line = line.strip()
|
if not os.path.isdir(file_path):
|
||||||
buffer.append(line)
|
# read the file
|
||||||
except Exception:
|
if filename != "warnings.txt":
|
||||||
logger.warning(
|
continue
|
||||||
f"{artifact_zip_path} is either an invalid zip file or something else wrong. This file is skipped."
|
with open(file_path) as fp:
|
||||||
)
|
parse_line(fp)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
with zipfile.ZipFile(artifact_path) as z:
|
||||||
|
for filename in z.namelist():
|
||||||
|
if not os.path.isdir(filename):
|
||||||
|
# read the file
|
||||||
|
if filename != "warnings.txt":
|
||||||
|
continue
|
||||||
|
with z.open(filename) as fp:
|
||||||
|
parse_line(fp)
|
||||||
|
except Exception:
|
||||||
|
logger.warning(
|
||||||
|
f"{artifact_path} is either an invalid zip file or something else wrong. This file is skipped."
|
||||||
|
)
|
||||||
|
|
||||||
return selected_warnings
|
return selected_warnings
|
||||||
|
|
||||||
@ -54,7 +68,7 @@ def extract_warnings(artifact_dir, targets):
|
|||||||
|
|
||||||
selected_warnings = set()
|
selected_warnings = set()
|
||||||
|
|
||||||
paths = [os.path.join(artifact_dir, p) for p in os.listdir(artifact_dir) if p.endswith(".zip")]
|
paths = [os.path.join(artifact_dir, p) for p in os.listdir(artifact_dir) if (p.endswith(".zip") or from_gh)]
|
||||||
for p in paths:
|
for p in paths:
|
||||||
selected_warnings.update(extract_warnings_from_single_artifact(p, targets))
|
selected_warnings.update(extract_warnings_from_single_artifact(p, targets))
|
||||||
|
|
||||||
@ -81,30 +95,41 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--token", default=None, type=str, required=True, help="A token that has actions:read permission."
|
"--token", default=None, type=str, required=True, help="A token that has actions:read permission."
|
||||||
)
|
)
|
||||||
|
# optional parameters
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--targets",
|
"--targets",
|
||||||
default="DeprecationWarning,UserWarning,FutureWarning",
|
default="DeprecationWarning,UserWarning,FutureWarning",
|
||||||
type=list_str,
|
type=list_str,
|
||||||
help="Comma-separated list of target warning(s) which we want to extract.",
|
help="Comma-separated list of target warning(s) which we want to extract.",
|
||||||
)
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--from_gh",
|
||||||
|
action="store_true",
|
||||||
|
help="If running from a GitHub action workflow and collecting warnings from its artifacts.",
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
os.makedirs(args.output_dir, exist_ok=True)
|
from_gh = args.from_gh
|
||||||
|
if from_gh:
|
||||||
|
# The artifacts have to be downloaded using `actions/download-artifact@v2`
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
os.makedirs(args.output_dir, exist_ok=True)
|
||||||
|
|
||||||
# get download links
|
# get download links
|
||||||
artifacts = get_artifacts_links(args.workflow_run_id)
|
artifacts = get_artifacts_links(args.workflow_run_id)
|
||||||
with open(os.path.join(args.output_dir, "artifacts.json"), "w", encoding="UTF-8") as fp:
|
with open(os.path.join(args.output_dir, "artifacts.json"), "w", encoding="UTF-8") as fp:
|
||||||
json.dump(artifacts, fp, ensure_ascii=False, indent=4)
|
json.dump(artifacts, fp, ensure_ascii=False, indent=4)
|
||||||
|
|
||||||
# download artifacts
|
# download artifacts
|
||||||
for idx, (name, url) in enumerate(artifacts.items()):
|
for idx, (name, url) in enumerate(artifacts.items()):
|
||||||
print(name)
|
print(name)
|
||||||
print(url)
|
print(url)
|
||||||
print("=" * 80)
|
print("=" * 80)
|
||||||
download_artifact(name, url, args.output_dir, args.token)
|
download_artifact(name, url, args.output_dir, args.token)
|
||||||
# Be gentle to GitHub
|
# Be gentle to GitHub
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
# extract warnings from artifacts
|
# extract warnings from artifacts
|
||||||
selected_warnings = extract_warnings(args.output_dir, args.targets)
|
selected_warnings = extract_warnings(args.output_dir, args.targets)
|
||||||
|
@ -98,7 +98,9 @@ def dicts_to_sum(objects: Union[Dict[str, Dict], List[dict]]):
|
|||||||
|
|
||||||
|
|
||||||
class Message:
|
class Message:
|
||||||
def __init__(self, title: str, ci_title: str, model_results: Dict, additional_results: Dict):
|
def __init__(
|
||||||
|
self, title: str, ci_title: str, model_results: Dict, additional_results: Dict, selected_warnings: List = None
|
||||||
|
):
|
||||||
self.title = title
|
self.title = title
|
||||||
self.ci_title = ci_title
|
self.ci_title = ci_title
|
||||||
|
|
||||||
@ -136,6 +138,10 @@ class Message:
|
|||||||
|
|
||||||
self.thread_ts = None
|
self.thread_ts = None
|
||||||
|
|
||||||
|
if selected_warnings is None:
|
||||||
|
selected_warnings = []
|
||||||
|
self.selected_warnings = selected_warnings
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def time(self) -> str:
|
def time(self) -> str:
|
||||||
all_results = [*self.model_results.values(), *self.additional_results.values()]
|
all_results = [*self.model_results.values(), *self.additional_results.values()]
|
||||||
@ -198,6 +204,22 @@ class Message:
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def warnings(self) -> Dict:
|
||||||
|
return {
|
||||||
|
"type": "section",
|
||||||
|
"text": {
|
||||||
|
"type": "plain_text",
|
||||||
|
"text": f"There were {len(self.selected_warnings)} warnings being selected.",
|
||||||
|
"emoji": True,
|
||||||
|
},
|
||||||
|
"accessory": {
|
||||||
|
"type": "button",
|
||||||
|
"text": {"type": "plain_text", "text": "Check warnings", "emoji": True},
|
||||||
|
"url": f"{github_actions_job_links['Extract warnings in CI artifacts']}",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_device_report(report, rjust=6):
|
def get_device_report(report, rjust=6):
|
||||||
if "single" in report and "multi" in report:
|
if "single" in report and "multi" in report:
|
||||||
@ -384,6 +406,9 @@ class Message:
|
|||||||
if self.n_model_failures == 0 and self.n_additional_failures == 0:
|
if self.n_model_failures == 0 and self.n_additional_failures == 0:
|
||||||
blocks.append(self.no_failures)
|
blocks.append(self.no_failures)
|
||||||
|
|
||||||
|
if len(self.selected_warnings) > 0:
|
||||||
|
blocks.append(self.warnings)
|
||||||
|
|
||||||
return json.dumps(blocks)
|
return json.dumps(blocks)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -910,7 +935,13 @@ if __name__ == "__main__":
|
|||||||
{"line": line, "trace": stacktraces.pop(0)}
|
{"line": line, "trace": stacktraces.pop(0)}
|
||||||
)
|
)
|
||||||
|
|
||||||
message = Message(title, ci_title, model_results, additional_results)
|
selected_warnings = []
|
||||||
|
if "warnings_in_ci" in available_artifacts:
|
||||||
|
directory = available_artifacts["warnings_in_ci"].paths[0]["path"]
|
||||||
|
with open(os.path.join(directory, "selected_warnings.json")) as fp:
|
||||||
|
selected_warnings = json.load(fp)
|
||||||
|
|
||||||
|
message = Message(title, ci_title, model_results, additional_results, selected_warnings=selected_warnings)
|
||||||
|
|
||||||
# send report only if there is any failure (for push CI)
|
# send report only if there is any failure (for push CI)
|
||||||
if message.n_failures or ci_event != "push":
|
if message.n_failures or ci_event != "push":
|
||||||
|
Loading…
Reference in New Issue
Block a user