From af91f34c9201e324ab01832ceb80251f6f7be9d3 Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Mon, 20 Nov 2023 11:13:46 +0200 Subject: [PATCH] CI: update Ruff, use GitHub output format (and test that it works...) --- .github/workflows/ci.yml | 2 ++ .pre-commit-config.yaml | 2 +- inference-mistral.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 409474f..09ba1f7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,3 +15,5 @@ jobs: with: python-version: "3.11" - uses: pre-commit/action@v3.0.0 + env: + RUFF_OUTPUT_FORMAT: github diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cfa9950..ed8fe51 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.1.3" + rev: "v0.1.6" hooks: - id: ruff args: diff --git a/inference-mistral.py b/inference-mistral.py index d8fb0b0..79caa19 100644 --- a/inference-mistral.py +++ b/inference-mistral.py @@ -18,7 +18,7 @@ def __init__(self, model_path: str, checkpoint_path: str) -> None: padding_side='left', add_eos_token=True, ) - logger.info('Loading model...') + loggeer.info('Loading model...') model = transformers.AutoModelForCausalLM.from_pretrained( model_path, quantization_config=get_quantization_config(),