Skip to content

Commit 37672e5

Browse files
authored
Merge pull request #510 from guardrails-ai/independent-notebook-checks
run each notebook as its own independent check
2 parents cf18c6a + 4ad15d1 commit 37672e5

File tree

5 files changed

+95
-92
lines changed

5 files changed

+95
-92
lines changed

.github/workflows/ci.yml

Lines changed: 41 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -142,48 +142,47 @@ jobs:
142142

143143
Notebooks:
144144
runs-on: ubuntu-latest
145-
if: ${{ always() && contains(join(needs.*.result, ','), 'success') }}
146145
needs: [Linting, Typing, Pytests]
147-
146+
strategy:
147+
matrix:
148+
# this line is automatically generated by the script in .github/workflows/scripts/update_notebook_matrix.sh
149+
notebook: ["bug_free_python_code.ipynb","check_for_pii.ipynb","competitors_check.ipynb","extracting_entities.ipynb","generate_structured_data.ipynb","generate_structured_data_cohere.ipynb","guardrails_with_chat_models.ipynb","input_validation.ipynb","llamaindex-output-parsing.ipynb","no_secrets_in_generated_text.ipynb","provenance.ipynb","recipe_generation.ipynb","regex_validation.ipynb","response_is_on_topic.ipynb","secrets_detection.ipynb","select_choice_based_on_action.ipynb","streaming.ipynb","syntax_error_free_sql.ipynb","text_summarization_quality.ipynb","toxic_language.ipynb","translation_to_specific_language.ipynb","translation_with_quality_check.ipynb","valid_chess_moves.ipynb","value_within_distribution.ipynb"]
148150
env:
149-
COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }}
150-
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
151-
HUGGINGFACE_API_KEY: ${{ secrets.HUGGINGFACE_API_KEY }}
152-
151+
COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }}
152+
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
153+
HUGGINGFACE_API_KEY: ${{ secrets.HUGGINGFACE_API_KEY }}
154+
NLTK_DATA: /tmp/nltk_data
153155
steps:
154-
- name: Checkout repository
155-
uses: actions/checkout@v2
156-
157-
- name: Set up Python
158-
uses: actions/setup-python@v2
159-
with:
160-
python-version: 3.11.x
161-
162-
- name: Poetry cache
163-
uses: actions/cache@v3
164-
with:
165-
path: ~/.cache/pypoetry
166-
key: poetry-cache-${{ runner.os }}-${{ steps.setup_python.outputs.python-version }}-${{ env.POETRY_VERSION }}
167-
168-
- name: Install Poetry
169-
uses: snok/install-poetry@v1
170-
with:
171-
virtualenvs-create: true
172-
virtualenvs-in-project: true
173-
installer-parallel: true
174-
175-
- name: Install dependencies
176-
run: |
177-
make full;
178-
poetry add openai==0.28.1 jupyter nbconvert cohere;
179-
# pip install openai==0.28.1 jupyter nbconvert;
180-
# pip install .;
181-
182-
- name: Check for pypdfium2
183-
run: poetry run pip show pypdfium2
184-
185-
- name: Huggingface Hub Login
186-
run: poetry run huggingface-cli login --token $HUGGINGFACE_API_KEY
187-
188-
- name: Execute notebooks and check for errors
189-
run: ./.github/workflows/scripts/run_notebooks.sh
156+
- name: Checkout repository
157+
uses: actions/checkout@v2
158+
- name: Set up Python
159+
uses: actions/setup-python@v2
160+
with:
161+
python-version: 3.11.x
162+
- name: Poetry cache
163+
uses: actions/cache@v3
164+
with:
165+
path: ~/.cache/pypoetry
166+
key: poetry-cache-${{ runner.os }}-${{ steps.setup_python.outputs.python-version }}-${{ env.POETRY_VERSION }}
167+
- name: Install Poetry
168+
uses: snok/install-poetry@v1
169+
with:
170+
virtualenvs-create: true
171+
virtualenvs-in-project: true
172+
installer-parallel: true
173+
- name: Install dependencies
174+
run: |
175+
make full;
176+
poetry add openai==0.28.1 jupyter nbconvert cohere;
177+
- name: Check for pypdfium2
178+
run: poetry run pip show pypdfium2
179+
- name: Huggingface Hub Login
180+
run: poetry run huggingface-cli login --token $HUGGINGFACE_API_KEY
181+
- name: download nltk data
182+
run: |
183+
mkdir /tmp/nltk_data;
184+
poetry run python -m nltk.downloader -d /tmp/nltk_data punkt;
185+
- name: Use venv
186+
run: source .venv/bin/activate
187+
- name: Execute notebooks and check for errors
188+
run: bash ./.github/workflows/scripts/run_notebooks.sh ${{ matrix.notebook }}

.github/workflows/examples_check.yml

Lines changed: 18 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -8,48 +8,47 @@ on:
88
jobs:
99
execute_notebooks:
1010
runs-on: ubuntu-latest
11-
11+
strategy:
12+
matrix:
13+
# this line is automatically generated by the script in .github/workflows/scripts/update_notebook_matrix.sh
14+
notebook: ["bug_free_python_code.ipynb","check_for_pii.ipynb","competitors_check.ipynb","extracting_entities.ipynb","generate_structured_data.ipynb","generate_structured_data_cohere.ipynb","guardrails_with_chat_models.ipynb","input_validation.ipynb","llamaindex-output-parsing.ipynb","no_secrets_in_generated_text.ipynb","provenance.ipynb","recipe_generation.ipynb","regex_validation.ipynb","response_is_on_topic.ipynb","secrets_detection.ipynb","select_choice_based_on_action.ipynb","streaming.ipynb","syntax_error_free_sql.ipynb","text_summarization_quality.ipynb","toxic_language.ipynb","translation_to_specific_language.ipynb","translation_with_quality_check.ipynb","valid_chess_moves.ipynb","value_within_distribution.ipynb"]
1215
env:
1316
COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }}
1417
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
1518
HUGGINGFACE_API_KEY: ${{ secrets.HUGGINGFACE_API_KEY }}
16-
17-
19+
NLTK_DATA: /tmp/nltk_data
1820
steps:
1921
- name: Checkout repository
20-
uses: actions/checkout@v2
21-
22+
uses: actions/checkout@v2
2223
- name: Set up Python
2324
uses: actions/setup-python@v2
24-
with:
25+
with:
2526
python-version: 3.11.x
26-
2727
- name: Poetry cache
2828
uses: actions/cache@v3
2929
with:
3030
path: ~/.cache/pypoetry
3131
key: poetry-cache-${{ runner.os }}-${{ steps.setup_python.outputs.python-version }}-${{ env.POETRY_VERSION }}
32-
3332
- name: Install Poetry
3433
uses: snok/install-poetry@v1
3534
with:
36-
virtualenvs-create: true
37-
virtualenvs-in-project: true
38-
installer-parallel: true
39-
35+
virtualenvs-create: true
36+
virtualenvs-in-project: true
37+
installer-parallel: true
4038
- name: Install dependencies
4139
run: |
4240
make full;
4341
poetry add openai==0.28.1 jupyter nbconvert cohere;
44-
# pip install openai==0.28.1 jupyter nbconvert;
45-
# pip install .;
46-
4742
- name: Check for pypdfium2
4843
run: poetry run pip show pypdfium2
49-
5044
- name: Huggingface Hub Login
51-
run: huggingface-cli login --token $HUGGINGFACE_API_KEY
52-
45+
run: poetry run huggingface-cli login --token $HUGGINGFACE_API_KEY
46+
- name: download nltk data
47+
run: |
48+
mkdir /tmp/nltk_data;
49+
poetry run python -m nltk.downloader -d /tmp/nltk_data punkt;
50+
- name: Use venv
51+
run: source .venv/bin/activate
5352
- name: Execute notebooks and check for errors
54-
run: ./.github/workflows/scripts/run_notebooks.sh
53+
run: bash ./.github/workflows/scripts/run_notebooks.sh ${{ matrix.notebook }}
5554

Lines changed: 13 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,38 +1,20 @@
11
#!/bin/bash
2-
3-
mkdir /tmp/nltk_data;
4-
poetry run python -m nltk.downloader -d /tmp/nltk_data punkt;
52
export NLTK_DATA=/tmp/nltk_data;
63

74
cd docs/examples
85

9-
# Function to process a notebook
10-
process_notebook() {
11-
notebook="$1"
12-
invalid_notebooks=("valid_chess_moves.ipynb" "llamaindex-output-parsing.ipynb" "competitors_check.ipynb")
13-
if [[ ! " ${invalid_notebooks[@]} " =~ " ${notebook} " ]]; then
14-
echo "Processing $notebook..."
15-
poetry run jupyter nbconvert --to notebook --execute "$notebook"
16-
if [ $? -ne 0 ]; then
17-
echo "Error found in $notebook"
18-
echo "Error in $notebook. See logs for details." >> errors.txt
19-
fi
20-
fi
21-
}
22-
23-
export -f process_notebook # Export the function for parallel execution
24-
25-
# Create a file to collect errors
26-
> errors.txt
6+
# Get the notebook name from the matrix variable
7+
notebook="$1"
278

28-
# Run in parallel
29-
ls *.ipynb | parallel process_notebook
9+
# Check if the notebook should be processed
10+
invalid_notebooks=("valid_chess_moves.ipynb" "llamaindex-output-parsing.ipynb" "competitors_check.ipynb")
11+
if [[ ! " ${invalid_notebooks[@]} " =~ " ${notebook} " ]]; then
12+
echo "Processing $notebook..."
13+
poetry run jupyter nbconvert --to notebook --execute "$notebook"
14+
if [ $? -ne 0 ]; then
15+
echo "Error found in $notebook"
16+
echo "Error in $notebook. See logs for details." >> errors.txt
17+
fi
18+
fi
3019

31-
# Check if there were any errors
32-
if [ -s errors.txt ]; then
33-
echo "Some notebooks had errors"
34-
cat errors.txt
35-
exit 1
36-
else
37-
echo "All notebooks ran successfully."
38-
fi
20+
exit 0
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
#!/bin/bash
2+
# Array to store notebook names
3+
notebook_names="["
4+
5+
# Compile list of file names
6+
for file in $(ls docs/examples/*.ipynb); do
7+
# Add the full filename with extension
8+
filename=$(basename "$file")
9+
10+
notebook_names+="\"$filename\","
11+
done
12+
notebook_names="${notebook_names%,}]"
13+
14+
# echo $notebook_names
15+
16+
17+
# find line that begins with "notebook:" and replace it with notebook: $notebook_names
18+
sed "s/notebook: \[.*\]/notebook: $notebook_names/" .github/workflows/examples_check.yml > .github/workflows/examples_check.yml.tmp
19+
mv .github/workflows/examples_check.yml.tmp .github/workflows/examples_check.yml
20+
21+
sed "s/notebook: \[.*\]/notebook: $notebook_names/" .github/workflows/ci.yml > .github/workflows/ci.yml.tmp
22+
mv .github/workflows/ci.yml.tmp .github/workflows/ci.yml

Makefile

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -71,3 +71,4 @@ precommit:
7171
# pytest -x -q --no-summary
7272
pyright guardrails/
7373
make lint
74+
./github/workflows/scripts/update_notebook_matrix.sh

0 commit comments

Comments
 (0)