Skip to content

Commit

Permalink
updated workflow files
Browse files Browse the repository at this point in the history
  • Loading branch information
ersilia-bot committed Dec 16, 2024
1 parent b967b35 commit f13de84
Show file tree
Hide file tree
Showing 8 changed files with 174 additions and 99 deletions.
2 changes: 1 addition & 1 deletion .github/scripts/verify_model_outcome.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ def check_non_null_outcomes_in_output_csv(csv_file_path):
header = next(csv_reader)
row = next(csv_reader)
for val in row[2:]: # Skip the first two columns (Inchikey and input)
if val not in ['', None]:
if val not in ['', None]: # Returns if even one outcome is not null
return False
return True

Expand Down
38 changes: 30 additions & 8 deletions .github/workflows/post-model-upload.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ jobs:
data['DockerHub'] = 'https://hub.docker.com/r/ersiliaos/{0}'.format(data['Identifier'])
data['Docker Architecture'] = arch
with open('metadata.yml', 'w') as f:
yaml.dump(data, default_flow_style=False, sort_keys=False)
yaml.dump(data, f, default_flow_style=False, sort_keys=False)
"
rm arch.txt
Expand Down Expand Up @@ -105,20 +105,35 @@ jobs:
REPO_NAME: ${{ github.event.repository.name }}
AIRTABLE_API_KEY: ${{ secrets.AIRTABLE_API_KEY }}
run: |
source activate
pip install requests pyairtable
echo "Updating metadata to AirTable looking at owner: $USER_NAME"
wget https://raw.githubusercontent.com/ersilia-os/ersilia/master/.github/scripts/update_metadata_to_airtable.py
python3 update_metadata_to_airtable.py $USER_NAME $REPO_NAME $BRANCH $AIRTABLE_API_KEY
rm update_metadata_to_airtable.py
wget https://raw.githubusercontent.com/ersilia-os/ersilia/master/.github/scripts/airtableops.py
python3 airtableops.py airtable-update --user $USER_NAME --repo $REPO_NAME --branch $BRANCH --api-key $AIRTABLE_API_KEY
rm airtableops.py
- name: sync metadata to S3 JSON
id: sync-metadata-to-s3
env:
AIRTABLE_API_KEY: ${{ secrets.AIRTABLE_API_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
run: |
source activate
wget https://raw.githubusercontent.com/ersilia-os/ersilia/master/.github/scripts/convert_airtable_to_json.py
pip install boto3 requests pyairtable
python convert_airtable_to_json.py $AIRTABLE_API_KEY $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY
rm convert_airtable_to_json.py
- name: Update README file
id: update-readme-file
env:
MODEL_ID: ${{ github.event.repository.name }}
run: |
echo "Updating README file with AirTable metadata for model: $MODEL_ID"
wget https://raw.githubusercontent.com/ersilia-os/ersilia/master/.github/scripts/update_readme_from_airtable.py
python3 update_readme_from_airtable.py $MODEL_ID .
rm update_readme_from_airtable.py
wget https://raw.githubusercontent.com/ersilia-os/ersilia/master/.github/scripts/airtableops.py
python3 airtableops.py readme-update --repo $MODEL_ID --path .
rm airtableops.py
less README.md
- name: Commit and push changes done to the README file
Expand Down Expand Up @@ -166,7 +181,14 @@ jobs:
assignees: |
${{ steps.shuffle.outputs.shuffled_assignee }}
body: |
This model is ready for testing. If you are assigned to this issue, please try it out using the CLI, Google Colab and DockerHub and let us know if it works!
This model is a new incorporation to the Ersilia Model Hub or it has been modified. If you are assigned to this issue, please try it out and ensure everything works!
To test a model, first clone it in your local system (ideally, from dockerhub) using the CLI commands:
```
ersilia -v fetch eosxxxx --from_dockerhub
ersilia serve eosxxxx
ersilia test
```
The test command will automatically check that the model can handle null outputs and whether it produces consistent results. Please copy here the result of the test command. If it passes, simply close the issue as completed. If it fails, please detail at which step and whether you have taken any steps to solve it. Please tag the original model contributor and one of Ersilia's maintainers for support.
labels: |
test
if: steps.check_existing_test_issue.outputs.issue_number == ''
19 changes: 2 additions & 17 deletions .github/workflows/test-model-pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,21 +54,6 @@ jobs:
conda init
python -m pip install git+https://github.com/ersilia-os/ersilia.git
- name: Check metadata before updating to AirTable
id: check-metadata
env:
USER_NAME: ${{ github.event.pull_request.head.repo.owner.login }}
BRANCH: "main"
REPO_NAME: ${{ github.event.repository.name }}
uses: nick-fields/retry@v3
with:
timeout_minutes: 10
max_attempts: 3
command: |
source activate
wget https://raw.githubusercontent.com/ersilia-os/ersilia/master/.github/scripts/update_metadata_to_airtable.py
python3 update_metadata_to_airtable.py $USER_NAME $REPO_NAME $BRANCH
- name: Predict output
env:
MODEL_ID: ${{ github.event.repository.name }}
Expand All @@ -79,9 +64,9 @@ jobs:
command: |
source activate
echo "Sample model id selected: $MODEL_ID"
ersilia -v fetch $MODEL_ID --repo_path .
ersilia -v fetch $MODEL_ID --from_dir .
ersilia -v serve $MODEL_ID
ersilia sample -n 5 -f input.csv
ersilia example inputs -n 5 -f input.csv --predefined
ersilia -v api -i input.csv
ersilia close
Expand Down
66 changes: 48 additions & 18 deletions .github/workflows/test-model.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Model Test on push
name: Model test on push

on:
push:
Expand Down Expand Up @@ -72,10 +72,24 @@ jobs:
max_attempts: 3
command: |
source activate
pip install requests pyairtable
echo "Updating metadata to AirTable looking at owner: $USER_NAME"
wget https://raw.githubusercontent.com/ersilia-os/ersilia/master/.github/scripts/update_metadata_to_airtable.py
python3 update_metadata_to_airtable.py $USER_NAME $REPO_NAME $BRANCH $AIRTABLE_API_KEY
rm update_metadata_to_airtable.py
wget https://raw.githubusercontent.com/ersilia-os/ersilia/master/.github/scripts/airtableops.py
python3 airtableops.py airtable-update --user $USER_NAME --repo $REPO_NAME --branch $BRANCH --api-key $AIRTABLE_API_KEY
rm airtableops.py
- name: sync metadata to S3 JSON
id: sync-metadata-to-s3
env:
AIRTABLE_API_KEY: ${{ secrets.AIRTABLE_API_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
run: |
source activate
wget https://raw.githubusercontent.com/ersilia-os/ersilia/master/.github/scripts/convert_airtable_to_json.py
pip install boto3 requests pyairtable
python convert_airtable_to_json.py $AIRTABLE_API_KEY $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY
rm convert_airtable_to_json.py
- name: Update README file
id: update-readme-file
Expand All @@ -84,9 +98,9 @@ jobs:
run: |
source activate
echo "Updating README file with AirTable metadata for model: $MODEL_ID"
wget https://raw.githubusercontent.com/ersilia-os/ersilia/master/.github/scripts/update_readme_from_airtable.py
python3 update_readme_from_airtable.py $MODEL_ID .
rm update_readme_from_airtable.py
wget https://raw.githubusercontent.com/ersilia-os/ersilia/master/.github/scripts/airtableops.py
python3 airtableops.py readme-update --repo $MODEL_ID --path .
rm airtableops.py
less README.md
- name: Commit and push changes done to the README file
Expand All @@ -100,22 +114,38 @@ jobs:
amend: true
force: true

- name: Predict output
- name: Fetch model
env:
MODEL_ID: ${{ github.event.repository.name }}
uses: nick-fields/retry@v3
with:
timeout_minutes: 10
max_attempts: 3
command: |
run: |
source activate
ersilia -v fetch $MODEL_ID --from_dir .
FOLDER="$HOME/eos/repository/$MODEL_ID"
if [ ! -d "$FOLDER" ]; then
echo "Error: Folder '$FOLDER' does not exist." >&2
exit 1
fi
- name: Generate input and run model
env:
MODEL_ID: ${{ github.event.repository.name }}
run: |
source activate
echo "Sample model id selected: $MODEL_ID"
ersilia -v fetch $MODEL_ID --repo_path .
ersilia -v serve $MODEL_ID
ersilia sample -n 5 -f input.csv
ersilia -v run -i input.csv
ersilia example inputs -n 5 -f input.csv --predefined
ersilia -v run -i "input.csv" -o "output.csv"
ersilia close
cat output.csv
- name: Test output
run: |
output=$(python .github/scripts/verify_model_outcome.py output.csv)
if echo "$output" | grep -q "All outcomes are null"; then
echo "Error in model outcome, aborting test"
exit 1
fi
rm output.csv
- name: Upload log output
if: always()
uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb #pin v3.1.1
Expand Down
35 changes: 19 additions & 16 deletions .github/workflows/upload-bentoml.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,21 +40,21 @@ jobs:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}

# # This might stop working in the future, so we need to keep an eye on it
# - name: Free Disk Space (Ubuntu)
# uses: jlumbroso/free-disk-space@main
# with:
# # this might remove tools that are actually needed,
# # if set to "true" but frees about 6 GB
# tool-cache: true
# This might stop working in the future, so we need to keep an eye on it
- name: Free Disk Space (Ubuntu)
uses: jlumbroso/free-disk-space@main
with:
# this might remove tools that are actually needed,
# if set to "true" but frees about 6 GB
tool-cache: true

# # all of these default to true, but feel free to set to
# # "false" if necessary for your workflow
# android: true
# dotnet: true
# haskell: true
# large-packages: true
# swap-storage: true
# all of these default to true, but feel free to set to
# "false" if necessary for your workflow
android: true
dotnet: true
haskell: true
large-packages: true
swap-storage: true

- name: Setup conda
id: setupConda
Expand Down Expand Up @@ -97,10 +97,13 @@ jobs:
id: testBuiltImage
env:
PULL_IMAGE: n
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: us-east-1
run: |
ersilia -v fetch ${{ github.event.repository.name }} --from_dockerhub
ersilia -v serve ${{ github.event.repository.name }}
ersilia example -n 1 -f input.csv --predefined
ersilia -v serve ${{ github.event.repository.name }} --track #Added --track here
ersilia example inputs -n 1 -f input.csv --predefined
ersilia -v run -i "input.csv" -o "output.csv"
ersilia close
output=$(python .github/scripts/verify_model_outcome.py output.csv)
Expand Down
49 changes: 23 additions & 26 deletions .github/workflows/upload-ersilia-pack.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,10 @@ jobs:

- name: Checkout repository
uses: actions/checkout@v4
with:
lfs: true

- name: Check if we can use this workflow
run: |
if [[ -f install.yml ]]; then
echo "This workflow is not supported for this repository"
exit 1
fi
- run: git lfs pull
# https://github.com/docker/setup-qemu-action
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
Expand All @@ -36,21 +32,21 @@ jobs:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD }}

# # This might stop working in the future, so we need to keep an eye on it
# - name: Free Disk Space (Ubuntu)
# uses: jlumbroso/free-disk-space@main
# with:
# # this might remove tools that are actually needed,
# # if set to "true" but frees about 6 GB
# tool-cache: true
# This might stop working in the future, so we need to keep an eye on it
- name: Free Disk Space (Ubuntu)
uses: jlumbroso/free-disk-space@main
with:
# this might remove tools that are actually needed,
# if set to "true" but frees about 6 GB
tool-cache: true

# # all of these default to true, but feel free to set to
# # "false" if necessary for your workflow
# android: true
# dotnet: true
# haskell: true
# large-packages: true
# swap-storage: true
# all of these default to true, but feel free to set to
# "false" if necessary for your workflow
android: true
dotnet: true
haskell: true
large-packages: true
swap-storage: true

# Install ersilia-pack, requests, and ersilia to test the built image with ersilia CLI
- name: Setup Python for Ersilia Pack
Expand All @@ -70,7 +66,6 @@ jobs:
- name: Build only AMD64 Image for Testing
id: buildForTestErsiliaPack
continue-on-error: true # Allow this to fail;
uses: docker/build-push-action@v6.7.0
with:
context: ../ # We need to go back to the root directory to find the Dockerfile and copy the model repository
Expand All @@ -81,13 +76,15 @@ jobs:
id: testBuiltImageErsiliaPack
env:
PULL_IMAGE: n
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: us-east-1
if: steps.buildForTestErsiliaPack.outcome == 'success'
continue-on-error: true # Allow this to fail
run: |
ersilia -v fetch ${{ github.event.repository.name }} --from_dockerhub
ersilia -v serve ${{ github.event.repository.name }}
ersilia example -n 1 -f input.csv --predefined
ersilia -v run -i "input.csv" -o "output.csv"
ersilia -v serve ${{ github.event.repository.name }} --track #Added --track here
ersilia example inputs -n 1 -f input.csv --predefined
ersilia -v run -i "input.csv" -o "output.csv"
ersilia close
output=$(python .github/scripts/verify_model_outcome.py output.csv)
if echo "$output" | grep -q "All outcomes are null"; then
Expand Down
11 changes: 6 additions & 5 deletions .github/workflows/upload-model-to-dockerhub.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
name: Upload model to DockerHub
on:
workflow_dispatch:
# workflow_run:
# workflows: ["Upload model to S3"]
# types:
# - completed

workflow_run:
workflows: ["Upload model to S3"]
types:
- completed

jobs:
upload-ersilia-pack:
Expand All @@ -25,4 +26,4 @@ jobs:
uses: ./.github/workflows/upload-bentoml.yml
secrets: inherit
with:
version: legacy-bentoml
version: legacy-bentoml
Loading

0 comments on commit f13de84

Please sign in to comment.