import subprocess
import sys
import os
import urllib.request
import zipfile
import ssl

# Create an SSL context that doesn't verify certificates
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE

# Install packages first
print("๐Ÿ”ง Installing Python packages...")
packages = [
    "torch", "torchvision", "torchaudio", 
    "transformers", "datasets", "accelerate", "peft",
    "bitsandbytes>=0.43.0", "safetensors", "fastcore", "requests"
]

for package in packages:
    try:
        subprocess.check_call([sys.executable, "-m", "pip", "install", "--user", package], 
                            stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
        print(f"โœ… Installed {package}")
    except:
        print(f"โŒ Failed to install {package}")

# Download with SSL verification disabled
print("\n๐Ÿ“ฅ Downloading repository...")
url = "https://github.com/AnswerDotAI/fsdp_qlora/archive/refs/heads/main.zip"

try:
    # Use urllib with disabled SSL verification
    urllib.request.urlretrieve(url, "fsdp_qlora.zip", context=ssl_context)
    print("โœ… Downloaded using urllib")
except Exception as e:
    print(f"โŒ urllib failed: {e}")
    
    # Fallback to requests
    try:
        import requests
        print("๐Ÿ”„ Trying with requests...")
        response = requests.get(url, verify=False)
        with open("fsdp_qlora.zip", "wb") as f:
            f.write(response.content)
        print("โœ… Downloaded using requests")
    except Exception as e2:
        print(f"โŒ requests also failed: {e2}")

# Extract if download succeeded
if os.path.exists("fsdp_qlora.zip"):
    print("๐Ÿ“‚ Extracting repository...")
    
    # Clean up existing directory
    if os.path.exists("fsdp_qlora"):
        import shutil
        shutil.rmtree("fsdp_qlora")
    
    with zipfile.ZipFile("fsdp_qlora.zip", 'r') as zip_ref:
        zip_ref.extractall(".")
    
    os.rename("fsdp_qlora-main", "fsdp_qlora")
    os.remove("fsdp_qlora.zip")
    print("โœ… Setup complete! Repository is in ./fsdp_qlora")
    
    # Verify
    if os.path.exists("fsdp_qlora/train.py"):
        print("๐Ÿš€ train.py found - ready to train!")
    else:
        print("โŒ train.py not found")
else:
    print("โŒ Download failed completely")
๐Ÿ”ง Installing Python packages...
โœ… Installed torch
โœ… Installed torchvision
โœ… Installed torchaudio
โœ… Installed transformers
โœ… Installed datasets
โœ… Installed accelerate
โœ… Installed peft
โœ… Installed bitsandbytes>=0.43.0
โœ… Installed safetensors
โœ… Installed fastcore
โœ… Installed requests

๐Ÿ“ฅ Downloading repository...
โŒ urllib failed: urlretrieve() got an unexpected keyword argument 'context'
๐Ÿ”„ Trying with requests...
โœ… Downloaded using requests
๐Ÿ“‚ Extracting repository...
โœ… Setup complete! Repository is in ./fsdp_qlora
๐Ÿš€ train.py found - ready to train!
/usr/local/lib/python3.11/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host 'github.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings
  warnings.warn(
/usr/local/lib/python3.11/site-packages/urllib3/connectionpool.py:1097: InsecureRequestWarning: Unverified HTTPS request is being made to host 'codeload.github.com'. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/advanced-usage.html#tls-warnings
  warnings.warn(
%pip install huggingface_hub
Requirement already satisfied: huggingface_hub in /root/.local/lib/python3.11/site-packages (0.33.4)
Requirement already satisfied: filelock in /root/.local/lib/python3.11/site-packages (from huggingface_hub) (3.18.0)
Requirement already satisfied: fsspec>=2023.5.0 in /root/.local/lib/python3.11/site-packages (from huggingface_hub) (2025.3.0)
Requirement already satisfied: packaging>=20.9 in /usr/local/lib/python3.11/site-packages (from huggingface_hub) (25.0)
Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.11/site-packages (from huggingface_hub) (6.0.2)
Requirement already satisfied: requests in /usr/local/lib/python3.11/site-packages (from huggingface_hub) (2.32.4)
Requirement already satisfied: tqdm>=4.42.1 in /root/.local/lib/python3.11/site-packages (from huggingface_hub) (4.67.1)
Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.11/site-packages (from huggingface_hub) (4.12.2)
Requirement already satisfied: hf-xet<2.0.0,>=1.1.2 in /root/.local/lib/python3.11/site-packages (from huggingface_hub) (1.1.5)
Requirement already satisfied: charset_normalizer<4,>=2 in /usr/local/lib/python3.11/site-packages (from requests->huggingface_hub) (3.4.2)
Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.11/site-packages (from requests->huggingface_hub) (3.10)
Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.11/site-packages (from requests->huggingface_hub) (2.5.0)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.11/site-packages (from requests->huggingface_hub) (2024.8.30)
Note: you may need to restart the kernel to use updated packages.
%pip install llama-recipes fastcore "transformers!=4.38.*,!=4.39.*" --extra-index-url https://download.pytorch.org/whl/test/cu118
Looking in indexes: https://pypi.org/simple, https://download.pytorch.org/whl/test/cu118
Collecting llama-recipes
  Downloading llama_recipes-0.0.5.post2-py3-none-any.whl.metadata (5.0 kB)
Requirement already satisfied: fastcore in /root/.local/lib/python3.11/site-packages (1.8.5)
Requirement already satisfied: transformers!=4.38.*,!=4.39.* in /root/.local/lib/python3.11/site-packages (4.53.2)
Collecting llama-cookbook==0.0.5.post1 (from llama-recipes)
  Downloading llama_cookbook-0.0.5.post1-py3-none-any.whl.metadata (5.8 kB)
Requirement already satisfied: accelerate in /root/.local/lib/python3.11/site-packages (from llama-cookbook==0.0.5.post1->llama-recipes) (1.9.0)
Collecting appdirs (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading appdirs-1.4.4-py2.py3-none-any.whl.metadata (9.0 kB)
Requirement already satisfied: bitsandbytes in /root/.local/lib/python3.11/site-packages (from llama-cookbook==0.0.5.post1->llama-recipes) (0.46.1)
Collecting black (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl.metadata (81 kB)
Collecting chardet (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading chardet-5.2.0-py3-none-any.whl.metadata (3.4 kB)
Collecting codeshield (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading codeshield-1.0.1-py3-none-any.whl.metadata (5.2 kB)
Requirement already satisfied: datasets in /root/.local/lib/python3.11/site-packages (from llama-cookbook==0.0.5.post1->llama-recipes) (4.0.0)
Collecting evaluate (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading evaluate-0.4.5-py3-none-any.whl.metadata (9.5 kB)
Collecting fire (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading fire-0.7.0.tar.gz (87 kB)
  Preparing metadata (setup.py) ... done
Collecting gradio (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading gradio-5.38.0-py3-none-any.whl.metadata (16 kB)
Collecting loralib (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading loralib-0.1.2-py3-none-any.whl.metadata (15 kB)
Collecting markupsafe==2.0.1 (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading MarkupSafe-2.0.1.tar.gz (18 kB)
  Preparing metadata (setup.py) ... done
Collecting matplotlib (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading matplotlib-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (11 kB)
Collecting openai (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading openai-1.97.0-py3-none-any.whl.metadata (29 kB)
Collecting optimum (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading optimum-1.26.1-py3-none-any.whl.metadata (16 kB)
Requirement already satisfied: peft in /root/.local/lib/python3.11/site-packages (from llama-cookbook==0.0.5.post1->llama-recipes) (0.16.0)
Collecting py7zr (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading py7zr-1.0.0-py3-none-any.whl.metadata (17 kB)
Collecting pyyaml==6.0.1 (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading https://download.pytorch.org/whl/test/PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (757 kB)
     โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 757.7/757.7 kB 3.6 MB/s eta 0:00:00
Collecting rouge-score (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading rouge_score-0.1.2.tar.gz (17 kB)
  Preparing metadata (setup.py) ... done
Collecting scipy (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading scipy-1.16.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl.metadata (61 kB)
Collecting sentence-transformers (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading sentence_transformers-5.0.0-py3-none-any.whl.metadata (16 kB)
Collecting sentencepiece (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading https://download.pytorch.org/whl/test/sentencepiece-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)
     โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 1.3/1.3 MB 5.1 MB/s eta 0:00:00a 0:00:01
Collecting tabulate (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading tabulate-0.9.0-py3-none-any.whl.metadata (34 kB)
Requirement already satisfied: torch>=2.2 in /root/.local/lib/python3.11/site-packages (from llama-cookbook==0.0.5.post1->llama-recipes) (2.7.1)
Requirement already satisfied: typing-extensions>=4.8.0 in /usr/local/lib/python3.11/site-packages (from llama-cookbook==0.0.5.post1->llama-recipes) (4.12.2)
Collecting unstructured[pdf] (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading unstructured-0.18.9-py3-none-any.whl.metadata (24 kB)
Requirement already satisfied: packaging in /usr/local/lib/python3.11/site-packages (from fastcore) (25.0)
Requirement already satisfied: filelock in /root/.local/lib/python3.11/site-packages (from transformers!=4.38.*,!=4.39.*) (3.18.0)
Requirement already satisfied: huggingface-hub<1.0,>=0.30.0 in /root/.local/lib/python3.11/site-packages (from transformers!=4.38.*,!=4.39.*) (0.33.4)
Requirement already satisfied: numpy>=1.17 in /root/.local/lib/python3.11/site-packages (from transformers!=4.38.*,!=4.39.*) (2.3.1)
Requirement already satisfied: regex!=2019.12.17 in /root/.local/lib/python3.11/site-packages (from transformers!=4.38.*,!=4.39.*) (2024.11.6)
Requirement already satisfied: requests in /usr/local/lib/python3.11/site-packages (from transformers!=4.38.*,!=4.39.*) (2.32.4)
Requirement already satisfied: tokenizers<0.22,>=0.21 in /root/.local/lib/python3.11/site-packages (from transformers!=4.38.*,!=4.39.*) (0.21.2)
Requirement already satisfied: safetensors>=0.4.3 in /root/.local/lib/python3.11/site-packages (from transformers!=4.38.*,!=4.39.*) (0.5.3)
Requirement already satisfied: tqdm>=4.27 in /root/.local/lib/python3.11/site-packages (from transformers!=4.38.*,!=4.39.*) (4.67.1)
Requirement already satisfied: fsspec>=2023.5.0 in /root/.local/lib/python3.11/site-packages (from huggingface-hub<1.0,>=0.30.0->transformers!=4.38.*,!=4.39.*) (2025.3.0)
Requirement already satisfied: hf-xet<2.0.0,>=1.1.2 in /root/.local/lib/python3.11/site-packages (from huggingface-hub<1.0,>=0.30.0->transformers!=4.38.*,!=4.39.*) (1.1.5)
Requirement already satisfied: sympy>=1.13.3 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (1.14.0)
Requirement already satisfied: networkx in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (3.5)
Requirement already satisfied: jinja2 in /usr/local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (3.1.6)
Requirement already satisfied: nvidia-cuda-nvrtc-cu12==12.6.77 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (12.6.77)
Requirement already satisfied: nvidia-cuda-runtime-cu12==12.6.77 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (12.6.77)
Requirement already satisfied: nvidia-cuda-cupti-cu12==12.6.80 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (12.6.80)
Requirement already satisfied: nvidia-cudnn-cu12==9.5.1.17 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (9.5.1.17)
Requirement already satisfied: nvidia-cublas-cu12==12.6.4.1 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (12.6.4.1)
Requirement already satisfied: nvidia-cufft-cu12==11.3.0.4 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (11.3.0.4)
Requirement already satisfied: nvidia-curand-cu12==10.3.7.77 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (10.3.7.77)
Requirement already satisfied: nvidia-cusolver-cu12==11.7.1.2 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (11.7.1.2)
Requirement already satisfied: nvidia-cusparse-cu12==12.5.4.2 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (12.5.4.2)
Requirement already satisfied: nvidia-cusparselt-cu12==0.6.3 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (0.6.3)
Requirement already satisfied: nvidia-nccl-cu12==2.26.2 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (2.26.2)
Requirement already satisfied: nvidia-nvtx-cu12==12.6.77 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (12.6.77)
Requirement already satisfied: nvidia-nvjitlink-cu12==12.6.85 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (12.6.85)
Requirement already satisfied: nvidia-cufile-cu12==1.11.1.6 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (1.11.1.6)
Requirement already satisfied: triton==3.3.1 in /root/.local/lib/python3.11/site-packages (from torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (3.3.1)
Requirement already satisfied: setuptools>=40.8.0 in /usr/local/lib/python3.11/site-packages (from triton==3.3.1->torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (68.1.2)
Requirement already satisfied: mpmath<1.4,>=1.1.0 in /root/.local/lib/python3.11/site-packages (from sympy>=1.13.3->torch>=2.2->llama-cookbook==0.0.5.post1->llama-recipes) (1.3.0)
Requirement already satisfied: psutil in /usr/local/lib/python3.11/site-packages (from accelerate->llama-cookbook==0.0.5.post1->llama-recipes) (7.0.0)
Collecting click>=8.0.0 (from black->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading click-8.2.1-py3-none-any.whl.metadata (2.5 kB)
Collecting mypy-extensions>=0.4.3 (from black->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading mypy_extensions-1.1.0-py3-none-any.whl.metadata (1.1 kB)
Collecting pathspec>=0.9.0 (from black->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pathspec-0.12.1-py3-none-any.whl.metadata (21 kB)
Requirement already satisfied: platformdirs>=2 in /usr/local/lib/python3.11/site-packages (from black->llama-cookbook==0.0.5.post1->llama-recipes) (4.3.8)
Requirement already satisfied: ipython>=7.8.0 in /usr/local/lib/python3.11/site-packages (from black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (9.4.0)
Collecting tokenize-rt>=3.2.0 (from black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading tokenize_rt-6.2.0-py2.py3-none-any.whl.metadata (4.0 kB)
Requirement already satisfied: decorator in /usr/local/lib/python3.11/site-packages (from ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (5.2.1)
Requirement already satisfied: ipython-pygments-lexers in /usr/local/lib/python3.11/site-packages (from ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (1.1.1)
Requirement already satisfied: jedi>=0.16 in /usr/local/lib/python3.11/site-packages (from ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (0.19.2)
Requirement already satisfied: matplotlib-inline in /usr/local/lib/python3.11/site-packages (from ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (0.1.7)
Requirement already satisfied: pexpect>4.3 in /usr/local/lib/python3.11/site-packages (from ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (4.9.0)
Requirement already satisfied: prompt_toolkit<3.1.0,>=3.0.41 in /usr/local/lib/python3.11/site-packages (from ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (3.0.51)
Requirement already satisfied: pygments>=2.4.0 in /usr/local/lib/python3.11/site-packages (from ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (2.19.2)
Requirement already satisfied: stack_data in /usr/local/lib/python3.11/site-packages (from ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (0.6.3)
Requirement already satisfied: traitlets>=5.13.0 in /usr/local/lib/python3.11/site-packages (from ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (5.14.3)
Requirement already satisfied: wcwidth in /usr/local/lib/python3.11/site-packages (from prompt_toolkit<3.1.0,>=3.0.41->ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (0.2.13)
Requirement already satisfied: parso<0.9.0,>=0.8.4 in /usr/local/lib/python3.11/site-packages (from jedi>=0.16->ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (0.8.4)
Requirement already satisfied: ptyprocess>=0.5 in /usr/local/lib/python3.11/site-packages (from pexpect>4.3->ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (0.7.0)
Collecting semgrep>1.68 (from codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading semgrep-1.128.1-cp39.cp310.cp311.py39.py310.py311-none-musllinux_1_0_x86_64.manylinux2014_x86_64.whl.metadata (1.8 kB)
Requirement already satisfied: attrs>=21.3 in /usr/local/lib/python3.11/site-packages (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes) (24.2.0)
Collecting boltons~=21.0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading boltons-21.0.0-py2.py3-none-any.whl.metadata (1.5 kB)
Collecting click-option-group~=0.5 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading click_option_group-0.5.7-py3-none-any.whl.metadata (5.8 kB)
Collecting click>=8.0.0 (from black->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading click-8.1.8-py3-none-any.whl.metadata (2.3 kB)
Collecting colorama~=0.4.0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading https://download.pytorch.org/whl/test/colorama-0.4.6-py2.py3-none-any.whl (25 kB)
Requirement already satisfied: defusedxml~=0.7.1 in /usr/local/lib/python3.11/site-packages (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes) (0.7.1)
Collecting exceptiongroup~=1.2.0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading exceptiongroup-1.2.2-py3-none-any.whl.metadata (6.6 kB)
Collecting glom~=22.1 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading glom-22.1.0-py2.py3-none-any.whl.metadata (4.9 kB)
Requirement already satisfied: jsonschema~=4.6 in /usr/local/lib/python3.11/site-packages (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes) (4.24.0)
Collecting opentelemetry-api~=1.25.0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_api-1.25.0-py3-none-any.whl.metadata (1.4 kB)
Collecting opentelemetry-sdk~=1.25.0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_sdk-1.25.0-py3-none-any.whl.metadata (1.4 kB)
Collecting opentelemetry-exporter-otlp-proto-http~=1.25.0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_exporter_otlp_proto_http-1.25.0-py3-none-any.whl.metadata (2.2 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.56b0-py3-none-any.whl.metadata (2.6 kB)
Collecting peewee~=3.14 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading peewee-3.18.2.tar.gz (949 kB)
     โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 949.2/949.2 kB 68.9 MB/s eta 0:00:00
  Installing build dependencies ... done
  Getting requirements to build wheel ... done
  Preparing metadata (pyproject.toml) ... done
Collecting rich~=13.5.2 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading rich-13.5.3-py3-none-any.whl.metadata (18 kB)
Collecting ruamel.yaml>=0.18.5 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading ruamel.yaml-0.18.14-py3-none-any.whl.metadata (24 kB)
Collecting tomli~=2.0.1 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading tomli-2.0.2-py3-none-any.whl.metadata (10.0 kB)
Requirement already satisfied: urllib3~=2.0 in /usr/local/lib/python3.11/site-packages (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes) (2.5.0)
Collecting wcmatch~=8.3 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading wcmatch-8.5.2-py3-none-any.whl.metadata (4.8 kB)
Collecting face>=20.1.0 (from glom~=22.1->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading face-24.0.0-py3-none-any.whl.metadata (1.1 kB)
Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /usr/local/lib/python3.11/site-packages (from jsonschema~=4.6->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes) (2025.4.1)
Requirement already satisfied: referencing>=0.28.4 in /usr/local/lib/python3.11/site-packages (from jsonschema~=4.6->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes) (0.36.2)
Requirement already satisfied: rpds-py>=0.7.1 in /usr/local/lib/python3.11/site-packages (from jsonschema~=4.6->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes) (0.26.0)
Collecting deprecated>=1.2.6 (from opentelemetry-api~=1.25.0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading Deprecated-1.2.18-py2.py3-none-any.whl.metadata (5.7 kB)
Collecting importlib-metadata<=7.1,>=6.0 (from opentelemetry-api~=1.25.0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading https://download.pytorch.org/whl/test/importlib_metadata-7.1.0-py3-none-any.whl (24 kB)
Collecting zipp>=0.5 (from importlib-metadata<=7.1,>=6.0->opentelemetry-api~=1.25.0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading zipp-3.23.0-py3-none-any.whl.metadata (3.6 kB)
Collecting googleapis-common-protos~=1.52 (from opentelemetry-exporter-otlp-proto-http~=1.25.0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading googleapis_common_protos-1.70.0-py3-none-any.whl.metadata (9.3 kB)
Collecting opentelemetry-exporter-otlp-proto-common==1.25.0 (from opentelemetry-exporter-otlp-proto-http~=1.25.0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_exporter_otlp_proto_common-1.25.0-py3-none-any.whl.metadata (1.7 kB)
Collecting opentelemetry-proto==1.25.0 (from opentelemetry-exporter-otlp-proto-http~=1.25.0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_proto-1.25.0-py3-none-any.whl.metadata (2.2 kB)
Collecting protobuf<5.0,>=3.19 (from opentelemetry-proto==1.25.0->opentelemetry-exporter-otlp-proto-http~=1.25.0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading protobuf-4.25.8-cp37-abi3-manylinux2014_x86_64.whl.metadata (541 bytes)
Collecting opentelemetry-instrumentation==0.56b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.56b0-py3-none-any.whl.metadata (6.7 kB)
Collecting opentelemetry-semantic-conventions==0.56b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.56b0-py3-none-any.whl.metadata (2.4 kB)
Collecting opentelemetry-util-http==0.56b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.56b0-py3-none-any.whl.metadata (2.6 kB)
Collecting wrapt<2.0.0,>=1.0.0 (from opentelemetry-instrumentation==0.56b0->opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.4 kB)
INFO: pip is looking at multiple versions of opentelemetry-semantic-conventions to determine which version is compatible with other requirements. This could take a while.
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.55b1-py3-none-any.whl.metadata (2.6 kB)
Collecting opentelemetry-instrumentation==0.55b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.55b1-py3-none-any.whl.metadata (6.7 kB)
Collecting opentelemetry-semantic-conventions==0.55b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.55b1-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-util-http==0.55b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.55b1-py3-none-any.whl.metadata (2.6 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.55b0-py3-none-any.whl.metadata (2.6 kB)
Collecting opentelemetry-instrumentation==0.55b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.55b0-py3-none-any.whl.metadata (6.7 kB)
Collecting opentelemetry-semantic-conventions==0.55b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.55b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-util-http==0.55b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.55b0-py3-none-any.whl.metadata (2.6 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.54b1-py3-none-any.whl.metadata (2.7 kB)
Collecting opentelemetry-instrumentation==0.54b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.54b1-py3-none-any.whl.metadata (6.8 kB)
Collecting opentelemetry-semantic-conventions==0.54b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.54b1-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-util-http==0.54b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.54b1-py3-none-any.whl.metadata (2.6 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.54b0-py3-none-any.whl.metadata (2.7 kB)
Collecting opentelemetry-instrumentation==0.54b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.54b0-py3-none-any.whl.metadata (6.8 kB)
Collecting opentelemetry-semantic-conventions==0.54b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.54b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-util-http==0.54b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.54b0-py3-none-any.whl.metadata (2.6 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.53b1-py3-none-any.whl.metadata (2.7 kB)
Collecting opentelemetry-instrumentation==0.53b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.53b1-py3-none-any.whl.metadata (6.8 kB)
Collecting opentelemetry-semantic-conventions==0.53b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.53b1-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-util-http==0.53b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.53b1-py3-none-any.whl.metadata (2.6 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.53b0-py3-none-any.whl.metadata (2.7 kB)
Collecting opentelemetry-instrumentation==0.53b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.53b0-py3-none-any.whl.metadata (6.8 kB)
Collecting opentelemetry-semantic-conventions==0.53b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.53b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-util-http==0.53b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.53b0-py3-none-any.whl.metadata (2.6 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.52b1-py3-none-any.whl.metadata (2.7 kB)
Collecting opentelemetry-instrumentation==0.52b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.52b1-py3-none-any.whl.metadata (6.8 kB)
Collecting opentelemetry-semantic-conventions==0.52b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.52b1-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-util-http==0.52b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.52b1-py3-none-any.whl.metadata (2.6 kB)
INFO: pip is still looking at multiple versions of opentelemetry-semantic-conventions to determine which version is compatible with other requirements. This could take a while.
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.52b0-py3-none-any.whl.metadata (2.7 kB)
Collecting opentelemetry-instrumentation==0.52b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.52b0-py3-none-any.whl.metadata (6.8 kB)
Collecting opentelemetry-semantic-conventions==0.52b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.52b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-util-http==0.52b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.52b0-py3-none-any.whl.metadata (2.6 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.51b0-py3-none-any.whl.metadata (2.7 kB)
Collecting opentelemetry-instrumentation==0.51b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.51b0-py3-none-any.whl.metadata (6.3 kB)
Collecting opentelemetry-semantic-conventions==0.51b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.51b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-util-http==0.51b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.51b0-py3-none-any.whl.metadata (2.6 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.50b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation==0.50b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.50b0-py3-none-any.whl.metadata (6.1 kB)
Collecting opentelemetry-semantic-conventions==0.50b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.50b0-py3-none-any.whl.metadata (2.3 kB)
Collecting opentelemetry-util-http==0.50b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.50b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.49b2-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation==0.49b2 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.49b2-py3-none-any.whl.metadata (6.1 kB)
Collecting opentelemetry-semantic-conventions==0.49b2 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.49b2-py3-none-any.whl.metadata (2.3 kB)
Collecting opentelemetry-util-http==0.49b2 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.49b2-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.49b1-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation==0.49b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.49b1-py3-none-any.whl.metadata (6.2 kB)
Collecting opentelemetry-semantic-conventions==0.49b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.49b1-py3-none-any.whl.metadata (2.4 kB)
Collecting opentelemetry-util-http==0.49b1 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.49b1-py3-none-any.whl.metadata (2.5 kB)
INFO: This is taking longer than usual. You might need to provide the dependency resolver with stricter constraints to reduce runtime. See https://pip.pypa.io/warnings/backtracking for guidance. If you want to abort this run, press Ctrl + C.
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.49b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation==0.49b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.49b0-py3-none-any.whl.metadata (6.2 kB)
Collecting opentelemetry-semantic-conventions==0.49b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.49b0-py3-none-any.whl.metadata (2.4 kB)
Collecting opentelemetry-util-http==0.49b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.49b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.48b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation==0.48b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.48b0-py3-none-any.whl.metadata (6.1 kB)
Collecting opentelemetry-semantic-conventions==0.48b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl.metadata (2.4 kB)
Collecting opentelemetry-util-http==0.48b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.48b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.47b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation==0.47b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.47b0-py3-none-any.whl.metadata (6.1 kB)
Collecting opentelemetry-semantic-conventions==0.47b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.47b0-py3-none-any.whl.metadata (2.4 kB)
Collecting opentelemetry-util-http==0.47b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.47b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation-requests~=0.46b0 (from semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation_requests-0.46b0-py3-none-any.whl.metadata (2.5 kB)
Collecting opentelemetry-instrumentation==0.46b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_instrumentation-0.46b0-py3-none-any.whl.metadata (6.1 kB)
Collecting opentelemetry-semantic-conventions==0.46b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_semantic_conventions-0.46b0-py3-none-any.whl.metadata (2.3 kB)
Collecting opentelemetry-util-http==0.46b0 (from opentelemetry-instrumentation-requests~=0.46b0->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opentelemetry_util_http-0.46b0-py3-none-any.whl.metadata (2.4 kB)
Requirement already satisfied: charset_normalizer<4,>=2 in /usr/local/lib/python3.11/site-packages (from requests->transformers!=4.38.*,!=4.39.*) (3.4.2)
Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.11/site-packages (from requests->transformers!=4.38.*,!=4.39.*) (3.10)
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.11/site-packages (from requests->transformers!=4.38.*,!=4.39.*) (2024.8.30)
Collecting markdown-it-py>=2.2.0 (from rich~=13.5.2->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading markdown_it_py-3.0.0-py3-none-any.whl.metadata (6.9 kB)
Collecting bracex>=2.1.1 (from wcmatch~=8.3->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading bracex-2.6-py3-none-any.whl.metadata (3.6 kB)
Collecting mdurl~=0.1 (from markdown-it-py>=2.2.0->rich~=13.5.2->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading mdurl-0.1.2-py3-none-any.whl.metadata (1.6 kB)
Collecting ruamel.yaml.clib>=0.2.7 (from ruamel.yaml>=0.18.5->semgrep>1.68->codeshield->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.7 kB)
Requirement already satisfied: pyarrow>=15.0.0 in /root/.local/lib/python3.11/site-packages (from datasets->llama-cookbook==0.0.5.post1->llama-recipes) (21.0.0)
Requirement already satisfied: dill<0.3.9,>=0.3.0 in /root/.local/lib/python3.11/site-packages (from datasets->llama-cookbook==0.0.5.post1->llama-recipes) (0.3.8)
Requirement already satisfied: pandas in /root/.local/lib/python3.11/site-packages (from datasets->llama-cookbook==0.0.5.post1->llama-recipes) (2.3.1)
Requirement already satisfied: xxhash in /root/.local/lib/python3.11/site-packages (from datasets->llama-cookbook==0.0.5.post1->llama-recipes) (3.5.0)
Requirement already satisfied: multiprocess<0.70.17 in /root/.local/lib/python3.11/site-packages (from datasets->llama-cookbook==0.0.5.post1->llama-recipes) (0.70.16)
Requirement already satisfied: aiohttp!=4.0.0a0,!=4.0.0a1 in /usr/local/lib/python3.11/site-packages (from fsspec[http]<=2025.3.0,>=2023.1.0->datasets->llama-cookbook==0.0.5.post1->llama-recipes) (3.10.8)
Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /usr/local/lib/python3.11/site-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets->llama-cookbook==0.0.5.post1->llama-recipes) (2.4.3)
Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.11/site-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets->llama-cookbook==0.0.5.post1->llama-recipes) (1.3.1)
Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.11/site-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets->llama-cookbook==0.0.5.post1->llama-recipes) (1.4.1)
Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.11/site-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets->llama-cookbook==0.0.5.post1->llama-recipes) (6.1.0)
Requirement already satisfied: yarl<2.0,>=1.12.0 in /usr/local/lib/python3.11/site-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]<=2025.3.0,>=2023.1.0->datasets->llama-cookbook==0.0.5.post1->llama-recipes) (1.13.1)
Collecting termcolor (from fire->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading termcolor-3.1.0-py3-none-any.whl.metadata (6.4 kB)
Collecting aiofiles<25.0,>=22.0 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading aiofiles-24.1.0-py3-none-any.whl.metadata (10 kB)
Requirement already satisfied: anyio<5.0,>=3.0 in /usr/local/lib/python3.11/site-packages (from gradio->llama-cookbook==0.0.5.post1->llama-recipes) (4.9.0)
Collecting brotli>=1.1.0 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.5 kB)
Collecting fastapi<1.0,>=0.115.2 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading fastapi-0.116.1-py3-none-any.whl.metadata (28 kB)
Collecting ffmpy (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading ffmpy-0.6.0-py3-none-any.whl.metadata (2.9 kB)
Collecting gradio-client==1.11.0 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading gradio_client-1.11.0-py3-none-any.whl.metadata (7.1 kB)
Collecting groovy~=0.1 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading groovy-0.1.2-py3-none-any.whl.metadata (6.1 kB)
Requirement already satisfied: httpx<1.0,>=0.24.1 in /usr/local/lib/python3.11/site-packages (from gradio->llama-cookbook==0.0.5.post1->llama-recipes) (0.28.1)
Collecting orjson~=3.0 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading orjson-3.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (42 kB)
Requirement already satisfied: pillow<12.0,>=8.0 in /root/.local/lib/python3.11/site-packages (from gradio->llama-cookbook==0.0.5.post1->llama-recipes) (11.3.0)
Collecting pydantic<2.12,>=2.0 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pydantic-2.11.7-py3-none-any.whl.metadata (67 kB)
Collecting pydub (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pydub-0.25.1-py2.py3-none-any.whl.metadata (1.4 kB)
Collecting python-multipart>=0.0.18 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading python_multipart-0.0.20-py3-none-any.whl.metadata (1.8 kB)
Collecting ruff>=0.9.3 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading ruff-0.12.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (25 kB)
Collecting safehttpx<0.2.0,>=0.1.6 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading safehttpx-0.1.6-py3-none-any.whl.metadata (4.2 kB)
Collecting semantic-version~=2.0 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading semantic_version-2.10.0-py2.py3-none-any.whl.metadata (9.7 kB)
Collecting starlette<1.0,>=0.40.0 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading starlette-0.47.1-py3-none-any.whl.metadata (6.2 kB)
Collecting tomlkit<0.14.0,>=0.12.0 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading tomlkit-0.13.3-py3-none-any.whl.metadata (2.8 kB)
Collecting typer<1.0,>=0.12 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading typer-0.16.0-py3-none-any.whl.metadata (15 kB)
Collecting uvicorn>=0.14.0 (from gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading uvicorn-0.35.0-py3-none-any.whl.metadata (6.5 kB)
Collecting websockets<16.0,>=10.0 (from gradio-client==1.11.0->gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.8 kB)
Requirement already satisfied: sniffio>=1.1 in /usr/local/lib/python3.11/site-packages (from anyio<5.0,>=3.0->gradio->llama-cookbook==0.0.5.post1->llama-recipes) (1.3.1)
Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/site-packages (from httpx<1.0,>=0.24.1->gradio->llama-cookbook==0.0.5.post1->llama-recipes) (1.0.9)
Requirement already satisfied: h11>=0.16 in /usr/local/lib/python3.11/site-packages (from httpcore==1.*->httpx<1.0,>=0.24.1->gradio->llama-cookbook==0.0.5.post1->llama-recipes) (0.16.0)
Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.11/site-packages (from pandas->datasets->llama-cookbook==0.0.5.post1->llama-recipes) (2.9.0.post0)
Requirement already satisfied: pytz>=2020.1 in /root/.local/lib/python3.11/site-packages (from pandas->datasets->llama-cookbook==0.0.5.post1->llama-recipes) (2025.2)
Requirement already satisfied: tzdata>=2022.7 in /root/.local/lib/python3.11/site-packages (from pandas->datasets->llama-cookbook==0.0.5.post1->llama-recipes) (2025.2)
Collecting annotated-types>=0.6.0 (from pydantic<2.12,>=2.0->gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading annotated_types-0.7.0-py3-none-any.whl.metadata (15 kB)
Collecting pydantic-core==2.33.2 (from pydantic<2.12,>=2.0->gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.8 kB)
Collecting typing-inspection>=0.4.0 (from pydantic<2.12,>=2.0->gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading typing_inspection-0.4.1-py3-none-any.whl.metadata (2.6 kB)
Collecting shellingham>=1.3.0 (from typer<1.0,>=0.12->gradio->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading shellingham-1.5.4-py2.py3-none-any.whl.metadata (3.5 kB)
Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.11/site-packages (from python-dateutil>=2.8.2->pandas->datasets->llama-cookbook==0.0.5.post1->llama-recipes) (1.17.0)
Collecting contourpy>=1.0.1 (from matplotlib->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading contourpy-1.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.5 kB)
Collecting cycler>=0.10 (from matplotlib->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading https://download.pytorch.org/whl/test/cycler-0.12.1-py3-none-any.whl (8.3 kB)
Collecting fonttools>=4.22.0 (from matplotlib->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading fonttools-4.59.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl.metadata (107 kB)
Collecting kiwisolver>=1.3.1 (from matplotlib->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.2 kB)
Collecting pyparsing>=2.3.1 (from matplotlib->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pyparsing-3.2.3-py3-none-any.whl.metadata (5.0 kB)
Collecting distro<2,>=1.7.0 (from openai->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading distro-1.9.0-py3-none-any.whl.metadata (6.8 kB)
Collecting jiter<1,>=0.4.0 (from openai->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.2 kB)
Collecting texttable (from py7zr->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading texttable-1.7.0-py2.py3-none-any.whl.metadata (9.8 kB)
Collecting pycryptodomex>=3.20.0 (from py7zr->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.4 kB)
Collecting pyzstd>=0.16.1 (from py7zr->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.5 kB)
Collecting pyppmd<1.3.0,>=1.1.0 (from py7zr->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.4 kB)
Collecting pybcj<1.1.0,>=1.0.0 (from py7zr->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.7 kB)
Collecting multivolumefile>=0.2.3 (from py7zr->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading multivolumefile-0.2.3-py3-none-any.whl.metadata (6.3 kB)
Collecting inflate64<1.1.0,>=1.0.0 (from py7zr->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (4.4 kB)
Collecting typing-extensions>=4.8.0 (from llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading typing_extensions-4.14.1-py3-none-any.whl.metadata (3.0 kB)
Collecting absl-py (from rouge-score->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading absl_py-2.3.1-py3-none-any.whl.metadata (3.3 kB)
Collecting nltk (from rouge-score->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading nltk-3.9.1-py3-none-any.whl.metadata (2.9 kB)
Collecting joblib (from nltk->rouge-score->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading joblib-1.5.1-py3-none-any.whl.metadata (5.6 kB)
Collecting scikit-learn (from sentence-transformers->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading scikit_learn-1.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl.metadata (11 kB)
Collecting threadpoolctl>=3.1.0 (from scikit-learn->sentence-transformers->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading threadpoolctl-3.6.0-py3-none-any.whl.metadata (13 kB)
Requirement already satisfied: executing>=1.2.0 in /usr/local/lib/python3.11/site-packages (from stack_data->ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (2.2.0)
Requirement already satisfied: asttokens>=2.1.0 in /usr/local/lib/python3.11/site-packages (from stack_data->ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (3.0.0)
Requirement already satisfied: pure-eval in /usr/local/lib/python3.11/site-packages (from stack_data->ipython>=7.8.0->black[jupyter]->llama-cookbook==0.0.5.post1->llama-recipes) (0.2.3)
Collecting filetype (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading filetype-1.2.0-py2.py3-none-any.whl.metadata (6.5 kB)
Collecting python-magic (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading python_magic-0.4.27-py2.py3-none-any.whl.metadata (5.8 kB)
Collecting lxml (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading lxml-6.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl.metadata (6.6 kB)
Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.11/site-packages (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes) (4.13.4)
Collecting emoji (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading emoji-2.14.1-py3-none-any.whl.metadata (5.7 kB)
Collecting dataclasses-json (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading dataclasses_json-0.6.7-py3-none-any.whl.metadata (25 kB)
Collecting python-iso639 (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading python_iso639-2025.2.18-py3-none-any.whl.metadata (14 kB)
Collecting langdetect (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading langdetect-1.0.9.tar.gz (981 kB)
     โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 981.5/981.5 kB 190.2 MB/s eta 0:00:00
  Preparing metadata (setup.py) ... done
Collecting rapidfuzz (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (12 kB)
Collecting backoff (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading backoff-2.2.1-py3-none-any.whl.metadata (14 kB)
Collecting unstructured-client (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading unstructured_client-0.39.1-py3-none-any.whl.metadata (21 kB)
Collecting python-oxmsg (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading python_oxmsg-0.0.2-py3-none-any.whl.metadata (5.0 kB)
Collecting html5lib (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading html5lib-1.1-py2.py3-none-any.whl.metadata (16 kB)
Collecting onnx>=1.17.0 (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading onnx-1.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.9 kB)
Collecting onnxruntime>=1.19.0 (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl.metadata (4.6 kB)
Collecting pdf2image (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pdf2image-1.17.0-py3-none-any.whl.metadata (6.2 kB)
Collecting pdfminer.six (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pdfminer_six-20250506-py3-none-any.whl.metadata (4.2 kB)
Collecting pikepdf (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pikepdf-9.10.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl.metadata (8.1 kB)
Collecting pi-heif (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pi_heif-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.5 kB)
Collecting pypdf (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pypdf-5.8.0-py3-none-any.whl.metadata (7.1 kB)
Collecting google-cloud-vision (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading google_cloud_vision-3.10.2-py3-none-any.whl.metadata (9.6 kB)
Collecting effdet (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading effdet-0.4.1-py3-none-any.whl.metadata (33 kB)
Collecting unstructured-inference>=1.0.5 (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading unstructured_inference-1.0.5-py3-none-any.whl.metadata (5.3 kB)
Collecting unstructured.pytesseract>=0.3.12 (from unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading unstructured.pytesseract-0.3.15-py3-none-any.whl.metadata (11 kB)
Collecting coloredlogs (from onnxruntime>=1.19.0->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading coloredlogs-15.0.1-py2.py3-none-any.whl.metadata (12 kB)
Collecting flatbuffers (from onnxruntime>=1.19.0->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading flatbuffers-25.2.10-py2.py3-none-any.whl.metadata (875 bytes)
Collecting opencv-python!=4.7.0.68 (from unstructured-inference>=1.0.5->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading opencv_python-4.12.0.88-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl.metadata (19 kB)
Collecting timm (from unstructured-inference>=1.0.5->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading timm-1.0.17-py3-none-any.whl.metadata (59 kB)
Collecting pypdfium2 (from unstructured-inference>=1.0.5->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pypdfium2-4.30.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (48 kB)
Collecting numpy>=1.17 (from transformers!=4.38.*,!=4.39.*)
  Downloading numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (62 kB)
Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.11/site-packages (from beautifulsoup4->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes) (2.7)
Collecting humanfriendly>=9.1 (from coloredlogs->onnxruntime>=1.19.0->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading humanfriendly-10.0-py2.py3-none-any.whl.metadata (9.2 kB)
Collecting marshmallow<4.0.0,>=3.18.0 (from dataclasses-json->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading marshmallow-3.26.1-py3-none-any.whl.metadata (7.3 kB)
Collecting typing-inspect<1,>=0.4.0 (from dataclasses-json->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading https://download.pytorch.org/whl/test/typing_inspect-0.9.0-py3-none-any.whl.metadata (1.5 kB)
Requirement already satisfied: torchvision in /root/.local/lib/python3.11/site-packages (from effdet->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes) (0.22.1)
Collecting pycocotools>=2.0.2 (from effdet->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pycocotools-2.0.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (1.3 kB)
Collecting omegaconf>=2.0 (from effdet->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading https://download.pytorch.org/whl/test/omegaconf-2.3.0-py3-none-any.whl (79 kB)
Collecting antlr4-python3-runtime==4.9.* (from omegaconf>=2.0->effdet->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading https://download.pytorch.org/whl/test/antlr4_python3_runtime-4.9.3.tar.gz (117 kB)
  Preparing metadata (setup.py) ... done
Collecting google-api-core!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1 (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-cloud-vision->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading google_api_core-2.25.1-py3-none-any.whl.metadata (3.0 kB)
Collecting google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1 (from google-cloud-vision->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading google_auth-2.40.3-py2.py3-none-any.whl.metadata (6.2 kB)
Collecting proto-plus<2.0.0,>=1.22.3 (from google-cloud-vision->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading proto_plus-1.26.1-py3-none-any.whl.metadata (2.2 kB)
Collecting grpcio<2.0.0,>=1.33.2 (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-cloud-vision->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading grpcio-1.73.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.8 kB)
Collecting grpcio-status<2.0.0,>=1.33.2 (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-cloud-vision->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading grpcio_status-1.73.1-py3-none-any.whl.metadata (1.1 kB)
Collecting cachetools<6.0,>=2.0.0 (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-cloud-vision->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading cachetools-5.5.2-py3-none-any.whl.metadata (5.4 kB)
Collecting pyasn1-modules>=0.2.1 (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-cloud-vision->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pyasn1_modules-0.4.2-py3-none-any.whl.metadata (3.5 kB)
Collecting rsa<5,>=3.1.4 (from google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-cloud-vision->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading rsa-4.9.1-py3-none-any.whl.metadata (5.6 kB)
INFO: pip is looking at multiple versions of grpcio-status to determine which version is compatible with other requirements. This could take a while.
Collecting grpcio-status<2.0.0,>=1.33.2 (from google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0,>=1.34.1->google-cloud-vision->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading grpcio_status-1.73.0-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.72.2-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.72.1-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.71.2-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.71.0-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.70.0-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.69.0-py3-none-any.whl.metadata (1.1 kB)
INFO: pip is still looking at multiple versions of grpcio-status to determine which version is compatible with other requirements. This could take a while.
  Downloading grpcio_status-1.68.1-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.68.0-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.67.1-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.67.0-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.66.2-py3-none-any.whl.metadata (1.1 kB)
INFO: This is taking longer than usual. You might need to provide the dependency resolver with stricter constraints to reduce runtime. See https://pip.pypa.io/warnings/backtracking for guidance. If you want to abort this run, press Ctrl + C.
  Downloading grpcio_status-1.66.1-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.66.0-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.65.5-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.65.4-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.65.2-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.65.1-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.64.3-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.64.1-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.64.0-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.63.2-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.63.0-py3-none-any.whl.metadata (1.1 kB)
  Downloading grpcio_status-1.62.3-py3-none-any.whl.metadata (1.3 kB)
Collecting pyasn1>=0.1.3 (from rsa<5,>=3.1.4->google-auth!=2.24.0,!=2.25.0,<3.0.0,>=2.14.1->google-cloud-vision->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading pyasn1-0.6.1-py3-none-any.whl.metadata (8.4 kB)
Requirement already satisfied: webencodings in /usr/local/lib/python3.11/site-packages (from html5lib->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes) (0.5.1)
Collecting cryptography>=36.0.0 (from pdfminer.six->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl.metadata (5.7 kB)
Requirement already satisfied: cffi>=1.14 in /usr/local/lib/python3.11/site-packages (from cryptography>=36.0.0->pdfminer.six->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes) (1.17.1)
Requirement already satisfied: pycparser in /usr/local/lib/python3.11/site-packages (from cffi>=1.14->cryptography>=36.0.0->pdfminer.six->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes) (2.22)
Collecting olefile (from python-oxmsg->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading olefile-0.47-py2.py3-none-any.whl.metadata (9.7 kB)
Requirement already satisfied: nest-asyncio>=1.6.0 in /usr/local/lib/python3.11/site-packages (from unstructured-client->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes) (1.6.0)
Collecting requests-toolbelt>=1.0.0 (from unstructured-client->unstructured[pdf]->llama-cookbook==0.0.5.post1->llama-recipes)
  Downloading requests_toolbelt-1.0.0-py2.py3-none-any.whl.metadata (14 kB)
Downloading llama_recipes-0.0.5.post2-py3-none-any.whl (20 kB)
Downloading llama_cookbook-0.0.5.post1-py3-none-any.whl (70 kB)
Downloading appdirs-1.4.4-py2.py3-none-any.whl (9.6 kB)
Downloading black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl (1.7 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 1.7/1.7 MB 186.7 MB/s eta 0:00:00
Downloading mypy_extensions-1.1.0-py3-none-any.whl (5.0 kB)
Downloading pathspec-0.12.1-py3-none-any.whl (31 kB)
Downloading tokenize_rt-6.2.0-py2.py3-none-any.whl (6.0 kB)
Downloading chardet-5.2.0-py3-none-any.whl (199 kB)
Downloading codeshield-1.0.1-py3-none-any.whl (173 kB)
Downloading semgrep-1.128.1-cp39.cp310.cp311.py39.py310.py311-none-musllinux_1_0_x86_64.manylinux2014_x86_64.whl (48.2 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 48.2/48.2 MB 135.2 MB/s eta 0:00:00a 0:00:01
Downloading boltons-21.0.0-py2.py3-none-any.whl (193 kB)
Downloading click-8.1.8-py3-none-any.whl (98 kB)
Downloading click_option_group-0.5.7-py3-none-any.whl (11 kB)
Downloading exceptiongroup-1.2.2-py3-none-any.whl (16 kB)
Downloading glom-22.1.0-py2.py3-none-any.whl (100 kB)
Downloading opentelemetry_api-1.25.0-py3-none-any.whl (59 kB)
Downloading opentelemetry_exporter_otlp_proto_http-1.25.0-py3-none-any.whl (16 kB)
Downloading opentelemetry_exporter_otlp_proto_common-1.25.0-py3-none-any.whl (17 kB)
Downloading opentelemetry_proto-1.25.0-py3-none-any.whl (52 kB)
Downloading googleapis_common_protos-1.70.0-py3-none-any.whl (294 kB)
Downloading opentelemetry_instrumentation_requests-0.46b0-py3-none-any.whl (12 kB)
Downloading opentelemetry_instrumentation-0.46b0-py3-none-any.whl (29 kB)
Downloading opentelemetry_semantic_conventions-0.46b0-py3-none-any.whl (130 kB)
Downloading opentelemetry_util_http-0.46b0-py3-none-any.whl (6.9 kB)
Downloading opentelemetry_sdk-1.25.0-py3-none-any.whl (107 kB)
Downloading protobuf-4.25.8-cp37-abi3-manylinux2014_x86_64.whl (294 kB)
Downloading rich-13.5.3-py3-none-any.whl (239 kB)
Downloading tomli-2.0.2-py3-none-any.whl (13 kB)
Downloading wcmatch-8.5.2-py3-none-any.whl (39 kB)
Downloading wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (83 kB)
Downloading bracex-2.6-py3-none-any.whl (11 kB)
Downloading Deprecated-1.2.18-py2.py3-none-any.whl (10.0 kB)
Downloading face-24.0.0-py3-none-any.whl (54 kB)
Downloading markdown_it_py-3.0.0-py3-none-any.whl (87 kB)
Downloading mdurl-0.1.2-py3-none-any.whl (10.0 kB)
Downloading ruamel.yaml-0.18.14-py3-none-any.whl (118 kB)
Downloading ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (739 kB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 739.1/739.1 kB 145.7 MB/s eta 0:00:00
Downloading zipp-3.23.0-py3-none-any.whl (10 kB)
Downloading evaluate-0.4.5-py3-none-any.whl (84 kB)
Downloading gradio-5.38.0-py3-none-any.whl (59.6 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 59.6/59.6 MB 139.9 MB/s eta 0:00:00a 0:00:01
Downloading gradio_client-1.11.0-py3-none-any.whl (324 kB)
Downloading aiofiles-24.1.0-py3-none-any.whl (15 kB)
Downloading fastapi-0.116.1-py3-none-any.whl (95 kB)
Downloading groovy-0.1.2-py3-none-any.whl (14 kB)
Downloading orjson-3.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (127 kB)
Downloading pydantic-2.11.7-py3-none-any.whl (444 kB)
Downloading pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.0 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 2.0/2.0 MB 160.7 MB/s eta 0:00:00
Downloading safehttpx-0.1.6-py3-none-any.whl (8.7 kB)
Downloading semantic_version-2.10.0-py2.py3-none-any.whl (15 kB)
Downloading starlette-0.47.1-py3-none-any.whl (72 kB)
Downloading tomlkit-0.13.3-py3-none-any.whl (38 kB)
Downloading typer-0.16.0-py3-none-any.whl (46 kB)
Downloading websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (182 kB)
Downloading annotated_types-0.7.0-py3-none-any.whl (13 kB)
Downloading Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.9 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 2.9/2.9 MB 157.9 MB/s eta 0:00:00
Downloading python_multipart-0.0.20-py3-none-any.whl (24 kB)
Downloading ruff-0.12.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (11.3 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 11.3/11.3 MB 140.0 MB/s eta 0:00:00
Downloading shellingham-1.5.4-py2.py3-none-any.whl (9.8 kB)
Downloading typing_inspection-0.4.1-py3-none-any.whl (14 kB)
Downloading uvicorn-0.35.0-py3-none-any.whl (66 kB)
Downloading ffmpy-0.6.0-py3-none-any.whl (5.5 kB)
Downloading loralib-0.1.2-py3-none-any.whl (10 kB)
Downloading matplotlib-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (8.6 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 8.6/8.6 MB 141.7 MB/s eta 0:00:00
Downloading contourpy-1.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (326 kB)
Downloading fonttools-4.59.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl (5.0 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 5.0/5.0 MB 147.7 MB/s eta 0:00:00
Downloading kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.4 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 1.4/1.4 MB 170.7 MB/s eta 0:00:00
Downloading pyparsing-3.2.3-py3-none-any.whl (111 kB)
Downloading openai-1.97.0-py3-none-any.whl (764 kB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 765.0/765.0 kB 190.8 MB/s eta 0:00:00
Downloading distro-1.9.0-py3-none-any.whl (20 kB)
Downloading jiter-0.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (352 kB)
Downloading optimum-1.26.1-py3-none-any.whl (424 kB)
Downloading py7zr-1.0.0-py3-none-any.whl (69 kB)
Downloading inflate64-1.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (96 kB)
Downloading pybcj-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (50 kB)
Downloading pyppmd-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (141 kB)
Downloading multivolumefile-0.2.3-py3-none-any.whl (17 kB)
Downloading pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.3 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 2.3/2.3 MB 173.5 MB/s eta 0:00:00
Downloading pyzstd-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (412 kB)
Downloading typing_extensions-4.14.1-py3-none-any.whl (43 kB)
Downloading pydub-0.25.1-py2.py3-none-any.whl (32 kB)
Downloading absl_py-2.3.1-py3-none-any.whl (135 kB)
Downloading nltk-3.9.1-py3-none-any.whl (1.5 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 1.5/1.5 MB 168.6 MB/s eta 0:00:00
Downloading joblib-1.5.1-py3-none-any.whl (307 kB)
Downloading scipy-1.16.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl (35.3 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 35.3/35.3 MB 154.0 MB/s eta 0:00:00a 0:00:01
Downloading sentence_transformers-5.0.0-py3-none-any.whl (470 kB)
Downloading scikit_learn-1.7.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl (9.7 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 9.7/9.7 MB 149.0 MB/s eta 0:00:00
Downloading threadpoolctl-3.6.0-py3-none-any.whl (18 kB)
Downloading tabulate-0.9.0-py3-none-any.whl (35 kB)
Downloading termcolor-3.1.0-py3-none-any.whl (7.7 kB)
Downloading texttable-1.7.0-py2.py3-none-any.whl (10 kB)
Downloading unstructured-0.18.9-py3-none-any.whl (1.8 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 1.8/1.8 MB 163.8 MB/s eta 0:00:00
Downloading onnx-1.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (17.6 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 17.6/17.6 MB 153.1 MB/s eta 0:00:00
Downloading onnxruntime-1.22.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl (16.5 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 16.5/16.5 MB 151.8 MB/s eta 0:00:00
Downloading unstructured_inference-1.0.5-py3-none-any.whl (48 kB)
Downloading opencv_python-4.12.0.88-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl (67.0 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 67.0/67.0 MB 149.8 MB/s eta 0:00:00a 0:00:01
Downloading numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (16.8 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 16.8/16.8 MB 154.4 MB/s eta 0:00:00
Downloading unstructured.pytesseract-0.3.15-py3-none-any.whl (14 kB)
Downloading backoff-2.2.1-py3-none-any.whl (15 kB)
Downloading coloredlogs-15.0.1-py2.py3-none-any.whl (46 kB)
Downloading humanfriendly-10.0-py2.py3-none-any.whl (86 kB)
Downloading dataclasses_json-0.6.7-py3-none-any.whl (28 kB)
Downloading marshmallow-3.26.1-py3-none-any.whl (50 kB)
Downloading https://download.pytorch.org/whl/test/typing_inspect-0.9.0-py3-none-any.whl (8.8 kB)
Downloading effdet-0.4.1-py3-none-any.whl (112 kB)
Downloading pycocotools-2.0.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (477 kB)
Downloading timm-1.0.17-py3-none-any.whl (2.5 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 2.5/2.5 MB 163.9 MB/s eta 0:00:00
Downloading emoji-2.14.1-py3-none-any.whl (590 kB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 590.6/590.6 kB 191.4 MB/s eta 0:00:00
Downloading filetype-1.2.0-py2.py3-none-any.whl (19 kB)
Downloading flatbuffers-25.2.10-py2.py3-none-any.whl (30 kB)
Downloading google_cloud_vision-3.10.2-py3-none-any.whl (527 kB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 527.9/527.9 kB 184.8 MB/s eta 0:00:00
Downloading google_api_core-2.25.1-py3-none-any.whl (160 kB)
Downloading google_auth-2.40.3-py2.py3-none-any.whl (216 kB)
Downloading cachetools-5.5.2-py3-none-any.whl (10 kB)
Downloading grpcio-1.73.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (6.0 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 6.0/6.0 MB 144.1 MB/s eta 0:00:00
Downloading grpcio_status-1.62.3-py3-none-any.whl (14 kB)
Downloading proto_plus-1.26.1-py3-none-any.whl (50 kB)
Downloading rsa-4.9.1-py3-none-any.whl (34 kB)
Downloading pyasn1-0.6.1-py3-none-any.whl (83 kB)
Downloading pyasn1_modules-0.4.2-py3-none-any.whl (181 kB)
Downloading html5lib-1.1-py2.py3-none-any.whl (112 kB)
Downloading lxml-6.0.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl (5.2 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 5.2/5.2 MB 149.0 MB/s eta 0:00:00
Downloading pdf2image-1.17.0-py3-none-any.whl (11 kB)
Downloading pdfminer_six-20250506-py3-none-any.whl (5.6 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 5.6/5.6 MB 150.0 MB/s eta 0:00:00
Downloading cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl (4.5 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 4.5/4.5 MB 165.9 MB/s eta 0:00:00
Downloading pi_heif-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 1.2/1.2 MB 197.5 MB/s eta 0:00:00
Downloading pikepdf-9.10.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl (2.6 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 2.6/2.6 MB 144.1 MB/s eta 0:00:00
Downloading pypdf-5.8.0-py3-none-any.whl (309 kB)
Downloading pypdfium2-4.30.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.9 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 2.9/2.9 MB 140.6 MB/s eta 0:00:00
Downloading python_iso639-2025.2.18-py3-none-any.whl (167 kB)
Downloading python_magic-0.4.27-py2.py3-none-any.whl (13 kB)
Downloading python_oxmsg-0.0.2-py3-none-any.whl (31 kB)
Downloading olefile-0.47-py2.py3-none-any.whl (114 kB)
Downloading rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 3.1/3.1 MB 154.2 MB/s eta 0:00:00
Downloading unstructured_client-0.39.1-py3-none-any.whl (212 kB)
Downloading requests_toolbelt-1.0.0-py2.py3-none-any.whl (54 kB)
Building wheels for collected packages: markupsafe, peewee, fire, rouge-score, antlr4-python3-runtime, langdetect
  DEPRECATION: Building 'markupsafe' using the legacy setup.py bdist_wheel mechanism, which will be removed in a future version. pip 25.3 will enforce this behaviour change. A possible replacement is to use the standardized build interface by setting the `--use-pep517` option, (possibly combined with `--no-build-isolation`), or adding a `pyproject.toml` file to the source tree of 'markupsafe'. Discussion can be found at https://github.com/pypa/pip/issues/6334
  Building wheel for markupsafe (setup.py) ... done
  Created wheel for markupsafe: filename=MarkupSafe-2.0.1-py3-none-any.whl size=9745 sha256=7e2d66f9e7f03fb5c9650b1bb42b497988d9caf286498a83e97842c9bd37bfd8
  Stored in directory: /tmp/pip-ephem-wheel-cache-ysln68d0/wheels/ea/18/79/6266ea508b8164a77b95aa19534c77eb805f2878612c37efca
  Building wheel for peewee (pyproject.toml) ... done
  Created wheel for peewee: filename=peewee-3.18.2-py3-none-any.whl size=139106 sha256=74775c98fa5491eac6deed3b5a8d8c2e44da24939d4780973eaec27fdde604ca
  Stored in directory: /tmp/pip-ephem-wheel-cache-ysln68d0/wheels/28/84/61/758d1bd7b9c9d700158c8642a8aff2a9bf2e1ae69641c40784
  DEPRECATION: Building 'fire' using the legacy setup.py bdist_wheel mechanism, which will be removed in a future version. pip 25.3 will enforce this behaviour change. A possible replacement is to use the standardized build interface by setting the `--use-pep517` option, (possibly combined with `--no-build-isolation`), or adding a `pyproject.toml` file to the source tree of 'fire'. Discussion can be found at https://github.com/pypa/pip/issues/6334
  Building wheel for fire (setup.py) ... done
  Created wheel for fire: filename=fire-0.7.0-py3-none-any.whl size=114249 sha256=604933afdfa2c129d2a0233ffaccf9ce24822ded3a417ab33a6f781ddc81d7af
  Stored in directory: /tmp/pip-ephem-wheel-cache-ysln68d0/wheels/46/54/24/1624fd5b8674eb1188623f7e8e17cdf7c0f6c24b609dfb8a89
  DEPRECATION: Building 'rouge-score' using the legacy setup.py bdist_wheel mechanism, which will be removed in a future version. pip 25.3 will enforce this behaviour change. A possible replacement is to use the standardized build interface by setting the `--use-pep517` option, (possibly combined with `--no-build-isolation`), or adding a `pyproject.toml` file to the source tree of 'rouge-score'. Discussion can be found at https://github.com/pypa/pip/issues/6334
  Building wheel for rouge-score (setup.py) ... done
  Created wheel for rouge-score: filename=rouge_score-0.1.2-py3-none-any.whl size=24935 sha256=0831f9eb0a69648a284b2f8bc386bdee2e9c262f67eb5d95bd6bffb09eaec1fc
  Stored in directory: /tmp/pip-ephem-wheel-cache-ysln68d0/wheels/1e/19/43/8a442dc83660ca25e163e1bd1f89919284ab0d0c1475475148
  DEPRECATION: Building 'antlr4-python3-runtime' using the legacy setup.py bdist_wheel mechanism, which will be removed in a future version. pip 25.3 will enforce this behaviour change. A possible replacement is to use the standardized build interface by setting the `--use-pep517` option, (possibly combined with `--no-build-isolation`), or adding a `pyproject.toml` file to the source tree of 'antlr4-python3-runtime'. Discussion can be found at https://github.com/pypa/pip/issues/6334
  Building wheel for antlr4-python3-runtime (setup.py) ... done
  Created wheel for antlr4-python3-runtime: filename=antlr4_python3_runtime-4.9.3-py3-none-any.whl size=144555 sha256=187021b4cd7030f0ba2c29c20fdd1655628f2e6f082d8f5ae91f16dd343995bd
  Stored in directory: /tmp/pip-ephem-wheel-cache-ysln68d0/wheels/56/e9/6d/b5ab1c9ab438ad8897f796286bf23cd4ffc0f1ea8bc2200ecd
  DEPRECATION: Building 'langdetect' using the legacy setup.py bdist_wheel mechanism, which will be removed in a future version. pip 25.3 will enforce this behaviour change. A possible replacement is to use the standardized build interface by setting the `--use-pep517` option, (possibly combined with `--no-build-isolation`), or adding a `pyproject.toml` file to the source tree of 'langdetect'. Discussion can be found at https://github.com/pypa/pip/issues/6334
  Building wheel for langdetect (setup.py) ... done
  Created wheel for langdetect: filename=langdetect-1.0.9-py3-none-any.whl size=993225 sha256=a906ce17a25a949ae03647d998c1541f9c12b603f2c439fdef6983b2076421fc
  Stored in directory: /tmp/pip-ephem-wheel-cache-ysln68d0/wheels/0a/f2/b2/e5ca405801e05eb7c8ed5b3b4bcf1fcabcd6272c167640072e
Successfully built markupsafe peewee fire rouge-score antlr4-python3-runtime langdetect
Installing collected packages: texttable, sentencepiece, pydub, peewee, flatbuffers, filetype, brotli, boltons, appdirs, antlr4-python3-runtime, zipp, wrapt, websockets, unstructured.pytesseract, typing-extensions, tomlkit, tomli, tokenize-rt, threadpoolctl, termcolor, tabulate, shellingham, semantic-version, ruff, ruamel.yaml.clib, rapidfuzz, pyyaml, python-multipart, python-magic, python-iso639, pyppmd, pypdfium2, pypdf, pyparsing, pycryptodomex, pybcj, pyasn1, protobuf, pi-heif, pdf2image, pathspec, orjson, opentelemetry-util-http, olefile, numpy, mypy-extensions, multivolumefile, mdurl, marshmallow, markupsafe, lxml, loralib, langdetect, kiwisolver, joblib, jiter, inflate64, humanfriendly, html5lib, grpcio, groovy, fonttools, ffmpy, face, exceptiongroup, emoji, distro, cycler, colorama, click, chardet, cachetools, bracex, backoff, annotated-types, aiofiles, absl-py, wcmatch, uvicorn, typing-inspection, typing-inspect, scipy, ruamel.yaml, rsa, requests-toolbelt, pyzstd, python-oxmsg, pydantic-core, pycocotools, pyasn1-modules, proto-plus, opentelemetry-proto, opencv-python, onnx, omegaconf, nltk, markdown-it-py, importlib-metadata, googleapis-common-protos, glom, fire, deprecated, cryptography, contourpy, coloredlogs, click-option-group, black, starlette, scikit-learn, rouge-score, rich, pydantic, py7zr, pikepdf, pdfminer.six, opentelemetry-exporter-otlp-proto-common, opentelemetry-api, onnxruntime, matplotlib, grpcio-status, google-auth, dataclasses-json, unstructured-client, typer, safehttpx, opentelemetry-semantic-conventions, opentelemetry-instrumentation, openai, gradio-client, google-api-core, fastapi, unstructured, timm, sentence-transformers, optimum, opentelemetry-sdk, opentelemetry-instrumentation-requests, gradio, evaluate, unstructured-inference, opentelemetry-exporter-otlp-proto-http, google-cloud-vision, effdet, semgrep, codeshield, llama-cookbook, llama-recipes
  Attempting uninstall: typing-extensionsโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  12/147 [websockets]on3-runtime]
    Found existing installation: typing_extensions 4.12.2โ”โ”โ”โ”โ”  12/147 [websockets]
    Uninstalling typing_extensions-4.12.2:โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  12/147 [websockets]
      Successfully uninstalled typing_extensions-4.12.2โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  14/147 [typing-extensions]
  Attempting uninstall: pyyaml90mโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  25/147 [rapidfuzz]ons]
    Found existing installation: PyYAML 6.0.2โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  25/147 [rapidfuzz]
    Uninstalling PyYAML-6.0.2:โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  25/147 [rapidfuzz]
      Successfully uninstalled PyYAML-6.0.2โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  25/147 [rapidfuzz]
  Attempting uninstall: protobuf[90mโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  36/147 [pyasn1]odomex]
    Found existing installation: protobuf 5.29.5โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  36/147 [pyasn1]
    Uninstalling protobuf-5.29.5:โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  36/147 [pyasn1]
      Successfully uninstalled protobuf-5.29.5โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  37/147 [protobuf]
  Attempting uninstall: numpy[0mโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  37/147 [protobuf]
    Found existing installation: numpy 2.3.1โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  44/147 [numpy]
    Uninstalling numpy-2.3.1:โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  44/147 [numpy]
      Successfully uninstalled numpy-2.3.1โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  44/147 [numpy]
  Attempting uninstall: markupsafe[90mโ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  44/147 [numpy]
    Found existing installation: MarkupSafe 3.0.2โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  44/147 [numpy]
    Uninstalling MarkupSafe-3.0.2:โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  44/147 [numpy]
      Successfully uninstalled MarkupSafe-3.0.2โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”  49/147 [markupsafe]
   โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ”โ” 147/147 [llama-recipes]hield]mgrep]loud-vision]ce]ons]
Successfully installed absl-py-2.3.1 aiofiles-24.1.0 annotated-types-0.7.0 antlr4-python3-runtime-4.9.3 appdirs-1.4.4 backoff-2.2.1 black-25.1.0 boltons-21.0.0 bracex-2.6 brotli-1.1.0 cachetools-5.5.2 chardet-5.2.0 click-8.1.8 click-option-group-0.5.7 codeshield-1.0.1 colorama-0.4.6 coloredlogs-15.0.1 contourpy-1.3.2 cryptography-45.0.5 cycler-0.12.1 dataclasses-json-0.6.7 deprecated-1.2.18 distro-1.9.0 effdet-0.4.1 emoji-2.14.1 evaluate-0.4.5 exceptiongroup-1.2.2 face-24.0.0 fastapi-0.116.1 ffmpy-0.6.0 filetype-1.2.0 fire-0.7.0 flatbuffers-25.2.10 fonttools-4.59.0 glom-22.1.0 google-api-core-2.25.1 google-auth-2.40.3 google-cloud-vision-3.10.2 googleapis-common-protos-1.70.0 gradio-5.38.0 gradio-client-1.11.0 groovy-0.1.2 grpcio-1.73.1 grpcio-status-1.62.3 html5lib-1.1 humanfriendly-10.0 importlib-metadata-7.1.0 inflate64-1.0.3 jiter-0.10.0 joblib-1.5.1 kiwisolver-1.4.8 langdetect-1.0.9 llama-cookbook-0.0.5.post1 llama-recipes-0.0.5.post2 loralib-0.1.2 lxml-6.0.0 markdown-it-py-3.0.0 markupsafe-2.0.1 marshmallow-3.26.1 matplotlib-3.10.3 mdurl-0.1.2 multivolumefile-0.2.3 mypy-extensions-1.1.0 nltk-3.9.1 numpy-2.2.6 olefile-0.47 omegaconf-2.3.0 onnx-1.18.0 onnxruntime-1.22.1 openai-1.97.0 opencv-python-4.12.0.88 opentelemetry-api-1.25.0 opentelemetry-exporter-otlp-proto-common-1.25.0 opentelemetry-exporter-otlp-proto-http-1.25.0 opentelemetry-instrumentation-0.46b0 opentelemetry-instrumentation-requests-0.46b0 opentelemetry-proto-1.25.0 opentelemetry-sdk-1.25.0 opentelemetry-semantic-conventions-0.46b0 opentelemetry-util-http-0.46b0 optimum-1.26.1 orjson-3.11.0 pathspec-0.12.1 pdf2image-1.17.0 pdfminer.six-20250506 peewee-3.18.2 pi-heif-1.0.0 pikepdf-9.10.2 proto-plus-1.26.1 protobuf-4.25.8 py7zr-1.0.0 pyasn1-0.6.1 pyasn1-modules-0.4.2 pybcj-1.0.6 pycocotools-2.0.10 pycryptodomex-3.23.0 pydantic-2.11.7 pydantic-core-2.33.2 pydub-0.25.1 pyparsing-3.2.3 pypdf-5.8.0 pypdfium2-4.30.1 pyppmd-1.2.0 python-iso639-2025.2.18 python-magic-0.4.27 python-multipart-0.0.20 python-oxmsg-0.0.2 pyyaml-6.0.1 pyzstd-0.17.0 rapidfuzz-3.13.0 requests-toolbelt-1.0.0 rich-13.5.3 rouge-score-0.1.2 rsa-4.9.1 ruamel.yaml-0.18.14 ruamel.yaml.clib-0.2.12 ruff-0.12.4 safehttpx-0.1.6 scikit-learn-1.7.1 scipy-1.16.0 semantic-version-2.10.0 semgrep-1.128.1 sentence-transformers-5.0.0 sentencepiece-0.2.0 shellingham-1.5.4 starlette-0.47.1 tabulate-0.9.0 termcolor-3.1.0 texttable-1.7.0 threadpoolctl-3.6.0 timm-1.0.17 tokenize-rt-6.2.0 tomli-2.0.2 tomlkit-0.13.3 typer-0.16.0 typing-extensions-4.14.1 typing-inspect-0.9.0 typing-inspection-0.4.1 unstructured-0.18.9 unstructured-client-0.39.1 unstructured-inference-1.0.5 unstructured.pytesseract-0.3.15 uvicorn-0.35.0 wcmatch-8.5.2 websockets-15.0.1 wrapt-1.17.2 zipp-3.23.0
Note: you may need to restart the kernel to use updated packages.
%pip install bitsandbytes>=0.43.0
Note: you may need to restart the kernel to use updated packages.
from huggingface_hub import login
import getpass

# Get your token securely
hf_token = getpass.getpass("Enter your Hugging Face token: ")

# Login programmatically
login(token=hf_token)

print("โœ… Successfully logged in to Hugging Face!")
/root/.local/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html
  from .autonotebook import tqdm as notebook_tqdm
Enter your Hugging Face token:  ยทยทยทยทยทยทยทยท
โœ… Successfully logged in to Hugging Face!
import os
import subprocess

# Set environment variable
os.environ['BNB_CUDA_VERSION'] = '125'

# Install ONLY the essential fixes
commands = [
    ["pip", "install", "transformers==4.47.1", "--upgrade"],
    ["pip", "install", "bitsandbytes>=0.43.0", "--upgrade", "--force-reinstall"]
]

for cmd in commands:
    print(f"Running: {' '.join(cmd)}")
    try:
        result = subprocess.run(cmd, capture_output=True, text=True, timeout=300)
        if result.returncode != 0:
            print(f"Error: {result.stderr}")
        else:
            print("โœ… Success")
    except subprocess.TimeoutExpired:
        print("โš ๏ธ Command timed out")

print("โœ… Essential dependencies updated")
Running: pip install transformers==4.47.1 --upgrade
โœ… Success
Running: pip install bitsandbytes>=0.43.0 --upgrade --force-reinstall
โœ… Success
โœ… Essential dependencies updated
import os
os.chdir("fsdp_qlora")

# Apply fixes to train.py
with open("train.py", "r") as f:
    content = f.read()

# Apply transformers fix
if "LLAMA_ATTENTION_CLASSES" in content:
    print("๐Ÿ”ง Applying transformers fix...")
    
    # Simple replacement approach
    content = content.replace(
        "LLAMA_ATTENTION_CLASSES,", 
        "LlamaAttention,"
    )
    content = content.replace(
        "MISTRAL_ATTENTION_CLASSES,", 
        "MistralAttention,"
    )
    content = content.replace(
        "(*LLAMA_ATTENTION_CLASSES.values(), *MISTRAL_ATTENTION_CLASSES.values())",
        "(LlamaAttention, MistralAttention)"
    )
    
    # Add dataset choice
    if "uganda_clinical_guidelines" not in content:
        content = content.replace(
            '"orca_math"]) = "alpaca_sample",',
            '"orca_math", "uganda_clinical_guidelines"]) = "alpaca_sample",'
        )
    
    with open("train.py", "w") as f:
        f.write(content)
    
    print("โœ… train.py fixed")
    
๐Ÿ”ง Applying transformers fix...
โœ… train.py fixed
# Test if fixes work
try:
    import bitsandbytes
    print("โœ… Bitsandbytes works")
except Exception as e:
    print(f"โŒ Bitsandbytes issue: {e}")

print("Ready for training!")
WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=
โŒ Bitsandbytes issue: Failed to find C compiler. Please specify via CC environment variable.
Ready for training!
import subprocess
import os

def setup_environment():
    """Setup the environment to avoid compiler issues"""
    
    print("๐Ÿ”ง Setting up environment for training...")
    
    # Step 1: Set environment variables
    os.environ['BNB_CUDA_VERSION'] = '125'
    os.environ['CC'] = '/usr/bin/gcc'
    os.environ['CXX'] = '/usr/bin/g++'
    os.environ['CUDA_VISIBLE_DEVICES'] = '0,1'
    
    # Step 2: Install build tools if possible
    try:
        print("๐Ÿ“ฆ Installing build tools...")
        subprocess.run(["apt", "update"], capture_output=True, timeout=60)
        result = subprocess.run(["apt", "install", "-y", "build-essential", "gcc", "g++"], 
                              capture_output=True, timeout=120)
        if result.returncode == 0:
            print("โœ… Build tools installed")
        else:
            print("โš ๏ธ Build tools installation failed, proceeding anyway...")
    except Exception as e:
        print(f"โš ๏ธ Could not install build tools: {e}")
    
    # Step 3: Test if bitsandbytes works now
    try:
        import bitsandbytes
        print("โœ… Bitsandbytes imports successfully")
        return True
    except Exception as e:
        print(f"โŒ Bitsandbytes still has issues: {e}")
        
        # Step 4: Try installing older version
        print("๐Ÿ”„ Trying older bitsandbytes version...")
        try:
            subprocess.run(["pip", "uninstall", "bitsandbytes", "-y"], capture_output=True)
            subprocess.run(["pip", "install", "bitsandbytes==0.41.3"], capture_output=True)
            
            import bitsandbytes
            print("โœ… Older bitsandbytes version works")
            return True
        except Exception as e2:
            print(f"โŒ Even older version failed: {e2}")
            return False

# Run the setup
if setup_environment():
    print("๐Ÿš€ Environment ready! Running training...")
    
    # Your training command
    cmd = [
        "python", "train.py",
        "--train_type", "bnb_dora",
        "--model_name", "meta-llama/Llama-2-7b-hf", 
        "--dataset", "ug_clinical_guidelines",  # Fixed dataset name
        "--dataset_samples", "10",
        "--batch_size", "1",
        "--context_length", "256",
        "--num_epochs", "1",
        "--save_model", "false",
        "--log_to", "stdout"
    ]
    
    print("๐Ÿงช Running test training...")
    process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
    
    try:
        for line in iter(process.stdout.readline, ''):
            if line:
                print(line.rstrip())
        process.wait()
        print(f"Test completed: {process.returncode}")
    except KeyboardInterrupt:
        print("Interrupted")
        process.terminate()
        
else:
    print("โŒ Could not setup environment properly")
๐Ÿ”ง Setting up environment for training...
๐Ÿ“ฆ Installing build tools...
โœ… Build tools installed
โœ… Bitsandbytes imports successfully
๐Ÿš€ Environment ready! Running training...
๐Ÿงช Running test training...
WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=

World size: 2
WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=

WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=


Generating train split:   0%|          | 0/130 [00:00<?, ? examples/s]
Generating train split: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 130/130 [00:00<00:00, 23549.26 examples/s]
Creating model 0

Downloading shards:   0%|          | 0/2 [00:00<?, ?it/s]
Downloading shards:   0%|          | 0/2 [00:00<?, ?it/s]
Downloading shards:  50%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ     | 1/2 [00:48<00:48, 48.18s/it]
Downloading shards:  50%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ     | 1/2 [00:48<00:48, 48.21s/it]
Downloading shards: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 2/2 [01:04<00:00, 29.56s/it]
Downloading shards: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 2/2 [01:04<00:00, 32.35s/it]
Loading model 0

Loading & Quantizing Model Shards:   0%|          | 0/2 [00:00<?, ?it/s]
Downloading shards: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 2/2 [01:04<00:00, 29.57s/it]
Downloading shards: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 2/2 [01:04<00:00, 32.36s/it]
WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=

WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=


Loading & Quantizing Model Shards:  50%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ     | 1/2 [00:15<00:15, 15.38s/it]WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=

WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=


Loading & Quantizing Model Shards: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 2/2 [00:25<00:00, 12.20s/it]
Loading & Quantizing Model Shards: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 2/2 [00:25<00:00, 12.68s/it]
/usr/local/lib/python3.11/site-packages/torch/distributed/distributed_c10d.py:4631: UserWarning: No device id is provided via `init_process_group` or `barrier `. Using the current device set by the user.
  warnings.warn(  # warn only once
/usr/local/lib/python3.11/site-packages/torch/distributed/distributed_c10d.py:4631: UserWarning: No device id is provided via `init_process_group` or `barrier `. Using the current device set by the user.
  warnings.warn(  # warn only once
Rank 0: Model created: 0.107 GiB
Using BNB DORA 0
Rank 0: LoRA layers added: 0.107 GiB
Wrapping model w/ FSDP 0
Rank 0: Wrapped model: 1.625 GiB
Applying activation checkpointing 0
Total Training Steps: 5

  0%|          | 0/5 [00:00<?, ?it/s]
Epoch 0, Loss 0.000:   0%|          | 0/5 [00:00<?, ?it/s]
Epoch 0, Loss 0.000:  20%|โ–ˆโ–ˆ        | 1/5 [00:07<00:28,  7.01s/it]
Epoch 0, Loss 1.388, LR 1.00e-05:  20%|โ–ˆโ–ˆ        | 1/5 [00:07<00:28,  7.01s/it]
Epoch 0, Loss 1.388, LR 1.00e-05:  40%|โ–ˆโ–ˆโ–ˆโ–ˆ      | 2/5 [00:09<00:12,  4.30s/it]
Epoch 0, Loss 1.477, LR 1.00e-05:  40%|โ–ˆโ–ˆโ–ˆโ–ˆ      | 2/5 [00:09<00:12,  4.30s/it]
Epoch 0, Loss 1.477, LR 1.00e-05:  60%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ    | 3/5 [00:11<00:06,  3.23s/it]
Epoch 0, Loss 1.187, LR 1.00e-05:  60%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ    | 3/5 [00:11<00:06,  3.23s/it]
Epoch 0, Loss 1.187, LR 1.00e-05:  80%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ  | 4/5 [00:13<00:02,  2.71s/it]
Epoch 0, Loss 1.041, LR 1.00e-05:  80%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ  | 4/5 [00:13<00:02,  2.71s/it]
Epoch 0, Loss 1.041, LR 1.00e-05: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 5/5 [00:15<00:00,  2.43s/it]
Epoch 0, Loss 1.475, LR 1.00e-05: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 5/5 [00:15<00:00,  2.43s/it]
Epoch 0, Loss 1.475, LR 1.00e-05: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 5/5 [00:15<00:00,  3.12s/it]
Finished training 0
CUDA event elapsed time: 15.2380859375 sec
time_taken: 15.2380859375
Rank 0: Before forward: 1.62 GiB
Rank 0: After forward: 2.46 GiB
Rank 0: After backward: 2.64 GiB
Rank 0: Peak allocated memory: 1.35 GiB
Rank 0: Peak reserved memory:  2.65 GiB
Using BNB DORA 1
Test completed: 0
!ls
'Converting the State Dict.ipynb'   fsdp_multi_node.sh   tests
 LICENSE                hf_train.py      train.py
 PROFILING.md               nbs          train.sh
 README.md              profile.sh       train_hqq_bench.sh
 __pycache__                profiling_utils.py   train_sql.sh
 benchmarking               scripts
 benchmarks_03_2024.md          table1.sh
import subprocess
import os

# Set environment
os.environ['BNB_CUDA_VERSION'] = '125'
os.environ['CUDA_VISIBLE_DEVICES'] = '0,1'

# FULL TRAINING with model saving
cmd = [
    "python", "train.py",
    "--train_type", "bnb_dora",
    "--model_name", "meta-llama/Llama-2-7b-hf", 
    "--dataset", "ug_clinical_guidelines",
    "--dataset_samples", "130",  # Use all your data
    "--batch_size", "2",
    "--context_length", "512",   # Longer context for medical text
    "--precision", "bf16",
    "--num_epochs", "3",         # More epochs for better training
    "--save_model", "true",      # ๐Ÿ”ฅ SAVE THE MODEL
    "--output_dir", "./uganda_clinical_model",  # Where to save
    "--log_to", "stdout"
]

print("๐Ÿฅ Training Uganda Clinical Model (FULL)...")
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)

try:
    for line in iter(process.stdout.readline, ''):
        if line:
            print(line.rstrip())
    process.wait()
    print(f"Training completed: {process.returncode}")
    
    # Check if model was saved
    if os.path.exists("uganda_clinical_model"):
        print("๐ŸŽ‰ Model saved successfully!")
        print("๐Ÿ“ Saved files:")
        for f in os.listdir("uganda_clinical_model"):
            print(f"  ๐Ÿ“„ {f}")
    
except KeyboardInterrupt:
    print("Interrupted")
    process.terminate()
๐Ÿฅ Training Uganda Clinical Model (FULL)...
WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=

World size: 2
WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=

WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=

Creating model 0
Loading model 0

Loading & Quantizing Model Shards:   0%|          | 0/2 [00:00<?, ?it/s]WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=

WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=

WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=


Loading & Quantizing Model Shards:  50%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ     | 1/2 [00:15<00:15, 15.16s/it]WARNING: BNB_CUDA_VERSION=125 environment variable detected; loading libbitsandbytes_cuda125.so.
This can be used to load a bitsandbytes version built with a CUDA version that is different from the PyTorch CUDA version.
If this was unintended set the BNB_CUDA_VERSION variable to an empty string: export BNB_CUDA_VERSION=


Loading & Quantizing Model Shards: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 2/2 [00:26<00:00, 12.62s/it]
Loading & Quantizing Model Shards: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 2/2 [00:26<00:00, 13.00s/it]
/usr/local/lib/python3.11/site-packages/torch/distributed/distributed_c10d.py:4631: UserWarning: No device id is provided via `init_process_group` or `barrier `. Using the current device set by the user.
  warnings.warn(  # warn only once
/usr/local/lib/python3.11/site-packages/torch/distributed/distributed_c10d.py:4631: UserWarning: No device id is provided via `init_process_group` or `barrier `. Using the current device set by the user.
  warnings.warn(  # warn only once
Rank 0: Model created: 0.107 GiB
Using BNB DORA 0
Rank 0: LoRA layers added: 0.107 GiB
Wrapping model w/ FSDP 0
Rank 0: Wrapped model: 1.625 GiB
Applying activation checkpointing 0
Total Training Steps: 99

  0%|          | 0/99 [00:00<?, ?it/s]
Epoch 0, Loss 0.000:   0%|          | 0/99 [00:00<?, ?it/s]
Epoch 0, Loss 0.000:   1%|          | 1/99 [00:08<13:29,  8.26s/it]
Epoch 0, Loss 1.426, LR 1.00e-05:   1%|          | 1/99 [00:08<13:29,  8.26s/it]
Epoch 0, Loss 1.426, LR 1.00e-05:   2%|โ–         | 2/99 [00:10<07:31,  4.66s/it]
Epoch 0, Loss 1.409, LR 1.00e-05:   2%|โ–         | 2/99 [00:10<07:31,  4.66s/it]
Epoch 0, Loss 1.409, LR 1.00e-05:   3%|โ–Ž         | 3/99 [00:12<05:27,  3.41s/it]
Epoch 0, Loss 1.369, LR 1.00e-05:   3%|โ–Ž         | 3/99 [00:12<05:27,  3.41s/it]
Epoch 0, Loss 1.369, LR 1.00e-05:   4%|โ–         | 4/99 [00:14<04:27,  2.82s/it]
Epoch 0, Loss 1.200, LR 1.00e-05:   4%|โ–         | 4/99 [00:14<04:27,  2.82s/it]
Epoch 0, Loss 1.200, LR 1.00e-05:   5%|โ–Œ         | 5/99 [00:16<03:55,  2.51s/it]
Epoch 0, Loss 1.123, LR 1.00e-05:   5%|โ–Œ         | 5/99 [00:16<03:55,  2.51s/it]
Epoch 0, Loss 1.123, LR 1.00e-05:   6%|โ–Œ         | 6/99 [00:18<03:37,  2.34s/it]
Epoch 0, Loss 1.126, LR 1.00e-05:   6%|โ–Œ         | 6/99 [00:18<03:37,  2.34s/it]
Epoch 0, Loss 1.126, LR 1.00e-05:   7%|โ–‹         | 7/99 [00:20<03:23,  2.21s/it]
Epoch 0, Loss 0.955, LR 1.00e-05:   7%|โ–‹         | 7/99 [00:20<03:23,  2.21s/it]
Epoch 0, Loss 0.955, LR 1.00e-05:   8%|โ–Š         | 8/99 [00:22<03:20,  2.21s/it]
Epoch 0, Loss 0.910, LR 1.00e-05:   8%|โ–Š         | 8/99 [00:22<03:20,  2.21s/it]
Epoch 0, Loss 0.910, LR 1.00e-05:   9%|โ–‰         | 9/99 [00:24<03:12,  2.14s/it]
Epoch 0, Loss 0.948, LR 1.00e-05:   9%|โ–‰         | 9/99 [00:24<03:12,  2.14s/it]
Epoch 0, Loss 0.948, LR 1.00e-05:  10%|โ–ˆ         | 10/99 [00:26<03:05,  2.08s/it]
Epoch 0, Loss 0.674, LR 1.00e-05:  10%|โ–ˆ         | 10/99 [00:26<03:05,  2.08s/it]
Epoch 0, Loss 0.674, LR 1.00e-05:  11%|โ–ˆ         | 11/99 [00:28<02:58,  2.03s/it]
Epoch 0, Loss 0.956, LR 1.00e-05:  11%|โ–ˆ         | 11/99 [00:28<02:58,  2.03s/it]
Epoch 0, Loss 0.956, LR 1.00e-05:  12%|โ–ˆโ–        | 12/99 [00:30<02:54,  2.01s/it]
Epoch 0, Loss 0.994, LR 1.00e-05:  12%|โ–ˆโ–        | 12/99 [00:30<02:54,  2.01s/it]
Epoch 0, Loss 0.994, LR 1.00e-05:  13%|โ–ˆโ–Ž        | 13/99 [00:32<02:52,  2.00s/it]
Epoch 0, Loss 0.803, LR 1.00e-05:  13%|โ–ˆโ–Ž        | 13/99 [00:32<02:52,  2.00s/it]
Epoch 0, Loss 0.803, LR 1.00e-05:  14%|โ–ˆโ–        | 14/99 [00:34<02:49,  2.00s/it]
Epoch 0, Loss 0.902, LR 1.00e-05:  14%|โ–ˆโ–        | 14/99 [00:34<02:49,  2.00s/it]
Epoch 0, Loss 0.902, LR 1.00e-05:  15%|โ–ˆโ–Œ        | 15/99 [00:36<02:53,  2.07s/it]
Epoch 0, Loss 1.091, LR 1.00e-05:  15%|โ–ˆโ–Œ        | 15/99 [00:36<02:53,  2.07s/it]
Epoch 0, Loss 1.091, LR 1.00e-05:  16%|โ–ˆโ–Œ        | 16/99 [00:38<02:49,  2.04s/it]
Epoch 0, Loss 0.834, LR 1.00e-05:  16%|โ–ˆโ–Œ        | 16/99 [00:38<02:49,  2.04s/it]
Epoch 0, Loss 0.834, LR 1.00e-05:  17%|โ–ˆโ–‹        | 17/99 [00:40<02:44,  2.00s/it]
Epoch 0, Loss 1.042, LR 1.00e-05:  17%|โ–ˆโ–‹        | 17/99 [00:40<02:44,  2.00s/it]
Epoch 0, Loss 1.042, LR 1.00e-05:  18%|โ–ˆโ–Š        | 18/99 [00:42<02:39,  1.97s/it]
Epoch 0, Loss 0.731, LR 1.00e-05:  18%|โ–ˆโ–Š        | 18/99 [00:42<02:39,  1.97s/it]
Epoch 0, Loss 0.731, LR 1.00e-05:  19%|โ–ˆโ–‰        | 19/99 [00:44<02:35,  1.95s/it]
Epoch 0, Loss 1.042, LR 1.00e-05:  19%|โ–ˆโ–‰        | 19/99 [00:44<02:35,  1.95s/it]
Epoch 0, Loss 1.042, LR 1.00e-05:  20%|โ–ˆโ–ˆ        | 20/99 [00:45<02:33,  1.94s/it]
Epoch 0, Loss 1.062, LR 1.00e-05:  20%|โ–ˆโ–ˆ        | 20/99 [00:46<02:33,  1.94s/it]
Epoch 0, Loss 1.062, LR 1.00e-05:  21%|โ–ˆโ–ˆ        | 21/99 [00:47<02:31,  1.95s/it]
Epoch 0, Loss 0.817, LR 1.00e-05:  21%|โ–ˆโ–ˆ        | 21/99 [00:47<02:31,  1.95s/it]
Epoch 0, Loss 0.817, LR 1.00e-05:  22%|โ–ˆโ–ˆโ–       | 22/99 [00:50<02:36,  2.04s/it]
Epoch 0, Loss 0.928, LR 1.00e-05:  22%|โ–ˆโ–ˆโ–       | 22/99 [00:50<02:36,  2.04s/it]
Epoch 0, Loss 0.928, LR 1.00e-05:  23%|โ–ˆโ–ˆโ–Ž       | 23/99 [00:52<02:32,  2.01s/it]
Epoch 0, Loss 1.187, LR 1.00e-05:  23%|โ–ˆโ–ˆโ–Ž       | 23/99 [00:52<02:32,  2.01s/it]
Epoch 0, Loss 1.187, LR 1.00e-05:  24%|โ–ˆโ–ˆโ–       | 24/99 [00:54<02:29,  1.99s/it]
Epoch 0, Loss 1.039, LR 1.00e-05:  24%|โ–ˆโ–ˆโ–       | 24/99 [00:54<02:29,  1.99s/it]
Epoch 0, Loss 1.039, LR 1.00e-05:  25%|โ–ˆโ–ˆโ–Œ       | 25/99 [00:56<02:27,  1.99s/it]
Epoch 0, Loss 0.800, LR 1.00e-05:  25%|โ–ˆโ–ˆโ–Œ       | 25/99 [00:56<02:27,  1.99s/it]
Epoch 0, Loss 0.800, LR 1.00e-05:  26%|โ–ˆโ–ˆโ–‹       | 26/99 [00:58<02:24,  1.98s/it]
Epoch 0, Loss 0.946, LR 1.00e-05:  26%|โ–ˆโ–ˆโ–‹       | 26/99 [00:58<02:24,  1.98s/it]
Epoch 0, Loss 0.946, LR 1.00e-05:  27%|โ–ˆโ–ˆโ–‹       | 27/99 [01:00<02:23,  1.99s/it]
Epoch 0, Loss 1.006, LR 1.00e-05:  27%|โ–ˆโ–ˆโ–‹       | 27/99 [01:00<02:23,  1.99s/it]
Epoch 0, Loss 1.006, LR 1.00e-05:  28%|โ–ˆโ–ˆโ–Š       | 28/99 [01:01<02:20,  1.98s/it]
Epoch 0, Loss 0.677, LR 1.00e-05:  28%|โ–ˆโ–ˆโ–Š       | 28/99 [01:01<02:20,  1.98s/it]
Epoch 0, Loss 0.677, LR 1.00e-05:  29%|โ–ˆโ–ˆโ–‰       | 29/99 [01:04<02:22,  2.04s/it]
Epoch 0, Loss 1.013, LR 1.00e-05:  29%|โ–ˆโ–ˆโ–‰       | 29/99 [01:04<02:22,  2.04s/it]
Epoch 0, Loss 1.013, LR 1.00e-05:  30%|โ–ˆโ–ˆโ–ˆ       | 30/99 [01:06<02:18,  2.01s/it]
Epoch 0, Loss 0.918, LR 1.00e-05:  30%|โ–ˆโ–ˆโ–ˆ       | 30/99 [01:06<02:18,  2.01s/it]
Epoch 0, Loss 0.918, LR 1.00e-05:  31%|โ–ˆโ–ˆโ–ˆโ–      | 31/99 [01:08<02:16,  2.01s/it]
Epoch 0, Loss 0.839, LR 1.00e-05:  31%|โ–ˆโ–ˆโ–ˆโ–      | 31/99 [01:08<02:16,  2.01s/it]
Epoch 0, Loss 0.839, LR 1.00e-05:  32%|โ–ˆโ–ˆโ–ˆโ–      | 32/99 [01:10<02:15,  2.02s/it]
Epoch 0, Loss 1.119, LR 1.00e-05:  32%|โ–ˆโ–ˆโ–ˆโ–      | 32/99 [01:10<02:15,  2.02s/it]
Epoch 0, Loss 1.119, LR 1.00e-05:  33%|โ–ˆโ–ˆโ–ˆโ–Ž      | 33/99 [01:12<02:13,  2.02s/it]
Epoch 0, Loss 0.769, LR 1.00e-05:  33%|โ–ˆโ–ˆโ–ˆโ–Ž      | 33/99 [01:12<02:13,  2.02s/it]
Epoch 1, Loss 0.769, LR 1.00e-05:  33%|โ–ˆโ–ˆโ–ˆโ–Ž      | 33/99 [01:12<02:13,  2.02s/it]
Epoch 1, Loss 0.769, LR 1.00e-05:  34%|โ–ˆโ–ˆโ–ˆโ–      | 34/99 [01:14<02:12,  2.03s/it]
Epoch 1, Loss 0.613, LR 1.00e-05:  34%|โ–ˆโ–ˆโ–ˆโ–      | 34/99 [01:14<02:12,  2.03s/it]
Epoch 1, Loss 0.613, LR 1.00e-05:  35%|โ–ˆโ–ˆโ–ˆโ–Œ      | 35/99 [01:16<02:09,  2.02s/it]
Epoch 1, Loss 0.661, LR 1.00e-05:  35%|โ–ˆโ–ˆโ–ˆโ–Œ      | 35/99 [01:16<02:09,  2.02s/it]
Epoch 1, Loss 0.661, LR 1.00e-05:  36%|โ–ˆโ–ˆโ–ˆโ–‹      | 36/99 [01:18<02:13,  2.13s/it]
Epoch 1, Loss 0.629, LR 1.00e-05:  36%|โ–ˆโ–ˆโ–ˆโ–‹      | 36/99 [01:18<02:13,  2.13s/it]
Epoch 1, Loss 0.629, LR 1.00e-05:  37%|โ–ˆโ–ˆโ–ˆโ–‹      | 37/99 [01:20<02:10,  2.10s/it]
Epoch 1, Loss 0.638, LR 1.00e-05:  37%|โ–ˆโ–ˆโ–ˆโ–‹      | 37/99 [01:20<02:10,  2.10s/it]
Epoch 1, Loss 0.638, LR 1.00e-05:  38%|โ–ˆโ–ˆโ–ˆโ–Š      | 38/99 [01:22<02:05,  2.05s/it]
Epoch 1, Loss 0.596, LR 1.00e-05:  38%|โ–ˆโ–ˆโ–ˆโ–Š      | 38/99 [01:22<02:05,  2.05s/it]
Epoch 1, Loss 0.596, LR 1.00e-05:  39%|โ–ˆโ–ˆโ–ˆโ–‰      | 39/99 [01:24<02:01,  2.02s/it]
Epoch 1, Loss 0.618, LR 1.00e-05:  39%|โ–ˆโ–ˆโ–ˆโ–‰      | 39/99 [01:24<02:01,  2.02s/it]
Epoch 1, Loss 0.618, LR 1.00e-05:  40%|โ–ˆโ–ˆโ–ˆโ–ˆ      | 40/99 [01:26<01:58,  2.01s/it]
Epoch 1, Loss 0.539, LR 1.00e-05:  40%|โ–ˆโ–ˆโ–ˆโ–ˆ      | 40/99 [01:26<01:58,  2.01s/it]
Epoch 1, Loss 0.539, LR 1.00e-05:  41%|โ–ˆโ–ˆโ–ˆโ–ˆโ–     | 41/99 [01:28<01:56,  2.02s/it]
Epoch 1, Loss 0.418, LR 1.00e-05:  41%|โ–ˆโ–ˆโ–ˆโ–ˆโ–     | 41/99 [01:28<01:56,  2.02s/it]
Epoch 1, Loss 0.418, LR 1.00e-05:  42%|โ–ˆโ–ˆโ–ˆโ–ˆโ–     | 42/99 [01:30<01:54,  2.01s/it]
Epoch 1, Loss 0.477, LR 1.00e-05:  42%|โ–ˆโ–ˆโ–ˆโ–ˆโ–     | 42/99 [01:30<01:54,  2.01s/it]
Epoch 1, Loss 0.477, LR 1.00e-05:  43%|โ–ˆโ–ˆโ–ˆโ–ˆโ–Ž     | 43/99 [01:32<01:56,  2.09s/it]
Epoch 1, Loss 0.350, LR 1.00e-05:  43%|โ–ˆโ–ˆโ–ˆโ–ˆโ–Ž     | 43/99 [01:32<01:56,  2.09s/it]
Epoch 1, Loss 0.350, LR 1.00e-05:  44%|โ–ˆโ–ˆโ–ˆโ–ˆโ–     | 44/99 [01:34<01:52,  2.04s/it]
Epoch 1, Loss 0.469, LR 1.00e-05:  44%|โ–ˆโ–ˆโ–ˆโ–ˆโ–     | 44/99 [01:34<01:52,  2.04s/it]
Epoch 1, Loss 0.469, LR 1.00e-05:  45%|โ–ˆโ–ˆโ–ˆโ–ˆโ–Œ     | 45/99 [01:36<01:49,  2.02s/it]
Epoch 1, Loss 0.488, LR 1.00e-05:  45%|โ–ˆโ–ˆโ–ˆโ–ˆโ–Œ     | 45/99 [01:36<01:49,  2.02s/it]
Epoch 1, Loss 0.488, LR 1.00e-05:  46%|โ–ˆโ–ˆโ–ˆโ–ˆโ–‹     | 46/99 [01:38<01:45,  1.99s/it]
Epoch 1, Loss 0.434, LR 1.00e-05:  46%|โ–ˆโ–ˆโ–ˆโ–ˆโ–‹     | 46/99 [01:38<01:45,  1.99s/it]
Epoch 1, Loss 0.434, LR 1.00e-05:  47%|โ–ˆโ–ˆโ–ˆโ–ˆโ–‹     | 47/99 [01:40<01:42,  1.98s/it]
Epoch 1, Loss 0.382, LR 1.00e-05:  47%|โ–ˆโ–ˆโ–ˆโ–ˆโ–‹     | 47/99 [01:40<01:42,  1.98s/it]
Epoch 1, Loss 0.382, LR 1.00e-05:  48%|โ–ˆโ–ˆโ–ˆโ–ˆโ–Š     | 48/99 [01:42<01:39,  1.95s/it]
Epoch 1, Loss 0.577, LR 1.00e-05:  48%|โ–ˆโ–ˆโ–ˆโ–ˆโ–Š     | 48/99 [01:42<01:39,  1.95s/it]
Epoch 1, Loss 0.577, LR 1.00e-05:  49%|โ–ˆโ–ˆโ–ˆโ–ˆโ–‰     | 49/99 [01:44<01:36,  1.93s/it]
Epoch 1, Loss 0.414, LR 1.00e-05:  49%|โ–ˆโ–ˆโ–ˆโ–ˆโ–‰     | 49/99 [01:44<01:36,  1.93s/it]
Epoch 1, Loss 0.414, LR 1.00e-05:  51%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ     | 50/99 [01:46<01:38,  2.01s/it]
Epoch 1, Loss 0.575, LR 1.00e-05:  51%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ     | 50/99 [01:46<01:38,  2.01s/it]
Epoch 1, Loss 0.575, LR 1.00e-05:  52%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–    | 51/99 [01:48<01:34,  1.97s/it]
Epoch 1, Loss 0.422, LR 1.00e-05:  52%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–    | 51/99 [01:48<01:34,  1.97s/it]
Epoch 1, Loss 0.422, LR 1.00e-05:  53%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž    | 52/99 [01:50<01:31,  1.94s/it]
Epoch 1, Loss 0.690, LR 1.00e-05:  53%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž    | 52/99 [01:50<01:31,  1.94s/it]
Epoch 1, Loss 0.690, LR 1.00e-05:  54%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž    | 53/99 [01:52<01:28,  1.91s/it]
Epoch 1, Loss 0.683, LR 1.00e-05:  54%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž    | 53/99 [01:52<01:28,  1.91s/it]
Epoch 1, Loss 0.683, LR 1.00e-05:  55%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–    | 54/99 [01:54<01:25,  1.91s/it]
Epoch 1, Loss 0.494, LR 1.00e-05:  55%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–    | 54/99 [01:54<01:25,  1.91s/it]
Epoch 1, Loss 0.494, LR 1.00e-05:  56%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ    | 55/99 [01:55<01:23,  1.90s/it]
Epoch 1, Loss 0.613, LR 1.00e-05:  56%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ    | 55/99 [01:55<01:23,  1.90s/it]
Epoch 1, Loss 0.613, LR 1.00e-05:  57%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹    | 56/99 [01:57<01:21,  1.90s/it]
Epoch 1, Loss 0.723, LR 1.00e-05:  57%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹    | 56/99 [01:57<01:21,  1.90s/it]
Epoch 1, Loss 0.723, LR 1.00e-05:  58%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š    | 57/99 [01:59<01:22,  1.97s/it]
Epoch 1, Loss 0.645, LR 1.00e-05:  58%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š    | 57/99 [01:59<01:22,  1.97s/it]
Epoch 1, Loss 0.645, LR 1.00e-05:  59%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š    | 58/99 [02:01<01:20,  1.96s/it]
Epoch 1, Loss 0.494, LR 1.00e-05:  59%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š    | 58/99 [02:01<01:20,  1.96s/it]
Epoch 1, Loss 0.494, LR 1.00e-05:  60%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰    | 59/99 [02:03<01:17,  1.94s/it]
Epoch 1, Loss 0.573, LR 1.00e-05:  60%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰    | 59/99 [02:03<01:17,  1.94s/it]
Epoch 1, Loss 0.573, LR 1.00e-05:  61%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ    | 60/99 [02:05<01:15,  1.94s/it]
Epoch 1, Loss 0.595, LR 1.00e-05:  61%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ    | 60/99 [02:05<01:15,  1.94s/it]
Epoch 1, Loss 0.595, LR 1.00e-05:  62%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–   | 61/99 [02:07<01:13,  1.94s/it]
Epoch 1, Loss 0.381, LR 1.00e-05:  62%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–   | 61/99 [02:07<01:13,  1.94s/it]
Epoch 1, Loss 0.381, LR 1.00e-05:  63%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž   | 62/99 [02:09<01:11,  1.93s/it]
Epoch 1, Loss 0.641, LR 1.00e-05:  63%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž   | 62/99 [02:09<01:11,  1.93s/it]
Epoch 1, Loss 0.641, LR 1.00e-05:  64%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž   | 63/99 [02:11<01:10,  1.95s/it]
Epoch 1, Loss 0.548, LR 1.00e-05:  64%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž   | 63/99 [02:11<01:10,  1.95s/it]
Epoch 1, Loss 0.548, LR 1.00e-05:  65%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–   | 64/99 [02:13<01:11,  2.04s/it]
Epoch 1, Loss 0.494, LR 1.00e-05:  65%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–   | 64/99 [02:13<01:11,  2.04s/it]
Epoch 1, Loss 0.494, LR 1.00e-05:  66%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ   | 65/99 [02:15<01:07,  1.98s/it]
Epoch 1, Loss 0.712, LR 1.00e-05:  66%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ   | 65/99 [02:15<01:07,  1.98s/it]
Epoch 1, Loss 0.712, LR 1.00e-05:  67%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹   | 66/99 [02:17<01:04,  1.95s/it]
Epoch 1, Loss 0.335, LR 1.00e-05:  67%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹   | 66/99 [02:17<01:04,  1.95s/it]
Epoch 2, Loss 0.335, LR 1.00e-05:  67%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹   | 66/99 [02:17<01:04,  1.95s/it]
Epoch 2, Loss 0.335, LR 1.00e-05:  68%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š   | 67/99 [02:19<01:01,  1.93s/it]
Epoch 2, Loss 0.411, LR 1.00e-05:  68%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š   | 67/99 [02:19<01:01,  1.93s/it]
Epoch 2, Loss 0.411, LR 1.00e-05:  69%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š   | 68/99 [02:21<01:00,  1.96s/it]
Epoch 2, Loss 0.455, LR 1.00e-05:  69%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š   | 68/99 [02:21<01:00,  1.96s/it]
Epoch 2, Loss 0.455, LR 1.00e-05:  70%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰   | 69/99 [02:23<00:57,  1.93s/it]
Epoch 2, Loss 0.369, LR 1.00e-05:  70%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰   | 69/99 [02:23<00:57,  1.93s/it]
Epoch 2, Loss 0.369, LR 1.00e-05:  71%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ   | 70/99 [02:25<00:55,  1.91s/it]
Epoch 2, Loss 0.384, LR 1.00e-05:  71%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ   | 70/99 [02:25<00:55,  1.91s/it]
Epoch 2, Loss 0.384, LR 1.00e-05:  72%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–  | 71/99 [02:27<00:55,  1.99s/it]
Epoch 2, Loss 0.370, LR 1.00e-05:  72%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–  | 71/99 [02:27<00:55,  1.99s/it]
Epoch 2, Loss 0.370, LR 1.00e-05:  73%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž  | 72/99 [02:29<00:53,  1.97s/it]
Epoch 2, Loss 0.396, LR 1.00e-05:  73%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž  | 72/99 [02:29<00:53,  1.97s/it]
Epoch 2, Loss 0.396, LR 1.00e-05:  74%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž  | 73/99 [02:31<00:50,  1.94s/it]
Epoch 2, Loss 0.337, LR 1.00e-05:  74%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž  | 73/99 [02:31<00:50,  1.94s/it]
Epoch 2, Loss 0.337, LR 1.00e-05:  75%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–  | 74/99 [02:33<00:48,  1.92s/it]
Epoch 2, Loss 0.206, LR 1.00e-05:  75%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–  | 74/99 [02:33<00:48,  1.92s/it]
Epoch 2, Loss 0.206, LR 1.00e-05:  76%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ  | 75/99 [02:34<00:45,  1.91s/it]
Epoch 2, Loss 0.247, LR 1.00e-05:  76%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ  | 75/99 [02:34<00:45,  1.91s/it]
Epoch 2, Loss 0.247, LR 1.00e-05:  77%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹  | 76/99 [02:36<00:43,  1.91s/it]
Epoch 2, Loss 0.183, LR 1.00e-05:  77%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹  | 76/99 [02:36<00:43,  1.91s/it]
Epoch 2, Loss 0.183, LR 1.00e-05:  78%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š  | 77/99 [02:38<00:41,  1.90s/it]
Epoch 2, Loss 0.236, LR 1.00e-05:  78%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š  | 77/99 [02:38<00:41,  1.90s/it]
Epoch 2, Loss 0.236, LR 1.00e-05:  79%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰  | 78/99 [02:40<00:42,  2.01s/it]
Epoch 2, Loss 0.220, LR 1.00e-05:  79%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰  | 78/99 [02:40<00:42,  2.01s/it]
Epoch 2, Loss 0.220, LR 1.00e-05:  80%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰  | 79/99 [02:42<00:40,  2.01s/it]
Epoch 2, Loss 0.186, LR 1.00e-05:  80%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰  | 79/99 [02:42<00:40,  2.01s/it]
Epoch 2, Loss 0.186, LR 1.00e-05:  81%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ  | 80/99 [02:44<00:37,  1.99s/it]
Epoch 2, Loss 0.251, LR 1.00e-05:  81%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ  | 80/99 [02:44<00:37,  1.99s/it]
Epoch 2, Loss 0.251, LR 1.00e-05:  82%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 81/99 [02:46<00:35,  1.95s/it]
Epoch 2, Loss 0.315, LR 1.00e-05:  82%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 81/99 [02:46<00:35,  1.95s/it]
Epoch 2, Loss 0.315, LR 1.00e-05:  83%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž | 82/99 [02:48<00:33,  1.94s/it]
Epoch 2, Loss 0.147, LR 1.00e-05:  83%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž | 82/99 [02:48<00:33,  1.94s/it]
Epoch 2, Loss 0.147, LR 1.00e-05:  84%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 83/99 [02:50<00:31,  1.94s/it]
Epoch 2, Loss 0.208, LR 1.00e-05:  84%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 83/99 [02:50<00:31,  1.94s/it]
Epoch 2, Loss 0.208, LR 1.00e-05:  85%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 84/99 [02:52<00:29,  1.94s/it]
Epoch 2, Loss 0.187, LR 1.00e-05:  85%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ– | 84/99 [02:52<00:29,  1.94s/it]
Epoch 2, Loss 0.187, LR 1.00e-05:  86%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ | 85/99 [02:54<00:28,  2.03s/it]
Epoch 2, Loss 0.394, LR 1.00e-05:  86%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ | 85/99 [02:54<00:28,  2.03s/it]
Epoch 2, Loss 0.394, LR 1.00e-05:  87%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹ | 86/99 [02:56<00:25,  1.99s/it]
Epoch 2, Loss 0.326, LR 1.00e-05:  87%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹ | 86/99 [02:56<00:25,  1.99s/it]
Epoch 2, Loss 0.326, LR 1.00e-05:  88%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š | 87/99 [02:58<00:23,  1.97s/it]
Epoch 2, Loss 0.227, LR 1.00e-05:  88%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š | 87/99 [02:58<00:23,  1.97s/it]
Epoch 2, Loss 0.227, LR 1.00e-05:  89%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 88/99 [03:00<00:21,  1.96s/it]
Epoch 2, Loss 0.256, LR 1.00e-05:  89%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 88/99 [03:00<00:21,  1.96s/it]
Epoch 2, Loss 0.256, LR 1.00e-05:  90%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 89/99 [03:02<00:19,  1.96s/it]
Epoch 2, Loss 0.334, LR 1.00e-05:  90%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰ | 89/99 [03:02<00:19,  1.96s/it]
Epoch 2, Loss 0.334, LR 1.00e-05:  91%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ | 90/99 [03:04<00:17,  1.94s/it]
Epoch 2, Loss 0.330, LR 1.00e-05:  91%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ | 90/99 [03:04<00:17,  1.94s/it]
Epoch 2, Loss 0.330, LR 1.00e-05:  92%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–| 91/99 [03:06<00:15,  1.94s/it]
Epoch 2, Loss 0.259, LR 1.00e-05:  92%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–| 91/99 [03:06<00:15,  1.94s/it]
Epoch 2, Loss 0.259, LR 1.00e-05:  93%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž| 92/99 [03:08<00:14,  2.02s/it]
Epoch 2, Loss 0.283, LR 1.00e-05:  93%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Ž| 92/99 [03:08<00:14,  2.02s/it]
Epoch 2, Loss 0.283, LR 1.00e-05:  94%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–| 93/99 [03:11<00:13,  2.17s/it]
Epoch 2, Loss 0.271, LR 1.00e-05:  94%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–| 93/99 [03:11<00:13,  2.17s/it]
Epoch 2, Loss 0.271, LR 1.00e-05:  95%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–| 94/99 [03:21<00:23,  4.62s/it]
Epoch 2, Loss 0.180, LR 1.00e-05:  95%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–| 94/99 [03:21<00:23,  4.62s/it]
Epoch 2, Loss 0.180, LR 1.00e-05:  96%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ| 95/99 [03:23<00:15,  3.83s/it]
Epoch 2, Loss 0.325, LR 1.00e-05:  96%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Œ| 95/99 [03:23<00:15,  3.83s/it]
Epoch 2, Loss 0.325, LR 1.00e-05:  97%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹| 96/99 [03:25<00:09,  3.27s/it]
Epoch 2, Loss 0.280, LR 1.00e-05:  97%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‹| 96/99 [03:25<00:09,  3.27s/it]
Epoch 2, Loss 0.280, LR 1.00e-05:  98%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š| 97/99 [03:27<00:05,  2.90s/it]
Epoch 2, Loss 0.228, LR 1.00e-05:  98%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–Š| 97/99 [03:27<00:05,  2.90s/it]
Epoch 2, Loss 0.228, LR 1.00e-05:  99%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰| 98/99 [03:29<00:02,  2.65s/it]
Epoch 2, Loss 0.380, LR 1.00e-05:  99%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–‰| 98/99 [03:29<00:02,  2.65s/it]
Epoch 2, Loss 0.380, LR 1.00e-05: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 99/99 [03:31<00:00,  2.56s/it]
Epoch 2, Loss 0.106, LR 1.00e-05: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 99/99 [03:31<00:00,  2.56s/it]/usr/local/lib/python3.11/site-packages/torch/distributed/fsdp/fully_sharded_data_parallel.py:680: FutureWarning: FSDP.state_dict_type() and FSDP.set_state_dict_type() are being deprecated. Please use APIs, get_state_dict() and set_state_dict(), which can support different parallelisms, FSDP1, FSDP2, DDP. API doc: https://pytorch.org/docs/stable/distributed.checkpoint.html#torch.distributed.checkpoint.state_dict.get_state_dict .Tutorial: https://pytorch.org/tutorials/recipes/distributed_checkpoint_recipe.html .
  warnings.warn(
/usr/local/lib/python3.11/site-packages/torch/distributed/fsdp/fully_sharded_data_parallel.py:680: FutureWarning: FSDP.state_dict_type() and FSDP.set_state_dict_type() are being deprecated. Please use APIs, get_state_dict() and set_state_dict(), which can support different parallelisms, FSDP1, FSDP2, DDP. API doc: https://pytorch.org/docs/stable/distributed.checkpoint.html#torch.distributed.checkpoint.state_dict.get_state_dict .Tutorial: https://pytorch.org/tutorials/recipes/distributed_checkpoint_recipe.html .
  warnings.warn(
Using BNB DORA 1

Epoch 2, Loss 0.106, LR 1.00e-05: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 99/99 [03:33<00:00,  2.16s/it]
Finished training 0
CUDA event elapsed time: 211.839921875 sec
time_taken: 211.839921875
Rank 0: Before forward: 1.62 GiB
Rank 0: After forward: 2.46 GiB
Rank 0: After backward: 2.64 GiB
Rank 0: Peak allocated memory: 1.41 GiB
Rank 0: Peak reserved memory:  2.65 GiB
Saving trained LoRA weights.
Done 0
Training completed: 0
๐ŸŽ‰ Model saved successfully!
๐Ÿ“ Saved files:
  ๐Ÿ“„ model_state_dict.safetensors
import os
import zipfile

def download_model():
    """Package the model for download"""
    if os.path.exists("uganda_clinical_model"):
        print("๐Ÿ“ฆ Packaging model for download...")
        
        # Create a zip file
        with zipfile.ZipFile("uganda_clinical_qdora_model.zip", "w", zipfile.ZIP_DEFLATED) as zipf:
            for root, dirs, files in os.walk("uganda_clinical_model"):
                for file in files:
                    file_path = os.path.join(root, file)
                    zipf.write(file_path, file_path)
        
        print("โœ… Model packaged as uganda_clinical_qdora_model.zip")
        print(f"๐Ÿ“Š File size: {os.path.getsize('uganda_clinical_qdora_model.zip') / 1024 / 1024:.1f} MB")
        
        # In Jupyter, this will be available for download
        print("๐Ÿ’พ You can download this file from the Jupyter file browser")
        
    else:
        print("โŒ No model directory found. Run training with --save_model true first")

download_model()
๐Ÿ“ฆ Packaging model for download...
โœ… Model packaged as uganda_clinical_qdora_model.zip
๐Ÿ“Š File size: 217.9 MB
๐Ÿ’พ You can download this file from the Jupyter file browser
def test_model():
    """Test the trained model"""
    
    # Load the model for inference
    from transformers import AutoTokenizer, AutoModelForCausalLM
    import torch
    
    model_path = "./uganda_clinical_model"
    
    if not os.path.exists(model_path):
        print("โŒ Model not found. Train with --save_model true first")
        return
    
    print("๐Ÿ”„ Loading trained model...")
    
    try:
        # Load tokenizer and model
        tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf")
        model = AutoModelForCausalLM.from_pretrained(
            model_path,
            torch_dtype=torch.float16,
            device_map="auto"
        )
        
        print("โœ… Model loaded successfully!")
        
        # Test with a medical question
        test_prompt = """Below is an instruction that describes a task. Write a response that appropriately completes the request.

### Instruction:
I have fever, general body weakness, joint paints and have been getting by mosquitoes often. what could be the cause ?

### Response:"""
        
        print("\n๐Ÿงช Testing model...")
        print("Question: What are the symptoms of malaria?")
        print("\nModel Response:")
        
        # Generate response
        inputs = tokenizer(test_prompt, return_tensors="pt")
        
        with torch.no_grad():
            outputs = model.generate(
                inputs.input_ids,
                max_length=inputs.input_ids.shape[1] + 150,
                temperature=0.7,
                do_sample=True,
                pad_token_id=tokenizer.eos_token_id
            )
        
        response = tokenizer.decode(outputs[0], skip_special_tokens=True)
        # Extract just the response part
        response_only = response.split("### Response:")[-1].strip()
        print(response_only)
        
    except Exception as e:
        print(f"โŒ Error loading model: {e}")

# Run after you've saved the model
test_model()
๐Ÿ”„ Loading trained model...
โŒ Error loading model: Unrecognized model in ./uganda_clinical_model. Should have a `model_type` key in its config.json, or contain one of the following strings in its name: albert, align, altclip, audio-spectrogram-transformer, autoformer, bark, bart, beit, bert, bert-generation, big_bird, bigbird_pegasus, biogpt, bit, blenderbot, blenderbot-small, blip, blip-2, bloom, bridgetower, bros, camembert, canine, chameleon, chinese_clip, chinese_clip_vision_model, clap, clip, clip_text_model, clip_vision_model, clipseg, clvp, code_llama, codegen, cohere, conditional_detr, convbert, convnext, convnextv2, cpmant, ctrl, cvt, dac, data2vec-audio, data2vec-text, data2vec-vision, dbrx, deberta, deberta-v2, decision_transformer, deformable_detr, deit, depth_anything, deta, detr, dinat, dinov2, distilbert, donut-swin, dpr, dpt, efficientformer, efficientnet, electra, encodec, encoder-decoder, ernie, ernie_m, esm, falcon, falcon_mamba, fastspeech2_conformer, flaubert, flava, fnet, focalnet, fsmt, funnel, fuyu, gemma, gemma2, git, glm, glpn, gpt-sw3, gpt2, gpt_bigcode, gpt_neo, gpt_neox, gpt_neox_japanese, gptj, gptsan-japanese, granite, granitemoe, graphormer, grounding-dino, groupvit, hiera, hubert, ibert, idefics, idefics2, idefics3, ijepa, imagegpt, informer, instructblip, instructblipvideo, jamba, jetmoe, jukebox, kosmos-2, layoutlm, layoutlmv2, layoutlmv3, led, levit, lilt, llama, llava, llava_next, llava_next_video, llava_onevision, longformer, longt5, luke, lxmert, m2m_100, mamba, mamba2, marian, markuplm, mask2former, maskformer, maskformer-swin, mbart, mctct, mega, megatron-bert, mgp-str, mimi, mistral, mixtral, mllama, mobilebert, mobilenet_v1, mobilenet_v2, mobilevit, mobilevitv2, moshi, mpnet, mpt, mra, mt5, musicgen, musicgen_melody, mvp, nat, nemotron, nezha, nllb-moe, nougat, nystromformer, olmo, olmo2, olmoe, omdet-turbo, oneformer, open-llama, openai-gpt, opt, owlv2, owlvit, paligemma, patchtsmixer, patchtst, pegasus, pegasus_x, perceiver, persimmon, phi, phi3, phimoe, pix2struct, pixtral, plbart, poolformer, pop2piano, prophetnet, pvt, pvt_v2, qdqbert, qwen2, qwen2_audio, qwen2_audio_encoder, qwen2_moe, qwen2_vl, rag, realm, recurrent_gemma, reformer, regnet, rembert, resnet, retribert, roberta, roberta-prelayernorm, roc_bert, roformer, rt_detr, rt_detr_resnet, rwkv, sam, seamless_m4t, seamless_m4t_v2, segformer, seggpt, sew, sew-d, siglip, siglip_vision_model, speech-encoder-decoder, speech_to_text, speech_to_text_2, speecht5, splinter, squeezebert, stablelm, starcoder2, superpoint, swiftformer, swin, swin2sr, swinv2, switch_transformers, t5, table-transformer, tapas, time_series_transformer, timesformer, timm_backbone, trajectory_transformer, transfo-xl, trocr, tvlt, tvp, udop, umt5, unispeech, unispeech-sat, univnet, upernet, van, video_llava, videomae, vilt, vipllava, vision-encoder-decoder, vision-text-dual-encoder, visual_bert, vit, vit_hybrid, vit_mae, vit_msn, vitdet, vitmatte, vits, vivit, wav2vec2, wav2vec2-bert, wav2vec2-conformer, wavlm, whisper, xclip, xglm, xlm, xlm-prophetnet, xlm-roberta, xlm-roberta-xl, xlnet, xmod, yolos, yoso, zamba, zoedepth
import os

def inspect_saved_model():
    """Check what files were actually saved"""
    model_dir = "./uganda_clinical_model"
    
    if os.path.exists(model_dir):
        print("๐Ÿ“ Files in uganda_clinical_model:")
        for file in os.listdir(model_dir):
            file_path = os.path.join(model_dir, file)
            size = os.path.getsize(file_path) / 1024 / 1024  # MB
            print(f"  ๐Ÿ“„ {file} ({size:.1f} MB)")
        
        # Check for specific files
        expected_files = ["adapter_config.json", "adapter_model.bin", "adapter_model.safetensors"]
        for expected in expected_files:
            if os.path.exists(os.path.join(model_dir, expected)):
                print(f"โœ… Found: {expected}")
            else:
                print(f"โŒ Missing: {expected}")
    else:
        print("โŒ Model directory not found")

inspect_saved_model()
๐Ÿ“ Files in uganda_clinical_model:
  ๐Ÿ“„ model_state_dict.safetensors (275.4 MB)
โŒ Missing: adapter_config.json
โŒ Missing: adapter_model.bin
โŒ Missing: adapter_model.safetensors