Mamba Codestral#

Build#

Containerfile#
FROM docker.io/rocm/pytorch:rocm6.4.1_ubuntu24.04_py3.12_pytorch_release_2.6.0

RUN pip install 'mistral-inference==1.3.0' 'torch~=2.6.0'

RUN \
    git clone https://github.com/state-spaces/mamba.git \
    && cd mamba \
    && pip install --no-build-isolation . \
    && cd .. \
    && rm -r ./mamba

RUN \
    git clone https://github.com/Dao-AILab/causal-conv1d.git \
    && cd causal-conv1d \
    && pip install --no-build-isolation . \
    && cd .. \
    && rm -r ./causal-conv1d

COPY download.py /opt/

CMD ["mistral-chat", "/root/mistral_models/Mamba-Codestral-7B-v0.1", "--instruct",  "--max_tokens", "512"]
download.py#
#!/usr/bin/env python3

import os
from huggingface_hub import snapshot_download
from pathlib import Path

mistral_models_path = Path.home().joinpath(
    'mistral_models',
    'Mamba-Codestral-7B-v0.1',
)
mistral_models_path.mkdir(parents=True, exist_ok=True)

snapshot_download(
    repo_id="mistralai/Mamba-Codestral-7B-v0.1",
    allow_patterns=[
        "params.json",
        "consolidated.safetensors",
        "tokenizer.model.v3",
    ],
    local_dir=mistral_models_path,
    token=os.environ.get("TOKEN", ""),
)
podman build \
   --cap-add=SYS_PTRACE \
   --security-opt seccomp=unconfined \
   --group-add keep-groups \
   --shm-size 8G \
   --device=/dev/kfd \
   --device=/dev/dri \
   --tag codestral:mamba \
   .

Téléchargement du modéle#

podman run --rm -it \
      --cap-add=SYS_PTRACE \
      --security-opt seccomp=unconfined \
      --group-add keep-groups \
      --shm-size 8G \
      --device=/dev/kfd \
      --device=/dev/dri \
      -v ${HOME}/codestral/mistral_models:/root/mistral_models \
      -e TOKEN="<TOKEN>" \
      codestral:mamba \
      python /opt/download.py

Prompt#

podman run --rm -it \
      --cap-add=SYS_PTRACE \
      --security-opt seccomp=unconfined \
      --group-add keep-groups \
      --shm-size 8G \
      --device=/dev/kfd \
      --device=/dev/dri \
      -v ${HOME}/codestral/mistral_models:/root/mistral_models \
      codestral:mamba

Makefile#

Makefile#
.PHONY: all
all: run

.PHONY: build
build:
	podman build \
      --cap-add=SYS_PTRACE \
      --security-opt seccomp=unconfined \
      --group-add keep-groups \
      --shm-size 8G \
      --device=/dev/kfd \
      --device=/dev/dri \
      --tag codestral:mamba \
      .

.PHONY: download
download:
	podman run --rm -it \
      --cap-add=SYS_PTRACE \
      --security-opt seccomp=unconfined \
      --group-add keep-groups \
      --shm-size 8G \
      --device=/dev/kfd \
      --device=/dev/dri \
      -v $(shell pwd)/mistral_models:/root/mistral_models \
      -e TOKEN="<TOKEN>" \
      codestral:mamba \
      python /opt/download.py

.PHONY: run
run:
	podman run --rm -it \
      --cap-add=SYS_PTRACE \
      --security-opt seccomp=unconfined \
      --group-add keep-groups \
      --shm-size 8G \
      --device=/dev/kfd \
      --device=/dev/dri \
      -v $(shell pwd)/mistral_models:/root/mistral_models \
      codestral:mamba

.PHONY: shell
shell:
	podman run --rm -it \
      --cap-add=SYS_PTRACE \
      --security-opt seccomp=unconfined \
      --group-add keep-groups \
      --shm-size 8G \
      --device=/dev/kfd \
      --device=/dev/dri \
      -v $(shell pwd)/mistral_models:/root/mistral_models \
      codestral:mamba bash