Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow pushing from one type of AI Image to an OCI Image #245

Merged
merged 1 commit into from
Oct 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .codespellrc
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
[codespell]

# Comma-separated list of files to skip.
skip = ./vendor,./.git #,bin,vendor,.git,go.sum,changelog.txt,.cirrus.yml,"RELEASE_NOTES.md,*.xz,*.gz,*.tar,*.tgz,bin2img,*ico,*.png,*.1,*.5,copyimg,*.orig,apidoc.go"
skip = ./logos,./vendor,./.git #,bin,vendor,.git,go.sum,changelog.txt,.cirrus.yml,"RELEASE_NOTES.md,*.xz,*.gz,*.tar,*.tgz,bin2img,*ico,*.png,*.1,*.5,copyimg,*.orig,apidoc.go"

# Comma separated list of words to be ignored. Words must be lowercased.
ignore-words-list = clos,creat,ro,hastable,shouldnot,mountns,passt
Expand Down
9 changes: 6 additions & 3 deletions ramalama/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,14 +323,17 @@ def pull_cli(args):

def push_parser(subparsers):
parser = subparsers.add_parser("push", help="push AI Model from local storage to remote registry")
parser.add_argument("MODEL") # positional argument
parser.add_argument("SOURCE") # positional argument
parser.add_argument("TARGET") # positional argument
parser.set_defaults(func=push_cli)


def push_cli(args):
model = New(args.MODEL)
model.push(args)
smodel = New(args.SOURCE)
source = smodel.path(args)

model = New(args.TARGET)
model.push(source, args)


def _name():
Expand Down
5 changes: 4 additions & 1 deletion ramalama/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,13 @@ def login(self, args):
def logout(self, args):
raise NotImplementedError(f"ramalama logout for {self.type} not implemented")

def path(self, source, args):
raise NotImplementedError(f"ramalama puath for {self.type} not implemented")

def pull(self, args):
raise NotImplementedError(f"ramalama pull for {self.type} not implemented")

def push(self, args):
def push(self, source, args):
raise NotImplementedError(f"ramalama push for {self.type} not implemented")

def is_symlink_to(self, file_path, target_path):
Expand Down
22 changes: 7 additions & 15 deletions ramalama/oci.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
from pathlib import Path
import os
import re
import subprocess
import sys

Expand Down Expand Up @@ -36,10 +34,10 @@ def logout(self, args):
conman_args.append(self.model)
return exec_cmd(conman_args)

def _target_decompose(self):
def _target_decompose(self, model):
# Remove the prefix and extract target details
try:
registry, reference = self.model.split("/", 1)
registry, reference = model.split("/", 1)
except Exception:
raise KeyError(
f"You must specify a registry for the model in the form "
Expand All @@ -49,23 +47,17 @@ def _target_decompose(self):
reference_dir = reference.replace(":", "/")
return registry, reference, reference_dir

def push(self, args):
registry, _, reference_dir = self._target_decompose()
target = re.sub(r"^oci://", "", args.TARGET)

# Validate the model exists locally
local_model_path = os.path.join(args.store, "models/oci", registry, reference_dir)
if not os.path.exists(local_model_path):
raise KeyError(f"model {self.model} not found locally. Cannot push.")
def push(self, source, args):
target = args.TARGET.strip("oci://")
tregistry, _, treference_dir = self._target_decompose(target)

model_file = Path(local_model_path).resolve()
try:
# Push the model using omlmd, using cwd the model's file parent directory
run_cmd([self.omlmd, "push", target, str(model_file), "--empty-metadata"], cwd=model_file.parent)

run_cmd([self.omlmd, "push", target, source, "--empty-metadata"])
except subprocess.CalledProcessError as e:
perror(f"Failed to push model to OCI: {e}")
raise e
return local_model_path

def pull(self, args):
try:
Expand Down
21 changes: 16 additions & 5 deletions ramalama/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,17 +84,14 @@ def __init__(self, model):
super().__init__(model.removeprefix("ollama://"))
self.type = "Ollama"

def pull(self, args):
repos = args.store + "/repos/ollama"
def _local(self, args):
models = args.store + "/models/ollama"
registry = "https://registry.ollama.ai"
if "/" in self.model:
model_full = self.model
models = os.path.join(models, model_full.rsplit("/", 1)[0])
self._models = os.path.join(models, model_full.rsplit("/", 1)[0])
else:
model_full = "library/" + self.model

accept = "Accept: application/vnd.docker.distribution.manifest.v2+json"
if ":" in model_full:
model_name, model_tag = model_full.split(":", 1)
else:
Expand All @@ -103,9 +100,23 @@ def pull(self, args):

model_base = os.path.basename(model_name)
symlink_path = os.path.join(models, f"{model_base}:{model_tag}")
return symlink_path, models, model_base, model_name, model_tag

def path(self, args):
symlink_path, _, _, _, _ = self._local(args)
if not os.path.exists(symlink_path):
raise KeyError("f{{args.Model} does not exist")

return symlink_path

def pull(self, args):
repos = args.store + "/repos/ollama"
symlink_path, models, model_base, model_name, model_tag = self._local(args)
if os.path.exists(symlink_path):
return symlink_path

registry = "https://registry.ollama.ai"
accept = "Accept: application/vnd.docker.distribution.manifest.v2+json"
manifests = os.path.join(repos, "manifests", registry, model_name, model_tag)
registry_head = f"{registry}/v2/{model_name}"
return init_pull(
Expand Down
Loading