Skip to content

Commit

Permalink
Remove working_dir from examples, and fix small bug in docker_user
Browse files Browse the repository at this point in the history
  • Loading branch information
dongreenberg committed Jul 26, 2024
1 parent 2a392fc commit c40a5f4
Show file tree
Hide file tree
Showing 9 changed files with 4 additions and 18 deletions.
1 change: 0 additions & 1 deletion examples/llama2-13b-ec2/llama2_ec2.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,6 @@ def predict(self, prompt_text, **inf_kwargs):
],
secrets=["huggingface"], # Needed to download Llama 2
name="llama2inference",
working_dir="./",
)

# Finally, we define our module and run it on the remote cluster. We construct it normally and then call
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -213,7 +213,6 @@ def restart_container(self):
name="tgi_env",
reqs=["docker"],
secrets=["huggingface"],
working_dir="./",
)

# Finally, we define our module and run it on the remote cluster. We construct it normally and then call
Expand Down
1 change: 0 additions & 1 deletion examples/llama2-with-tgi-ec2/tgi_llama_ec2.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,6 @@ def restart_container(self):
name="tgi_env",
reqs=["docker", "torch", "transformers"],
secrets=["huggingface"],
working_dir="./",
)

# Finally, we define our module and run it on the remote cluster. We construct it normally and then call
Expand Down
1 change: 0 additions & 1 deletion examples/llama3-8b-ec2/llama3_ec2.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,6 @@ def predict(self, prompt_text, **inf_kwargs):
],
secrets=["huggingface"], # Needed to download Llama 3 from HuggingFace
name="llama3inference",
working_dir="./",
)

# Finally, we define our module and run it on the remote cluster. We construct it normally and then call
Expand Down
1 change: 0 additions & 1 deletion examples/llama3-8b-tgi-ec2/llama3_tgi_ec2.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,6 @@ def deploy(self):
name="tgi_env",
reqs=["docker", "torch", "transformers"],
secrets=["huggingface"],
working_dir="./",
)

# Finally, we define our module and run it on the remote cluster. We construct it normally and then call
Expand Down
1 change: 0 additions & 1 deletion examples/llama3-vllm-gcp/llama3_vllm_gcp.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,6 @@ async def main():
reqs=["vllm==0.2.7"], # >=0.3.0 causes Pydantic version error
secrets=["huggingface"], # Needed to download Llama 3 from HuggingFace
name="llama3inference",
working_dir="./",
)

# Finally, we define our module and run it on the remote cluster. We construct it normally and then call
Expand Down
11 changes: 1 addition & 10 deletions examples/mistral-aws-inferentia2/mistral_inferentia.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,15 +151,6 @@ def generate(self, messages: list, return_tensors="pt", sequence_length=256):
],
)

# Next, we define the environment for our module. This includes the required dependencies that need
# to be installed on the remote machine.
#
# Learn more in the [Runhouse docs on envs](/docs/tutorials/api-envs).
env = rh.env(
name="instruct_env",
working_dir="./",
)

# Finally, we define our module and run it on the remote cluster. We construct it normally and then call
# `get_or_to` to run it on the remote cluster. Using `get_or_to` allows us to load the exiting Module
# by the name `mistral-instruct` if it was already put on the cluster. If we want to update the module each
Expand All @@ -168,7 +159,7 @@ def generate(self, messages: list, return_tensors="pt", sequence_length=256):
# Note that we also pass the `env` object to the `get_or_to` method, which will ensure that the environment is
# set up on the remote machine before the module is run.
remote_instruct_model = MistralInstruct().get_or_to(
cluster, env=env, name="mistral-instruct"
cluster, name="mistral-instruct"
)

# ## Loading and prompting the model
Expand Down
1 change: 0 additions & 1 deletion examples/mistral-with-tgi-ec2/tgi_mistral_ec2.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,6 @@ def restart_container(self):
env = rh.env(
name="tgi_env",
reqs=["docker", "openai", "torch", "transformers"],
working_dir="./",
)

# Finally, we define our module and run it on the remote cluster. We construct it normally and then call
Expand Down
4 changes: 3 additions & 1 deletion runhouse/resources/hardware/on_demand_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,9 @@ def docker_user(self) -> str:
if self._docker_user:
return self._docker_user

if not self.image_id:
# TODO detect whether this is a k8s cluster properly, and handle the user setting / SSH properly
# (e.g. SkyPilot's new KubernetesCommandRunner)
if not self.image_id or "docker:" not in self.image_id or self.provider == "kubernetes":
return None

from runhouse.resources.hardware.sky_ssh_runner import get_docker_user
Expand Down

0 comments on commit c40a5f4

Please sign in to comment.