Skip to content

Commit 232f373

Browse files
committed
fix: naming and prompting
1 parent 3e0eadb commit 232f373

File tree

10 files changed

+63
-75
lines changed

10 files changed

+63
-75
lines changed

cmd/project.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ import (
88

99
var projectCmd = &cobra.Command{
1010
Use: "project [command]",
11-
Short: "(NEW) Manage RunPod projects",
11+
Short: "Manage RunPod projects",
1212
Long: "Develop and deploy projects entirely on RunPod's infrastructure.",
1313
}
1414

cmd/project/defaults.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,9 @@ func getDefaultModelName(modelType string) string {
55
switch modelType {
66
case "LLM":
77
return "google/flan-t5-base"
8-
case "Stable Diffusion":
8+
case "Stable_Diffusion":
99
return "stabilityai/sdxl-turbo"
10-
case "Text to Audio":
10+
case "Text_to_Audio":
1111
return "facebook/musicgen-small"
1212
}
1313

cmd/project/project.go

+39-31
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
package project
22

33
import (
4+
"bufio"
45
"cli/api"
56
"errors"
67
"fmt"
78
"os"
9+
"path/filepath"
10+
"strings"
811

912
"github.com/manifoldco/promptui"
1013
"github.com/spf13/cobra"
@@ -24,10 +27,18 @@ var (
2427
const inputPromptPrefix string = " > "
2528

2629
func prompt(message string) string {
27-
var selection string = ""
28-
for selection == "" {
29-
fmt.Print(inputPromptPrefix + message)
30-
fmt.Scanln(&selection)
30+
reader := bufio.NewReader(os.Stdin)
31+
fmt.Print(inputPromptPrefix + message)
32+
33+
selection, err := reader.ReadString('\n')
34+
if err != nil {
35+
fmt.Println("An error occurred while reading input. Please try again.", err)
36+
return prompt(message)
37+
}
38+
39+
selection = strings.TrimSpace(selection)
40+
if selection == "" {
41+
return prompt(message)
3142
}
3243
return selection
3344
}
@@ -124,10 +135,13 @@ func selectStarterTemplate() (template string, err error) {
124135
}
125136
options := []StarterTemplateOption{}
126137
for _, template := range templates {
127-
options = append(options, StarterTemplateOption{Name: template.Name(), Value: template.Name()})
138+
// For the printed name, replace _ with spaces
139+
var name = template.Name()
140+
name = strings.Replace(name, "_", " ", -1)
141+
options = append(options, StarterTemplateOption{Name: name, Value: template.Name()})
128142
}
129143
getStarterTemplate := promptui.Select{
130-
Label: "Select a Starter Example:",
144+
Label: "Select a Starter Project:",
131145
Items: options,
132146
Templates: promptTemplates,
133147
}
@@ -157,15 +171,13 @@ var NewProjectCmd = &cobra.Command{
157171

158172
// Project Name
159173
if projectName == "" {
160-
fmt.Print("1. Project Name:\n")
161-
fmt.Print(" Please enter the name of your project.\n")
174+
fmt.Print("Provide a name for your project:\n")
162175
projectName = prompt("")
163176
}
164177
fmt.Print("\n Project name set to '" + projectName + "'.\n\n")
165178

166179
// Project Examples
167-
fmt.Print("2. Starter Example:\n")
168-
fmt.Print(" Choose a starter example to begin with.\n")
180+
fmt.Print("Select a starter project to begin with:\n")
169181

170182
if modelType == "" {
171183
starterExample, err := selectStarterTemplate()
@@ -178,28 +190,21 @@ var NewProjectCmd = &cobra.Command{
178190
fmt.Println("")
179191

180192
// Model Name
181-
if modelType != "Hello World" {
182-
fmt.Print(" Model Name:\n")
183-
fmt.Print(" Please enter the name of the Hugging Face model you would like to use.\n")
184-
fmt.Print(" Leave blank to use the default model for the selected example.\n > ")
193+
if modelType != "Hello_World" {
194+
fmt.Print(" Enter the name of the Hugging Face model you would like to use:\n")
195+
fmt.Print(" Leave blank to use the default model for the selected project.\n > ")
185196
fmt.Scanln(&modelName)
186197
fmt.Println("")
187198
}
188199

189-
// Project Configuration
190-
fmt.Print("3. Configuration:\n")
191-
fmt.Print(" Let's configure the project environment.\n\n")
192-
193200
// CUDA Version
194-
fmt.Println(" CUDA Version:")
195-
cudaVersion := promptChoice(" Choose a CUDA version for your project.",
201+
cudaVersion := promptChoice("Select a CUDA version for your project:",
196202
[]string{"11.8.0", "12.1.0", "12.2.0"}, "11.8.0")
197203

198-
fmt.Println("\n Using CUDA version: " + cudaVersion)
204+
fmt.Println("\n Using CUDA version: " + cudaVersion + "\n")
199205

200206
// Python Version
201-
fmt.Println("\n Python Version:")
202-
pythonVersion := promptChoice(" Choose a Python version for your project.",
207+
pythonVersion := promptChoice("Select a Python version for your project:",
203208
[]string{"3.8", "3.9", "3.10", "3.11"}, "3.10")
204209

205210
fmt.Println("\n Using Python version: " + pythonVersion)
@@ -208,7 +213,7 @@ var NewProjectCmd = &cobra.Command{
208213
fmt.Println("\nProject Summary:")
209214
fmt.Println("----------------")
210215
fmt.Printf("- Project Name : %s\n", projectName)
211-
fmt.Printf("- Starter Example : %s\n", modelType)
216+
fmt.Printf("- Starter Project : %s\n", modelType)
212217
fmt.Printf("- CUDA version : %s\n", cudaVersion)
213218
fmt.Printf("- Python version : %s\n", pythonVersion)
214219

@@ -219,15 +224,18 @@ var NewProjectCmd = &cobra.Command{
219224
return
220225
}
221226

222-
fmt.Printf("\nThe project will be created in the current directory: \n%s\n\n", currentDir)
223-
confirm := promptChoice("Proceed with creation?", []string{"yes", "no"}, "yes")
224-
if confirm != "yes" {
225-
fmt.Println("Project creation cancelled.")
226-
return
227+
projectDir := filepath.Join(currentDir, projectName)
228+
if _, err := os.Stat(projectDir); !os.IsNotExist(err) {
229+
fmt.Printf("\nA directory with the name '%s' already exists in the current path.\n", projectName)
230+
confirm := promptChoice("Continue with overwrite?", []string{"yes", "no"}, "no")
231+
if confirm != "yes" {
232+
fmt.Println("Project creation cancelled.")
233+
return
234+
}
235+
} else {
236+
fmt.Printf("\nCreating project '%s' in directory '%s'\n", projectName, projectDir)
227237
}
228238

229-
fmt.Println("\nCreating project...")
230-
231239
// Create Project
232240
createNewProject(projectName, cudaVersion, pythonVersion, modelType, modelName, initCurrentDir)
233241
fmt.Printf("\nProject %s created successfully! \nNavigate to your project directory with `cd %s`\n\n", projectName, projectName)

cmd/project/tomlBuilder.go

+21-41
Original file line numberDiff line numberDiff line change
@@ -11,20 +11,23 @@ import (
1111
func generateProjectToml(projectFolder, filename, projectName, cudaVersion, pythonVersion string) {
1212
template := `# RunPod Project Configuration
1313
14-
1514
name = "%s"
1615
17-
1816
[project]
19-
uuid = "%s" # Unique identifier for the project. Generated automatically.
20-
21-
# Base Docker image used for the project environment. Includes essential packages and CUDA support.
22-
# Use 'runpod/base' as a starting point. Customize only if you need additional packages or configurations.
17+
# uuid - Unique identifier for the project. Generated automatically.
18+
# volume_mount_path - Default volume mount path in serverless environment. Changing this may affect data persistence.
19+
# base_image - Base Docker image used for the project environment. Includes essential packages and CUDA support.
20+
# Use 'runpod/base' as a starting point. Customize only if you need additional packages or configurations.
21+
# gpu_types - List of preferred GPU types for your development pod, ordered by priority.
22+
# The pod will use the first available type from this list.
23+
# For a full list of supported GPU types, visit: https://docs.runpod.io/references/gpu-types
24+
# gpu_count - Number of GPUs to allocate for the pod.
25+
# volume_mount_path - Default volume mount path in serverless environment. Changing this may affect data persistence.
26+
# ports - Ports to expose and their protocols. Configure as needed for your application's requirements.
27+
# container_disk_size_gb - Disk space allocated for the container. Adjust according to your project's needs.
28+
29+
uuid = "%s"
2330
base_image = "runpod/base:0.5.0-cuda%s"
24-
25-
# List of preferred GPU types for your development pod, ordered by priority.
26-
# The pod will use the first available type from this list.
27-
# For a full list of supported GPU types, visit: https://docs.runpod.io/references/gpu-types
2831
gpu_types = [
2932
"NVIDIA GeForce RTX 4080", # 16GB
3033
"NVIDIA RTX A4000", # 16GB
@@ -35,47 +38,26 @@ gpu_types = [
3538
"NVIDIA RTX A6000", # 48GB
3639
"NVIDIA A100 80GB PCIe", # 80GB
3740
]
38-
3941
gpu_count = 1
40-
41-
# Default volume mount path in serverless environment. Changing this may affect data persistence.
4242
volume_mount_path = "/runpod-volume"
43-
44-
# Ports to expose and their protocols. Configure as needed for your application's requirements.
45-
# The base image uses 4040 for FileBrowser, 8080 for FastAPI and 22 for SSH
46-
ports = "4040/http, 8080/http, 22/tcp"
47-
48-
# Disk space allocated for the container. Adjust according to your project's needs.
43+
ports = "4040/http, 8080/http, 22/tcp" # FileBrowser, FastAPI, SSH
4944
container_disk_size_gb = 100
5045
51-
5246
[project.env_vars]
5347
# Environment variables for the pod.
48+
# For full list of base environment variables, visit: https://github.com/runpod/containers/blob/main/official-templates/base/Dockerfile
49+
# POD_INACTIVITY_TIMEOUT - Duration (in seconds) before terminating the pod after the last SSH session ends.
50+
# RUNPOD_DEBUG_LEVEL - Log level for RunPod. Set to 'debug' for detailed logs.
51+
# UVICORN_LOG_LEVEL - Log level for Uvicorn. Set to 'warning' for minimal logs.
5452
55-
# Duration (in seconds) before terminating the pod after the last SSH session ends.
5653
POD_INACTIVITY_TIMEOUT = "120"
57-
5854
RUNPOD_DEBUG_LEVEL = "debug"
5955
UVICORN_LOG_LEVEL = "warning"
6056
61-
# Configurations for caching Hugging Face models and datasets to improve load times and reduce bandwidth.
62-
HF_HOME = "/runpod-volume/.cache/huggingface/"
63-
HF_DATASETS_CACHE = "/runpod-volume/.cache/huggingface/datasets/"
64-
DEFAULT_HF_METRICS_CACHE = "/runpod-volume/.cache/huggingface/metrics/"
65-
DEFAULT_HF_MODULES_CACHE = "/runpod-volume/.cache/huggingface/modules/"
66-
HUGGINGFACE_HUB_CACHE = "/runpod-volume/.cache/huggingface/hub/"
67-
HUGGINGFACE_ASSETS_CACHE = "/runpod-volume/.cache/huggingface/assets/"
68-
69-
# Enable this to use the HF Hub transfer service for faster Hugging Face downloads.
70-
HF_HUB_ENABLE_HF_TRANSFER = "1" # Requires 'hf_transfer' Python package.
71-
72-
# Directories for caching Python dependencies, speeding up subsequent installations.
73-
VIRTUALENV_OVERRIDE_APP_DATA = "/runpod-volume/.cache/virtualenv/"
74-
PIP_CACHE_DIR = "/runpod-volume/.cache/pip/"
75-
76-
7757
[runtime]
78-
# Runtime configuration for the project.
58+
# python_version - Python version to use for the project.
59+
# handler_path - Path to the handler file for the project.
60+
# requirements_path - Path to the requirements file for the project.
7961
8062
python_version = "%s"
8163
handler_path = "src/handler.py"
@@ -90,7 +72,5 @@ requirements_path = "builder/requirements.txt"
9072
err := os.WriteFile(tomlPath, []byte(content), 0644)
9173
if err != nil {
9274
fmt.Printf("Failed to write the TOML file: %s\n", err)
93-
} else {
94-
fmt.Println("TOML file generated successfully with dynamic content.")
9575
}
9676
}

0 commit comments

Comments
 (0)