Skip to content

Commit

Permalink
Merge pull request #9 from yusufcanb/release/1.0
Browse files Browse the repository at this point in the history
Release/1.0
  • Loading branch information
yusufcanb authored Mar 2, 2024
2 parents 8469034 + 1b100a1 commit cca26d8
Show file tree
Hide file tree
Showing 13 changed files with 295 additions and 106 deletions.
2 changes: 1 addition & 1 deletion LICENSE
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.

Copyright [yyyy] [name of copyright owner]
Copyright 2024 Yusuf Can Bayrak

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
Expand Down
10 changes: 9 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ Download instructions can be followed at the following link: [https://ollama.com
docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama

# With GPU (Nvidia only)
docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama"
docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
```

### Installation Script
Expand Down Expand Up @@ -107,3 +107,11 @@ On Linux and macOS;
```bash
rm /usr/local/bin/tlm
```

On Windows;

Remove the directory under;

```
C:\Users\<username>\AppData\Local\Programs\tlm
```
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.0-rc3
1.0
18 changes: 12 additions & 6 deletions app/app.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@ import (
"github.com/yusufcanb/tlm/config"
"github.com/yusufcanb/tlm/explain"
"github.com/yusufcanb/tlm/install"
"github.com/yusufcanb/tlm/shell"
"github.com/yusufcanb/tlm/suggest"
"os"
"runtime"

"github.com/urfave/cli/v2"
Expand Down Expand Up @@ -36,10 +38,14 @@ func New(version string) *TlmApp {
ins := install.New(o, suggestModelfile, explainModelfile)

cliApp := &cli.App{
Name: "tlm",
Usage: "local terminal companion powered by CodeLLaMa.",
Version: version,
HideHelpCommand: true,
Name: "tlm",
Usage: "terminal copilot, powered by CodeLLaMa.",
UsageText: "tlm explain <command>\ntlm suggest <prompt>",
Version: version,
CommandNotFound: func(context *cli.Context, s string) {
fmt.Println(shell.Err() + " command not found.")
os.Exit(-1)
},
Action: func(c *cli.Context) error {
return cli.ShowAppHelp(c)
},
Expand All @@ -51,9 +57,9 @@ func New(version string) *TlmApp {
{
Name: "version",
Aliases: []string{"v"},
Usage: "print version.",
Usage: "Prints tlm version.",
Action: func(c *cli.Context) error {
fmt.Printf("tlm version %s %s/%s", version, runtime.GOOS, runtime.GOARCH)
fmt.Printf("tlm %s (%s/%s)", version, runtime.GOOS, runtime.GOARCH)
return nil
},
},
Expand Down
Binary file added assets/deploy.gif
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added assets/suggest2.gif
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
12 changes: 12 additions & 0 deletions assets/tapes/deploy.tape
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
Output deploy.gif

Set Theme "Cyberdyne"

Set Width 1200
Set Height 600
Set FontSize 32

Type "tlm deploy"
Sleep 500ms
Enter
Sleep 8s
71 changes: 63 additions & 8 deletions config/cli.go
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
package config

import (
"errors"
"fmt"
"github.com/spf13/viper"
"github.com/urfave/cli/v2"
"github.com/yusufcanb/tlm/shell"
"net/url"
)

func (c *Config) Action(_ *cli.Context) error {
Expand Down Expand Up @@ -39,16 +41,69 @@ func (c *Config) Command() *cli.Command {
return &cli.Command{
Name: "config",
Aliases: []string{"c"},
Usage: "configure preferences.",
Usage: "Configures tlm preferences.",
Action: c.Action,
Subcommands: []*cli.Command{
{
Name: "set",
Usage: "set configuration",
Action: func(context *cli.Context) error {
return nil
},
},
c.subCommandGet(),
c.subCommandSet(),
},
}
}

func (c *Config) subCommandGet() *cli.Command {
return &cli.Command{
Name: "get",
Usage: "get configuration by key",
UsageText: "tlm config get <key>",
Action: func(c *cli.Context) error {
key := c.Args().Get(0)
value := viper.GetString(key)

if value == "" {
fmt.Println(fmt.Sprintf("%s <%s> is not a tlm parameter", shell.Err(), key))
return nil
}

fmt.Println(fmt.Sprintf("%s = %s", key, value))
return nil
},
}
}

func (c *Config) subCommandSet() *cli.Command {
return &cli.Command{
Name: "set",
Usage: "set configuration",
Action: func(c *cli.Context) error {
key := c.Args().Get(0)

switch key {
case "llm.host":
u, err := url.ParseRequestURI(c.Args().Get(1))
if err != nil {
return errors.New("Invalid url: " + c.Args().Get(1))
}
viper.Set(key, u.String())

case "llm.suggest", "llm.explain":
mode := c.Args().Get(1)
if mode != "stable" && mode != "balanced" && mode != "creative" {
return errors.New("Invalid mode: " + mode)
}
viper.Set(key, mode)
default:
fmt.Println(fmt.Sprintf("%s <%s> is not a tlm parameter", shell.Err(), key))
return nil
}

viper.Set(key, c.Args().Get(1))
err := viper.WriteConfig()
if err != nil {
return err
}

fmt.Println(fmt.Sprintf("%s = %s %s", key, c.Args().Get(1), shell.Ok()))
return nil
},
}
}
28 changes: 23 additions & 5 deletions explain/cli.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,22 @@ func (e *Explain) before(_ *cli.Context) error {
os.Exit(-1)
}

list, err := e.api.List(context.Background())
if err != nil {
fmt.Println(shell.Err() + " " + err.Error())
fmt.Println(shell.Err() + " Ollama connection failed. Please check your Ollama if it's running or configured correctly.")
os.Exit(-1)
}

for _, model := range list.Models {
if model.Name == e.modelfile {
return nil
}
fmt.Println(shell.Err() + " " + "tlm explain:7b model not found.\n\nPlease run `tlm deploy` to deploy tlm models first.")
os.Exit(-1)
return nil
}

return nil
}

Expand All @@ -25,10 +41,12 @@ func (e *Explain) action(c *cli.Context) error {

func (e *Explain) Command() *cli.Command {
return &cli.Command{
Name: "explain",
Aliases: []string{"e"},
Usage: "explain a command.",
Before: e.before,
Action: e.action,
Name: "explain",
Aliases: []string{"e"},
Usage: "Explains a command.",
UsageText: "tlm explain <command>",
Description: "explains given shell command.",
Before: e.before,
Action: e.action,
}
}
74 changes: 50 additions & 24 deletions install.ps1
Original file line number Diff line number Diff line change
Expand Up @@ -16,58 +16,84 @@ if ($env:PROCESSOR_ARCHITECTURE -eq 'AMD64') {
}

# Download URL Construction
$version = "1.0-rc3"
$version = "1.0"
$base_url = "https://github.com/yusufcanb/tlm/releases/download"
$download_url = "${base_url}/${version}/tlm_${version}_${os}_${arch}.exe"

# Ollama check
$ollamaHost = $env:OLLAMA_HOST
if (-not $ollamaHost) {
$ollamaHost = "http://localhost:11434"
}

# Ollama check - For Windows, we'll assume Ollama is installed directly on the system
try {
Invoke-WebRequest -Uri "http://localhost:11434" -UseBasicParsing -ErrorAction Stop | Out-Null
Invoke-WebRequest -Uri $ollamaHost -UseBasicParsing -ErrorAction Stop | Out-Null
} catch {
Write-Host "ERR: Ollama not found." -ForegroundColor red
Write-Host "If you have Ollama installed, please make sure it's running and accessible at $ollamaHost" -ForegroundColor red
Write-Host "or configure OLLAMA_HOST environment variable." -ForegroundColor red
Write-Host ""
Write-Host ">>> If have Ollama on your system or network, you can set the OLLAMA_HOST like below;"
Write-Host " `$env:OLLAMA_HOST` = 'http://localhost:11434'"
Write-Host ""
Write-Host ""
Write-Host ">>> If you don't have Ollama installed, you can install it using the following methods;"
Write-Host ""
Write-Host "*** On Windows: ***" -ForegroundColor green
Write-Host " *** Windows: ***" -ForegroundColor green
Write-Host " Download instructions can be followed at the following link: https://ollama.com/download"
Write-Host ""
Write-Host "Ollama can run with GPU acceleration inside Docker containers for Nvidia GPUs."
Write-Host "To get started using the Docker image, please follow these steps:"
Write-Host " *** Official Docker Images: ***" -ForegroundColor green
Write-Host ""
Write-Host "1. *** CPU only: ***" -ForegroundColor green
Write-Host " docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama"
Write-Host " Ollama can run with GPU acceleration inside Docker containers for Nvidia GPUs."
Write-Host " To get started using the Docker image, please follow these steps:"
Write-Host ""
Write-Host "2. *** Nvidia GPU: ***" -ForegroundColor green
Write-Host " docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama"
Write-Host " 1. *** CPU only: ***" -ForegroundColor green
Write-Host " docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama"
Write-Host ""
Write-Host " 2. *** Nvidia GPU: ***" -ForegroundColor green
Write-Host " docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama"
Write-Host ""
Write-Host ""
Write-Host "Installation aborted." -ForegroundColor red
Write-Host "Please install Ollama using the methods above and try again." -ForegroundColor red
return -1
Write-Host "Please install or configure Ollama using the methods above and try again." -ForegroundColor red
return
}

# Create Application Directory
$install_directory = "C:\Users\$env:USERNAME\AppData\Local\Programs\tlm"
if (-not (Test-Path $install_directory)) {
New-Item -ItemType Directory -Path $install_directory | Out-Null
}

# Download the binary
Write-Host "Downloading tlm version ${version} for ${os}/${arch}..."
try {
Invoke-WebRequest -Uri $download_url -OutFile 'tlm.exe' -UseBasicParsing -ErrorAction Stop | Out-Null
Invoke-WebRequest -Uri $download_url -OutFile "$install_directory\tlm.exe" -UseBasicParsing -ErrorAction Stop | Out-Null
} catch {
Write-Error "Download failed. Please check your internet connection and try again."
return -1
}

# Move to installation directory
Write-Host "Installing tlm..."
#try {
# Move-Item -Path 'tlm.exe' -Destination 'C:\Windows\Program Files\tlm\' -Force
#} catch {
# Write-Error "Installation requires administrator permissions. Please elevate with rights and run the script again."
# exit 1
#}
# Add installation directory to PATH
$env:Path += ";$install_directory"

# Ollama deployment - specific to the original script, might need modification
# Configure tlm to use Ollama
try {
.\tlm.exe deploy
."$install_directory\tlm.exe" config set llm.host $ollamaHost
} catch {
Write-Error "tlm config set llm.host failed."
return 1
}

# Deploy tlm
try {

."$install_directory\tlm.exe" deploy
} catch {
Write-Error "tlm deploy failed."
return 1
}

Write-Host "Type '.\tlm.exe help' to get started."
Write-Host ""
Write-Host "Installation completed successfully."
Write-Host "Type 'tlm help' to get started."
Loading

0 comments on commit cca26d8

Please sign in to comment.