Skip to content

Commit

Permalink
Wip p2p enhancements
Browse files Browse the repository at this point in the history
  • Loading branch information
mudler committed Jul 5, 2024
1 parent 9280060 commit de52a87
Show file tree
Hide file tree
Showing 4 changed files with 84 additions and 18 deletions.
3 changes: 3 additions & 0 deletions .github/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ changelog:
labels:
- bug
- regression
- title: "🖧 P2P area"
labels:
- area/p2p
- title: Exciting New Features 🎉
labels:
- Semver-Minor
Expand Down
2 changes: 1 addition & 1 deletion core/cli/worker/worker_p2p.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import (

type P2P struct {
WorkerFlags `embed:""`
Token string `env:"LOCALAI_TOKEN,TOKEN" help:"JSON list of galleries"`
Token string `env:"LOCALAI_TOKEN,LOCALAI_P2P_TOKEN,TOKEN" help:"P2P token to use"`
NoRunner bool `env:"LOCALAI_NO_RUNNER,NO_RUNNER" help:"Do not start the llama-cpp-rpc-server"`
RunnerAddress string `env:"LOCALAI_RUNNER_ADDRESS,RUNNER_ADDRESS" help:"Address of the llama-cpp-rpc-server"`
RunnerPort string `env:"LOCALAI_RUNNER_PORT,RUNNER_PORT" help:"Port of the llama-cpp-rpc-server"`
Expand Down
54 changes: 43 additions & 11 deletions core/p2p/p2p.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,18 @@ import (
"net"
"os"
"strings"
"sync"
"time"

"github.com/ipfs/go-log"
"github.com/libp2p/go-libp2p/core/peer"
"github.com/mudler/LocalAI/pkg/utils"
"github.com/mudler/edgevpn/pkg/config"
"github.com/mudler/edgevpn/pkg/node"
"github.com/mudler/edgevpn/pkg/protocol"
"github.com/mudler/edgevpn/pkg/services"
"github.com/mudler/edgevpn/pkg/types"
"github.com/phayes/freeport"

"github.com/ipfs/go-log"
"github.com/mudler/edgevpn/pkg/config"
"github.com/mudler/edgevpn/pkg/services"
zlog "github.com/rs/zerolog/log"

"github.com/mudler/edgevpn/pkg/logger"
Expand All @@ -34,6 +34,11 @@ func GenerateToken() string {
return newData.Base64()
}

func nodeID() string {
hostname, _ := os.Hostname()
return hostname
}

func allocateLocalService(ctx context.Context, node *node.Node, listenAddr, service string) error {

zlog.Info().Msgf("Allocating service '%s' on: %s", service, listenAddr)
Expand Down Expand Up @@ -135,6 +140,15 @@ func copyStream(closer chan struct{}, dst io.Writer, src io.Reader) {
io.Copy(dst, src)
}

var availableNodes = []NodeData{}
var mu sync.Mutex

func GetAvailableNodes() []NodeData {
mu.Lock()
defer mu.Unlock()
return availableNodes
}

// This is the main of the server (which keeps the env variable updated)
// This starts a goroutine that keeps LLAMACPP_GRPC_SERVERS updated with the discovered services
func LLamaCPPRPCServerDiscoverer(ctx context.Context, token string) error {
Expand All @@ -151,19 +165,22 @@ func LLamaCPPRPCServerDiscoverer(ctx context.Context, token string) error {
zlog.Error().Msg("Discoverer stopped")
return
case tunnel := <-tunnels:

totalTunnels = append(totalTunnels, tunnel)
totalTunnels = append(totalTunnels, tunnel.TunnelAddress)
os.Setenv("LLAMACPP_GRPC_SERVERS", strings.Join(totalTunnels, ","))
zlog.Debug().Msgf("setting LLAMACPP_GRPC_SERVERS to %s", strings.Join(totalTunnels, ","))
mu.Lock()
defer mu.Unlock()
availableNodes = append(availableNodes, tunnel)
zlog.Info().Msgf("Node %s available", tunnel.ID)
}
}
}()

return nil
}

func discoveryTunnels(ctx context.Context, token string) (chan string, error) {
tunnels := make(chan string)
func discoveryTunnels(ctx context.Context, token string) (chan NodeData, error) {
tunnels := make(chan NodeData)

nodeOpts, err := newNodeOpts(token)
if err != nil {
Expand Down Expand Up @@ -196,18 +213,24 @@ func discoveryTunnels(ctx context.Context, token string) (chan string, error) {
zlog.Debug().Msg("Searching for workers")

data := ledger.LastBlock().Storage["services_localai"]
for k := range data {
for k, v := range data {
zlog.Info().Msgf("Found worker %s", k)
if _, found := emitted[k]; !found {
emitted[k] = true
nd := &NodeData{}
if err := v.Unmarshal(nd); err != nil {
zlog.Error().Msg("cannot unmarshal node data")
continue
}
//discoveredPeers <- k
port, err := freeport.GetFreePort()
if err != nil {
fmt.Print(err)
}
tunnelAddress := fmt.Sprintf("127.0.0.1:%d", port)
go allocateLocalService(ctx, n, tunnelAddress, k)
tunnels <- tunnelAddress
nd.TunnelAddress = tunnelAddress
tunnels <- *nd
}
}
}
Expand All @@ -217,6 +240,12 @@ func discoveryTunnels(ctx context.Context, token string) (chan string, error) {
return tunnels, err
}

type NodeData struct {
Name string
ID string
TunnelAddress string
}

// This is the P2P worker main
func BindLLamaCPPWorker(ctx context.Context, host, port, token string) error {
llger := logger.New(log.LevelFatal)
Expand Down Expand Up @@ -255,7 +284,10 @@ func BindLLamaCPPWorker(ctx context.Context, host, port, token string) error {
// If mismatch, update the blockchain
if !found {
updatedMap := map[string]interface{}{}
updatedMap[name] = "p2p"
updatedMap[name] = &NodeData{
Name: name,
ID: nodeID(),
}
ledger.Add("services_localai", updatedMap)
}
},
Expand Down
43 changes: 37 additions & 6 deletions docs/static/install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,8 @@ DOCKER_INSTALL=${DOCKER_INSTALL:-$docker_found}
USE_AIO=${USE_AIO:-false}
API_KEY=${API_KEY:-}
CORE_IMAGES=${CORE_IMAGES:-false}
P2P_TOKEN=${P2P_TOKEN:-}
WORKER=${WORKER:-false}
# nprocs -1
if available nproc; then
procs=$(nproc)
Expand Down Expand Up @@ -132,15 +134,22 @@ configure_systemd() {

info "Adding current user to local-ai group..."
$SUDO usermod -a -G local-ai $(whoami)

STARTCOMMAND="run"
if [ "$WORKER" = true ]; then
if [ -n "$P2P_TOKEN" ]; then
STARTCOMMAND="worker p2p-llama-cpp-rpc"
else
STARTCOMMAND="worker llama-cpp-rpc"
fi
fi
info "Creating local-ai systemd service..."
cat <<EOF | $SUDO tee /etc/systemd/system/local-ai.service >/dev/null
[Unit]
Description=LocalAI Service
After=network-online.target
[Service]
ExecStart=$BINDIR/local-ai run
ExecStart=$BINDIR/local-ai $STARTCOMMAND
User=local-ai
Group=local-ai
Restart=always
Expand All @@ -159,6 +168,11 @@ EOF
$SUDO echo "THREADS=$THREADS" | $SUDO tee -a /etc/localai.env >/dev/null
$SUDO echo "MODELS_PATH=$MODELS_PATH" | $SUDO tee -a /etc/localai.env >/dev/null

if [ -n "$P2P_TOKEN" ]; then
$SUDO echo "LOCALAI_P2P_TOKEN=$P2P_TOKEN" | $SUDO tee -a /etc/localai.env >/dev/null
$SUDO echo "LOCALAI_P2P=true" | $SUDO tee -a /etc/localai.env >/dev/null
fi

SYSTEMCTL_RUNNING="$(systemctl is-system-running || true)"
case $SYSTEMCTL_RUNNING in
running|degraded)
Expand Down Expand Up @@ -407,6 +421,19 @@ install_docker() {
# exit 0
fi

STARTCOMMAND="run"
if [ "$WORKER" = true ]; then
if [ -n "$P2P_TOKEN" ]; then
STARTCOMMAND="worker p2p-llama-cpp-rpc"
else
STARTCOMMAND="worker llama-cpp-rpc"
fi
fi
envs=""
if [ -n "$P2P_TOKEN" ]; then
envs="-e LOCALAI_P2P_TOKEN=$P2P_TOKEN -e LOCALAI_P2P=true"
fi

IMAGE_TAG=
if [ "$HAS_CUDA" ]; then
IMAGE_TAG=${VERSION}-cublas-cuda12-ffmpeg
Expand All @@ -430,7 +457,8 @@ install_docker() {
--restart=always \
-e API_KEY=$API_KEY \
-e THREADS=$THREADS \
-d -p $PORT:8080 --name local-ai localai/localai:$IMAGE_TAG
$envs \
-d -p $PORT:8080 --name local-ai localai/localai:$IMAGE_TAG $STARTCOMMAND
elif [ "$HAS_AMD" ]; then
IMAGE_TAG=${VERSION}-hipblas-ffmpeg
# CORE
Expand All @@ -448,7 +476,8 @@ install_docker() {
--restart=always \
-e API_KEY=$API_KEY \
-e THREADS=$THREADS \
-d -p $PORT:8080 --name local-ai localai/localai:$IMAGE_TAG
$envs \
-d -p $PORT:8080 --name local-ai localai/localai:$IMAGE_TAG $STARTCOMMAND
elif [ "$HAS_INTEL" ]; then
IMAGE_TAG=${VERSION}-sycl-f32-ffmpeg
# CORE
Expand All @@ -465,7 +494,8 @@ install_docker() {
--restart=always \
-e API_KEY=$API_KEY \
-e THREADS=$THREADS \
-d -p $PORT:8080 --name local-ai localai/localai:$IMAGE_TAG
$envs \
-d -p $PORT:8080 --name local-ai localai/localai:$IMAGE_TAG $STARTCOMMAND
else
IMAGE_TAG=${VERSION}-ffmpeg
# CORE
Expand All @@ -481,7 +511,8 @@ install_docker() {
-e MODELS_PATH=/models \
-e API_KEY=$API_KEY \
-e THREADS=$THREADS \
-d -p $PORT:8080 --name local-ai localai/localai:$IMAGE_TAG
$envs \
-d -p $PORT:8080 --name local-ai localai/localai:$IMAGE_TAG $STARTCOMMAND
fi

install_success
Expand Down

0 comments on commit de52a87

Please sign in to comment.