Skip to content

Commit

Permalink
Merge branch 'main' into itsliamdowd/main
Browse files Browse the repository at this point in the history
# Conflicts:
#	poetry.lock
  • Loading branch information
jaluma committed Sep 11, 2024
2 parents 9fbbc90 + 4262859 commit 277f810
Show file tree
Hide file tree
Showing 20 changed files with 428 additions and 111 deletions.
16 changes: 16 additions & 0 deletions .docker/router.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
http:
services:
ollama:
loadBalancer:
healthCheck:
interval: 5s
path: /
servers:
- url: http://ollama-cpu:11434
- url: http://ollama-cuda:11434
- url: http://host.docker.internal:11434

routers:
ollama-router:
rule: "PathPrefix(`/`)"
service: ollama
19 changes: 19 additions & 0 deletions .github/release_please/.release-please-config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
{
"$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json",
"release-type": "simple",
"version-file": "version.txt",
"extra-files": [
{
"type": "toml",
"path": "pyproject.toml",
"jsonpath": "$.tool.poetry.version"
},
{
"type": "generic",
"path": "docker-compose.yaml"
}
],
"packages": {
".": {}
}
}
3 changes: 3 additions & 0 deletions .github/release_please/.release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
".": "0.6.2"
}
45 changes: 0 additions & 45 deletions .github/workflows/docker.yml

This file was deleted.

83 changes: 83 additions & 0 deletions .github/workflows/generate-release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
name: generate-release

on:
release:
types: [ published ]
workflow_dispatch:

env:
REGISTRY: docker.io
IMAGE_NAME: zylonai/private-gpt
platforms: linux/amd64,linux/arm64
DEFAULT_TYPE: "ollama"

jobs:
build-and-push-image:
runs-on: ubuntu-latest

strategy:
matrix:
type: [ llamacpp-cpu, ollama ]

permissions:
contents: read
packages: write

outputs:
version: ${{ steps.version.outputs.version }}

steps:
- name: Free Disk Space (Ubuntu)
uses: jlumbroso/free-disk-space@main
with:
tool-cache: false
android: true
dotnet: true
haskell: true
large-packages: true
docker-images: false
swap-storage: true

- name: Checkout repository
uses: actions/checkout@v4

- name: Set up QEMU
uses: docker/setup-qemu-action@v3

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}

- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=semver,pattern={{version}},enable=${{ matrix.type == env.DEFAULT_TYPE }}
type=semver,pattern={{version}}-${{ matrix.type }}
type=semver,pattern={{major}}.{{minor}},enable=${{ matrix.type == env.DEFAULT_TYPE }}
type=semver,pattern={{major}}.{{minor}}-${{ matrix.type }}
type=raw,value=latest,enable=${{ matrix.type == env.DEFAULT_TYPE }}
type=sha
flavor: |
latest=false
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: Dockerfile.${{ matrix.type }}
platforms: ${{ env.platforms }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

- name: Version output
id: version
run: echo "version=${{ steps.meta.outputs.version }}" >> "$GITHUB_OUTPUT"
7 changes: 4 additions & 3 deletions .github/workflows/release-please.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ jobs:
release-please:
runs-on: ubuntu-latest
steps:
- uses: google-github-actions/release-please-action@v3
- uses: google-github-actions/release-please-action@v4
id: release
with:
release-type: simple
version-file: version.txt
config-file: .github/release_please/.release-please-config.json
manifest-file: .github/release_please/.release-please-manifest.json
6 changes: 3 additions & 3 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
setup:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: ./.github/workflows/actions/install_dependencies

checks:
Expand All @@ -28,7 +28,7 @@ jobs:
- ruff
- mypy
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: ./.github/workflows/actions/install_dependencies
- name: run ${{ matrix.quality-command }}
run: make ${{ matrix.quality-command }}
Expand All @@ -38,7 +38,7 @@ jobs:
runs-on: ubuntu-latest
name: test
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: ./.github/workflows/actions/install_dependencies
- name: run test
run: make test-coverage
Expand Down
20 changes: 20 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,25 @@
# Changelog

## [0.6.2](https://github.com/zylon-ai/private-gpt/compare/v0.6.1...v0.6.2) (2024-08-08)


### Bug Fixes

* add numpy issue to troubleshooting ([#2048](https://github.com/zylon-ai/private-gpt/issues/2048)) ([4ca6d0c](https://github.com/zylon-ai/private-gpt/commit/4ca6d0cb556be7a598f7d3e3b00d2a29214ee1e8))
* auto-update version ([#2052](https://github.com/zylon-ai/private-gpt/issues/2052)) ([7fefe40](https://github.com/zylon-ai/private-gpt/commit/7fefe408b4267684c6e3c1a43c5dc2b73ec61fe4))
* publish image name ([#2043](https://github.com/zylon-ai/private-gpt/issues/2043)) ([b1acf9d](https://github.com/zylon-ai/private-gpt/commit/b1acf9dc2cbca2047cd0087f13254ff5cda6e570))
* update matplotlib to 3.9.1-post1 to fix win install ([b16abbe](https://github.com/zylon-ai/private-gpt/commit/b16abbefe49527ac038d235659854b98345d5387))

## [0.6.1](https://github.com/zylon-ai/private-gpt/compare/v0.6.0...v0.6.1) (2024-08-05)


### Bug Fixes

* add built image from DockerHub ([#2042](https://github.com/zylon-ai/private-gpt/issues/2042)) ([f09f6dd](https://github.com/zylon-ai/private-gpt/commit/f09f6dd2553077d4566dbe6b48a450e05c2f049e))
* Adding azopenai to model list ([#2035](https://github.com/zylon-ai/private-gpt/issues/2035)) ([1c665f7](https://github.com/zylon-ai/private-gpt/commit/1c665f7900658144f62814b51f6e3434a6d7377f))
* **deploy:** generate docker release when new version is released ([#2038](https://github.com/zylon-ai/private-gpt/issues/2038)) ([1d4c14d](https://github.com/zylon-ai/private-gpt/commit/1d4c14d7a3c383c874b323d934be01afbaca899e))
* **deploy:** improve Docker-Compose and quickstart on Docker ([#2037](https://github.com/zylon-ai/private-gpt/issues/2037)) ([dae0727](https://github.com/zylon-ai/private-gpt/commit/dae0727a1b4abd35d2b0851fe30e0a4ed67e0fbb))

## [0.6.0](https://github.com/zylon-ai/private-gpt/compare/v0.5.0...v0.6.0) (2024-08-02)


Expand Down
File renamed without changes.
File renamed without changes.
92 changes: 87 additions & 5 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,19 +1,101 @@
services:
private-gpt:

#-----------------------------------
#---- Private-GPT services ---------
#-----------------------------------

# Private-GPT service for the Ollama CPU and GPU modes
# This service builds from an external Dockerfile and runs the Ollama mode.
private-gpt-ollama:
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-ollama # x-release-please-version
build:
dockerfile: Dockerfile.external
context: .
dockerfile: Dockerfile.ollama
volumes:
- ./local_data/:/home/worker/app/local_data
ports:
- 8001:8001
- "8001:8001"
environment:
PORT: 8001
PGPT_PROFILES: docker
PGPT_MODE: ollama
PGPT_EMBED_MODE: ollama
PGPT_OLLAMA_API_BASE: http://ollama:11434
HF_TOKEN: ${HF_TOKEN:-}
profiles:
- ""
- ollama-cpu
- ollama-cuda
- ollama-api

# Private-GPT service for the local mode
# This service builds from a local Dockerfile and runs the application in local mode.
private-gpt-llamacpp-cpu:
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-llamacpp-cpu # x-release-please-version
build:
context: .
dockerfile: Dockerfile.llamacpp-cpu
volumes:
- ./local_data/:/home/worker/app/local_data
- ./models/:/home/worker/app/models
entrypoint: sh -c ".venv/bin/python scripts/setup && .venv/bin/python -m private_gpt"
ports:
- "8001:8001"
environment:
PORT: 8001
PGPT_PROFILES: local
HF_TOKEN: ${HF_TOKEN}
profiles:
- llamacpp-cpu

#-----------------------------------
#---- Ollama services --------------
#-----------------------------------

# Traefik reverse proxy for the Ollama service
# This will route requests to the Ollama service based on the profile.
ollama:
image: ollama/ollama:latest
image: traefik:v2.10
ports:
- 11434:11434
- "8081:8080"
command:
- "--providers.file.filename=/etc/router.yml"
- "--log.level=ERROR"
- "--api.insecure=true"
- "--providers.docker=true"
- "--providers.docker.exposedbydefault=false"
- "--entrypoints.web.address=:11434"
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
- ./.docker/router.yml:/etc/router.yml:ro
extra_hosts:
- "host.docker.internal:host-gateway"
profiles:
- ""
- ollama-cpu
- ollama-cuda
- ollama-api

# Ollama service for the CPU mode
ollama-cpu:
image: ollama/ollama:latest
volumes:
- ./models:/root/.ollama
profiles:
- ""
- ollama-cpu

# Ollama service for the CUDA mode
ollama-cuda:
image: ollama/ollama:latest
volumes:
- ./models:/root/.ollama
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
profiles:
- ollama-cuda
9 changes: 9 additions & 0 deletions fern/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ tabs:
overview:
display-name: Overview
icon: "fa-solid fa-home"
quickstart:
display-name: Quickstart
icon: "fa-solid fa-rocket"
installation:
display-name: Installation
icon: "fa-solid fa-download"
Expand All @@ -32,6 +35,12 @@ navigation:
contents:
- page: Introduction
path: ./docs/pages/overview/welcome.mdx
- tab: quickstart
layout:
- section: Getting started
contents:
- page: Quickstart
path: ./docs/pages/quickstart/quickstart.mdx
# How to install PrivateGPT, with FAQ and troubleshooting
- tab: installation
layout:
Expand Down
Loading

0 comments on commit 277f810

Please sign in to comment.