From f98b92779f5048300dcac73cd279e7f82ecb898f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 21 Aug 2024 08:40:35 +0000 Subject: [PATCH] chore(main): release 0.6.3 --- .github/release_please/.release-please-manifest.json | 2 +- CHANGELOG.md | 8 ++++++++ docker-compose.yaml | 4 ++-- pyproject.toml | 2 +- version.txt | 2 +- 5 files changed, 13 insertions(+), 5 deletions(-) diff --git a/.github/release_please/.release-please-manifest.json b/.github/release_please/.release-please-manifest.json index e3778b2c1..5c87ad823 100644 --- a/.github/release_please/.release-please-manifest.json +++ b/.github/release_please/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.6.2" + ".": "0.6.3" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index e37d34133..fa5c973ca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.6.3](https://github.com/zylon-ai/private-gpt/compare/v0.6.2...v0.6.3) (2024-08-21) + + +### Bug Fixes + +* naming image and ollama-cpu ([#2056](https://github.com/zylon-ai/private-gpt/issues/2056)) ([89477ea](https://github.com/zylon-ai/private-gpt/commit/89477ea9d3a83181b0222b732a81c71db9edf142)) +* Rectify ffmpy poetry config; update version from 0.3.2 to 0.4.0 ([#2062](https://github.com/zylon-ai/private-gpt/issues/2062)) ([7603b36](https://github.com/zylon-ai/private-gpt/commit/7603b3627d91aed1cce2e1ae407fec11ca1ad132)) + ## [0.6.2](https://github.com/zylon-ai/private-gpt/compare/v0.6.1...v0.6.2) (2024-08-08) diff --git a/docker-compose.yaml b/docker-compose.yaml index c2ef0f6d7..661ba54bf 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -7,7 +7,7 @@ services: # Private-GPT service for the Ollama CPU and GPU modes # This service builds from an external Dockerfile and runs the Ollama mode. private-gpt-ollama: - image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-ollama # x-release-please-version + image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.3}-ollama # x-release-please-version build: context: . dockerfile: Dockerfile.ollama @@ -31,7 +31,7 @@ services: # Private-GPT service for the local mode # This service builds from a local Dockerfile and runs the application in local mode. private-gpt-llamacpp-cpu: - image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-llamacpp-cpu # x-release-please-version + image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.3}-llamacpp-cpu # x-release-please-version build: context: . dockerfile: Dockerfile.llamacpp-cpu diff --git a/pyproject.toml b/pyproject.toml index 17a7c6985..1dd3d010b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "private-gpt" -version = "0.6.2" +version = "0.6.3" description = "Private GPT" authors = ["Zylon "] diff --git a/version.txt b/version.txt index b61604874..844f6a91a 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.6.2 +0.6.3