diff --git a/.github/release_please/.release-please-manifest.json b/.github/release_please/.release-please-manifest.json index ac0317144e..e3778b2c1e 100644 --- a/.github/release_please/.release-please-manifest.json +++ b/.github/release_please/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.6.1" + ".": "0.6.2" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 34362b0a93..e37d34133f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,15 @@ # Changelog +## [0.6.2](https://github.com/zylon-ai/private-gpt/compare/v0.6.1...v0.6.2) (2024-08-08) + + +### Bug Fixes + +* add numpy issue to troubleshooting ([#2048](https://github.com/zylon-ai/private-gpt/issues/2048)) ([4ca6d0c](https://github.com/zylon-ai/private-gpt/commit/4ca6d0cb556be7a598f7d3e3b00d2a29214ee1e8)) +* auto-update version ([#2052](https://github.com/zylon-ai/private-gpt/issues/2052)) ([7fefe40](https://github.com/zylon-ai/private-gpt/commit/7fefe408b4267684c6e3c1a43c5dc2b73ec61fe4)) +* publish image name ([#2043](https://github.com/zylon-ai/private-gpt/issues/2043)) ([b1acf9d](https://github.com/zylon-ai/private-gpt/commit/b1acf9dc2cbca2047cd0087f13254ff5cda6e570)) +* update matplotlib to 3.9.1-post1 to fix win install ([b16abbe](https://github.com/zylon-ai/private-gpt/commit/b16abbefe49527ac038d235659854b98345d5387)) + ## [0.6.1](https://github.com/zylon-ai/private-gpt/compare/v0.6.0...v0.6.1) (2024-08-05) diff --git a/docker-compose.yaml b/docker-compose.yaml index 1698605dfd..be0ee63f10 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -7,7 +7,7 @@ services: # Private-GPT service for the Ollama CPU and GPU modes # This service builds from an external Dockerfile and runs the Ollama mode. private-gpt-ollama: - image: ${PGPT_IMAGE:-zylonai/private-gpt}${PGPT_TAG:-0.6.1}-ollama # x-release-please-version + image: ${PGPT_IMAGE:-zylonai/private-gpt}${PGPT_TAG:-0.6.2}-ollama # x-release-please-version build: context: . dockerfile: Dockerfile.ollama @@ -31,7 +31,7 @@ services: # Private-GPT service for the local mode # This service builds from a local Dockerfile and runs the application in local mode. private-gpt-llamacpp-cpu: - image: ${PGPT_IMAGE:-zylonai/private-gpt}${PGPT_TAG:-0.6.1}-llamacpp-cpu # x-release-please-version + image: ${PGPT_IMAGE:-zylonai/private-gpt}${PGPT_TAG:-0.6.2}-llamacpp-cpu # x-release-please-version build: context: . dockerfile: Dockerfile.llamacpp-cpu diff --git a/pyproject.toml b/pyproject.toml index 10e3c2b5ca..f3912b1f65 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "private-gpt" -version = "0.6.0" +version = "0.6.2" description = "Private GPT" authors = ["Zylon "] diff --git a/version.txt b/version.txt index ee6cdce3c2..b616048743 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -0.6.1 +0.6.2