nixpkgs/pkgs/by-name/gp/gpt4all/package.nix

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

97 lines
2.4 KiB
Nix
Raw Normal View History

2024-02-02 02:49:12 -05:00
{ lib
2024-05-25 07:42:30 -04:00
, config
2024-02-02 02:49:12 -05:00
, stdenv
, fetchFromGitHub
2024-08-27 12:49:21 -04:00
, fetchurl
2024-02-02 02:49:12 -05:00
, cmake
, qt6
, fmt
, shaderc
, vulkan-headers
, wayland
2024-05-25 07:42:30 -04:00
, cudaSupport ? config.cudaSupport
, cudaPackages ? { }
2024-09-16 05:12:23 -04:00
, autoAddDriverRunpath
2024-02-02 02:49:12 -05:00
}:
stdenv.mkDerivation (finalAttrs: {
pname = "gpt4all";
2024-09-23 12:49:20 -04:00
version = "3.3.0";
2024-02-02 02:49:12 -05:00
src = fetchFromGitHub {
fetchSubmodules = true;
2024-09-23 12:49:20 -04:00
hash = "sha256-aez/APsei30Tp1em/RDCuq+v8hOavHq4O9qZahrsF/g=";
2024-02-02 02:49:12 -05:00
owner = "nomic-ai";
repo = "gpt4all";
rev = "v${finalAttrs.version}";
};
2024-08-27 12:49:21 -04:00
embed_model = fetchurl {
url = "https://gpt4all.io/models/gguf/nomic-embed-text-v1.5.f16.gguf";
hash = "sha256-969vZoAvTfhu2hD+m7z8dcOVYr7Ujvas5xmiUc8cL9s=";
2024-08-27 12:49:21 -04:00
};
patches = [
./embedding-local.patch
];
2024-02-02 02:49:12 -05:00
sourceRoot = "${finalAttrs.src.name}/gpt4all-chat";
nativeBuildInputs = [
cmake
qt6.wrapQtAppsHook
2024-05-25 07:42:30 -04:00
] ++ lib.optionals cudaSupport [
cudaPackages.cuda_nvcc
2024-09-16 05:12:23 -04:00
autoAddDriverRunpath
2024-02-02 02:49:12 -05:00
];
buildInputs = [
fmt
qt6.qtwayland
qt6.qtquicktimeline
qt6.qtsvg
qt6.qthttpserver
qt6.qtwebengine
qt6.qt5compat
2024-08-27 12:49:21 -04:00
qt6.qttools
2024-02-02 02:49:12 -05:00
shaderc
vulkan-headers
wayland
2024-05-25 07:42:30 -04:00
] ++ lib.optionals cudaSupport (
2024-09-16 05:12:23 -04:00
with cudaPackages;
[
cuda_cccl
cuda_cudart
libcublas
]
);
2024-02-02 02:49:12 -05:00
cmakeFlags = [
"-DKOMPUTE_OPT_USE_BUILT_IN_VULKAN_HEADER=OFF"
"-DKOMPUTE_OPT_DISABLE_VULKAN_VERSION_CHECK=ON"
"-DKOMPUTE_OPT_USE_BUILT_IN_FMT=OFF"
2024-05-25 07:42:30 -04:00
] ++ lib.optionals (!cudaSupport) [
"-DLLMODEL_CUDA=OFF"
2024-02-02 02:49:12 -05:00
];
postInstall = ''
rm -rf $out/include
rm -rf $out/lib/*.a
mv $out/bin/chat $out/bin/${finalAttrs.meta.mainProgram}
2024-08-27 12:49:21 -04:00
install -D ${finalAttrs.embed_model} $out/resources/nomic-embed-text-v1.5.f16.gguf
2024-02-02 02:49:12 -05:00
install -m 444 -D $src/gpt4all-chat/flatpak-manifest/io.gpt4all.gpt4all.desktop $out/share/applications/io.gpt4all.gpt4all.desktop
2024-08-27 12:49:21 -04:00
install -m 444 -D $src/gpt4all-chat/icons/nomic_logo.svg $out/share/icons/hicolor/scalable/apps/io.gpt4all.gpt4all.svg
2024-02-02 02:49:12 -05:00
substituteInPlace $out/share/applications/io.gpt4all.gpt4all.desktop \
2024-03-12 13:14:09 -04:00
--replace-fail 'Exec=chat' 'Exec=${finalAttrs.meta.mainProgram}'
2024-02-02 02:49:12 -05:00
'';
meta = {
changelog = "https://github.com/nomic-ai/gpt4all/releases/tag/v${finalAttrs.version}";
2024-02-02 02:49:12 -05:00
description = "Free-to-use, locally running, privacy-aware chatbot. No GPU or internet required";
homepage = "https://github.com/nomic-ai/gpt4all";
2024-02-02 02:49:12 -05:00
license = lib.licenses.mit;
mainProgram = "gpt4all";
maintainers = with lib.maintainers; [ polygon titaniumtown ];
2024-02-02 02:49:12 -05:00
};
})