builder: add ollama and ui
This commit is contained in:
@@ -101,10 +101,17 @@
|
|||||||
};
|
};
|
||||||
|
|
||||||
# Ollama used by open-webui as llm backend
|
# Ollama used by open-webui as llm backend
|
||||||
# services.ollama = {
|
services.ollama = {
|
||||||
# enable = true;
|
enable = true;
|
||||||
# # acceleration = "rocm";
|
# acceleration = "rocm";
|
||||||
# };
|
openFirewall = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
services.nextjs-ollama-llm-ui = {
|
||||||
|
enable = true;
|
||||||
|
hostname = "192.168.3.118";
|
||||||
|
port = 3001;
|
||||||
|
};
|
||||||
# services.open-webui = {
|
# services.open-webui = {
|
||||||
# enable = true;
|
# enable = true;
|
||||||
# port = 8080;
|
# port = 8080;
|
||||||
@@ -114,6 +121,7 @@
|
|||||||
|
|
||||||
networking.firewall.allowedTCPPorts = [
|
networking.firewall.allowedTCPPorts = [
|
||||||
80
|
80
|
||||||
|
3001 # ollama-ui
|
||||||
];
|
];
|
||||||
|
|
||||||
services.openssh = {
|
services.openssh = {
|
||||||
|
|||||||
Reference in New Issue
Block a user