From 4762da12cc2d736bcb5a19208422d21b494eddfa Mon Sep 17 00:00:00 2001 From: Mohamed Bassem Date: Mon, 22 Dec 2025 13:52:14 +0000 Subject: docs: shuffle some docs around --- .../03-configuration/02-different-ai-providers.md | 88 ++++++++++++++++++++++ docs/docs/05-integrations/01-openai.md | 11 --- .../05-integrations/04-different-ai-providers.md | 88 ---------------------- docs/docs/06-administration/03-openai.md | 11 +++ docs/docs/06-administration/03-troubleshooting.md | 46 ----------- docs/docs/06-administration/04-server-migration.md | 71 ----------------- .../05-legacy-container-upgrade.md | 66 ---------------- docs/docs/06-administration/05-troubleshooting.md | 46 +++++++++++ .../06-hoarder-to-karakeep-migration.md | 28 ------- docs/docs/06-administration/06-server-migration.md | 71 +++++++++++++++++ .../07-legacy-container-upgrade.md | 66 ++++++++++++++++ .../08-hoarder-to-karakeep-migration.md | 28 +++++++ 12 files changed, 310 insertions(+), 310 deletions(-) create mode 100644 docs/docs/03-configuration/02-different-ai-providers.md delete mode 100644 docs/docs/05-integrations/01-openai.md delete mode 100644 docs/docs/05-integrations/04-different-ai-providers.md create mode 100644 docs/docs/06-administration/03-openai.md delete mode 100644 docs/docs/06-administration/03-troubleshooting.md delete mode 100644 docs/docs/06-administration/04-server-migration.md delete mode 100644 docs/docs/06-administration/05-legacy-container-upgrade.md create mode 100644 docs/docs/06-administration/05-troubleshooting.md delete mode 100644 docs/docs/06-administration/06-hoarder-to-karakeep-migration.md create mode 100644 docs/docs/06-administration/06-server-migration.md create mode 100644 docs/docs/06-administration/07-legacy-container-upgrade.md create mode 100644 docs/docs/06-administration/08-hoarder-to-karakeep-migration.md diff --git a/docs/docs/03-configuration/02-different-ai-providers.md b/docs/docs/03-configuration/02-different-ai-providers.md new file mode 100644 index 00000000..9a86e04f --- /dev/null +++ b/docs/docs/03-configuration/02-different-ai-providers.md @@ -0,0 +1,88 @@ +# Configuring different AI Providers + +Karakeep uses LLM providers for AI tagging and summarization. We support OpenAI-compatible providers and ollama. This guide will show you how to configure different providers. + +## OpenAI + +If you want to use OpenAI itself, you just need to pass in the OPENAI_API_KEY environment variable. + +``` +OPENAI_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + +# You can change the default models by uncommenting the following lines, and choosing your model. +# INFERENCE_TEXT_MODEL=gpt-4.1-mini +# INFERENCE_IMAGE_MODEL=gpt-4o-mini +``` + +## Ollama + +Ollama is a local LLM provider that you can use to run your own LLM server. You'll need to pass ollama's address to karakeep and you need to ensure that it's accessible from within the karakeep container (e.g. no localhost addresses). + +``` +# MAKE SURE YOU DON'T HAVE OPENAI_API_KEY set, otherwise it takes precedence. + +OLLAMA_BASE_URL=http://ollama.mylab.com:11434 + +# Make sure to pull the models in ollama first. Example models: +INFERENCE_TEXT_MODEL=gemma3 +INFERENCE_IMAGE_MODEL=llava + +# If the model you're using doesn't support structured output, you also need: +# INFERENCE_OUTPUT_SCHEMA=plain +``` + +## Gemini + +Gemini has an OpenAI-compatible API. You need to get an api key from Google AI Studio. + +``` + +OPENAI_BASE_URL=https://generativelanguage.googleapis.com/v1beta +OPENAI_API_KEY=YOUR_API_KEY + +# Example models: +INFERENCE_TEXT_MODEL=gemini-2.0-flash +INFERENCE_IMAGE_MODEL=gemini-2.0-flash +``` + +## OpenRouter + +``` +OPENAI_BASE_URL=https://openrouter.ai/api/v1 +OPENAI_API_KEY=YOUR_API_KEY + +# Example models: +INFERENCE_TEXT_MODEL=meta-llama/llama-4-scout +INFERENCE_IMAGE_MODEL=meta-llama/llama-4-scout +``` + +## Perplexity + +``` +OPENAI_BASE_URL: https://api.perplexity.ai +OPENAI_API_KEY: Your Perplexity API Key +INFERENCE_TEXT_MODEL: sonar-pro +INFERENCE_IMAGE_MODEL: sonar-pro +``` + +## Azure + +Azure has an OpenAI-compatible API. + +You can get your API key from the Overview page of the Azure AI Foundry Portal or via "Keys + Endpoints" on the resource in the Azure Portal. + +:::warning +The [model name is the deployment name](https://learn.microsoft.com/en-us/azure/ai-foundry/openai/how-to/switching-endpoints#keyword-argument-for-model) you specified when deploying the model, which may differ from the base model name. +::: + +``` +# Deployed via Azure AI Foundry: +OPENAI_BASE_URL=https://{your-azure-ai-foundry-resource-name}.cognitiveservices.azure.com/openai/v1/ + +# Deployed via Azure OpenAI Service: +OPENAI_BASE_URL=https://{your-azure-openai-resource-name}.openai.azure.com/openai/v1/ + +OPENAI_API_KEY=YOUR_API_KEY +INFERENCE_TEXT_MODEL=YOUR_DEPLOYMENT_NAME +INFERENCE_IMAGE_MODEL=YOUR_DEPLOYMENT_NAME +``` diff --git a/docs/docs/05-integrations/01-openai.md b/docs/docs/05-integrations/01-openai.md deleted file mode 100644 index 32218da8..00000000 --- a/docs/docs/05-integrations/01-openai.md +++ /dev/null @@ -1,11 +0,0 @@ -# OpenAI Costs - -This service uses OpenAI for automatic tagging. This means that you'll incur some costs if automatic tagging is enabled. There are two type of inferences that we do: - -## Text Tagging - -For text tagging, we use the `gpt-4.1-mini` model. This model is [extremely cheap](https://openai.com/api/pricing). Cost per inference varies depending on the content size per article. Though, roughly, You'll be able to generate tags for almost 3000+ bookmarks for less than $1. - -## Image Tagging - -For image uploads, we use the `gpt-4o-mini` model for extracting tags from the image. You can learn more about the costs of using this model [here](https://platform.openai.com/docs/guides/images?api-mode=chat#calculating-costs). To lower the costs, we're using the low resolution mode (fixed number of tokens regardless of image size). You'll be able to run inference for 1000+ images for less than a $1. diff --git a/docs/docs/05-integrations/04-different-ai-providers.md b/docs/docs/05-integrations/04-different-ai-providers.md deleted file mode 100644 index 9a86e04f..00000000 --- a/docs/docs/05-integrations/04-different-ai-providers.md +++ /dev/null @@ -1,88 +0,0 @@ -# Configuring different AI Providers - -Karakeep uses LLM providers for AI tagging and summarization. We support OpenAI-compatible providers and ollama. This guide will show you how to configure different providers. - -## OpenAI - -If you want to use OpenAI itself, you just need to pass in the OPENAI_API_KEY environment variable. - -``` -OPENAI_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx - -# You can change the default models by uncommenting the following lines, and choosing your model. -# INFERENCE_TEXT_MODEL=gpt-4.1-mini -# INFERENCE_IMAGE_MODEL=gpt-4o-mini -``` - -## Ollama - -Ollama is a local LLM provider that you can use to run your own LLM server. You'll need to pass ollama's address to karakeep and you need to ensure that it's accessible from within the karakeep container (e.g. no localhost addresses). - -``` -# MAKE SURE YOU DON'T HAVE OPENAI_API_KEY set, otherwise it takes precedence. - -OLLAMA_BASE_URL=http://ollama.mylab.com:11434 - -# Make sure to pull the models in ollama first. Example models: -INFERENCE_TEXT_MODEL=gemma3 -INFERENCE_IMAGE_MODEL=llava - -# If the model you're using doesn't support structured output, you also need: -# INFERENCE_OUTPUT_SCHEMA=plain -``` - -## Gemini - -Gemini has an OpenAI-compatible API. You need to get an api key from Google AI Studio. - -``` - -OPENAI_BASE_URL=https://generativelanguage.googleapis.com/v1beta -OPENAI_API_KEY=YOUR_API_KEY - -# Example models: -INFERENCE_TEXT_MODEL=gemini-2.0-flash -INFERENCE_IMAGE_MODEL=gemini-2.0-flash -``` - -## OpenRouter - -``` -OPENAI_BASE_URL=https://openrouter.ai/api/v1 -OPENAI_API_KEY=YOUR_API_KEY - -# Example models: -INFERENCE_TEXT_MODEL=meta-llama/llama-4-scout -INFERENCE_IMAGE_MODEL=meta-llama/llama-4-scout -``` - -## Perplexity - -``` -OPENAI_BASE_URL: https://api.perplexity.ai -OPENAI_API_KEY: Your Perplexity API Key -INFERENCE_TEXT_MODEL: sonar-pro -INFERENCE_IMAGE_MODEL: sonar-pro -``` - -## Azure - -Azure has an OpenAI-compatible API. - -You can get your API key from the Overview page of the Azure AI Foundry Portal or via "Keys + Endpoints" on the resource in the Azure Portal. - -:::warning -The [model name is the deployment name](https://learn.microsoft.com/en-us/azure/ai-foundry/openai/how-to/switching-endpoints#keyword-argument-for-model) you specified when deploying the model, which may differ from the base model name. -::: - -``` -# Deployed via Azure AI Foundry: -OPENAI_BASE_URL=https://{your-azure-ai-foundry-resource-name}.cognitiveservices.azure.com/openai/v1/ - -# Deployed via Azure OpenAI Service: -OPENAI_BASE_URL=https://{your-azure-openai-resource-name}.openai.azure.com/openai/v1/ - -OPENAI_API_KEY=YOUR_API_KEY -INFERENCE_TEXT_MODEL=YOUR_DEPLOYMENT_NAME -INFERENCE_IMAGE_MODEL=YOUR_DEPLOYMENT_NAME -``` diff --git a/docs/docs/06-administration/03-openai.md b/docs/docs/06-administration/03-openai.md new file mode 100644 index 00000000..9247d065 --- /dev/null +++ b/docs/docs/06-administration/03-openai.md @@ -0,0 +1,11 @@ +# Tagging Costs + +This service uses OpenAI for automatic tagging. This means that you'll incur some costs if automatic tagging is enabled. There are two type of inferences that we do: + +## Text Tagging + +For text tagging, we use the `gpt-4.1-mini` model. This model is [extremely cheap](https://openai.com/api/pricing). Cost per inference varies depending on the content size per article. Though, roughly, You'll be able to generate tags for almost 3000+ bookmarks for less than $1. + +## Image Tagging + +For image uploads, we use the `gpt-4o-mini` model for extracting tags from the image. You can learn more about the costs of using this model [here](https://platform.openai.com/docs/guides/images?api-mode=chat#calculating-costs). To lower the costs, we're using the low resolution mode (fixed number of tokens regardless of image size). You'll be able to run inference for 1000+ images for less than a $1. diff --git a/docs/docs/06-administration/03-troubleshooting.md b/docs/docs/06-administration/03-troubleshooting.md deleted file mode 100644 index 4072442b..00000000 --- a/docs/docs/06-administration/03-troubleshooting.md +++ /dev/null @@ -1,46 +0,0 @@ -# Troubleshooting - -## SqliteError: no such table: user - -This usually means that there's something wrong with the database setup (more concretely, it means that the database is not initialized). This can be caused by multiple problems: -1. **Wiped DATA_DIR:** Your `DATA_DIR` got wiped (or the backing storage dir changed). If you did this intentionally, restart the container so that it can re-initalize the database. -2. **Missing DATA_DIR**: You're not using the default docker compose file, and you forgot to configure the `DATA_DIR` env var. This will result into the database getting set up in a different directory than the one used by the service. - -## Chrome Failed to Read DnsConfig - -If you see this error in the logs of the chrome container, it's a benign error and you can safely ignore it. Whatever problems you're having, is unrelated to this error. - -## AI Tagging not working (when using OpenAI) - -Check the logs of the container and this will usually tell you what's wrong. Common problems are: -1. Typo in the env variable `OPENAI_API_KEY` name resulting into logs saying something like "skipping inference as it's not configured". -2. You forgot to call `docker compose up` after configuring open ai. -3. OpenAI requires pre-charging the account with credits before using it, otherwise you'll get an error like "insufficient funds". - -## AI Tagging not working (when using Ollama) - -Check the logs of the container and this will usually tell you what's wrong. Common problems are: -1. Typo in the env variable `OLLAMA_BASE_URL` name resulting into logs saying something like "skipping inference as it's not configured". -2. You forgot to call `docker compose up` after configuring ollama. -3. You didn't change the `INFERENCE_TEXT_MODEL` env variable, resulting into karakeep attempting to use gpt models with ollama which won't work. -4. Ollama server is not reachable by the karakeep container. This can be caused by: - 1. Ollama server being in a different docker network than the karakeep container. - 2. You're using `localhost` as the `OLLAMA_BASE_URL` instead of the actual address of the ollama server. `localhost` points to the container itself, not the docker host. Check this [stackoverflow answer](https://stackoverflow.com/questions/24319662/from-inside-of-a-docker-container-how-do-i-connect-to-the-localhost-of-the-mach) to find how to correctly point to the docker host address instead. - -## Crawling not working - -Check the logs of the container and this will usually tell you what's wrong. Common problems are: -1. You changed the name of the chrome container but didn't change the `BROWSER_WEB_URL` env variable. - -## Upgrading Meilisearch - Migrating the Meilisearch db version - -[Meilisearch](https://www.meilisearch.com/) is the database used by karakeep for searching in your bookmarks. The version used by karakeep is `1.13.3` and it is advised not to upgrade it without good reasons. If you do, you might see errors like `Your database version (1.11.1) is incompatible with your current engine version (1.13.3). To migrate data between Meilisearch versions, please follow our guide on https://www.meilisearch.com/docs/learn/update_and_migration/updating.`. - -Luckily we can easily workaround this: -1. Stop the Meilisearch container. -2. Inside the Meilisearch volume bound to `/meili_data`, erase/rename the folder called `data.ms`. -3. Launch Meilisearch again. -4. Login to karakeep as administrator and go to (as of v0.24.1) `Admin Settings > Background Jobs` then click on `Reindex All Bookmarks`. -5. When the reindexing has finished, Meilisearch should be working as usual. - -If you run into issues, the official documentation can be found [there](https://www.meilisearch.com/docs/learn/update_and_migration/updating). diff --git a/docs/docs/06-administration/04-server-migration.md b/docs/docs/06-administration/04-server-migration.md deleted file mode 100644 index 147ae1ec..00000000 --- a/docs/docs/06-administration/04-server-migration.md +++ /dev/null @@ -1,71 +0,0 @@ -# Migrating Between Servers - -This guide explains how to migrate all of your data from one Karakeep server to another using the official CLI. - -## What the command does - -The migration copies user-owned data from a source server to a destination server in this order: - -- User settings -- Lists (preserving hierarchy and settings) -- RSS feeds -- AI prompts (custom prompts and their enabled state) -- Webhooks (URL and events) -- Tags (ensures tags by name exist) -- Rule engine rules (IDs remapped to destination equivalents) -- Bookmarks (links, text, and assets) - - After creation, attaches the correct tags and adds to the correct lists - -Notes: -- Webhook tokens cannot be read via the API, so tokens are not migrated. Re‑add them on the destination if needed. -- Asset bookmarks are migrated by downloading the original asset and re‑uploading it to the destination. Only images and PDFs are supported for asset bookmarks. -- Link bookmarks on the destination may be de‑duplicated if the same URL already exists. - -## Prerequisites - -- Install the CLI: - - NPM: `npm install -g @karakeep/cli` - - Docker: `docker run --rm ghcr.io/karakeep-app/karakeep-cli:release --help` -- Collect API keys and base URLs for both servers: - - Source: `--server-addr`, `--api-key` - - Destination: `--dest-server`, `--dest-api-key` - -## Quick start - -``` -karakeep --server-addr https://src.example.com --api-key migrate \ - --dest-server https://dest.example.com \ - --dest-api-key -``` - -The command is long‑running and shows live progress for each phase. You will be prompted for confirmation; pass `--yes` to skip the prompt. - -### Options - -- `--server-addr `: Source server base URL -- `--api-key `: API key for the source server -- `--dest-server `: Destination server base URL -- `--dest-api-key `: API key for the destination server -- `--batch-size `: Page size for bookmark migration (default 50, max 100) -- `-y`, `--yes`: Skip the confirmation prompt - -## What to expect - -- Lists are recreated parent‑first and retain their hierarchy. -- Feeds, prompts, webhooks, and tags are recreated by value. -- Rules are recreated after IDs (tags, lists, feeds) are remapped to their corresponding destination IDs. -- After each bookmark is created, the command attaches the correct tags and adds it to the correct lists. - -## Caveats and tips - -- Webhook auth tokens must be re‑entered on the destination after migration. -- If your destination already contains data, duplicate links may be de‑duplicated; tags and list membership are still applied to the existing bookmark. - -## Troubleshooting - -- If the command exits early, you can re‑run it, but note: - - Tags and lists that already exist are reused. - - Link de‑duplication avoids duplicate link bookmarks. Notes and assets will get re-created. - - Rules, webhooks, rss feeds will get re-created and you'll have to manually clean them up afterwards. - - The progress log indicates how far it got. -- Use a smaller `--batch-size` if your source or destination is under heavy load. diff --git a/docs/docs/06-administration/05-legacy-container-upgrade.md b/docs/docs/06-administration/05-legacy-container-upgrade.md deleted file mode 100644 index d95c1c1e..00000000 --- a/docs/docs/06-administration/05-legacy-container-upgrade.md +++ /dev/null @@ -1,66 +0,0 @@ -# Legacy Container Upgrade - -Karakeep's 0.16 release consolidated the web and worker containers into a single container and also dropped the need for the redis container. The legacy containers will stop being supported soon, to upgrade to the new container do the following: - -1. Remove the redis container and its volume if it had one. -2. Move the environment variables that you've set exclusively to the `workers` container to the `web` container. -3. Delete the `workers` container. -4. Rename the web container image from `hoarder-app/hoarder-web` to `hoarder-app/hoarder`. - -```diff -diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml -index cdfc908..6297563 100644 ---- a/docker/docker-compose.yml -+++ b/docker/docker-compose.yml -@@ -1,7 +1,7 @@ - version: "3.8" - services: - web: -- image: ghcr.io/hoarder-app/hoarder-web:${KARAKEEP_VERSION:-release} -+ image: ghcr.io/karakeep-app/karakeep:${KARAKEEP_VERSION:-release} - restart: unless-stopped - volumes: - - data:/data -@@ -10,14 +10,10 @@ services: - env_file: - - .env - environment: -- REDIS_HOST: redis - MEILI_ADDR: http://meilisearch:7700 -+ BROWSER_WEB_URL: http://chrome:9222 -+ # OPENAI_API_KEY: ... - DATA_DIR: /data -- redis: -- image: redis:7.2-alpine -- restart: unless-stopped -- volumes: -- - redis:/data - chrome: - image: gcr.io/zenika-hub/alpine-chrome:123 - restart: unless-stopped -@@ -37,24 +33,7 @@ services: - MEILI_NO_ANALYTICS: "true" - volumes: - - meilisearch:/meili_data -- workers: -- image: ghcr.io/hoarder-app/hoarder-workers:${KARAKEEP_VERSION:-release} -- restart: unless-stopped -- volumes: -- - data:/data -- env_file: -- - .env -- environment: -- REDIS_HOST: redis -- MEILI_ADDR: http://meilisearch:7700 -- BROWSER_WEB_URL: http://chrome:9222 -- DATA_DIR: /data -- # OPENAI_API_KEY: ... -- depends_on: -- web: -- condition: service_started - - volumes: -- redis: - meilisearch: - data: -``` diff --git a/docs/docs/06-administration/05-troubleshooting.md b/docs/docs/06-administration/05-troubleshooting.md new file mode 100644 index 00000000..4072442b --- /dev/null +++ b/docs/docs/06-administration/05-troubleshooting.md @@ -0,0 +1,46 @@ +# Troubleshooting + +## SqliteError: no such table: user + +This usually means that there's something wrong with the database setup (more concretely, it means that the database is not initialized). This can be caused by multiple problems: +1. **Wiped DATA_DIR:** Your `DATA_DIR` got wiped (or the backing storage dir changed). If you did this intentionally, restart the container so that it can re-initalize the database. +2. **Missing DATA_DIR**: You're not using the default docker compose file, and you forgot to configure the `DATA_DIR` env var. This will result into the database getting set up in a different directory than the one used by the service. + +## Chrome Failed to Read DnsConfig + +If you see this error in the logs of the chrome container, it's a benign error and you can safely ignore it. Whatever problems you're having, is unrelated to this error. + +## AI Tagging not working (when using OpenAI) + +Check the logs of the container and this will usually tell you what's wrong. Common problems are: +1. Typo in the env variable `OPENAI_API_KEY` name resulting into logs saying something like "skipping inference as it's not configured". +2. You forgot to call `docker compose up` after configuring open ai. +3. OpenAI requires pre-charging the account with credits before using it, otherwise you'll get an error like "insufficient funds". + +## AI Tagging not working (when using Ollama) + +Check the logs of the container and this will usually tell you what's wrong. Common problems are: +1. Typo in the env variable `OLLAMA_BASE_URL` name resulting into logs saying something like "skipping inference as it's not configured". +2. You forgot to call `docker compose up` after configuring ollama. +3. You didn't change the `INFERENCE_TEXT_MODEL` env variable, resulting into karakeep attempting to use gpt models with ollama which won't work. +4. Ollama server is not reachable by the karakeep container. This can be caused by: + 1. Ollama server being in a different docker network than the karakeep container. + 2. You're using `localhost` as the `OLLAMA_BASE_URL` instead of the actual address of the ollama server. `localhost` points to the container itself, not the docker host. Check this [stackoverflow answer](https://stackoverflow.com/questions/24319662/from-inside-of-a-docker-container-how-do-i-connect-to-the-localhost-of-the-mach) to find how to correctly point to the docker host address instead. + +## Crawling not working + +Check the logs of the container and this will usually tell you what's wrong. Common problems are: +1. You changed the name of the chrome container but didn't change the `BROWSER_WEB_URL` env variable. + +## Upgrading Meilisearch - Migrating the Meilisearch db version + +[Meilisearch](https://www.meilisearch.com/) is the database used by karakeep for searching in your bookmarks. The version used by karakeep is `1.13.3` and it is advised not to upgrade it without good reasons. If you do, you might see errors like `Your database version (1.11.1) is incompatible with your current engine version (1.13.3). To migrate data between Meilisearch versions, please follow our guide on https://www.meilisearch.com/docs/learn/update_and_migration/updating.`. + +Luckily we can easily workaround this: +1. Stop the Meilisearch container. +2. Inside the Meilisearch volume bound to `/meili_data`, erase/rename the folder called `data.ms`. +3. Launch Meilisearch again. +4. Login to karakeep as administrator and go to (as of v0.24.1) `Admin Settings > Background Jobs` then click on `Reindex All Bookmarks`. +5. When the reindexing has finished, Meilisearch should be working as usual. + +If you run into issues, the official documentation can be found [there](https://www.meilisearch.com/docs/learn/update_and_migration/updating). diff --git a/docs/docs/06-administration/06-hoarder-to-karakeep-migration.md b/docs/docs/06-administration/06-hoarder-to-karakeep-migration.md deleted file mode 100644 index 4e309408..00000000 --- a/docs/docs/06-administration/06-hoarder-to-karakeep-migration.md +++ /dev/null @@ -1,28 +0,0 @@ -# Hoarder to Karakeep Migration - -Hoarder is rebranding to Karakeep. Due to github limitations, the old docker image might not be getting new updates after the rebranding. You might need to update your docker image to point to the new karakeep image instead by applying the following change in the docker compose file. - -```diff -diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml -index cdfc908..6297563 100644 ---- a/docker/docker-compose.yml -+++ b/docker/docker-compose.yml -@@ -1,7 +1,7 @@ - version: "3.8" - services: - web: -- image: ghcr.io/hoarder-app/hoarder:${HOARDER_VERSION:-release} -+ image: ghcr.io/karakeep-app/karakeep:${HOARDER_VERSION:-release} -``` - -You can also change the `HOARDER_VERSION` environment variable but if you do so remember to change it in the `.env` file as well. - -## Migrating a Baremetal Installation - -If you previously used the [Debian/Ubuntu install script](../installation/debuntu) to install Hoarder, there is an option to migrate your installation to Karakeep. - -```bash -bash karakeep-linux.sh migrate -``` - -This will migrate your installation with no user input required. After the migration, the script will also check for an update. diff --git a/docs/docs/06-administration/06-server-migration.md b/docs/docs/06-administration/06-server-migration.md new file mode 100644 index 00000000..147ae1ec --- /dev/null +++ b/docs/docs/06-administration/06-server-migration.md @@ -0,0 +1,71 @@ +# Migrating Between Servers + +This guide explains how to migrate all of your data from one Karakeep server to another using the official CLI. + +## What the command does + +The migration copies user-owned data from a source server to a destination server in this order: + +- User settings +- Lists (preserving hierarchy and settings) +- RSS feeds +- AI prompts (custom prompts and their enabled state) +- Webhooks (URL and events) +- Tags (ensures tags by name exist) +- Rule engine rules (IDs remapped to destination equivalents) +- Bookmarks (links, text, and assets) + - After creation, attaches the correct tags and adds to the correct lists + +Notes: +- Webhook tokens cannot be read via the API, so tokens are not migrated. Re‑add them on the destination if needed. +- Asset bookmarks are migrated by downloading the original asset and re‑uploading it to the destination. Only images and PDFs are supported for asset bookmarks. +- Link bookmarks on the destination may be de‑duplicated if the same URL already exists. + +## Prerequisites + +- Install the CLI: + - NPM: `npm install -g @karakeep/cli` + - Docker: `docker run --rm ghcr.io/karakeep-app/karakeep-cli:release --help` +- Collect API keys and base URLs for both servers: + - Source: `--server-addr`, `--api-key` + - Destination: `--dest-server`, `--dest-api-key` + +## Quick start + +``` +karakeep --server-addr https://src.example.com --api-key migrate \ + --dest-server https://dest.example.com \ + --dest-api-key +``` + +The command is long‑running and shows live progress for each phase. You will be prompted for confirmation; pass `--yes` to skip the prompt. + +### Options + +- `--server-addr `: Source server base URL +- `--api-key `: API key for the source server +- `--dest-server `: Destination server base URL +- `--dest-api-key `: API key for the destination server +- `--batch-size `: Page size for bookmark migration (default 50, max 100) +- `-y`, `--yes`: Skip the confirmation prompt + +## What to expect + +- Lists are recreated parent‑first and retain their hierarchy. +- Feeds, prompts, webhooks, and tags are recreated by value. +- Rules are recreated after IDs (tags, lists, feeds) are remapped to their corresponding destination IDs. +- After each bookmark is created, the command attaches the correct tags and adds it to the correct lists. + +## Caveats and tips + +- Webhook auth tokens must be re‑entered on the destination after migration. +- If your destination already contains data, duplicate links may be de‑duplicated; tags and list membership are still applied to the existing bookmark. + +## Troubleshooting + +- If the command exits early, you can re‑run it, but note: + - Tags and lists that already exist are reused. + - Link de‑duplication avoids duplicate link bookmarks. Notes and assets will get re-created. + - Rules, webhooks, rss feeds will get re-created and you'll have to manually clean them up afterwards. + - The progress log indicates how far it got. +- Use a smaller `--batch-size` if your source or destination is under heavy load. diff --git a/docs/docs/06-administration/07-legacy-container-upgrade.md b/docs/docs/06-administration/07-legacy-container-upgrade.md new file mode 100644 index 00000000..d95c1c1e --- /dev/null +++ b/docs/docs/06-administration/07-legacy-container-upgrade.md @@ -0,0 +1,66 @@ +# Legacy Container Upgrade + +Karakeep's 0.16 release consolidated the web and worker containers into a single container and also dropped the need for the redis container. The legacy containers will stop being supported soon, to upgrade to the new container do the following: + +1. Remove the redis container and its volume if it had one. +2. Move the environment variables that you've set exclusively to the `workers` container to the `web` container. +3. Delete the `workers` container. +4. Rename the web container image from `hoarder-app/hoarder-web` to `hoarder-app/hoarder`. + +```diff +diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml +index cdfc908..6297563 100644 +--- a/docker/docker-compose.yml ++++ b/docker/docker-compose.yml +@@ -1,7 +1,7 @@ + version: "3.8" + services: + web: +- image: ghcr.io/hoarder-app/hoarder-web:${KARAKEEP_VERSION:-release} ++ image: ghcr.io/karakeep-app/karakeep:${KARAKEEP_VERSION:-release} + restart: unless-stopped + volumes: + - data:/data +@@ -10,14 +10,10 @@ services: + env_file: + - .env + environment: +- REDIS_HOST: redis + MEILI_ADDR: http://meilisearch:7700 ++ BROWSER_WEB_URL: http://chrome:9222 ++ # OPENAI_API_KEY: ... + DATA_DIR: /data +- redis: +- image: redis:7.2-alpine +- restart: unless-stopped +- volumes: +- - redis:/data + chrome: + image: gcr.io/zenika-hub/alpine-chrome:123 + restart: unless-stopped +@@ -37,24 +33,7 @@ services: + MEILI_NO_ANALYTICS: "true" + volumes: + - meilisearch:/meili_data +- workers: +- image: ghcr.io/hoarder-app/hoarder-workers:${KARAKEEP_VERSION:-release} +- restart: unless-stopped +- volumes: +- - data:/data +- env_file: +- - .env +- environment: +- REDIS_HOST: redis +- MEILI_ADDR: http://meilisearch:7700 +- BROWSER_WEB_URL: http://chrome:9222 +- DATA_DIR: /data +- # OPENAI_API_KEY: ... +- depends_on: +- web: +- condition: service_started + + volumes: +- redis: + meilisearch: + data: +``` diff --git a/docs/docs/06-administration/08-hoarder-to-karakeep-migration.md b/docs/docs/06-administration/08-hoarder-to-karakeep-migration.md new file mode 100644 index 00000000..4e309408 --- /dev/null +++ b/docs/docs/06-administration/08-hoarder-to-karakeep-migration.md @@ -0,0 +1,28 @@ +# Hoarder to Karakeep Migration + +Hoarder is rebranding to Karakeep. Due to github limitations, the old docker image might not be getting new updates after the rebranding. You might need to update your docker image to point to the new karakeep image instead by applying the following change in the docker compose file. + +```diff +diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml +index cdfc908..6297563 100644 +--- a/docker/docker-compose.yml ++++ b/docker/docker-compose.yml +@@ -1,7 +1,7 @@ + version: "3.8" + services: + web: +- image: ghcr.io/hoarder-app/hoarder:${HOARDER_VERSION:-release} ++ image: ghcr.io/karakeep-app/karakeep:${HOARDER_VERSION:-release} +``` + +You can also change the `HOARDER_VERSION` environment variable but if you do so remember to change it in the `.env` file as well. + +## Migrating a Baremetal Installation + +If you previously used the [Debian/Ubuntu install script](../installation/debuntu) to install Hoarder, there is an option to migrate your installation to Karakeep. + +```bash +bash karakeep-linux.sh migrate +``` + +This will migrate your installation with no user input required. After the migration, the script will also check for an update. -- cgit v1.2.3-70-g09d2