Hello! I have been struggling through a few tutorials on getting a lemmy instance to work correctly when setup with Docker.
I have it mostly done, but there are various issues each time that I do not have the knowledge to properly correct.
I am familiar with Docker, and already have an Oracle VPS set up on ARM64 Ubuntu.
I already have portainer and an NGINX proxy set up and working okay. I have an existing lemmy instance "running" but not quite working.
My best guess here would be to have someone assist with setting up the docker-compose to work with current updates/settings, as well as the config.hjson.
TIA, and I cant wait to have my own entry into the fediverse working right!
version: "3.3"
networks:
# communication to web and clients
lemmyexternalproxy:
# communication between lemmy services
lemmyinternal:
driver: bridge
internal: true
services:
lemmy:
image: dessalines/lemmy
# this hostname is used in nginx reverse proxy and also for lemmy ui to connect to the backend, do not change
hostname: lemmy
networks:
- lemmyinternal
restart: always
environment:
- RUST_LOG="warn,lemmy_server=debug,lemmy_api=debug,lemmy_api_common=debug,lemmy_api_crud=debug,lemmy_apub=debug,lemmy_db_schema=debug,lemmy_db_views=debug,lemmy_db_views_actor=debug,lemmy_db_views_moderator=debug,lemmy_routes=debug,lemmy_utils=debug,lemmy_websocket=debug"
- RUST_BACKTRACE=full
volumes:
- ./lemmy.hjson:/config/config.hjson:Z
depends_on:
- postgres
- pictrs
lemmy-ui:
image: dessalines/lemmy-ui
# use this to build your local lemmy ui image for development
# run docker compose up --build
# assuming lemmy-ui is cloned besides lemmy directory
# build:
# context: ../../lemmy-ui
# dockerfile: dev.dockerfile
networks:
- lemmyinternal
environment:
# this needs to match the hostname defined in the lemmy service
- LEMMY_UI_LEMMY_INTERNAL_HOST=lemmy:8536
# set the outside hostname here
- LEMMY_UI_LEMMY_EXTERNAL_HOST=lemmy.bulwarkob.com:1236
- LEMMY_HTTPS=false
- LEMMY_UI_DEBUG=true
depends_on:
- lemmy
restart: always
pictrs:
image: asonix/pictrs:0.4.0-beta.19
# this needs to match the pictrs url in lemmy.hjson
hostname: pictrs
# we can set options to pictrs like this, here we set max. image size and forced format for conversion
# entrypoint: /sbin/tini -- /usr/local/bin/pict-rs -p /mnt -m 4 --image-format webp
networks:
- lemmyinternal
environment:
- PICTRS_OPENTELEMETRY_URL=http://otel:4137
- PICTRS__API_KEY=API_KEY
- RUST_LOG=debug
- RUST_BACKTRACE=full
- PICTRS__MEDIA__VIDEO_CODEC=vp9
- PICTRS__MEDIA__GIF__MAX_WIDTH=256
- PICTRS__MEDIA__GIF__MAX_HEIGHT=256
- PICTRS__MEDIA__GIF__MAX_AREA=65536
- PICTRS__MEDIA__GIF__MAX_FRAME_COUNT=400
user: 991:991
volumes:
- ./volumes/pictrs:/mnt:Z
restart: always
postgres:
image: postgres:15-alpine
# this needs to match the database host in lemmy.hson
# Tune your settings via
# https://pgtune.leopard.in.ua/#/
# You can use this technique to add them here
# https://stackoverflow.com/a/30850095/1655478
hostname: postgres
command:
[
"postgres",
"-c",
"session_preload_libraries=auto_explain",
"-c",
"auto_explain.log_min_duration=5ms",
"-c",
"auto_explain.log_analyze=true",
"-c",
"track_activity_query_size=1048576",
]
networks:
- lemmyinternal
# adding the external facing network to allow direct db access for devs
- lemmyexternalproxy
ports:
# use a different port so it doesnt conflict with potential postgres db running on the host
- "5433:5432"
environment:
- POSTGRES_USER=noUsrHere
- POSTGRES_PASSWORD=noPassHere
- POSTGRES_DB=noDbHere
volumes:
- ./volumes/postgres:/var/lib/postgresql/data:Z
restart: always
The NGINX I am using is not the one that came with the stack, but a separate single container for nginx-proxy-manager. I did not customize the conf that it installed with, and only used the UI to set up the proxy host and SSL, both of which are working (front end, at least.). The config seems to be unrelated on this, however I can share it if the rest of the information below is not enough.
{
# for more info about the config, check out the documentation
# https://join-lemmy.org/docs/en/administration/configuration.html
# only few config options are covered in this example config
setup: {
# username for the admin user
admin_username: "noUsrHere"
# password for the admin user
admin_password: "noPassHere"
# name of the site (can be changed later)
site_name: "Bulwark of Boredom"
}
# the domain name of your instance (eg "lemmy.ml")
hostname: "lemmy.bulwarkob.com"
# address where lemmy should listen for incoming requests
bind: "0.0.0.0"
# port where lemmy should listen for incoming requests
port: 8536
# Whether the site is available over TLS. Needs to be true for federation to work.
tls_enabled: true
# pictrs host
pictrs: {
url: "http://pictrs:8080/"
api_key: "API_KEY"
}
# settings related to the postgresql database
database: {
# name of the postgres database for lemmy
database: "noDbHere"
# username to connect to postgres
user: "noUsrHere"
# password to connect to postgres
password: "noPassHere"
# host where postgres is running
host: "postgres"
# port where postgres can be accessed
port: 5432
# maximum number of active sql connections
pool_size: 5
}
}
I am not certain if I am somehow getting the wrong location of the config in the container. There is no volume or link for a conf file from host:container, so I am just grabbing from the default area /etc /nginx/nginx.conf:
spoiler
# run nginx in foreground
daemon off;
pid /run/nginx/nginx.pid;
user npm;
# Set number of worker processes automatically based on number of CPU cores.
worker_processes auto;
# Enables the use of JIT for regular expressions to speed-up their processing.
pcre_jit on;
error_log /data/logs/fallback_error.log warn;
# Includes files with directives to load dynamic modules.
include /etc/nginx/modules/*.conf;
events {
include /data/nginx/custom/events[.]conf;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
sendfile on;
server_tokens off;
tcp_nopush on;
tcp_nodelay on;
client_body_temp_path /tmp/nginx/body 1 2;
keepalive_timeout 90s;
proxy_connect_timeout 90s;
proxy_send_timeout 90s;
proxy_read_timeout 90s;
ssl_prefer_server_ciphers on;
gzip on;
proxy_ignore_client_abort off;
client_max_body_size 2000m;
server_names_hash_bucket_size 1024;
proxy_http_version 1.1;
proxy_set_header X-Forwarded-Scheme $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Accept-Encoding "";
proxy_cache off;
proxy_cache_path /var/lib/nginx/cache/public levels=1:2 keys_zone=public-cache:30m max_size=192m;
proxy_cache_path /var/lib/nginx/cache/private levels=1:2 keys_zone=private-cache:5m max_size=1024m;
log_format proxy '[$time_local] $upstream_cache_status $upstream_status $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] [Sent-to $server] "$http_user_agent" "$http_referer"';
log_format standard '[$time_local] $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] "$http_user_agent" "$http_referer"';
access_log /data/logs/fallback_access.log proxy;
# Dynamically generated resolvers file
include /etc/nginx/conf.d/include/resolvers.conf;
# Default upstream scheme
map $host $forward_scheme {
default http;
}
# Real IP Determination
# Local subnets:
set_real_ip_from 10.0.0.0/8;
set_real_ip_from 172.16.0.0/12; # Includes Docker subnet
set_real_ip_from 192.168.0.0/16;
# NPM generated CDN ip ranges:
include conf.d/include/ip_ranges.conf;
# always put the following 2 lines after ip subnets:
real_ip_header X-Real-IP;
real_ip_recursive on;
# Custom
include /data/nginx/custom/http_top[.]conf;
# Files generated by NPM
include /etc/nginx/conf.d/*.conf;
include /data/nginx/default_host/*.conf;
include /data/nginx/proxy_host/*.conf;
include /data/nginx/redirection_host/*.conf;
include /data/nginx/dead_host/*.conf;
include /data/nginx/temp/*.conf;
# Custom
include /data/nginx/custom/http[.]conf;
}
stream {
# Files generated by NPM
include /data/nginx/stream/*.conf;
# Custom
include /data/nginx/custom/stream[.]conf;
}
# Custom
include /data/nginx/custom/root[.]conf;
I may be mistaken in my choice of proceeding, but as many are reporting, the install guide provided docker-compose and general docker instructions dont quite seem to work as expected. I have been trying to piecemeal this together, and the Included lemmy nginx service container was completely excluded (edited out/deleted) once I had the standalone nginx-proxy-manager setup and working for regular 80,443 ->1234 proxy requests to the lemmy-ui container.
Does the lemmy nginx have a specific role or tie in? I am still fairly new to reverse proxying in general.
All containers are running. I handle them with Portainer, though I build the stack from the CLI in the lemmy dir, so Portainer cant fully manage them. Reboots and logs and networking and such work fine though.
As for the nginx config, the nginx proxy manager I use currently has all proxy-host/settings setup from the webGUI, where I use the GUI to set up the proxy host information and SSL information. I did no manual edits to any configurations or settings of the container during or after compose. Only GUI actions.
When looking at the nginx.conf I replied with here (my current conf), I do not see anything related to that proxy host I created from the GUI. I am not sure if that is normal or not, or if I maybe have a wrong .conf included here.
With that in mind, would you suggest I simply overwrite and/or add your snippet to my existing conf file?
include /etc/nginx/conf.d/*.conf;
include /data/nginx/default_host/*.conf;
include /data/nginx/proxy_host/*.conf;
include /data/nginx/redirection_host/*.conf;
include /data/nginx/dead_host/*.conf;
include /data/nginx/temp/*.conf;
conf.d/* has two configurations that appear to be some form of default. default.conf and production.conf. production.conf is only for the admin GUI.
default.conf:
Container has a volume set /lemmy/docker/nginx-proxy-manager/data:/data
I have those folders and more, and they DO seem to have the correct custom item.
Specifically, in the proxy_host folder I have a configuration for the proxy host I set up (1.conf) in the GUI:
spoiler
# ------------------------------------------------------------
# lemmy.bulwarkob.com
# ------------------------------------------------------------
server {
set $forward_scheme http;
set $server "172.24.0.5";
set $port 1234;
listen 80;
listen [::]:80;
listen 443 ssl http2;
listen [::]:443 ssl http2;
server_name lemmy.bulwarkob.com;
# Let's Encrypt SSL
include conf.d/include/letsencrypt-acme-challenge.conf;
include conf.d/include/ssl-ciphers.conf;
ssl_certificate /etc/letsencrypt/live/npm-1/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/npm-1/privkey.pem;
# Block Exploits
include conf.d/include/block-exploits.conf;
# Force SSL
include conf.d/include/force-ssl.conf;
access_log /data/logs/proxy-host-1_access.log proxy;
error_log /data/logs/proxy-host-1_error.log warn;
location / {
# Proxy!
include conf.d/include/proxy.conf;
}
# Custom
include /data/nginx/custom/server_proxy[.]conf;
}
(2/2)
I am able to connect to my lemmy UI site and navigate. When I try to view "Communities", I get stuck with just a spinning wheel, and never get results. Same for trying to login or create an account.
I have checked logs for all the containers, and so far the only error is on the lemmy_lemmy_1 container, which is the following (Error is the last entry, labelled DEBUG):
spoiler
2023-06-07T12:08:27.532769Z INFO lemmy_db_schema::utils: Running Database migrations (This may take a long time)...
2023-06-07T12:08:27.538649Z INFO lemmy_db_schema::utils: Database migrations complete.
2023-06-07T12:08:27.551889Z INFO lemmy_server::code_migrations: Running user_updates_2020_04_02
2023-06-07T12:08:27.555253Z INFO lemmy_server::code_migrations: 0 person rows updated.
2023-06-07T12:08:27.555611Z INFO lemmy_server::code_migrations: Running community_updates_2020_04_02
2023-06-07T12:08:27.557394Z INFO lemmy_server::code_migrations: 0 community rows updated.
2023-06-07T12:08:27.557720Z INFO lemmy_server::code_migrations: Running post_updates_2020_04_03
2023-06-07T12:08:27.559061Z INFO lemmy_server::code_migrations: 0 post rows updated.
2023-06-07T12:08:27.559241Z INFO lemmy_server::code_migrations: Running comment_updates_2020_04_03
2023-06-07T12:08:27.562723Z INFO lemmy_server::code_migrations: 0 comment rows updated.
2023-06-07T12:08:27.562972Z INFO lemmy_server::code_migrations: Running private_message_updates_2020_05_05
2023-06-07T12:08:27.563976Z INFO lemmy_server::code_migrations: 0 private message rows updated.
2023-06-07T12:08:27.564197Z INFO lemmy_server::code_migrations: Running post_thumbnail_url_updates_2020_07_27
2023-06-07T12:08:27.565019Z INFO lemmy_server::code_migrations: 0 Post thumbnail_url rows updated.
2023-06-07T12:08:27.565194Z INFO lemmy_server::code_migrations: Running apub_columns_2021_02_02
2023-06-07T12:08:27.566016Z INFO lemmy_server::code_migrations: Running instance_actor_2021_09_29
2023-06-07T12:08:27.572498Z INFO lemmy_server::code_migrations: Running regenerate_public_keys_2022_07_05
2023-06-07T12:08:27.573440Z INFO lemmy_server::code_migrations: Running initialize_local_site_2022_10_10
federation enabled, host is lemmy.bulwarkob.com
Starting http server at 0.0.0.0:8536
2023-06-07T12:08:27.605442Z INFO lemmy_server::scheduled_tasks: Updating active site and community aggregates ...
2023-06-07T12:08:27.624775Z INFO lemmy_server::scheduled_tasks: Done.
2023-06-07T12:08:27.624792Z INFO lemmy_server::scheduled_tasks: Updating banned column if it expires ...
2023-06-07T12:08:27.625406Z INFO lemmy_server::scheduled_tasks: Reindexing table concurrently post_aggregates ...
2023-06-07T12:08:27.705125Z INFO lemmy_server::scheduled_tasks: Done.
2023-06-07T12:08:27.705146Z INFO lemmy_server::scheduled_tasks: Reindexing table concurrently comment_aggregates ...
2023-06-07T12:08:27.728006Z INFO lemmy_server::scheduled_tasks: Done.
2023-06-07T12:08:27.728027Z INFO lemmy_server::scheduled_tasks: Reindexing table concurrently community_aggregates ...
2023-06-07T12:08:27.756015Z INFO lemmy_server::scheduled_tasks: Done.
2023-06-07T12:08:27.756146Z INFO lemmy_server::scheduled_tasks: Clearing old activities...
2023-06-07T12:08:27.757461Z INFO lemmy_server::scheduled_tasks: Done.
2023-06-07T12:11:53.210731Z DEBUG HTTP request{http.method=GET http.scheme="http" http.host=lemmy.bulwarkob.com http.target=/api/v3/post/list otel.kind="server" request_id=254f6c39-9146-42e9-9353-06c0e6c1cea4}:perform{self=GetPosts { type_: Some(Local), sort: Some(Active), page: Some(1), limit: Some(40), community_id: None, community_name: None, saved_only: None, auth: None }}: lemmy_db_views::post_view: Post View Query: Query { sql: "SELECT \"post\".\"id\", \"post\".\"name\", \"post\".\"url\", \"post\".\"body\", \"post\".\"creator_id\", \"post\".\"community_id\", \"post\".\"removed\", \"post\".\"locked\", \"post\".\"published\", \"post\".\"updated\", \"post\".\"deleted\", \"post\".\"nsfw\", \"post\".\"embed_title\", \"post\".\"embed_description\", \"post\".\"embed_video_url\", \"post\".\"thumbnail_url\", \"post\".\"ap_id\", \"post\".\"local\", \"post\".\"language_id\", \"post\".\"featured_community\", \"post\".\"featured_local\", \"person\".\"id\", \"person\".\"name\", \"person\".\"display_name\", \"person\".\"avatar\", \"person\".\"banned\", \"person\".\"published\", \"person\".\"updated\", \"person\".\"actor_id\", \"person\".\"bio\", \"person\".\"local\", \"person\".\"banner\", \"person\".\"deleted\", \"person\".\"inbox_url\", \"person\".\"shared_inbox_url\", \"person\".\"matrix_user_id\", \"person\".\"admin\", \"person\".\"bot_account\", \"person\".\"ban_expires\", \"person\".\"instance_id\", \"community\".\"id\", \"community\".\"name\", \"community\".\"title\", \"community\".\"description\", \"community\".\"removed\", \"community\".\"published\", \"community\".\"updated\", \"community\".\"deleted\", \"community\".\"nsfw\", \"community\".\"actor_id\", \"community\".\"local\", \"community\".\"icon\", \"community\".\"banner\", \"community\".\"hidden\", \"community\".\"posting_restricted_to_mods\", \"community\".\"instance_id\", \"community_person_ban\".\"id\", \"community_person_ban\".\"community_id\", \"community_person_ban\".\"person_id\", \"community_person_ban\".\"published\", \"community_person_ban\".\"expires\", \"post_aggregates\".\"id\", \"post_aggregates\".\"post_id\", \"post_aggregates\".\"comments\", \"post_aggregates\".\"score\", \"post_aggregates\".\"upvotes\", \"post_aggregates\".\"downvotes\", \"post_aggregates\".\"published\", \"post_aggregates\".\"newest_comment_time_necro\", \"post_aggregates\".\"newest_comment_time\", \"post_aggregates\".\"featured_community\", \"post_aggregates\".\"featured_local\", \"community_follower\".\"id\", \"community_follower\".\"community_id\", \"community_follower\".\"person_id\", \"community_follower\".\"published\", \"community_follower\".\"pending\", \"post_saved\".\"id\", \"post_saved\".\"post_id\", \"post_saved\".\"person_id\", \"post_saved\".\"published\", \"post_read\".\"id\", \"post_read\".\"post_id\", \"post_read\".\"person_id\", \"post_read\".\"published\", \"person_block\".\"id\", \"person_block\".\"person_id\", \"person_block\".\"target_id\", \"person_block\".\"published\", \"post_like\".\"score\", coalesce((\"post_aggregates\".\"comments\" - \"person_post_aggregates\".\"read_comments\"), \"post_aggregates\".\"comments\") FROM ((((((((((((\"post\" INNER JOIN \"person\" ON (\"post\".\"creator_id\" = \"person\".\"id\")) INNER JOIN \"community\" ON (\"post\".\"community_id\" = \"community\".\"id\")) LEFT OUTER JOIN \"community_person_ban\" ON (((\"post\".\"community_id\" = \"community_person_ban\".\"community_id\") AND (\"community_person_ban\".\"person_id\" = \"post\".\"creator_id\")) AND ((\"community_person_ban\".\"expires\" IS NULL) OR (\"community_person_ban\".\"expires\" > CURRENT_TIMESTAMP)))) INNER JOIN \"post_aggregates\" ON (\"post_aggregates\".\"post_id\" = \"post\".\"id\")) LEFT OUTER JOIN \"community_follower\" ON ((\"post\".\"community_id\" = \"community_follower\".\"community_id\") AND (\"community_follower\".\"person_id\" = $1))) LEFT OUTER JOIN \"post_saved\" ON ((\"post\".\"id\" = \"post_saved\".\"post_id\") AND (\"post_saved\".\"person_id\" = $2))) LEFT OUTER JOIN \"post_read\" ON ((\"post\".\"id\" = \"post_read\".\"post_id\") AND (\"post_read\".\"person_id\" = $3))) LEFT OUTER JOIN \"person_block\" ON ((\"post\".\"creator_id\" = \"person_block\".\"target_id\") AND (\"person_block\".\"person_id\" = $4))) LEFT OUTER JOIN \"community_block\" ON ((\"community\".\"id\" = \"community_block\".\"community_id\") AND (\"community_block\".\"person_id\" = $5))) LEFT OUTER JOIN \"post_like\" ON ((\"post\".\"id\" = \"post_like\".\"post_id\") AND (\"post_like\".\"person_id\" = $6))) LEFT OUTER JOIN \"person_post_aggregates\" ON ((\"post\".\"id\" = \"person_post_aggregates\".\"post_id\") AND (\"person_post_aggregates\".\"person_id\" = $7))) LEFT OUTER JOIN \"local_user_language\" ON ((\"post\".\"language_id\" = \"local_user_language\".\"language_id\") AND (\"local_user_language\".\"local_user_id\" = $8))) WHERE ((((((((\"community\".\"local\" = $9) AND ((\"community\".\"hidden\" = $10) OR (\"community_follower\".\"person_id\" = $11))) AND (\"post\".\"nsfw\" = $12)) AND (\"community\".\"nsfw\" = $13)) AND (\"post\".\"removed\" = $14)) AND (\"post\".\"deleted\" = $15)) AND (\"community\".\"removed\" = $16)) AND (\"community\".\"deleted\" = $17)) ORDER BY \"post_aggregates\".\"featured_local\" DESC , hot_rank(\"post_aggregates\".\"score\", \"post_aggregates\".\"newest_comment_time_necro\") DESC , \"post_aggregates\".\"newest_comment_time_necro\" DESC LIMIT $18 OFFSET $19", binds: [-1, -1, -1, -1, -1, -1, -1, -1, true, false, -1, false, false, false, false, false, false, 40, 0] }