skip to Main Content

I have Django server which uses WebSockets to send real time updates to web clients. This is all running perfectly fine locally (with manage.py runserver), but in production I am running into the problem that most messages are simply not sent at all. I test this by opening two browsers, making a change in one, which should then be reflected in the other browser. Like I said, this all works locally, but not in production. In production some WebSocket messages are sent by the server and received by the web client, but maybe 20% or so? The rest is just not sent at all.

# /websockets/__init__.py

import logging
from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from djangorestframework_camel_case.util import camelize

logger = logging.getLogger("django.server.ws.critical-notes")


def ws_send(model_type, action, model_data, user_ids):
    logger.info(f"Called ws_send for model {model_type}, action {action}, user_ids: {user_ids}")

    channel_layer = get_channel_layer()
    for user_id in user_ids:
        group_name = f"user-{user_id}"
        async_to_sync(channel_layer.group_send)(
            group_name,
            {
                "type": "send.data",  # this calls Consumer.send_data
                "data": {"type": model_type, "action": action, "model": camelize(model_data)},
            },
        )
# /websockets/consumers.py

import logging
from channels.generic.websocket import AsyncJsonWebsocketConsumer
from channels.db import database_sync_to_async
from django.db import close_old_connections
from knox.auth import TokenAuthentication

logger = logging.getLogger("django.server.ws.critical-notes")


class Consumer(AsyncJsonWebsocketConsumer):
    def __init__(self, *args, **kwargs):
        super().__init__(args, kwargs)
        self.group_name = None

    @database_sync_to_async
    def get_user(self, token):
        try:
            auth = TokenAuthentication()
            user, auth_token = auth.authenticate_credentials(token.encode("utf-8"))
            return user
        except Exception as e:
            logger.warning("User not found:", e)
            return None

    # User can authenticate by sending a JSON message containing a token
    async def receive_json(self, content, **kwargs):
        token = content["token"]
        if token is None:
            return

        user = await self.get_user(token)
        close_old_connections()

        if user is not None:
            self.group_name = f"user-{user.pk}"
            logger.info(f"WS connected to {self.group_name}")
            await self.channel_layer.group_add(self.group_name, self.channel_name)

    # This gets called by /websockets/__init__.py sending an event with type "send.data"
    async def send_data(self, event):
        data = event["data"]
        logger.info(f"Sending event to WS user {self.group_name}: {data}")
        await self.send_json(content=data)

    # Clean up on disconnect
    async def disconnect(self, close_code):
        if self.group_name is not None:
            logger.info(f"WS disconnected: {self.group_name}")
            await self.channel_layer.group_discard(self.group_name, self.channel_name)

        await self.close()
# /websockets/routing.py

from django.urls import re_path

from .consumers import Consumer

websocket_urlpatterns = [
    re_path(r"ws", Consumer.as_asgi()),
]

With all this in place, I can do something like this in my Django views and models:

from websockets import ws_send
ws_send("model_name", "create", data, user_ids)

To let these user_ids know that a certain model was created, with the data.

When I look at the server logs, I can see that Called ws_send for model is consistently logged, but then the Sending event to WS user log message is missing.

Some server setup info:

# settings.py

ASGI_APPLICATION = "criticalnotes.asgi.application"
WSGI_APPLICATION = "criticalnotes.wsgi.application"
CHANNEL_LAYERS = {"default": {"BACKEND": "channels.layers.InMemoryChannelLayer"}}
# asgi.py

import os
import django

os.environ.setdefault("DJANGO_SETTINGS_MODULE", "criticalnotes.settings")
django.setup()

from channels.routing import ProtocolTypeRouter, URLRouter
from django.core.asgi import get_asgi_application

import criticalnotes.websockets.routing

application = ProtocolTypeRouter(
    {
        "http": get_asgi_application(),
        "websocket": URLRouter(criticalnotes.websockets.routing.websocket_urlpatterns),
    }
)

I’m using Nginx with uvicorn on the server:

# /etc/systemd/system/api.critical-notes.com.service

[Unit]
Description=api.critical-notes.com

[Service]
User=criticalnotes
Group=criticalnotes
Environment="PYTHONPATH=/opt/api"
ExecStart=/opt/api/env/bin/uvicorn criticalnotes.asgi:application --log-level warning --workers 4 --uds /tmp/uvicorn.sock

[Install]
WantedBy=multi-user.target
# /etc/nginx/sites-enabled/api.critical-notes.com

server {
    server_name api.critical-notes.com;
    root /var/www/api.critical-notes.com;

    location / {
        proxy_set_header Host $http_host;
        proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
        proxy_set_header X-Forwarded-Proto $scheme;
        proxy_set_header Upgrade $http_upgrade;
        proxy_set_header Connection $connection_upgrade;
        proxy_redirect off;
        proxy_buffering off;
        proxy_pass http://uvicorn;
    }

    listen 443 ssl; # managed by Certbot
    ssl_certificate /etc/letsencrypt/live/api.critical-notes.com/fullchain.pem; # managed by Certbot
    ssl_certificate_key /etc/letsencrypt/live/api.critical-notes.com/privkey.pem; # managed by Certbot
    include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot
    ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot
}

server {
    if ($host = api.critical-notes.com) {
        return 301 https://$host$request_uri;
    } # managed by Certbot

    server_name api.critical-notes.com;
    listen 80;
    return 404; # managed by Certbot
}

map $http_upgrade $connection_upgrade {
    default upgrade;
    '' close;
}

upstream uvicorn {
    server unix:/tmp/uvicorn.sock;
}

I’m at a loss, I don’t understand why everything works perfectly locally, but not in production. It’s a huge problem for my users: real times updates simply do not work most of the time 🙁

Version info: Channels 3.0.4 and Django 3.2.8.

2

Answers


  1. Chosen as BEST ANSWER

    Turns out I really needed to switch to channels_redis.core.RedisChannelLayer as the channel backend, instead of using InMemoryChannelLayer.


  2. You need to add new location to serve your websocket resources in nginx configuration. Change your consumer route to something like /ws/updates.

         location /ws/ {
            proxy_http_version 1.1;
            proxy_set_header Upgrade $http_upgrade;
            proxy_set_header Connection "Upgrade";
            proxy_set_header Host $http_host;
            proxy_pass http://uvicorn;
        }
    

    This way you could access the consumer updates at /ws/updates. Your api will be served at / while your websocket will be served at /ws.

    Login or Signup to reply.
Please signup or login to give your own answer.
Back To Top
Search