skip to Main Content

I’m trying to implement Redis on my endpoint using the aiocache library.
The first test I did with aiocache I used @cache without indicating any other service and everything worked. But when I tried to use Redis I see this error (the endpoint still returns the request)

ERROR:    Couldn't set [<databases.backends.postgres.Record object at 0x7fb9f01d86a0>, <databases.backends.postgres.Record object at 0x7fb9f01d88b0>, <databases.backends.postgres.Record object at 0x7fb9f01d8a60>] in key app.api.authorget_authors()[], unexpected error
Traceback (most recent call last):
  File "/usr/local/lib/python3.8/site-packages/aiocache/decorators.py", line 144, in set_in_cache
    await self.cache.set(key, value, ttl=self.ttl)
  File "/usr/local/lib/python3.8/site-packages/aiocache/base.py", line 61, in _enabled
    return await func(*args, **kwargs)
  File "/usr/local/lib/python3.8/site-packages/aiocache/base.py", line 45, in _timeout
    return await asyncio.wait_for(func(self, *args, **kwargs), timeout)
  File "/usr/local/lib/python3.8/asyncio/tasks.py", line 483, in wait_for
    return fut.result()
  File "/usr/local/lib/python3.8/site-packages/aiocache/base.py", line 75, in _plugins
    ret = await func(self, *args, **kwargs)
  File "/usr/local/lib/python3.8/site-packages/aiocache/base.py", line 265, in set
    ns_key, dumps(value), ttl=self._get_ttl(ttl), _cas_token=_cas_token, _conn=_conn
  File "/usr/local/lib/python3.8/site-packages/aiocache/serializers/serializers.py", line 140, in dumps
    return json.dumps(value)
TypeError: <databases.backends.postgres.Record object at 0x7fb9f01d8a60> is not JSON serializable

The method is:

@authors.get("/", response_model=List[AuthorOut])                                                                                                                                                                                        
@cached(                                                                                                
     ttl=100,                                                                                            
     cache=Cache.REDIS,
     endpoint="X.X.X.X", #my local ip
     serializer=JsonSerializer(),
     port=6379,
     namespace="main",
     #key="key",
 )
async def get_authors():
   return await db_manager.get_all_authors() 

The whole environment is based on docker, 2 containers, 1 FastApi, 1 PostgreSQL and 1 Redis.

It seems evident that there is a problem with the object returned by the endpoint, so I ask you how can I pass such a complex object to Redis?

Following the aiochace documentation I have tried all the serializers present but without success.

My docker-compose

version: '3.7'

services:
  book_service:
    build: ./book-service
    command: uvicorn app.main:app --reload --host 0.0.0.0 --port 8000
    volumes:
      - ./book-service/:/app/
    ports:
      - 8001:8000
    environment:
      - DATABASE_URI=postgresql://book_db_username:book_db_password@book_db/book_db_dev
      - AUTHOR_SERVICE_HOST_URL=http://author_service:8000/api/v1/authors/
    depends_on:
      - book_db

  book_db:
    image: postgres:12.1-alpine
    volumes:
      - postgres_data_book:/var/lib/postgresql/data/
    environment:
      - POSTGRES_USER=book_db_username
      - POSTGRES_PASSWORD=book_db_password
      - POSTGRES_DB=book_db_dev
  
  author_service:
    build: ./author-service
    command: uvicorn app.main:app --reload --host 0.0.0.0 --port 8000
    volumes:
      - ./author-service/:/app/
    ports:
      - 8002:8000
    environment:
      - DATABASE_URI=postgresql://author_db_username:author_db_password@author_db/author_db_dev
    depends_on:
      - author_db

  author_db:
    image: postgres:12.1-alpine
    volumes:
      - postgres_data_author:/var/lib/postgres/data
    environment:
      - POSTGRES_USER=author_db_username
      - POSTGRES_PASSWORD=author_db_password
      - POSTGRES_DB=author_db_dev

  nginx:
    image: nginx:latest
    ports:
      - "8080:8080"
    volumes:
      - ./nginx_config.conf:/etc/nginx/conf.d/default.conf
    depends_on:
      - author_service
      - book_service

  redis:
    image: redis:3.2-alpine
    volumes:
      #      - redis_data:/data
      - ./redis.conf:/usr/local/etc/redis/redis.conf
    command: redis-server /usr/local/etc/redis/redis.conf
    ports:
      - 6379:6379

    networks:
      node_net:
        ipv4_address: 172.28.1.4

networks:
  node_net:
    ipam:
      driver: default
      config:
        - subnet: 172.28.0.0/16

volumes:
  postgres_data_book:
  postgres_data_author:
  redis_data:

2

Answers


  1. The error says it all

    TypeError: <databases.backends.postgres.Record object at 0x7fb9f01d8a60> is not JSON serializable
    

    The error lies in the fact that you are serializing the object in JSON for caching it that is not serializable in JSON.

    I had a similar problem once, but found that fastAPI’s encoder supports the Record object, while others don’t. You can either return an object serialized via such encoder, or set it as encoder in the redis cache parameters. See https://fastapi.tiangolo.com/tutorial/encoder/#using-the-jsonable_encoder.

    I did not test the following code, but it should be enough to give the idea of what I mean.

    from fastapi.encoders import jsonable_encoder
    
    @authors.get("/", response_model=List[AuthorOut])                                                                                                                                                                                        
    @cached(                                                                                                
         ttl=100,                                                                                            
         cache=Cache.REDIS,
         endpoint="X.X.X.X", #my local ip
         serializer=JsonSerializer(),
         port=6379,
         namespace="main",
         #key="key",
     )
    async def get_authors():
       return jsonable_encoder(await db_manager.get_all_authors())
    
    Login or Signup to reply.
  2. You can use redis_cache to access with RedisDB

    connection.py

    from typing import Optional
    
    from aioredis import Redis, create_redis_pool
    
    #Create a RedisCache instance
    class RedisCache:
        
        def __init__(self):
            self.redis_cache: Optional[Redis] = None
            
        async def init_cache(self):
            self.redis_cache = await create_redis_pool("redis://localhost:6379/0?encoding=utf-8") #Connecting to database
    
        async def keys(self, pattern):
            return await self.redis_cache.keys(pattern)
    
        async def set(self, key, value):
            return await self.redis_cache.set(key, value)
        
        async def get(self, key):
            return await self.redis_cache.get(key)
    
        
        async def close(self):
            self.redis_cache.close()
            await self.redis_cache.wait_closed()
    
    
    redis_cache = RedisCache()
    

    main.py

    from fastapi import FastAPI, applications
    from uvicorn import run
    from fastapi import FastAPI, Request, Response
    from connection import redis_cache
    
    
    
    app = FastAPI(title="FastAPI with Redis")
    
    
    async def get_all():
        return await redis_cache.keys('*')
    
    
    @app.on_event('startup')
    async def starup_event():
        await redis_cache.init_cache()
    
    
    @app.on_event('shutdown')
    async def shutdown_event():
        redis_cache.close()
        await redis_cache.wait_closed()
    
    #root
    @app.get("/")
    def read_root():
        return {"Redis": "FastAPI"}
    
    #root > Get all keys from the redis DB
    @app.get('/RedisKeys')
    async def redis_keys():
        return await get_all()
    
    if __name__ == '__main__':
        run("main:app", port=3000, reload=True)
    

    I am using uvicorn to access:

    uvicorn main:app --reload
    
    Login or Signup to reply.
Please signup or login to give your own answer.
Back To Top
Search