skip to Main Content

I tried to up the docker compose and received the following error:

$ docker-compose up 


Creating network "evaluatehumanbalance_default" with the default driver
Pulling redis (redis:6.0.6)...
6.0.6: Pulling from library/redis
bf5952930446: Pull complete
911b8422b695: Pull complete
093b947e0ade: Pull complete
5b1d5f59e382: Pull complete
7a5f59580c0b: Pull complete
f9c63997c980: Pull complete
Digest: sha256:09c33840ec47815dc0351f1eca3befe741d7105b3e95bc8fdb9a7e4985b9e1e5
Status: Downloaded newer image for redis:6.0.6
Pulling zookeeper (confluentinc/cp-zookeeper:5.5.1)...
5.5.1: Pulling from confluentinc/cp-zookeeper
0cd7281e66ed: Pull complete
ee8abe01e201: Pull complete
19bb39092429: Pull complete
e8a27d9d6e72: Pull complete
cadbdfe0e559: Pull complete
184cb34023c9: Pull complete
Digest: sha256:1ef59713eea58401b333827dc44f23556cbc4b6437968a261f0b0a7b105126be
Status: Downloaded newer image for confluentinc/cp-zookeeper:5.5.1
Pulling kafka (confluentinc/cp-kafka:5.5.1)...
5.5.1: Pulling from confluentinc/cp-kafka
0cd7281e66ed: Already exists
ee8abe01e201: Already exists
19bb39092429: Already exists
e8a27d9d6e72: Already exists
8efe498170fa: Pull complete
b5050338516f: Pull complete
Digest: sha256:4de6a6f317991d858fe1bd84636c55dc17d9312db6d4a80be0f85354b9e481fc
Status: Downloaded newer image for confluentinc/cp-kafka:5.5.1
Pulling banking-simulation (gcr.io/simulation-screenshots/banking-simulation:)...
ERROR: Head https://gcr.io/v2/simulation-screenshots/banking-simulation/manifests/latest: unknown: Project 'project:simulation-screenshots' not found or deleted.

This is when I try to run it interactively:

$ docker run -it gcr.io/simulation-screenshots/banking-simulation
Unable to find image 'gcr.io/simulation-screenshots/banking-simulation:latest' locally
docker: Error response from daemon: Head https://gcr.io/v2/simulation-screenshots/banking-simulation/manifests/latest: unknown: Project 'project:simulation-screenshots' not found or deleted.

The docker-compose.yaml file is provided:

#
# This docker-compose file starts and runs:
# * A redis server
# * A 1-node kafka cluster
# * A 1-zookeeper ensemble
# * Kafka Connect with Redis Source
# * 3 Java Applications- Trucking-Simulation, Banking-Simulation, and STEDI
# * A Spark master
# * A Spark worker

version: '3.7'

services:
    redis:
        image: redis:6.0.6
        ports:
            - "6379:6379"

    zookeeper:
        image: confluentinc/cp-zookeeper:5.5.1
        ports:
            - "2181:2181"
        environment:
            ZOOKEEPER_CLIENT_PORT: "2181"

    kafka:
        image: confluentinc/cp-kafka:5.5.1
        ports:
            - "9092:9092"
        environment:
            KAFKA_BROKER_ID: 0
            KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
            KAFKA_ADVERTISED_LISTENERS: "INTERNAL://kafka:19092,EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092"
            KAFKA_INTER_BROKER_LISTENER_NAME: "INTERNAL"
            KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT"
            KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: "1"
        depends_on:
            - "zookeeper"

    banking-simulation:
        image: gcr.io/simulation-screenshots/banking-simulation
        environment:
            REDIS_HOST: redis
            REDIS_PORT: 6379
            REDIS_DB: 0
            KAFKA_BROKER: kafka:19092
        depends_on:
            - "kafka"
            - "redis"

    trucking-simulation:
        image: gcr.io/simulation-screenshots/trucking-simulation
        environment:
            REDIS_HOST: redis
            REDIS_PORT: 6379
            REDIS_DB: 0
            KAFKA_BROKER: kafka:19092
        depends_on:
            - "kafka"
            - "redis"

    stedi:
        image: gcr.io/simulation-screenshots/stedi
        ports:
            - "4567:4567"
        environment:
            REDIS_HOST: redis
            REDIS_PORT: 6379
            REDIS_DB: 0
            KAFKA_BROKER: kafka:19092
            KAFKA_RISK_TOPIC: risk-topic
        depends_on:
            - "kafka"
            - "redis"

    connect:
        image: gcr.io/simulation-screenshots/kafka-connect-redis-source
        ports:
            - "8083:8083"
            - "5005:5005"
        environment:
            CONNECT_BOOTSTRAP_SERVERS: "PLAINTEXT://kafka:19092"
            CONNECT_GROUP_ID: "connect"
            CONNECT_REST_ADVERTISED_HOST_NAME: "connect"
            CONNECT_PLUGIN_PATH: "/usr/share/java"
            CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
            CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
            CONNECT_KEY_CONVERTER: "org.apache.kafka.connect.converters.ByteArrayConverter"
            CONNECT_VALUE_CONVERTER: "org.apache.kafka.connect.storage.StringConverter"
            CONNECT_CONFIG_STORAGE_TOPIC: "connect-config"
            CONNECT_OFFSET_STORAGE_TOPIC: "connect-offset"
            CONNECT_STATUS_STORAGE_TOPIC: "connect-status"
            CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1"
            CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1"
            CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1"
            CONNECT_DEBUG: "y"
            DEBUG_SUSPEND_FLAG: "y"
            CLASSPATH: "/usr/share/java/kafka-connect-redis-source/*"

        depends_on:
            - "kafka"
            - "redis"

    spark:
        image: docker.io/bitnami/spark:3-debian-10
        environment:
            - SPARK_MODE=master
            - SPARK_RPC_AUTHENTICATION_ENABLED=no
            - SPARK_RPC_ENCRYPTION_ENABLED=no
            - SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
            - SPARK_SSL_ENABLED=no
        ports:
            - '8080:8080'
        volumes:
            - ./:/home/workspace/
            - ./spark/jars:/opt/bitnami/spark/.ivy2

    spark-worker-1:
        image: docker.io/bitnami/spark:3-debian-10
        environment:
            - SPARK_MODE=worker
            - SPARK_MASTER_URL=spark://spark:7077
            - SPARK_WORKER_MEMORY=1G
            - SPARK_WORKER_CORES=1
            - SPARK_RPC_AUTHENTICATION_ENABLED=no
            - SPARK_RPC_ENCRYPTION_ENABLED=no
            - SPARK_LOCAL_STORAGE_ENCRYPTION_ENABLED=no
            - SPARK_SSL_ENABLED=no
        volumes:
            - ./:/home/workspace/
            - ./spark/jars:/opt/bitnami/spark/.ivy2


 

What’s the issue here and how do I solve it?

2

Answers


  1. The repository (in the form of a Google Cloud Platform project) has been deleted (or made inaccessible). As a result you’re unable to retrieve the image from the repository.

    You may want to contact the author of the documentation that you’re using to ask for an update.

    You can confirm this by browsing the link:

    https://gcr.io/v2/simulation-screenshots/banking-simulation/manifests/latest

    For extant repositories|images, Google Container Registry (GCR and hence gcr.io) will redirect HTTP GETs to a registry browser (https://console.cloud.google.com) so that you may browse the repository. Here’s an (unrelated) example to show how it would usually work:

    https://gcr.io/cadvisor/cadvisor:v0.40.0

    The GCR registry has image:tag URLs of the form:

    [us|eu].gcr.io/${PROJECT}/${IMAGE}:${TAG}
    
    Login or Signup to reply.
  2. I changed the image version to ‘latest’.

     zookeeper:
            image: confluentinc/cp-zookeeper:latest
    

    Found it here: https://github.com/confluentinc/cp-docker-images/issues/582

    Login or Signup to reply.
Please signup or login to give your own answer.
Back To Top
Search