skip to Main Content

I have a docker image which I wanted to bring up to run test automatically, the scripts are located at /opt/robotframework/tests

Error occurred that docker cannot read the directory:

$ docker-compose up
Creating network "docker-robot-framework_default" with the default driver
Creating robot-runner ... done
Attaching to robot-runner
robot-runner    | [ ERROR ] Reading directory '/opt/robotframework/tests' failed: PermissionError: [Errno 13] Permission denied: '/opt/robotframework/tests'
robot-runner    | 
robot-runner    | Try --help for usage information.
robot-runner exited with code 252

docker-compose.yml

version: '3'
services:
  robot-runner:
    build:
      context: .
      dockerfile: /Dockerfile
    container_name: robot-runner
    image: ppodgorsek/robot-framework:latest
    volumes:
      - ./test:/opt/robotframework/tests
      - ./test-audios:/opt/robotframework/test-audios
      - ./output-local:/opt/robotframework/reports
    environment:
      PYTHONWARNINGS: "ignore:Unverified HTTPS request"

Dockerfile:

FROM fedora:36

MAINTAINER Paul Podgorsek <[email protected]>
LABEL description Robot Framework in Docker.

# Set the reports directory environment variable
ENV ROBOT_REPORTS_DIR /opt/robotframework/reports

# Set the tests directory environment variable
ENV ROBOT_TESTS_DIR /opt/robotframework/tests
# ENV ROBOT_TEST_AUDIOS_DIR /opt/robotframework/test-audios

# Set the working directory environment variable
ENV ROBOT_WORK_DIR /opt/robotframework/temp

# Setup X Window Virtual Framebuffer
ENV SCREEN_COLOUR_DEPTH 24
ENV SCREEN_HEIGHT 1080
ENV SCREEN_WIDTH 1920

# Setup the timezone to use, defaults to UTC
ENV TZ UTC

# Set number of threads for parallel execution
# By default, no parallelisation
ENV ROBOT_THREADS 1

# Define the default user who'll run the tests
ENV ROBOT_UID 1000
ENV ROBOT_GID 1000

# Dependency versions
ENV ALPINE_GLIBC 2.35-r0
ENV AWS_CLI_VERSION 1.22.87
ENV AXE_SELENIUM_LIBRARY_VERSION 2.1.6
ENV BROWSER_LIBRARY_VERSION 12.2.0
ENV CHROMIUM_VERSION 99.0
ENV DATABASE_LIBRARY_VERSION 1.2.4
ENV DATADRIVER_VERSION 1.6.0
ENV DATETIMETZ_VERSION 1.0.6
ENV FAKER_VERSION 5.0.0
ENV FIREFOX_VERSION 98.0
ENV FTP_LIBRARY_VERSION 1.9
ENV GECKO_DRIVER_VERSION v0.30.0
ENV IMAP_LIBRARY_VERSION 0.4.2
ENV PABOT_VERSION 2.5.2
ENV REQUESTS_VERSION 0.9.2
ENV ROBOT_FRAMEWORK_VERSION 5.0
ENV SELENIUM_LIBRARY_VERSION 6.0.0
ENV SSH_LIBRARY_VERSION 3.8.0
ENV XVFB_VERSION 1.20

# By default, no reports are uploaded to AWS S3
ENV AWS_UPLOAD_TO_S3 false

# Prepare binaries to be executed
COPY bin/chromedriver.sh /opt/robotframework/bin/chromedriver
COPY bin/chromium-browser.sh /opt/robotframework/bin/chromium-browser
COPY bin/run-tests-in-virtual-screen.sh /opt/robotframework/bin/
# COPY bin/mml_4_apr_2018_b_session3_2.wav    /opt/robotframework/test-audios
# COPY bin/mml_4_apr_2018_b_session3_2.stm    /opt/robotframework/test-audios

# Install system dependencies
RUN dnf upgrade -y --refresh 
  && dnf install -y 
    chromedriver-${CHROMIUM_VERSION}* 
    chromium-${CHROMIUM_VERSION}* 
    firefox-${FIREFOX_VERSION}* 
    npm 
    nodejs 
    python3-pip 
    tzdata 
    xorg-x11-server-Xvfb-${XVFB_VERSION}* 
  && dnf clean all

# FIXME: below is a workaround, as the path is ignored
RUN mv /usr/lib64/chromium-browser/chromium-browser /usr/lib64/chromium-browser/chromium-browser-original 
  && ln -sfv /opt/robotframework/bin/chromium-browser /usr/lib64/chromium-browser/chromium-browser

# Install Robot Framework and associated libraries
RUN pip3 install 
  --no-cache-dir 
  robotframework==$ROBOT_FRAMEWORK_VERSION 
  robotframework-browser==$BROWSER_LIBRARY_VERSION 
  robotframework-databaselibrary==$DATABASE_LIBRARY_VERSION 
  robotframework-datadriver==$DATADRIVER_VERSION 
  robotframework-datadriver[XLS] 
  robotframework-datetime-tz==$DATETIMETZ_VERSION 
  robotframework-faker==$FAKER_VERSION 
  robotframework-ftplibrary==$FTP_LIBRARY_VERSION 
  robotframework-imaplibrary2==$IMAP_LIBRARY_VERSION 
  robotframework-pabot==$PABOT_VERSION 
  robotframework-requests==$REQUESTS_VERSION 
  robotframework-seleniumlibrary==$SELENIUM_LIBRARY_VERSION 
  robotframework-sshlibrary==$SSH_LIBRARY_VERSION 
  axe-selenium-python==$AXE_SELENIUM_LIBRARY_VERSION 
  PyYAML 
  # Install awscli to be able to upload test reports to AWS S3
  awscli==$AWS_CLI_VERSION

# Gecko drivers
RUN dnf install -y 
    wget 

  # Download Gecko drivers directly from the GitHub repository
  && wget -q "https://github.com/mozilla/geckodriver/releases/download/$GECKO_DRIVER_VERSION/geckodriver-$GECKO_DRIVER_VERSION-linux64.tar.gz" 
  && tar xzf geckodriver-$GECKO_DRIVER_VERSION-linux64.tar.gz 
  && mkdir -p /opt/robotframework/drivers/ 
  && mv geckodriver /opt/robotframework/drivers/geckodriver 
  && rm geckodriver-$GECKO_DRIVER_VERSION-linux64.tar.gz 

  && dnf remove -y 
    wget 
  && dnf clean all

# Install the Node dependencies for the Browser library
# FIXME: Playright currently doesn't support relying on system browsers, which is why the `--skip-browsers` parameter cannot be used here.
RUN rfbrowser init 
  && ln -sf /usr/lib64/libstdc++.so.6 /usr/local/lib/python3.10/site-packages/Browser/wrapper/node_modules/playwright-core/.local-browsers/firefox-1316/firefox/libstdc++.so.6

# Create the default report and work folders with the default user to avoid runtime issues
# These folders are writeable by anyone, to ensure the user can be changed on the command line.
RUN mkdir -p ${ROBOT_REPORTS_DIR} 
  && mkdir -p ${ROBOT_WORK_DIR} 
  && chown ${ROBOT_UID}:${ROBOT_GID} ${ROBOT_REPORTS_DIR} 
  && chown ${ROBOT_UID}:${ROBOT_GID} ${ROBOT_WORK_DIR} 
  && chmod ugo+w ${ROBOT_REPORTS_DIR} ${ROBOT_WORK_DIR}

# Allow any user to write logs
RUN chmod ugo+w /var/log 
  && chown ${ROBOT_UID}:${ROBOT_GID} /var/log

# Update system path
ENV PATH=/opt/robotframework/bin:/opt/robotframework/drivers:$PATH

# Set up a volume for the generated reports
VOLUME ${ROBOT_REPORTS_DIR}

USER ${ROBOT_UID}:${ROBOT_GID}

# A dedicated work folder to allow for the creation of temporary files
WORKDIR ${ROBOT_WORK_DIR}

# Execute all robot tests
CMD ["run-tests-in-virtual-screen.sh"]

local directories:
enter image description here

2

Answers


  1. Chosen as BEST ANSWER

    by adding user: root in docker-compose.yml. The user granted full access right to path.

    version: '3'
    services:
      robot-runner:
        build:
          context: .
          dockerfile: /Dockerfile
        container_name: robot-runner
    #    image: ppodgorsek/robot-framework:latest
        image: robot-runner:latest
        user: root
        volumes:
          - ./BrowserTests:/opt/robotframework/tests
          - ./output-local:/opt/robotframework/reports
        environment:
          PYTHONWARNINGS: "ignore:Unverified HTTPS request"
        extra_hosts:
          - "speech.sts:172.17.0.1"
          - "speech.srs:172.17.0.1"
        networks:
          - sts_sts_network
    networks:
      sts_sts_network:
        external: true
    

  2. Basically the USER specified in dockerfile (USER ${ROBOT_UID}:${ROBOT_GID}) is used the container and has no access rights to the folder on your host. While you could use root in the container to "solve" the problem your container may get root on host. You should NEVER use root in a docker container.
    To avoid the problem give the user (in your case 1000:1000) appropriate rights on the folder on host (./test) with setfacl. If the user is not present on host just add one with same UID/GID:

    sudo addgroup robot --gid 1000
    sudo adduser robot --ingroup robot --uid 1000
    setfacl -R -m u:robot:rwx test
    
    Login or Signup to reply.
Please signup or login to give your own answer.
Back To Top
Search