Skip to content

Installation on the server via Portainer #3837

@osiriso1

Description

@osiriso1

docker-compose.yml

services:
  postgres:
    image: postgres:14-alpine
    restart: always
    # comment out if you want to externally connect DB
    # ports:
    #   - 5432:5432
    volumes:
      - ./postgres-data:/var/lib/postgresql/data
    environment:
      - PGDATA=/var/lib/postgresql/data/pgdata
      - POSTGRES_USER=skyvern
      - POSTGRES_PASSWORD=skyvern
      - POSTGRES_DB=skyvern
    healthcheck:
      test: ["CMD-SHELL", "pg_isready -U skyvern"]
      interval: 5s
      timeout: 5s
      retries: 5

  skyvern:
    image: public.ecr.aws/skyvern/skyvern:latest
    restart: on-failure
    # comment out if you want to externally call skyvern API
    ports:
      - 8008:8000
      - 9222:9222 # for cdp browser forwarding
    volumes:
      - ./artifacts:/data/artifacts
      - ./videos:/data/videos
      - ./har:/data/har
      - ./log:/data/log
      - ./.streamlit:/app/.streamlit
      # Uncomment the following two lines if you want to connect to any local changes
      - ./skyvern:/app/skyvern
      - ./alembic:/app/alembic
    environment:
      - DATABASE_STRING=postgresql+psycopg://skyvern:skyvern@postgres:5432/skyvern
      - BROWSER_TYPE=chromium-headful
      # - BROWSER_TYPE=cdp-connect
      # Use this command to start Chrome with remote debugging:
      # "C:\Program Files\Google\Chrome\Application\chrome.exe" --remote-debugging-port=9222 --user-data-dir="C:\chrome-cdp-profile" --no-first-run --no-default-browser-check
      # /Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome --remote-debugging-port=9222 --user-data-dir="/Users/yourusername/chrome-cdp-profile" --no-first-run --no-default-browser-check
      # - BROWSER_REMOTE_DEBUGGING_URL=http://host.docker.internal:9222/

      # =========================
      #       LLM Settings
      # =========================
      # OpenAI Support:
      # If you want to use OpenAI as your LLM provider, uncomment the following lines and fill in your OpenAI API key.
      # - ENABLE_OPENAI=true
      # - LLM_KEY=OPENAI_GPT4O
      # - OPENAI_API_KEY=<your_openai_key>
      #  Gemini Support:
      # Gemini is a new LLM provider that is currently in beta. You can use it by uncommenting the following lines and filling in your Gemini API key.
      #- LLM_KEY=GEMINI_FLASH
      #- ENABLE_GEMINI=true
      #- GEMINI_API_KEY=nope
      # If you want to use other LLM provider, like azure and anthropic:
      # - ENABLE_ANTHROPIC=true
      # - LLM_KEY=ANTHROPIC_CLAUDE3.5_SONNET
      # - ANTHROPIC_API_KEY=<your_anthropic_key>

      # Ollama Support:
      # Ollama is a local LLM provider that can be used to run models locally on your machine.
     # - LLM_KEY=OLLAMA
      #- ENABLE_OLLAMA=true
      - OLLAMA_MODEL=qwen3:14b-max_context #qwen2.5-coder:14b-instruct-q4_1 #qwen2.5:32b-instruct-q4_K_M
      - OLLAMA_SERVER_URL=http://host.docker.internal:11434


      # Maximum tokens to use: (only set for OpenRouter aand Ollama)
      #- LLM_CONFIG_MAX_TOKENS=128000

      # Bitwarden Settings
      # If you are looking to integrate Skyvern with a password manager (eg Bitwarden), you can use the following environment variables.
      # - BITWARDEN_SERVER=http://localhost  # OPTIONAL IF YOU ARE SELF HOSTING BITWARDEN
      # - BITWARDEN_SERVER_PORT=8002 # OPTIONAL IF YOU ARE SELF HOSTING BITWARDEN
      # - BITWARDEN_CLIENT_ID=FILL_ME_IN_PLEASE
      # - BITWARDEN_CLIENT_SECRET=FILL_ME_IN_PLEASE
      # - BITWARDEN_MASTER_PASSWORD=FILL_ME_IN_PLEASE
      
    depends_on:
      postgres:
        condition: service_healthy
    healthcheck:
      test: ["CMD", "test", "-f", "/app/.streamlit/secrets.toml"]
      interval: 5s
      timeout: 5s
      retries: 5

  skyvern-ui:
    image: public.ecr.aws/skyvern/skyvern-ui:latest
    restart: on-failure
    ports:
      - 8080:8080
      - 9090:9090
    volumes:
      - ./artifacts:/data/artifacts
      - ./videos:/data/videos
      - ./har:/data/har
      - ./.streamlit:/app/.streamlit
    environment:
    # if you want to run skyvern on a remote server,
    # you need to change the host in VITE_WSS_BASE_URL and VITE_API_BASE_URL to match your server ip
    # If you're self-hosting this behind a dns, you'll want to set:
    #   A route for the API: api.yourdomain.com -> localhost:8000
    #   A route for the UI: yourdomain.com -> localhost:8080
    #   A route for the artifact API: artifact.yourdomain.com -> localhost:9090 (maybe not needed)
      - VITE_WSS_BASE_URL=ws://10.99.5.18:8008/api/v1
      - VITE_ARTIFACT_API_BASE_URL=http://10.99.5.18:9090
      - VITE_API_BASE_URL=http://10.99.5.18:8008/api/v1
      - VITE_SKYVERN_API_KEY=eyJhbGciOiJIUzI1NiIsInR5c--------nope
    depends_on:
      skyvern:
        condition: service_healthy #

but I get an error. Deepspek suggested me to remove service_healthy, and it worked.

Failed to deploy a stack: compose up operation failed: dependency failed to start: container skyvern-skyvern-1 is unhealthy

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions