Self Hosting Baserow with S3

Hii,
I want to ask something about baserow with s3 bucket.
I have a problem after adding the AWS_S3 environment variable, the problem is when I upload a file, it has been successfully added to the cart but can’t preview it in the dashboard

This is my docker-compose.yml right now:

version: "3.4"

x-backend-variables: &backend-variables
  # Most users should only need to set these first four variables.
  SECRET_KEY: ${SECRET_KEY:?}
  BASEROW_JWT_SIGNING_KEY: ${BASEROW_JWT_SIGNING_KEY:-}
  DATABASE_PASSWORD: ${DATABASE_PASSWORD:?}
  REDIS_PASSWORD: ${REDIS_PASSWORD:?}
  PUBLIC_BACKEND_URL: ${PUBLIC_BACKEND_URL-http://localhost:8000}
  PUBLIC_WEB_FRONTEND_URL: ${PUBLIC_WEB_FRONTEND_URL-http://localhost:3000}

  # Set these if you want to use an external postgres instead of the db service below.
  DATABASE_USER: ${DATABASE_USER:-baserow}
  DATABASE_NAME: ${DATABASE_NAME:-baserow}
  DATABASE_HOST:
  DATABASE_PORT:
  DATABASE_URL:

  # Set these if you want to use an external redis instead of the redis service below.
  REDIS_HOST:
  REDIS_PORT:
  REDIS_PROTOCOL:
  REDIS_URL:
  REDIS_USER:

  # Set these to enable Baserow to send emails.
  EMAIL_SMTP:
  EMAIL_SMTP_HOST:
  EMAIL_SMTP_PORT:
  EMAIL_SMTP_USE_TLS:
  EMAIL_SMTP_USE_SSL:
  EMAIL_SMTP_USER:
  EMAIL_SMTP_PASSWORD:
  EMAIL_SMTP_SSL_CERTFILE_PATH:
  EMAIL_SMTP_SSL_KEYFILE_PATH:
  FROM_EMAIL:

  # Set these to use AWS S3 bucket to store user files.
  DOWNLOAD_FILE_VIA_XHR: 1
  AWS_ACCESS_KEY_ID: xxxxxxxxx
  AWS_SECRET_ACCESS_KEY: xxxxxx
  AWS_STORAGE_BUCKET_NAME: lolrandom
  AWS_S3_REGION_NAME: ewr1
  AWS_S3_ENDPOINT_URL: https://ewr1.vultrobjects.com
  AWS_S3_CUSTOM_DOMAIN: https://ewr1.vultrobjects.com

  # Misc settings see https://baserow.io/docs/installation%2Fconfiguration for info
  BASEROW_AMOUNT_OF_WORKERS:
  BASEROW_ROW_PAGE_SIZE_LIMIT:
  BATCH_ROWS_SIZE_LIMIT:
  INITIAL_TABLE_DATA_LIMIT:
  BASEROW_FILE_UPLOAD_SIZE_LIMIT_MB:

  BASEROW_EXTRA_ALLOWED_HOSTS:
  ADDITIONAL_APPS:
  BASEROW_PLUGIN_GIT_REPOS:
  BASEROW_PLUGIN_URLS:

  BASEROW_ENABLE_SECURE_PROXY_SSL_HEADER:
  MIGRATE_ON_STARTUP: ${MIGRATE_ON_STARTUP:-true}
  SYNC_TEMPLATES_ON_STARTUP: ${SYNC_TEMPLATES_ON_STARTUP:-true}
  DONT_UPDATE_FORMULAS_AFTER_MIGRATION:
  BASEROW_TRIGGER_SYNC_TEMPLATES_AFTER_MIGRATION:
  BASEROW_SYNC_TEMPLATES_TIME_LIMIT:

  BASEROW_BACKEND_DEBUG:
  BASEROW_BACKEND_LOG_LEVEL:
  FEATURE_FLAGS:
  BASEROW_ENABLE_OTEL:
  BASEROW_DEPLOYMENT_ENV:
  OTEL_EXPORTER_OTLP_ENDPOINT:
  OTEL_RESOURCE_ATTRIBUTES:

  PRIVATE_BACKEND_URL: http://backend:8000
  BASEROW_PUBLIC_URL: http://localhost
  MEDIA_URL: http://localhost/media/
  MEDIA_ROOT: /baserow/media 

  BASEROW_AIRTABLE_IMPORT_SOFT_TIME_LIMIT:
  HOURS_UNTIL_TRASH_PERMANENTLY_DELETED:
  OLD_ACTION_CLEANUP_INTERVAL_MINUTES:
  MINUTES_UNTIL_ACTION_CLEANED_UP:
  BASEROW_GROUP_STORAGE_USAGE_QUEUE:
  DISABLE_ANONYMOUS_PUBLIC_VIEW_WS_CONNECTIONS:
  BASEROW_WAIT_INSTEAD_OF_409_CONFLICT_ERROR:
  BASEROW_DISABLE_MODEL_CACHE:
  BASEROW_PLUGIN_DIR:
  BASEROW_JOB_EXPIRATION_TIME_LIMIT:
  BASEROW_JOB_CLEANUP_INTERVAL_MINUTES:
  BASEROW_MAX_ROW_REPORT_ERROR_COUNT:
  BASEROW_JOB_SOFT_TIME_LIMIT:
  BASEROW_FRONTEND_JOBS_POLLING_TIMEOUT_MS:
  BASEROW_INITIAL_CREATE_SYNC_TABLE_DATA_LIMIT:
  BASEROW_WEBHOOKS_ALLOW_PRIVATE_ADDRESS:
  BASEROW_WEBHOOKS_IP_BLACKLIST:
  BASEROW_WEBHOOKS_IP_WHITELIST:
  BASEROW_WEBHOOKS_URL_REGEX_BLACKLIST:
  BASEROW_WEBHOOKS_URL_CHECK_TIMEOUT_SECS:
  BASEROW_WEBHOOKS_MAX_CONSECUTIVE_TRIGGER_FAILURES:
  BASEROW_WEBHOOKS_MAX_RETRIES_PER_CALL:
  BASEROW_WEBHOOKS_MAX_PER_TABLE:
  BASEROW_WEBHOOKS_MAX_CALL_LOG_ENTRIES:
  BASEROW_WEBHOOKS_REQUEST_TIMEOUT_SECONDS:
  BASEROW_ENTERPRISE_AUDIT_LOG_CLEANUP_INTERVAL_MINUTES:
  BASEROW_ENTERPRISE_AUDIT_LOG_RETENTION_DAYS:
  BASEROW_ALLOW_MULTIPLE_SSO_PROVIDERS_FOR_SAME_ACCOUNT:
  BASEROW_ROW_COUNT_JOB_CRONTAB:
  BASEROW_STORAGE_USAGE_JOB_CRONTAB:
  BASEROW_SEAT_USAGE_JOB_CRONTAB:
  BASEROW_PERIODIC_FIELD_UPDATE_CRONTAB:
  BASEROW_PERIODIC_FIELD_UPDATE_TIMEOUT_MINUTES:
  BASEROW_PERIODIC_FIELD_UPDATE_QUEUE_NAME:
  BASEROW_MAX_CONCURRENT_USER_REQUESTS:
  BASEROW_CONCURRENT_USER_REQUESTS_THROTTLE_TIMEOUT:
  BASEROW_OSS_ONLY:
  OTEL_TRACES_SAMPLER:
  OTEL_TRACES_SAMPLER_ARG:
  OTEL_PER_MODULE_SAMPLER_OVERRIDES:
  BASEROW_CACHALOT_ENABLED:
  BASEROW_CACHALOT_MODE:
  BASEROW_CACHALOT_ONLY_CACHABLE_TABLES:
  BASEROW_CACHALOT_UNCACHABLE_TABLES:
  BASEROW_CACHALOT_TIMEOUT:
  BASEROW_AUTO_INDEX_VIEW_ENABLED:
  BASEROW_DISABLE_LOCKED_MIGRATIONS:

services:

  caddy:
    image: caddy:2
    restart: unless-stopped
    environment:
      # Controls what port the Caddy server binds to inside its container.
      - BASEROW_CADDY_ADDRESSES=${BASEROW_CADDY_ADDRESSES:-:80}
      - PRIVATE_WEB_FRONTEND_URL=${PRIVATE_WEB_FRONTEND_URL:-http://web-frontend:3000}
      - PRIVATE_BACKEND_URL=${PRIVATE_BACKEND_URL:-http://backend:8000}
    ports:
      - "${HOST_PUBLISH_IP:-0.0.0.0}:${WEB_FRONTEND_PORT:-80}:80"
      - "${HOST_PUBLISH_IP:-0.0.0.0}:${WEB_FRONTEND_SSL_PORT:-443}:443"
    volumes:
      - $PWD/Caddyfile:/etc/caddy/Caddyfile
      - ./baserow/media:/baserow/media
      - ./baserow/caddy_config:/config
      - ./baserow/caddy_data:/data
    networks:
      local:

  db:
    image: postgres:11
    restart: unless-stopped
    environment:
      - POSTGRES_USER=${DATABASE_USER:-baserow}
      - POSTGRES_PASSWORD=${DATABASE_PASSWORD:?}
      - POSTGRES_DB=${DATABASE_NAME:-baserow}
    healthcheck:
      test: [ "CMD-SHELL", "su postgres -c \"pg_isready -U ${DATABASE_USER:-baserow}\"" ]
      interval: 10s
      timeout: 5s
      retries: 5
    networks:
      local:
    volumes:
      - ./baserow/postgres:/var/lib/postgresql/data

  backend:
    image: baserow/backend:1.18.0
    restart: unless-stopped
    ports:
      - "${HOST_PUBLISH_IP:-127.0.0.1}:8000:8000"
    environment:
      <<: *backend-variables
    depends_on:
      - db
      - redis
    volumes:
      - ./baserow/media:/baserow/media
    networks:
      local:

  web-frontend:
    image: baserow/web-frontend:1.18.0
    restart: unless-stopped
    ports:
      - "${HOST_PUBLISH_IP:-127.0.0.1}:3000:3000"
    environment:
      DOWNLOAD_FILE_VIA_XHR: 1
      BASEROW_PUBLIC_URL: ${BASEROW_PUBLIC_URL}
      PRIVATE_BACKEND_URL: ${PRIVATE_BACKEND_URL:-http://backend:8000}
      PUBLIC_BACKEND_URL: 
      PUBLIC_WEB_FRONTEND_URL: 
      BASEROW_DISABLE_PUBLIC_URL_CHECK:
      INITIAL_TABLE_DATA_LIMIT:
      BASEROW_DISABLE_GOOGLE_DOCS_FILE_PREVIEW:
      HOURS_UNTIL_TRASH_PERMANENTLY_DELETED:
      DISABLE_ANONYMOUS_PUBLIC_VIEW_WS_CONNECTIONS:
      FEATURE_FLAGS:
      ADDITIONAL_MODULES:
      BASEROW_MAX_IMPORT_FILE_SIZE_MB:
      BASEROW_MAX_SNAPSHOTS_PER_GROUP:
      BASEROW_ENABLE_OTEL:
      BASEROW_DEPLOYMENT_ENV:
      BASEROW_OSS_ONLY:
    depends_on:
      - backend
    networks:
      local:

  celery:
    image: baserow/backend:1.18.0
    restart: unless-stopped
    environment:
      <<: *backend-variables
    command: celery-worker
    # The backend image's baked in healthcheck defaults to the django healthcheck
    # override it to the celery one here.
    healthcheck:
      test: [ "CMD-SHELL", "/baserow/backend/docker/docker-entrypoint.sh celery-worker-healthcheck" ]
    depends_on:
      - backend
    volumes:
      - ./baserow/media:/baserow/media
    networks:
      local:

  celery-export-worker:
    image: baserow/backend:1.18.0
    restart: unless-stopped
    command: celery-exportworker
    environment:
      <<: *backend-variables
    # The backend image's baked in healthcheck defaults to the django healthcheck
    # override it to the celery one here.
    healthcheck:
      test: [ "CMD-SHELL", "/baserow/backend/docker/docker-entrypoint.sh celery-exportworker-healthcheck" ]
    depends_on:
      - backend
    volumes:
      - ./baserow/media:/baserow/media
    networks:
      local:

  celery-beat-worker:
    image: baserow/backend:1.18.0
    restart: unless-stopped
    command: celery-beat
    environment:
      <<: *backend-variables
    # See https://github.com/sibson/redbeat/issues/129#issuecomment-1057478237
    stop_signal: SIGQUIT
    depends_on:
      - backend
    volumes:
      - ./baserow/media:/baserow/media
    networks:
      local:

  redis:
    image: redis:6
    command: redis-server --requirepass ${REDIS_PASSWORD:?}
    healthcheck:
      test: [ "CMD", "redis-cli", "ping" ]
    volumes:
      - ./baserow/redis-data:/redis/data
    networks:
      local:

  # By default, the media volume will be owned by root on startup. Ensure it is owned by
  # the same user that django is running as, so it can write user files.
  volume-permissions-fixer:
    image: bash:4.4
    command: chown 9999:9999 -R /baserow/media
    volumes:
      - ./baserow/media:/baserow/media
    networks:
      local:
networks:
  local:
    driver: bridge

Did I miss something?

1 Like

Hi @Tomsykesss,

Your config looks ok & sufficient, with the exception of AWS_S3_CUSTOM_DOMAIN which might probably need to include your bucket name. Basically this variable need to point to a place where users will be downloading the files from.

See also Amazon S3 — django-storages 1.13.2 documentation for more info on configuring S3 to work with Baserow.

Hii @petrs ,

Fantastic, now my dashboard can see the preview file attachment. Thanks for the fast response
but I have one more issue, I still can’t download the uploaded file.

this is my example env variable

  # Set these to use AWS S3 bucket to store user files.
  DOWNLOAD_FILE_VIA_XHR: 1
  AWS_ACCESS_KEY_ID: xxxxxxxxx
  AWS_SECRET_ACCESS_KEY: xxxxxx
  AWS_STORAGE_BUCKET_NAME: lolrandom
  AWS_S3_REGION_NAME: ewr1
  AWS_S3_ENDPOINT_URL: ewr1.vultrobjects.com
  AWS_S3_CUSTOM_DOMAIN: lolrandom.ewr1.vultrobjects.com

With DOWNLOAD_FILE_VIA_XHR you need to make sure the CORS policy is setup correct on your S3 bucket. Heres an example:

cors_rule {
allowed_headers = ["*"]
allowed_methods = ["GET"]
allowed_origins = ["REPLACE_WITH_YOUR_BASEROW_PUBLIC)URL"]
expose_headers  = ["ETag"]
max_age_seconds = 3000
}