heko_webcrawler/docker-compose.yml
2026-03-10 11:33:18 +01:00

119 lines
2.7 KiB
YAML

version: '3.8'
services:
web:
build: .
ports:
- "5000:5000"
environment:
- FLASK_APP=app
- FLASK_ENV=production
- PYTHONUNBUFFERED=1
volumes:
- ./app:/app/app
- ./uploads:/app/uploads
- ./results:/app/results
- ./instance:/app/instance
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
- gmaps-scraper-1
- gmaps-scraper-2
- gmaps-scraper-3
- gmaps-scraper-4
restart: always
networks:
- scraper-net
gmaps-scraper-1:
image: gosom/google-maps-scraper:latest
container_name: gmaps-scraper-1
environment:
- PLAYWRIGHT_BROWSERS_PATH=/ms-playwright
ports:
- "5001:8080"
volumes:
- ./scraper-data-1:/gmapsdata
command:
- "-web"
- "-data-folder=/gmapsdata"
restart: always
healthcheck:
test: ["CMD-SHELL", "wget -qO- http://localhost:8080/api/v1/jobs || exit 1"]
interval: 30s
timeout: 10s
retries: 3
start_period: 15s
networks:
- scraper-net
gmaps-scraper-2:
image: gosom/google-maps-scraper:latest
container_name: gmaps-scraper-2
environment:
- PLAYWRIGHT_BROWSERS_PATH=/ms-playwright
ports:
- "5002:8080"
volumes:
- ./scraper-data-2:/gmapsdata
command:
- "-web"
- "-data-folder=/gmapsdata"
restart: always
healthcheck:
test: ["CMD-SHELL", "wget -qO- http://localhost:8080/api/v1/jobs || exit 1"]
interval: 30s
timeout: 10s
retries: 3
start_period: 15s
networks:
- scraper-net
gmaps-scraper-3:
image: gosom/google-maps-scraper:latest
container_name: gmaps-scraper-3
environment:
- PLAYWRIGHT_BROWSERS_PATH=/ms-playwright
ports:
- "5003:8080"
volumes:
- ./scraper-data-3:/gmapsdata
command:
- "-web"
- "-data-folder=/gmapsdata"
restart: always
healthcheck:
test: ["CMD-SHELL", "wget -qO- http://localhost:8080/api/v1/jobs || exit 1"]
interval: 30s
timeout: 10s
retries: 3
start_period: 15s
networks:
- scraper-net
gmaps-scraper-4:
image: gosom/google-maps-scraper:latest
container_name: gmaps-scraper-4
environment:
- PLAYWRIGHT_BROWSERS_PATH=/ms-playwright
ports:
- "5004:8080"
volumes:
- ./scraper-data-4:/gmapsdata
command:
- "-web"
- "-data-folder=/gmapsdata"
restart: always
healthcheck:
test: ["CMD-SHELL", "wget -qO- http://localhost:8080/api/v1/jobs || exit 1"]
interval: 30s
timeout: 10s
retries: 3
start_period: 15s
networks:
- scraper-net
networks:
scraper-net:
driver: bridge