All checks were successful
Build and Push / build (push) Successful in 7s
Key changes:
- Replace ratings_server.py + status.html with a unified server.py that
serves the map, scraper status dashboard, and ratings API in one process
- Add scraper_stats.py utility: each scraper writes per-run stats (fetched,
accepted, excluded, duration) to stats_<source>.json for the status page
- generate_status.py: respect DATA_DIR env var so status.json lands in the
configured data directory instead of always the project root
- run_all.sh: replace the {"status":"running"} overwrite of status.json with
a dedicated scraper_running.json lock file; trap on EXIT ensures cleanup
even on kill/error, preventing the previous run's results from being wiped
- server.py: detect running state via scraper_running.json existence instead
of status["status"] field, eliminating the dual-use race condition
- Makefile: add serve (local dev), debug (Docker debug container) targets;
add SERVER_PORT variable
- build/Dockerfile + entrypoint.sh: switch to server.py, set DATA_DIR,
adjust volume mounts
- .gitignore: add *.json and *.log to keep runtime data files out of VCS
- mapa_bytu.html: price-per-m² colouring, status link, UX tweaks
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
127 lines
4.5 KiB
Bash
Executable File
127 lines
4.5 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
# ============================================================
|
|
# Spustí všechny scrapery, sloučí data a otevře mapu.
|
|
# Použití: ./run_all.sh
|
|
# Nebo s limity: ./run_all.sh --max-pages 1 --max-properties 10
|
|
# Nebo s logováním: ./run_all.sh --log-level DEBUG
|
|
# ============================================================
|
|
set -euo pipefail
|
|
cd "$(dirname "$0")"
|
|
|
|
GREEN='\033[0;32m'
|
|
RED='\033[0;31m'
|
|
BOLD='\033[1m'
|
|
NC='\033[0m'
|
|
|
|
TOTAL=6
|
|
CURRENT=0
|
|
FAILED=0
|
|
START_TIME=$(date -u +"%Y-%m-%dT%H:%M:%S")
|
|
START_EPOCH=$(date +%s)
|
|
LOG_FILE="$(pwd)/scrape_run.log"
|
|
|
|
# Mark scraper as running; cleaned up on exit (even on error/kill)
|
|
LOCK_FILE="${DATA_DIR:-.}/scraper_running.json"
|
|
echo '{"running":true,"started_at":"'"$START_TIME"'"}' > "$LOCK_FILE"
|
|
trap 'rm -f "$LOCK_FILE"' EXIT
|
|
|
|
show_help() {
|
|
echo "Usage: ./run_all.sh [OPTIONS]"
|
|
echo ""
|
|
echo "Spustí všechny scrapery, sloučí data a otevře mapu."
|
|
echo ""
|
|
echo "Options:"
|
|
echo " --max-pages N Maximální počet stránek ke stažení z každého zdroje"
|
|
echo " --max-properties N Maximální počet nemovitostí ke stažení z každého zdroje"
|
|
echo " --log-level LEVEL Úroveň logování (DEBUG, INFO, WARNING, ERROR)"
|
|
echo " --keep N Počet běhů v historii (výchozí: 5, 0=neomezeno)"
|
|
echo " -h, --help Zobrazí tuto nápovědu"
|
|
echo ""
|
|
echo "Examples:"
|
|
echo " ./run_all.sh # plný běh"
|
|
echo " ./run_all.sh --max-pages 1 --max-properties 10 # rychlý test"
|
|
echo " ./run_all.sh --log-level DEBUG # s debug logováním"
|
|
echo " ./run_all.sh --keep 10 # uchovej 10 běhů v historii"
|
|
}
|
|
|
|
# Parse arguments
|
|
SCRAPER_ARGS=""
|
|
KEEP_ARG=""
|
|
while [[ $# -gt 0 ]]; do
|
|
case $1 in
|
|
-h|--help)
|
|
show_help
|
|
exit 0
|
|
;;
|
|
--max-pages|--max-properties|--log-level)
|
|
SCRAPER_ARGS="$SCRAPER_ARGS $1 $2"
|
|
shift 2
|
|
;;
|
|
--keep)
|
|
KEEP_ARG="--keep $2"
|
|
shift 2
|
|
;;
|
|
*)
|
|
echo "Unknown argument: $1"
|
|
echo ""
|
|
show_help
|
|
exit 1
|
|
;;
|
|
esac
|
|
done
|
|
|
|
step() {
|
|
CURRENT=$((CURRENT + 1))
|
|
echo ""
|
|
echo -e "${BOLD}[$CURRENT/$TOTAL] $1${NC}"
|
|
echo "------------------------------------------------------------"
|
|
}
|
|
|
|
# ── Scrapery (paralelně kde to jde) ─────────────────────────
|
|
# Tee all output to log file for status generation
|
|
exec > >(tee -a "$LOG_FILE") 2>&1
|
|
|
|
step "Sreality"
|
|
python3 scrape_and_map.py $SCRAPER_ARGS || { echo -e "${RED}✗ Sreality selhalo${NC}"; FAILED=$((FAILED + 1)); }
|
|
|
|
step "Realingo"
|
|
python3 scrape_realingo.py $SCRAPER_ARGS || { echo -e "${RED}✗ Realingo selhalo${NC}"; FAILED=$((FAILED + 1)); }
|
|
|
|
step "Bezrealitky"
|
|
python3 scrape_bezrealitky.py $SCRAPER_ARGS || { echo -e "${RED}✗ Bezrealitky selhalo${NC}"; FAILED=$((FAILED + 1)); }
|
|
|
|
step "iDNES Reality"
|
|
python3 scrape_idnes.py $SCRAPER_ARGS || { echo -e "${RED}✗ iDNES selhalo${NC}"; FAILED=$((FAILED + 1)); }
|
|
|
|
step "PSN + CityHome"
|
|
python3 scrape_psn.py $SCRAPER_ARGS &
|
|
PID_PSN=$!
|
|
python3 scrape_cityhome.py $SCRAPER_ARGS &
|
|
PID_CH=$!
|
|
wait $PID_PSN || { echo -e "${RED}✗ PSN selhalo${NC}"; FAILED=$((FAILED + 1)); }
|
|
wait $PID_CH || { echo -e "${RED}✗ CityHome selhalo${NC}"; FAILED=$((FAILED + 1)); }
|
|
|
|
# ── Sloučení + mapa ──────────────────────────────────────────
|
|
|
|
step "Sloučení dat a generování mapy"
|
|
python3 merge_and_map.py || { echo -e "${RED}✗ Merge selhal${NC}"; FAILED=$((FAILED + 1)); }
|
|
|
|
# ── Otevření mapy ────────────────────────────────────────────
|
|
|
|
# ── Generování statusu ─────────────────────────────────────
|
|
|
|
END_EPOCH=$(date +%s)
|
|
DURATION=$((END_EPOCH - START_EPOCH))
|
|
python3 generate_status.py --start-time "$START_TIME" --duration "$DURATION" $KEEP_ARG
|
|
|
|
echo ""
|
|
echo "============================================================"
|
|
if [ $FAILED -eq 0 ]; then
|
|
echo -e "${GREEN}${BOLD}Hotovo! Všech 6 zdrojů úspěšně staženo.${NC}"
|
|
else
|
|
echo -e "${RED}${BOLD}Hotovo s $FAILED chybami.${NC}"
|
|
fi
|
|
echo "============================================================"
|
|
|
|
command -v open &>/dev/null && open mapa_bytu.html || true
|