Skip to content

Crawl SCP Wiki

Crawl SCP Wiki #610

Workflow file for this run

name: Crawl SCP Wiki
on:
schedule:
- cron: "0 0 * * *"
push:
branches:
- main
paths:
- .github/workflows/scp-items.yml
permissions:
contents: write
jobs:
update-main-scp:
runs-on: ubuntu-latest
steps:
- name: "Clone this Repository"
uses: actions/checkout@v4
with:
path: scp-api
- name: "Clone the Crawler"
uses: actions/checkout@v4
with:
repository: scp-data/scp_crawler
ref: "main"
path: scp-crawler
- name: "Setup Python"
uses: actions/setup-python@v5
- name: "Install Crawler"
working-directory: ./scp-crawler
run: make install
- name: "Crawl Titles"
working-directory: ./scp-crawler
run: make data/scp_titles.json
- name: "Crawl Hubs"
working-directory: ./scp-crawler
run: make data/scp_hubs.json
- name: "Crawl Items"
working-directory: ./scp-crawler
run: make data/scp_items.json
- name: "Process Items"
working-directory: ./scp-crawler
run: make data/processed/items
- name: "Crawl Tales"
working-directory: ./scp-crawler
run: make data/scp_tales.json
- name: "Process Tales"
working-directory: ./scp-crawler
run: make data/processed/tales
- name: "Crawl GOI"
working-directory: ./scp-crawler
run: make data/goi.json
- name: "Process GOI"
working-directory: ./scp-crawler
run: make data/processed/goi
- name: "Move Files into API"
run: cp -Rf ./scp-crawler/data/processed/* ./scp-api/docs/data/scp/
- name: "Push"
shell: bash
run: >
cd scp-api;
./bin/push.sh;
env:
GIT_USER: "SCP Bot"
GIT_EMAIL: "[email protected]"