Skip to content

Add basic support for robots.txt disallow rules #100

Add basic support for robots.txt disallow rules

Add basic support for robots.txt disallow rules #100

Workflow file for this run

name: PHP-Spider
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
name: PHP Spider (PHP ${{ matrix.php-versions }} on ${{ matrix.operating-system }})
runs-on: ${{ matrix.operating-system }}
strategy:
fail-fast: false
matrix:
operating-system: [ ubuntu-latest ]
php-versions: [ '8.0', '8.1', '8.2' ]
steps:
- uses: actions/checkout@v3
- name: Setup PHP, with composer and extensions
uses: shivammathur/setup-php@v2 #https://github.com/shivammathur/setup-php
with:
php-version: ${{ matrix.php-versions }}
extensions: ast
coverage: xdebug
- name: Get Composer Cache Directory
id: composer-cache
run: |
echo "dir=$(composer config cache-files-dir)" >> $GITHUB_OUTPUT
- name: Cache composer dependencies
uses: actions/cache@v3
with:
path: ${{ steps.composer-cache.outputs.dir }}
# Use composer.json for key, if composer.lock is not committed.
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.json') }}
restore-keys: ${{ runner.os }}-composer-
- name: Validate composer.json and composer.lock
run: composer validate
- name: Install Composer dependencies
run: composer install --no-progress --no-suggest --prefer-dist --optimize-autoloader --no-interaction
- name: Run Tests
id: tests
run: bin/coverage-enforce 100
- name: Run Static Analysis
run: bin/static-analysis
- name: Display Text Code Coverage
if: ${{ failure() && steps.tests.conclusion == 'failure' }} # Only if tests fail
run: cat build/coverage/coverage.txt