Skip to content

Renomme la page "À propos" en "Crédits techniques" #1920

Renomme la page "À propos" en "Crédits techniques"

Renomme la page "À propos" en "Crédits techniques" #1920

Workflow file for this run

name: CI
on:
push:
pull_request:
schedule:
- cron: "30 3 * * WED" # every Wednesday at 3:30 AM, only main branch
env:
NODE_VERSION: "16" # needs to be also updated in .nvmrc
PYTHON_VERSION: "3.7"
MARIADB_VERSION: "10.4.10"
COVERALLS_VERSION: "3.3.1" # check if Coverage needs to be also updated in requirements-ci.txt
GECKODRIVER_VERSION: "0.33.0"
# As GitHub Action does not allow environment variables
# to be used in services definitions, these are only for
# reference. If you update these versions, you HAVE TO
# update the versions in the services definitions of the
# test job.
ELASTICSEARCH_VERSION: "5.5.2"
MEMCACHED_VERSION: "1.6"
jobs:
# Lint all source files by executing pre-commit hooks.
lint:
name: Lint
runs-on: ubuntu-22.04
# do not execute scheduled jobs on forks:
if: ${{ github.event_name != 'schedule' || github.repository_owner == 'zestedesavoir' }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@v4
with:
python-version: "${{ env.PYTHON_VERSION }}"
- name: Execute pre-commit
uses: pre-commit/[email protected]
# Build the documentation and upload it as an artifact.
build-doc:
name: Build Sphinx documentation
runs-on: ubuntu-22.04
# do not execute scheduled jobs on forks:
if: ${{ github.event_name != 'schedule' || github.repository_owner == 'zestedesavoir' }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@v4
with:
python-version: "${{ env.PYTHON_VERSION }}"
- name: Upgrade pip
run: pip install --upgrade pip
- name: Retrieve pip cache directory
id: pip-cache
run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
- name: Cache pip packages
uses: actions/cache@v3
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('requirements-dev.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install python dependencies
run: pip install -r requirements-dev.txt
- name: Build documentation
run: make generate-doc
- name: Upload documentation as a page artifact
uses: actions/upload-pages-artifact@v1
with:
path: doc/build/html
# Build the website front-end and upload built assets as an artifact.
build-front:
name: Lint and build front-end
runs-on: ubuntu-22.04
# do not execute scheduled jobs on forks:
if: ${{ github.event_name != 'schedule' || github.repository_owner == 'zestedesavoir' }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Set up NodeJS ${{ env.NODE_VERSION }}
uses: actions/setup-node@v3
with:
node-version: "${{ env.NODE_VERSION }}"
- name: Retrieve yarn cache directory
id: yarn-cache-dir-path
run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
- name: Cache Node modules
uses: actions/cache@v3
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install front-end
run: make install-front
- name: Lint front-end
run: make lint-front
- name: Build front-end
run: make build-front
- name: Upload font-end assets for subsequent tests
uses: actions/upload-artifact@v3
with:
name: assets
path: dist
retention-days: 1
# Test the zds-site project.
# Install the project, using assets created during the previous job,
# and install elasticsearch & memcache as a service. Then, run the tests
# in a matrix build to parallelize multiple components.
test:
name: Install and test zds-site
needs: build-front
runs-on: ubuntu-22.04
strategy:
matrix:
module:
[
"zds.tutorialv2",
"zds.member zds.gallery zds.searchv2 zds.middlewares zds.pages",
"zds.forum zds.featured zds.mp zds.notification zds.utils",
]
services:
elasticsearch:
image: "elasticsearch:5.5.2"
ports:
- "9200:9200"
env:
"http.host": "0.0.0.0"
"transport.host": "127.0.0.1"
"xpack.security.enabled": false
"ES_JAVA_OPTS": "-Xms512m -Xmx512m"
options: >-
-e="discovery.type=single-node"
--health-cmd="curl http://localhost:9200/_cluster/health"
--health-interval=10s
--health-timeout=5s
--health-retries=10
memcached:
image: "memcached:1.6"
ports:
- "11211:11211"
steps:
- name: Shutdown Ubuntu MySQL
run: sudo service mysql stop
- name: Set up MariaDB ${{ env.MARIADB_VERSION }}
uses: getong/[email protected]
with:
character set server: "utf8mb4"
collation server: "utf8mb4_unicode_ci"
mariadb version: "${{ env.MARIADB_VERSION }}"
mysql database: "ci_db_name"
mysql root password: "ci_root_password"
- name: Install Firefox
run: sudo apt-get update && sudo apt-get install firefox
- name: Install Geckodriver
run: |
wget https://github.com/mozilla/geckodriver/releases/download/v${{ env.GECKODRIVER_VERSION }}/geckodriver-v${{ env.GECKODRIVER_VERSION }}-linux64.tar.gz
mkdir geckodriver
tar -xzf geckodriver-v${{ env.GECKODRIVER_VERSION }}-linux64.tar.gz -C geckodriver
- name: Checkout
uses: actions/checkout@v3
- name: Download previously built assets
uses: actions/download-artifact@v3
with:
name: assets
path: dist
- name: Upgrade pip
run: |
pip install --upgrade pip
- name: Retrieve pip cache directory
id: pip-cache
run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
- name: Cache pip packages
uses: actions/cache@v3
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('requirements*.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Cache Node modules
uses: actions/cache@v3
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('zmd/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@v4
with:
python-version: "${{ env.PYTHON_VERSION }}"
- name: Set up NodeJS ${{ env.NODE_VERSION }}
uses: actions/setup-node@v3
with:
node-version: "${{ env.NODE_VERSION }}"
- name: Install Python dependencies
run: pip install -r requirements-ci.txt
- name: Check that no migration is missing
run: python manage.py makemigrations --check --dry-run
- name: Build and start zmarkdown
run: |
make zmd-install
make zmd-start
- name: Run tests for ${{ matrix.module }}
run: |
export PATH="$PATH:$PWD/geckodriver"
coverage run --source='.' manage.py test -v=2 --keepdb --settings zds.settings.ci_test ${{ matrix.module }}
- name: Analyze coverage
shell: bash -l {0}
run: |
echo $COVERALLS_FLAG_NAME
python -m pip install coveralls==${{ env.COVERALLS_VERSION }}
coveralls --service=github # See https://github.com/TheKevJames/coveralls-python/issues/252
env:
GITHUB_TOKEN: ${{ secrets.github_token }}
COVERALLS_PARALLEL: true
COVERALLS_FLAG_NAME: "${{ matrix.module }}"
# Push coverage data to Coveralls.
coverage:
name: Push coverage to Coveralls
needs: test
runs-on: ubuntu-22.04
steps:
- name: Set up Python ${{ env.PYTHON_VERSION }}
uses: actions/setup-python@v4
with:
python-version: "${{ env.PYTHON_VERSION }}"
- name: Upload coverage data
run: |
python -m pip install coveralls==${{ env.COVERALLS_VERSION }}
coveralls --finish --service=github # See https://github.com/TheKevJames/coveralls-python/issues/252
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# If we're on the dev branch (i.e. for merged pull requests), push the built
# documentation to the gh-page branch.
push_doc:
name: Push documentation to GitHub Pages
needs: ["build-doc", "test"]
runs-on: ubuntu-22.04
if: "github.ref == 'refs/heads/dev'"
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
permissions:
contents: read
pages: write
id-token: write
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v2