Compare commits

..

1 Commits

240 changed files with 9670 additions and 11966 deletions

View File

@@ -31,8 +31,3 @@ update_configs:
- package_manager: 'javascript'
directory: '/badge-maker'
update_schedule: 'weekly'
# approve-bot package dependencies
- package_manager: 'javascript'
directory: '.github/actions/approve-bot'
update_schedule: 'weekly'

View File

@@ -4,4 +4,3 @@
/__snapshots__
/public
badge-maker/node_modules/
!.github/

View File

@@ -1,6 +1,5 @@
extends:
- standard
- standard-jsx
- standard-react
- plugin:@typescript-eslint/recommended
- prettier

View File

@@ -1,7 +1,4 @@
contact_links:
- name: 🎨 Simple Icons
url: https://github.com/badges/shields/discussions/5369
about: Please read this before posting a question about SimpleIcons
- name: ❓ Support Question
url: https://github.com/badges/shields/discussions
about: Ask a question about Shields.io

View File

@@ -1,12 +0,0 @@
name: 'Auto Approve'
description: 'Automatically approve/close selected pull requests for shields.io'
branding:
icon: 'check-circle'
color: 'green'
inputs:
github-token:
description: 'The GITHUB_TOKEN secret'
required: true
runs:
using: 'node12'
main: 'index.js'

View File

@@ -1,65 +0,0 @@
'use strict'
function findChangelogStart(lines) {
for (let i = 0; i < lines.length; i++) {
const line = lines[i]
if (
line === '<summary>Changelog</summary>' &&
lines[i + 2] === '<blockquote>'
) {
return i + 3
}
}
return null
}
function findChangelogEnd(lines, start) {
for (let i = start; i < lines.length; i++) {
const line = lines[i]
if (line === '</blockquote>') {
return i
}
}
return null
}
function allChangelogLinesAreVersionBump(changelogLines) {
return (
changelogLines.length > 0 &&
changelogLines.length ===
changelogLines.filter(line =>
line.includes('Version bump only for package')
).length
)
}
function isPointlessGatsbyBump(body) {
const lines = body.split(/\r?\n/)
if (
!lines[0].includes('https://github.com/gatsbyjs/gatsby') // lgtm [js/incomplete-url-substring-sanitization]
) {
return false
}
const start = findChangelogStart(lines)
const end = findChangelogEnd(lines, start)
if (!start || !end) {
return false
}
const changelogLines = lines
.slice(start, end)
.filter(line => !line.startsWith('<h'))
.filter(line => !line.startsWith('<p>All notable changes'))
.filter(
line => !line.startsWith('See <a href="https://conventionalcommits.org">')
)
.filter(line => !line.startsWith('<!--'))
return allChangelogLinesAreVersionBump(changelogLines)
}
function shouldAutoMerge(body) {
return body.includes(
'If all status checks pass Dependabot will automatically merge this pull request'
)
}
module.exports = { isPointlessGatsbyBump, shouldAutoMerge }

View File

@@ -1,56 +0,0 @@
'use strict'
const core = require('@actions/core')
const github = require('@actions/github')
const { isPointlessGatsbyBump, shouldAutoMerge } = require('./helpers')
async function run() {
try {
const token = core.getInput('github-token', { required: true })
const { pull_request: pr } = github.context.payload
if (!pr) {
throw new Error('Event payload missing `pull_request`')
}
const client = github.getOctokit(token)
if (
['dependabot[bot]', 'dependabot-preview[bot]'].includes(pr.user.login)
) {
if (isPointlessGatsbyBump(pr.body)) {
core.debug(`Closing pull request #${pr.number}`)
await client.pulls.update({
owner: github.context.repo.owner,
repo: github.context.repo.repo,
pull_number: pr.number,
state: 'closed',
})
core.debug(`Done.`)
} else if (shouldAutoMerge(pr.body)) {
core.debug(`Adding label to pull request #${pr.number}`)
await client.issues.addLabels({
owner: github.context.repo.owner,
repo: github.context.repo.repo,
issue_number: pr.number,
labels: ['squash when passing'],
})
core.debug(`Creating approving review for pull request #${pr.number}`)
await client.pulls.createReview({
owner: github.context.repo.owner,
repo: github.context.repo.repo,
pull_number: pr.number,
event: 'APPROVE',
})
core.debug(`Done.`)
}
}
} catch (error) {
core.setFailed(error.message)
}
}
run()

View File

@@ -1,177 +0,0 @@
{
"name": "approve-bot",
"version": "0.0.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"@actions/core": {
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.6.tgz",
"integrity": "sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA=="
},
"@actions/github": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/@actions/github/-/github-4.0.0.tgz",
"integrity": "sha512-Ej/Y2E+VV6sR9X7pWL5F3VgEWrABaT292DRqRU6R4hnQjPtC/zD3nagxVdXWiRQvYDh8kHXo7IDmG42eJ/dOMA==",
"requires": {
"@actions/http-client": "^1.0.8",
"@octokit/core": "^3.0.0",
"@octokit/plugin-paginate-rest": "^2.2.3",
"@octokit/plugin-rest-endpoint-methods": "^4.0.0"
}
},
"@actions/http-client": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.9.tgz",
"integrity": "sha512-0O4SsJ7q+MK0ycvXPl2e6bMXV7dxAXOGjrXS1eTF9s2S401Tp6c/P3c3Joz04QefC1J6Gt942Wl2jbm3f4mLcg==",
"requires": {
"tunnel": "0.0.6"
}
},
"@octokit/auth-token": {
"version": "2.4.5",
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.4.5.tgz",
"integrity": "sha512-BpGYsPgJt05M7/L/5FoE1PiAbdxXFZkX/3kDYcsvd1v6UhlnE5e96dTDr0ezX/EFwciQxf3cNV0loipsURU+WA==",
"requires": {
"@octokit/types": "^6.0.3"
}
},
"@octokit/core": {
"version": "3.2.5",
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.2.5.tgz",
"integrity": "sha512-+DCtPykGnvXKWWQI0E1XD+CCeWSBhB6kwItXqfFmNBlIlhczuDPbg+P6BtLnVBaRJDAjv+1mrUJuRsFSjktopg==",
"requires": {
"@octokit/auth-token": "^2.4.4",
"@octokit/graphql": "^4.5.8",
"@octokit/request": "^5.4.12",
"@octokit/types": "^6.0.3",
"before-after-hook": "^2.1.0",
"universal-user-agent": "^6.0.0"
}
},
"@octokit/endpoint": {
"version": "6.0.11",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.11.tgz",
"integrity": "sha512-fUIPpx+pZyoLW4GCs3yMnlj2LfoXTWDUVPTC4V3MUEKZm48W+XYpeWSZCv+vYF1ZABUm2CqnDVf1sFtIYrj7KQ==",
"requires": {
"@octokit/types": "^6.0.3",
"is-plain-object": "^5.0.0",
"universal-user-agent": "^6.0.0"
}
},
"@octokit/graphql": {
"version": "4.5.9",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.5.9.tgz",
"integrity": "sha512-c+0yofIugUNqo+ktrLaBlWSbjSq/UF8ChAyxQzbD3X74k1vAuyLKdDJmPwVExUFSp6+U1FzWe+3OkeRsIqV0vg==",
"requires": {
"@octokit/request": "^5.3.0",
"@octokit/types": "^6.0.3",
"universal-user-agent": "^6.0.0"
}
},
"@octokit/openapi-types": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-3.3.0.tgz",
"integrity": "sha512-s3dd32gagPmKaSLNJ9aPNok7U+tl69YLESf6DgQz5Ml/iipPZtif3GLvWpNXoA6qspFm1LFUZX+C3SqWX/Y/TQ=="
},
"@octokit/plugin-paginate-rest": {
"version": "2.9.0",
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.9.0.tgz",
"integrity": "sha512-XxbOg45r2n/2QpU6hnGDxQNDRrJ7gjYpMXeDbUCigWTHECmjoyFLizkFO2jMEtidMkfiELn7AF8GBAJ/cbPTnA==",
"requires": {
"@octokit/types": "^6.6.0"
}
},
"@octokit/plugin-rest-endpoint-methods": {
"version": "4.9.0",
"resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-4.9.0.tgz",
"integrity": "sha512-EAr2epvY8JfXSi/cdMsyyfBctdKkolDH7xSgu3MKBqPRm0WfQ2QvI050jz61XZXoVK3ZgrhdMCyd1GgOFz7CSw==",
"requires": {
"@octokit/types": "^6.6.0",
"deprecation": "^2.3.1"
}
},
"@octokit/request": {
"version": "5.4.13",
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.4.13.tgz",
"integrity": "sha512-WcNRH5XPPtg7i1g9Da5U9dvZ6YbTffw9BN2rVezYiE7couoSyaRsw0e+Tl8uk1fArHE7Dn14U7YqUDy59WaqEw==",
"requires": {
"@octokit/endpoint": "^6.0.1",
"@octokit/request-error": "^2.0.0",
"@octokit/types": "^6.0.3",
"deprecation": "^2.0.0",
"is-plain-object": "^5.0.0",
"node-fetch": "^2.6.1",
"once": "^1.4.0",
"universal-user-agent": "^6.0.0"
}
},
"@octokit/request-error": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.0.5.tgz",
"integrity": "sha512-T/2wcCFyM7SkXzNoyVNWjyVlUwBvW3igM3Btr/eKYiPmucXTtkxt2RBsf6gn3LTzaLSLTQtNmvg+dGsOxQrjZg==",
"requires": {
"@octokit/types": "^6.0.3",
"deprecation": "^2.0.0",
"once": "^1.4.0"
}
},
"@octokit/types": {
"version": "6.6.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.6.0.tgz",
"integrity": "sha512-nmFoU3HCbw1AmnZU/eto2VvzV06+N7oAqXwMmAHGlNDF+KFykksh/VlAl85xc1P5T7Mw8fKYvXNaImNHCCH/rg==",
"requires": {
"@octokit/openapi-types": "^3.3.0",
"@types/node": ">= 8"
}
},
"@types/node": {
"version": "14.14.22",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.22.tgz",
"integrity": "sha512-g+f/qj/cNcqKkc3tFqlXOYjrmZA+jNBiDzbP3kH+B+otKFqAdPgVTGP1IeKRdMml/aE69as5S4FqtxAbl+LaMw=="
},
"before-after-hook": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.1.0.tgz",
"integrity": "sha512-IWIbu7pMqyw3EAJHzzHbWa85b6oud/yfKYg5rqB5hNE8CeMi3nX+2C2sj0HswfblST86hpVEOAb9x34NZd6P7A=="
},
"deprecation": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
"integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ=="
},
"is-plain-object": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
"integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q=="
},
"node-fetch": {
"version": "2.6.1",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz",
"integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw=="
},
"once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
"requires": {
"wrappy": "1"
}
},
"tunnel": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
},
"universal-user-agent": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.0.tgz",
"integrity": "sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w=="
},
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
}
}
}

View File

@@ -1,16 +0,0 @@
{
"name": "approve-bot",
"version": "0.0.0",
"description": "",
"main": "index.js",
"private": true,
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "chris48s",
"license": "CC0",
"dependencies": {
"@actions/core": "^1.2.6",
"@actions/github": "^4.0.0"
}
}

View File

@@ -1,8 +0,0 @@
FROM node:12-buster
RUN apt-get update
RUN apt-get install -y jq
RUN apt-get install -y uuid-runtime
COPY entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

View File

@@ -1,5 +0,0 @@
name: 'draft-release'
description: 'Generate a changelog and propose a release PR'
runs:
using: 'docker'
image: 'Dockerfile'

View File

@@ -1,62 +0,0 @@
#!/bin/bash
set -euxo pipefail
# Set up a git user
git config user.name "release[bot]"
git config user.email "actions@users.noreply.github.com"
# Find last server-YYYY-MM-DD tag
git fetch --unshallow --tags
LAST_TAG=$(git tag | grep server | tail -n 1)
# Find the marker in CHANGELOG.md
INSERT_POINT=$(grep -n "^\-\-\-$" CHANGELOG.md | cut -f1 -d:)
INSERT_POINT=$((INSERT_POINT+1))
# Generate a release name
RELEASE_NAME="server-$(date --rfc-3339=date)"
# Assemble changelog entry
rm -f temp-changes.txt
touch temp-changes.txt
{
echo "## $RELEASE_NAME"
echo ""
git log "$LAST_TAG"..HEAD --no-merges --oneline --pretty="format:- %s" --perl-regexp --author='^((?!dependabot).*)$'
echo $'\n- Dependency updates\n'
} >> temp-changes.txt
BASE_URL="https:\/\/github.com\/badges\/shields\/issues\/"
sed -r -i "s/\((\#)([0-9]+)\)$/\[\1\2\]\($BASE_URL\2\)/g" temp-changes.txt
# Write the changelog
sed -i "${INSERT_POINT} r temp-changes.txt" CHANGELOG.md
# Cleanup
rm temp-changes.txt
# Run prettier (to ensure the markdown file doesn't fail CI)
npx prettier@$(cat package.json | jq -r .devDependencies.prettier) --write "CHANGELOG.md"
# Generate a unique branch name
BRANCH_NAME="$RELEASE_NAME"-$(uuidgen | head -c 8)
git checkout -b "$BRANCH_NAME"
# Commit + push changelog
git add CHANGELOG.md
git commit -m "Update Changelog"
git push origin "$BRANCH_NAME"
# Submit a PR
TITLE="Changelog for Release $RELEASE_NAME"
PR_RESP=$(curl https://api.github.com/repos/"$REPO_NAME"/pulls \
-X POST \
-H "Authorization: token $GITHUB_TOKEN" \
--data '{"title": "'"$TITLE"'", "body": "'"$TITLE"'", "head": "'"$BRANCH_NAME"'", "base": "master"}')
# Add the 'release' label to the PR
PR_API_URL=$(echo "$PR_RESP" | jq -r ._links.issue.href)
curl "$PR_API_URL" \
-X POST \
-H "Authorization: token $GITHUB_TOKEN" \
--data '{"labels":["release"]}'

View File

@@ -1,7 +0,0 @@
<!--
Be sure to review our Contributing guidelines to help streamline the merging of your PR!
* PR title conventions - https://github.com/badges/shields/blob/master/CONTRIBUTING.md#running-service-tests-in-pull-requests
* Code formatting - https://github.com/badges/shields/blob/master/CONTRIBUTING.md#prettier
* Merge processes and reminders - https://github.com/badges/shields/blob/master/CONTRIBUTING.md#pull-requests
-->

View File

@@ -0,0 +1,28 @@
name: Ddd deployment status
on:
pull_request_target:
types: [closed]
jobs:
add_deployment_status:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Create comment
if: ${{ github.event_name == 'pull_request_target'
&& github.event.action == 'closed'
&& github.event.pull_request.merged
&& !startsWith(github.event.pull_request.head.ref, 'dependabot/')
&& github.event.pull_request.base.ref == 'master' }}
# From a security perspective it's good practice to reference the commit hash
# https://github.com/peter-evans/create-pull-request/blob/master/docs/concepts-guidelines.md#security
uses: peter-evans/create-or-update-comment@41f3207a84f33bd70388036109082784d059dcaa
with:
issue-number: ${{ github.event.pull_request.number }}
edit-mode: replace
body: |
This pull request was merged to [${{ github.event.pull_request.base.ref }}](${{ github.event.repository.html_url }}/tree/${{ github.event.pull_request.base.ref }}) branch. This change is now waiting for deployment, which will usually happen within a few days. Stay tuned by joining our `#ops` channel on [Discord](https://discordapp.com/invite/HjJCwm5)!
After deployment, changes are copied to [gh-pages](${{ github.event.repository.html_url }}/tree/gh-pages) branch: ![](https://img.shields.io/github/commit-status/${{ github.event.repository.full_name }}/gh-pages/${{ github.event.pull_request.merge_commit_sha }}.svg?label=deploy%20status)

View File

@@ -5,12 +5,6 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install action dependencies
run: cd .github/actions/approve-bot && npm ci
- uses: ./.github/actions/approve-bot
- uses: chris48s/approve-bot@2.0.1
with:
github-token: '${{ secrets.GITHUB_TOKEN }}'

View File

@@ -1,26 +0,0 @@
name: Deploy Documentation
on:
push:
branches:
- master
jobs:
build-and-deploy:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2.3.1
with:
persist-credentials: false
- name: Build
run: |
npm ci
npm run build-docs
- name: Deploy
uses: JamesIves/github-pages-deploy-action@3.7.1
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BRANCH: gh-pages
FOLDER: api-docs
CLEAN: true

View File

@@ -1,19 +0,0 @@
name: Draft Release
on:
schedule:
- cron: '0 1 1 * *'
# At 01:00 on the first day of every month
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Draft Release
uses: ./.github/actions/draft-release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REPO_NAME: ${{ github.repository }}

View File

@@ -1,31 +0,0 @@
name: Tag Release
on:
pull_request:
types: [closed]
jobs:
tag-release:
if: |
github.event_name == 'pull_request' &&
github.event.action == 'closed' &&
github.event.pull_request.merged == true &&
contains(github.event.pull_request.labels.*.name, 'release')
runs-on: ubuntu-latest
steps:
- name: Get current date
id: date
run: echo "::set-output name=date::$(date --rfc-3339=date)"
- name: Checkout branch "master"
uses: actions/checkout@v2
with:
ref: 'master'
- name: Tag Release
uses: tvdias/github-tagger@v0.0.2
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
tag: server-${{ steps.date.outputs.date }}

View File

@@ -1,28 +0,0 @@
# Changelog
Note: this changelog is for the shields.io server. The changelog for the badge-maker NPM package is at https://github.com/badges/shields/blob/master/badge-maker/CHANGELOG.md
---
## server-2021-03-01
- ensure redirect target path is correctly encoded [#6229](https://github.com/badges/shields/issues/6229)
- [SecurityHeaders] Added a possibility for no follow redirects [#6212](https://github.com/badges/shields/issues/6212)
- catch URL parse error in [endpoint] badge [#6214](https://github.com/badges/shields/issues/6214)
- [Homebrew] Add homebrew downloads badge [#6209](https://github.com/badges/shields/issues/6209)
- update [pkgreview] url [#6189](https://github.com/badges/shields/issues/6189)
- Make [Twitch] a social badge [#6183](https://github.com/badges/shields/issues/6183)
- update [flathub] error handling [#6185](https://github.com/badges/shields/issues/6185)
- Add [Testspace] badges [#6162](https://github.com/badges/shields/issues/6162)
- Dependency updates
## server-2021-02-01
- Docs.rs badge (#6098)
- Fix feedz service in case the package page gets paginated (#6101)
- [Bitbucket] Server Adding Auth Tokens and Resolving Pull Request api … (#6076)
- Dependency updates
## server-2021-01-18
- Gotta start somewhere

View File

@@ -101,7 +101,7 @@ There are three places to get help:
used by developers or which are widely used by developers.
- The left-hand side of a badge should not advertise. It should be a lowercase _noun_
succinctly describing the meaning of the right-hand side.
- Except for badges using the `social` style, logos and links should be _turned off by
- Except for badges using the `social` style, logos should be _turned off by
default_.
- Badges should not obtain data from undocumented or reverse-engineered API endpoints.
- Badges should not obtain data by scraping web pages - these are likely to break frequently.
@@ -131,7 +131,13 @@ Prettier before a commit by default.
### Tests
When adding or changing a service [please write tests][service-tests], and ensure the [title of your Pull Requests follows the required conventions](#running-service-tests-in-pull-requests) to ensure your tests are executed.
When adding or changing a service [please write tests][service-tests].
When opening a pull request, include your service name in brackets in the pull
request title. That way, those service tests will run in CI.
e.g. **[Travis] Fix timeout issues**
When changing other code, please add unit tests.
To run the integration tests, you must have redis installed and in your PATH.
@@ -147,35 +153,3 @@ There is a [High-level code walkthrough](doc/code-walkthrough.md) describing the
### Logos
We have [documentation for logo usage](doc/logos.md) which includes [contribution guidance](doc/logos.md#contributing-logos)
## Pull Requests
All code changes are incorporated via pull requests, and pull requests are always squashed into a single commit on merging. Therefore there's no requirement to squash commits within your PR, but feel free to squash or restructure the commits on your PR branch if you think it will be helpful. PRs with well structured commits are always easier to review!
Because all changes are pulled into the main branch via squash merges from PRs, we do **not** support overwriting any aspects of the git history once it hits our main branch. Notably this means we do not support amending commit messages, nor adjusting commit author information once merged.
Accordingly, it is the responsibility of contributors to review this type of information and adjust as needed before marking PRs as ready for review and merging.
You can review and modify your local [git configuration][git-config] via `git config`, and also find more information about amending your commit messages [here][amending-commits].
[git-config]: https://git-scm.com/book/en/v2/Customizing-Git-Git-Configuration
[amending-commits]: https://docs.github.com/en/github/committing-changes-to-your-project/changing-a-commit-message#rewriting-the-most-recent-commit-message
### Running service tests in pull requests
The affected service names must be included in square brackets in the pull request title so that the CI engine will run those service tests. When a pull request affects multiple services, they should be separated with spaces. The test runner is case-insensitive, so they should be capitalized for readability.
For example:
- **[Travis] Fix timeout issues**
- **[Travis Sonar] Support user token authentication**
- **Add tests for [CRAN] and [CPAN]**
Note that many services are part of a "family" of related services. Depending on the changes in your PR you may need to run the tests for just a single service, or for _all_ the services within a family.
For example, a PR title of **[GitHubForks] Foo** will only run the service tests specifically for the GitHub Forks badge, whereas a title of **[GitHub] Foo** will run the service tests for all of the GitHub badges.
In the rare case when it's necessary to see the output of a full service-test
run in a PR (all 2,000+ tests), include `[*****]` in the title. Unless all the tests pass, the build
will fail, so likely it will be necessary to remove it and re-run the tests
before merging.

70
Makefile Normal file
View File

@@ -0,0 +1,70 @@
SHELL:=/bin/bash
SERVER_TMP=${TMPDIR}shields-server-deploy
FRONTEND_TMP=${TMPDIR}shields-frontend-deploy
# This branch is reserved for the deploy process and should not be used for
# development. The deploy script will clobber it. To avoid accidentally
# pushing secrets to GitHub, this branch is configured to reject pushes.
WORKING_BRANCH=server-deploy-working-branch
all: test
deploy: deploy-s0 deploy-s1 deploy-s2 clean-server-deploy deploy-gh-pages deploy-gh-pages-clean
deploy-s0: prepare-server-deploy push-s0
deploy-s1: prepare-server-deploy push-s1
deploy-s2: prepare-server-deploy push-s2
prepare-server-deploy:
# Ship a copy of the front end to each server for debugging.
# https://github.com/badges/shields/issues/1220
INCLUDE_DEV_PAGES=false \
npm run build
rm -rf ${SERVER_TMP}
git worktree prune
git worktree add -B ${WORKING_BRANCH} ${SERVER_TMP}
cp -r public ${SERVER_TMP}
git -C ${SERVER_TMP} add -f public/
git -C ${SERVER_TMP} commit --no-verify -m '[DEPLOY] Add frontend for debugging'
cp config/local-shields-io-production.yml ${SERVER_TMP}/config/
git -C ${SERVER_TMP} add -f config/local-shields-io-production.yml
git -C ${SERVER_TMP} commit --no-verify -m '[DEPLOY] MUST NOT BE ON GITHUB'
clean-server-deploy:
rm -rf ${SERVER_TMP}
git worktree prune
push-s0:
git push -f s0 ${WORKING_BRANCH}:master
push-s1:
git push -f s1 ${WORKING_BRANCH}:master
push-s2:
git push -f s2 ${WORKING_BRANCH}:master
deploy-gh-pages:
rm -rf ${FRONTEND_TMP}
git worktree prune
GATSBY_BASE_URL=https://img.shields.io \
INCLUDE_DEV_PAGES=false \
npm run build
git worktree add -B gh-pages ${FRONTEND_TMP}
git -C ${FRONTEND_TMP} ls-files | xargs git -C ${FRONTEND_TMP} rm
git -C ${FRONTEND_TMP} commit --no-verify -m '[DEPLOY] Completely clean the index'
cp -r public/* ${FRONTEND_TMP}
echo shields.io > ${FRONTEND_TMP}/CNAME
touch ${FRONTEND_TMP}/.nojekyll
git -C ${FRONTEND_TMP} add .
git -C ${FRONTEND_TMP} commit --no-verify -m '[DEPLOY] Add built site'
git push -f origin gh-pages
deploy-gh-pages-clean:
rm -rf ${FRONTEND_TMP}
git worktree prune
test:
npm test
.PHONY: all deploy prepare-server-deploy clean-server-deploy deploy-s0 deploy-s1 deploy-s2 push-s0 push-s1 push-s2 deploy-gh-pages deploy-gh-pages-clean deploy-heroku setup redis test

View File

@@ -1,5 +1 @@
web: npm run start:server:prod
scale4: npm run heroku:scale 4
scale5: npm run heroku:scale 5
scale6: npm run heroku:scale 6
scale7: npm run heroku:scale 7

View File

@@ -1,5 +1,5 @@
<p align="center">
<img src="https://raw.githubusercontent.com/badges/shields/master/readme-logo.svg?sanitize=true"
<img src="https://raw.githubusercontent.com/badges/shields/master/frontend/images/logo.svg?sanitize=true"
height="130">
</p>
<p align="center">
@@ -22,6 +22,9 @@
<a href="https://lgtm.com/projects/g/badges/shields/alerts/">
<img src="https://img.shields.io/lgtm/alerts/g/badges/shields"
alt="Total alerts"/></a>
<a href="https://github.com/badges/shields/compare/gh-pages...master">
<img src="https://img.shields.io/github/commits-since/badges/shields/gh-pages?label=commits%20to%20be%20deployed"
alt="commits to be deployed"></a>
<a href="https://discord.gg/HjJCwm5">
<img src="https://img.shields.io/discord/308323056592486420?logo=discord"
alt="chat on Discord"></a>
@@ -88,6 +91,10 @@ maybe you'd like to open a pull request to address one of them.
You can read a [tutorial on how to add a badge][tutorial].
[![GitHub issues by-label](https://img.shields.io/github/issues/badges/shields/good%20first%20issue)](https://github.com/badges/shields/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
[![Hacktoberfest 2020](https://img.shields.io/github/hacktoberfest/2020/badges/shields?label=hacktoberfest%202020)](https://github.com/badges/shields/issues?q=is%3Aopen+is%3Aissue+label%3Ahacktoberfest)
Let's see if we can beat last year!
[![Hacktoberfest 2019](https://img.shields.io/github/hacktoberfest/2019/badges/shields?label=hacktoberfest%202019)](https://github.com/badges/shields/issues?q=is%3Aopen+is%3Aissue+label%3Ahacktoberfest)
[service-tests]: https://github.com/badges/shields/blob/master/doc/service-tests.md
[tutorial]: doc/TUTORIAL.md

View File

@@ -23,6 +23,10 @@ module.exports = function makeBadge({
logoWidth,
links = ['', ''],
}) {
if (!logo && (logoPosition !== undefined || logoWidth !== undefined)) {
throw Error('`logoPosition` and `logoWidth` require `logo`')
}
// String coercion and whitespace removal.
label = `${label}`.trim()
message = `${message}`.trim()

View File

@@ -21,7 +21,7 @@ public:
key: 'HTTPS_KEY'
cert: 'HTTPS_CRT'
redirectUrl: 'REDIRECT_URI'
redirectUri: 'REDIRECT_URI'
rasterUrl: 'RASTER_URL'

View File

@@ -15,4 +15,6 @@ public:
cors:
allowedOrigin: ['http://shields.io', 'https://shields.io']
redirectUrl: 'https://shields.io/'
rasterUrl: 'https://raster.shields.io'

View File

@@ -5,8 +5,11 @@
function escapeFormat(t) {
return (
t
// Single underscore.
.replace(/(^|[^_])((?:__)*)_(?!_)/g, '$1$2 ')
// Inline single underscore.
.replace(/([^_])_([^_])/g, '$1 $2')
// Leading or trailing underscore.
.replace(/([^_])_$/, '$1 ')
.replace(/^_([^_])/, ' $1')
// Double underscore and double dash.
.replace(/__/g, '_')
.replace(/--/g, '-')

View File

@@ -1,21 +0,0 @@
'use strict'
const { test, given } = require('sazerac')
const { escapeFormat } = require('./path-helpers')
describe('Badge URL helper functions', function () {
test(escapeFormat, () => {
given('_single leading underscore').expect(' single leading underscore')
given('single trailing underscore_').expect('single trailing underscore ')
given('__double leading underscores').expect('_double leading underscores')
given('double trailing underscores__').expect(
'double trailing underscores_'
)
given('treble___underscores').expect('treble_ underscores')
given('fourfold____underscores').expect('fourfold__underscores')
given('double--dashes').expect('double-dashes')
given('treble---dashes').expect('treble--dashes')
given('fourfold----dashes').expect('fourfold--dashes')
given('once_in_a_blue--moon').expect('once in a blue-moon')
})
})

View File

@@ -0,0 +1,74 @@
'use strict'
const makeBadge = require('../../badge-maker/lib/make-badge')
const BaseService = require('./base')
const { MetricHelper } = require('./metric-helper')
const { setCacheHeaders } = require('./cache-headers')
const { makeSend } = require('./legacy-result-sender')
const coalesceBadge = require('./coalesce-badge')
const { prepareRoute, namedParamsForMatch } = require('./route')
// Badges are subject to two independent types of caching: in-memory and
// downstream.
//
// Services deriving from `NonMemoryCachingBaseService` are not cached in
// memory on the server. This means that each request that hits the server
// triggers another call to the handler. When using badges for server
// diagnostics, that's useful!
//
// In contrast, The `handle()` function of most other `BaseService`
// subclasses is wrapped in onboard, in-memory caching. See `lib /request-
// handler.js` and `BaseService.prototype.register()`.
//
// All services, including those extending NonMemoryCachingBaseServices, may
// be cached _downstream_. This is governed by cache headers, which are
// configured by the service, the user's request, and the server's default
// cache length.
module.exports = class NonMemoryCachingBaseService extends BaseService {
static register({ camp, metricInstance }, serviceConfig) {
const { cacheHeaders: cacheHeaderConfig } = serviceConfig
const { _cacheLength: serviceDefaultCacheLengthSeconds } = this
const { regex, captureNames } = prepareRoute(this.route)
const metricHelper = MetricHelper.create({
metricInstance,
ServiceClass: this,
})
camp.route(regex, async (queryParams, match, end, ask) => {
const metricHandle = metricHelper.startRequest()
const namedParams = namedParamsForMatch(captureNames, match, this)
const serviceData = await this.invoke(
{},
serviceConfig,
namedParams,
queryParams
)
const badgeData = coalesceBadge(
queryParams,
serviceData,
this.defaultBadgeData,
this
)
// The final capture group is the extension.
const format = (match.slice(-1)[0] || '.svg').replace(/^\./, '')
badgeData.format = format
const svg = makeBadge(badgeData)
setCacheHeaders({
cacheHeaderConfig,
serviceDefaultCacheLengthSeconds,
queryParams,
res: ask.res,
})
makeSend(format, ask.res, end)(svg)
metricHandle.noteResponseSent()
})
}
}

View File

@@ -57,7 +57,7 @@ class BaseYamlService extends BaseService {
})
let parsed
try {
parsed = yaml.load(buffer.toString(), encoding)
parsed = yaml.safeLoad(buffer.toString(), encoding)
} catch (err) {
logTrace(emojic.dart, 'Response YAML (unparseable)', buffer)
throw new InvalidResponse({

View File

@@ -213,18 +213,7 @@ class BaseService {
async _request({ url, options = {}, errorMessages = {} }) {
const logTrace = (...args) => trace.logTrace('fetch', ...args)
let logUrl = url
const logOptions = Object.assign({}, options)
if ('qs' in options) {
const params = new URLSearchParams(options.qs)
logUrl = `${url}?${params.toString()}`
delete logOptions.qs
}
logTrace(
emojic.bowAndArrow,
'Request',
`${logUrl}\n${JSON.stringify(logOptions, null, 2)}`
)
logTrace(emojic.bowAndArrow, 'Request', url, '\n', options)
const { res, buffer } = await this._requestFetcher(url, options)
await this._meterResponse(res, buffer)
logTrace(emojic.dart, 'Response status code', res.statusCode)

View File

@@ -329,7 +329,7 @@ describe('BaseService', function () {
describe('ScoutCamp integration', function () {
// TODO Strangly, without the useless escape the regexes do not match in Node 12.
// eslint-disable-next-line no-useless-escape
const expectedRouteRegex = /^\/foo(?:\/([^\/#\?]+?))(|\.svg|\.json)$/
const expectedRouteRegex = /^\/foo\/([^\/]+?)(|\.svg|\.json)$/
let mockCamp
let mockHandleRequest
@@ -463,7 +463,9 @@ describe('BaseService', function () {
'fetch',
sinon.match.string,
'Request',
`${url}\n${JSON.stringify(options, null, 2)}`
url,
'\n',
options
)
expect(trace.logTrace).to.be.calledWithMatch(
'fetch',
@@ -517,7 +519,7 @@ describe('BaseService', function () {
await serviceInstance._request({ url })
expect(await register.getSingleMetricAsString('service_response_bytes'))
expect(register.getSingleMetricAsString('service_response_bytes'))
.to.contain(
'service_response_bytes_bucket{le="65536",category="other",family="undefined",service="dummy_service_with_service_response_size_metric_enabled"} 0\n'
)
@@ -543,7 +545,7 @@ describe('BaseService', function () {
await serviceInstance._request({ url })
expect(
await register.getSingleMetricAsString('service_response_bytes')
register.getSingleMetricAsString('service_response_bytes')
).to.not.contain('service_response_bytes_bucket')
})
})

View File

@@ -3,6 +3,7 @@
const BaseService = require('./base')
const BaseJsonService = require('./base-json')
const BaseGraphqlService = require('./base-graphql')
const NonMemoryCachingBaseService = require('./base-non-memory-caching')
const BaseStaticService = require('./base-static')
const BaseSvgScrapingService = require('./base-svg-scraping')
const BaseXmlService = require('./base-xml')
@@ -21,6 +22,7 @@ module.exports = {
BaseService,
BaseJsonService,
BaseGraphqlService,
NonMemoryCachingBaseService,
BaseStaticService,
BaseSvgScrapingService,
BaseXmlService,

View File

@@ -1,6 +1,7 @@
'use strict'
const request = require('request')
const queryString = require('query-string')
const makeBadge = require('../../badge-maker/lib/make-badge')
const { setCacheHeaders } = require('./cache-headers')
const {
@@ -9,10 +10,27 @@ const {
ShieldsRuntimeError,
} = require('./errors')
const { makeSend } = require('./legacy-result-sender')
const LruCache = require('./lru-cache')
const coalesceBadge = require('./coalesce-badge')
const userAgent = 'Shields.io/2003a'
// We avoid calling the vendor's server for computation of the information in a
// number of badges.
const minAccuracy = 0.75
// The quotient of (vendor) data change frequency by badge request frequency
// must be lower than this to trigger sending the cached data *before*
// updating our data from the vendor's server.
// Indeed, the accuracy of our badges are:
// A(Δt) = 1 - min(# data change over Δt, # requests over Δt)
// / (# requests over Δt)
// = 1 - max(1, df) / rf
const freqRatioMax = 1 - minAccuracy
// Request cache size of 5MB (~5000 bytes/image).
const requestCache = new LruCache(1000)
// These query parameters are available to any badge. They are handled by
// `coalesceBadge`.
const globalQueryParams = new Set([
@@ -103,6 +121,8 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
return
}
const reqTime = new Date()
// `defaultCacheLengthSeconds` can be overridden by
// `serviceDefaultCacheLengthSeconds` (either by category or on a badge-
// by-badge basis). Then in turn that can be overridden by
@@ -131,10 +151,49 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
filteredQueryParams[key] = queryParams[key]
})
// Use sindresorhus query-string because it sorts the keys, whereas the
// builtin querystring module relies on the iteration order.
const stringified = queryString.stringify(filteredQueryParams)
const cacheIndex = `${match[0]}?${stringified}`
// Should we return the data right away?
const cached = requestCache.get(cacheIndex)
let cachedVersionSent = false
if (cached !== undefined) {
// A request was made not long ago.
const tooSoon = +reqTime - cached.time < cached.interval
if (tooSoon || cached.dataChange / cached.reqs <= freqRatioMax) {
const svg = makeBadge(cached.data.badgeData)
setCacheHeadersOnResponse(
ask.res,
cached.data.badgeData.cacheLengthSeconds
)
makeSend(cached.data.format, ask.res, end)(svg)
cachedVersionSent = true
// We do not wish to call the vendor servers.
if (tooSoon) {
return
}
}
}
// In case our vendor servers are unresponsive.
let serverUnresponsive = false
const serverResponsive = setTimeout(() => {
serverUnresponsive = true
if (cachedVersionSent) {
return
}
if (requestCache.has(cacheIndex)) {
const cached = requestCache.get(cacheIndex)
const svg = makeBadge(cached.data.badgeData)
setCacheHeadersOnResponse(
ask.res,
cached.data.badgeData.cacheLengthSeconds
)
makeSend(cached.data.format, ask.res, end)(svg)
return
}
ask.res.setHeader('Cache-Control', 'no-cache, no-store, must-revalidate')
const badgeData = coalesceBadge(
filteredQueryParams,
@@ -147,6 +206,8 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
makeSend(extension, ask.res, end)(svg)
}, 25000)
// Only call vendor servers when last request is older than…
let cacheInterval = 5000 // milliseconds
function cachingRequest(uri, options, callback) {
if (typeof options === 'function' && !callback) {
callback = options
@@ -162,7 +223,20 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
options.headers['User-Agent'] = userAgent
let bufferLength = 0
const r = request(options, callback)
const r = request(options, (err, res, body) => {
if (res != null && res.headers != null) {
const cacheControl = res.headers['cache-control']
if (cacheControl != null) {
const age = cacheControl.match(/max-age=([0-9]+)/)
// Would like to get some more test coverage on this before changing it.
// eslint-disable-next-line no-self-compare
if (age != null && +age[1] === +age[1]) {
cacheInterval = +age[1] * 1000
}
}
}
callback(err, res, body)
})
r.on('data', chunk => {
bufferLength += chunk.length
if (bufferLength > fetchLimitBytes) {
@@ -190,11 +264,30 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
return
}
clearTimeout(serverResponsive)
// Check for a change in the data.
let dataHasChanged = false
if (
cached !== undefined &&
cached.data.badgeData.message !== badgeData.message
) {
dataHasChanged = true
}
// Add format to badge data.
badgeData.format = format
const svg = makeBadge(badgeData)
setCacheHeadersOnResponse(ask.res, badgeData.cacheLengthSeconds)
makeSend(format, ask.res, end)(svg)
// Update information in the cache.
const updatedCache = {
reqs: cached ? cached.reqs + 1 : 1,
dataChange: cached ? cached.dataChange + (dataHasChanged ? 1 : 0) : 1,
time: +reqTime,
interval: cacheInterval,
data: { format, badgeData },
}
requestCache.set(cacheIndex, updatedCache)
if (!cachedVersionSent) {
const svg = makeBadge(badgeData)
setCacheHeadersOnResponse(ask.res, badgeData.cacheLengthSeconds)
makeSend(format, ask.res, end)(svg)
}
},
cachingRequest
)
@@ -206,8 +299,15 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
}
}
function clearRequestCache() {
requestCache.clear()
}
module.exports = {
handleRequest,
promisify,
clearRequestCache,
// Expose for testing.
_requestCache: requestCache,
userAgent,
}

View File

@@ -6,7 +6,11 @@ const portfinder = require('portfinder')
const Camp = require('@shields_io/camp')
const got = require('../got-test-client')
const coalesceBadge = require('./coalesce-badge')
const { handleRequest } = require('./legacy-request-handler')
const {
handleRequest,
clearRequestCache,
_requestCache,
} = require('./legacy-request-handler')
async function performTwoRequests(baseUrl, first, second) {
expect((await got(`${baseUrl}${first}`)).statusCode).to.equal(200)
@@ -79,6 +83,7 @@ describe('The request handler', function () {
camp.on('listening', () => done())
})
afterEach(function (done) {
clearRequestCache()
if (camp) {
camp.close(() => done())
camp = null
@@ -191,18 +196,57 @@ describe('The request handler', function () {
describe('caching', function () {
describe('standard query parameters', function () {
let handlerCallCount
beforeEach(function () {
handlerCallCount = 0
})
function register({ cacheHeaderConfig }) {
camp.route(
/^\/testing\/([^/]+)\.(svg|png|gif|jpg|json)$/,
handleRequest(
cacheHeaderConfig,
(queryParams, match, sendBadge, request) => {
++handlerCallCount
fakeHandler(queryParams, match, sendBadge, request)
}
)
)
}
context('With standard cache settings', function () {
beforeEach(function () {
register({ cacheHeaderConfig: standardCacheHeaders })
})
it('should cache identical requests', async function () {
await performTwoRequests(
baseUrl,
'/testing/123.svg',
'/testing/123.svg'
)
expect(handlerCallCount).to.equal(1)
})
it('should differentiate known query parameters', async function () {
await performTwoRequests(
baseUrl,
'/testing/123.svg?label=foo',
'/testing/123.svg?label=bar'
)
expect(handlerCallCount).to.equal(2)
})
it('should ignore unknown query parameters', async function () {
await performTwoRequests(
baseUrl,
'/testing/123.svg?foo=1',
'/testing/123.svg?foo=2'
)
expect(handlerCallCount).to.equal(1)
})
})
it('should set the expires header to current time + defaultCacheLengthSeconds', async function () {
register({ cacheHeaderConfig: { defaultCacheLengthSeconds: 900 } })
const { headers } = await got(`${baseUrl}/testing/123.json`)
@@ -233,6 +277,7 @@ describe('The request handler', function () {
handleRequest(
{ defaultCacheLengthSeconds: 300 },
(queryParams, match, sendBadge, request) => {
++handlerCallCount
createFakeHandlerWithCacheLength(400)(
queryParams,
match,
@@ -253,6 +298,7 @@ describe('The request handler', function () {
handleRequest(
{ defaultCacheLengthSeconds: 300 },
(queryParams, match, sendBadge, request) => {
++handlerCallCount
createFakeHandlerWithCacheLength(200)(
queryParams,
match,
@@ -299,6 +345,21 @@ describe('The request handler', function () {
'no-cache, no-store, must-revalidate'
)
})
describe('the cache key', function () {
beforeEach(function () {
register({ cacheHeaderConfig: standardCacheHeaders })
})
const expectedCacheKey = '/testing/123.json?color=123&label=foo'
it('should match expected and use canonical order - 1', async function () {
await got(`${baseUrl}/testing/123.json?color=123&label=foo`)
expect(_requestCache.cache).to.have.keys(expectedCacheKey)
})
it('should match expected and use canonical order - 2', async function () {
await got(`${baseUrl}/testing/123.json?label=foo&color=123`)
expect(_requestCache.cache).to.have.keys(expectedCacheKey)
})
})
})
describe('custom query parameters', function () {

View File

@@ -0,0 +1,136 @@
'use strict'
// In-memory KV, remove the oldest data when the capacity is reached.
const typeEnum = {
unit: 0,
heap: 1,
}
// In bytes.
let heapSize
function computeHeapSize() {
return (heapSize = process.memoryUsage().heapTotal)
}
let heapSizeTimeout
function getHeapSize() {
if (heapSizeTimeout == null) {
// Compute the heap size every 60 seconds.
heapSizeTimeout = setInterval(computeHeapSize, 60 * 1000)
return computeHeapSize()
} else {
return heapSize
}
}
function CacheSlot(key, value) {
this.key = key
this.value = value
this.older = null // Newest slot that is older than this slot.
this.newer = null // Oldest slot that is newer than this slot.
}
function Cache(capacity, type) {
type = type || 'unit'
this.capacity = capacity
this.type = typeEnum[type]
this.cache = new Map() // Maps cache keys to CacheSlots.
this.newest = null // Newest slot in the cache.
this.oldest = null
}
Cache.prototype = {
set: function addToCache(cacheKey, cached) {
let slot = this.cache.get(cacheKey)
if (slot === undefined) {
slot = new CacheSlot(cacheKey, cached)
this.cache.set(cacheKey, slot)
}
this.makeNewest(slot)
const numItemsToRemove = this.limitReached()
if (numItemsToRemove > 0) {
for (let i = 0; i < numItemsToRemove; i++) {
this.removeOldest()
}
}
},
get: function getFromCache(cacheKey) {
const slot = this.cache.get(cacheKey)
if (slot !== undefined) {
this.makeNewest(slot)
return slot.value
}
},
has: function hasInCache(cacheKey) {
return this.cache.has(cacheKey)
},
makeNewest: function makeNewestSlot(slot) {
const previousNewest = this.newest
if (previousNewest === slot) {
return
}
const older = slot.older
const newer = slot.newer
if (older !== null) {
older.newer = newer
} else if (newer !== null) {
this.oldest = newer
}
if (newer !== null) {
newer.older = older
}
this.newest = slot
if (previousNewest !== null) {
slot.older = previousNewest
slot.newer = null
previousNewest.newer = slot
} else {
// If previousNewest is null, the cache used to be empty.
this.oldest = slot
}
},
removeOldest: function removeOldest() {
const cacheKey = this.oldest.key
if (this.oldest !== null) {
this.oldest = this.oldest.newer
if (this.oldest !== null) {
this.oldest.older = null
}
}
this.cache.delete(cacheKey)
},
// Returns the number of elements to remove if we're past the limit.
limitReached: function heuristic() {
if (this.type === typeEnum.unit) {
// Remove the excess.
return Math.max(0, this.cache.size - this.capacity)
} else if (this.type === typeEnum.heap) {
if (getHeapSize() >= this.capacity) {
console.log('LRU HEURISTIC heap:', getHeapSize())
// Remove half of them.
return this.cache.size >> 1
} else {
return 0
}
} else {
console.error(`Unknown heuristic '${this.type}' for LRU cache.`)
return 1
}
},
clear: function () {
this.cache.clear()
this.newest = null
this.oldest = null
},
}
module.exports = Cache

View File

@@ -0,0 +1,134 @@
'use strict'
const { expect } = require('chai')
const LRU = require('./lru-cache')
function expectCacheSlots(cache, keys) {
expect(cache.cache.size).to.equal(keys.length)
const slots = keys.map(k => cache.cache.get(k))
const first = slots[0]
const last = slots.slice(-1)[0]
expect(cache.oldest).to.equal(first)
expect(cache.newest).to.equal(last)
expect(first.older).to.be.null
expect(last.newer).to.be.null
for (let i = 0; i + 1 < slots.length; ++i) {
const current = slots[i]
const next = slots[i + 1]
expect(current.newer).to.equal(next)
expect(next.older).to.equal(current)
}
}
describe('The LRU cache', function () {
it('should support a zero capacity', function () {
const cache = new LRU(0)
cache.set('key', 'value')
expect(cache.cache.size).to.equal(0)
})
it('should support a one capacity', function () {
const cache = new LRU(1)
cache.set('key1', 'value1')
expectCacheSlots(cache, ['key1'])
cache.set('key2', 'value2')
expectCacheSlots(cache, ['key2'])
expect(cache.get('key1')).to.be.undefined
expect(cache.get('key2')).to.equal('value2')
})
it('should remove the oldest element when reaching capacity', function () {
const cache = new LRU(2)
cache.set('key1', 'value1')
cache.set('key2', 'value2')
cache.set('key3', 'value3')
cache.cache.get('key1')
expectCacheSlots(cache, ['key2', 'key3'])
expect(cache.cache.get('key1')).to.be.undefined
expect(cache.get('key1')).to.be.undefined
expect(cache.get('key2')).to.equal('value2')
expect(cache.get('key3')).to.equal('value3')
})
it('should make sure that resetting a key in cache makes it newest', function () {
const cache = new LRU(2)
cache.set('key', 'value')
cache.set('key2', 'value2')
expectCacheSlots(cache, ['key', 'key2'])
cache.set('key', 'value')
expectCacheSlots(cache, ['key2', 'key'])
})
describe('getting a key in the cache', function () {
context('when the requested key is oldest', function () {
it('should leave the keys in the expected order', function () {
const cache = new LRU(2)
cache.set('key1', 'value1')
cache.set('key2', 'value2')
expectCacheSlots(cache, ['key1', 'key2'])
expect(cache.get('key1')).to.equal('value1')
expectCacheSlots(cache, ['key2', 'key1'])
})
})
context('when the requested key is newest', function () {
it('should leave the keys in the expected order', function () {
const cache = new LRU(2)
cache.set('key1', 'value1')
cache.set('key2', 'value2')
expect(cache.get('key2')).to.equal('value2')
expectCacheSlots(cache, ['key1', 'key2'])
})
})
context('when the requested key is in the middle', function () {
it('should leave the keys in the expected order', function () {
const cache = new LRU(3)
cache.set('key1', 'value1')
cache.set('key2', 'value2')
cache.set('key3', 'value3')
expectCacheSlots(cache, ['key1', 'key2', 'key3'])
expect(cache.get('key2')).to.equal('value2')
expectCacheSlots(cache, ['key1', 'key3', 'key2'])
})
})
})
it('should clear', function () {
// Set up.
const cache = new LRU(2)
cache.set('key1', 'value1')
cache.set('key2', 'value2')
// Confidence check.
expect(cache.get('key1')).to.equal('value1')
expect(cache.get('key2')).to.equal('value2')
// Run.
cache.clear()
// Test.
expect(cache.get('key1')).to.be.undefined
expect(cache.get('key2')).to.be.undefined
expect(cache.cache.size).to.equal(0)
})
})

View File

@@ -82,7 +82,7 @@ module.exports = function redirector(attrs) {
trace.logTrace('inbound', emojic.ticket, 'Named params', namedParams)
trace.logTrace('inbound', emojic.crayon, 'Query params', queryParams)
const targetPath = encodeURI(transformPath(namedParams))
const targetPath = transformPath(namedParams)
trace.logTrace('validate', emojic.dart, 'Target', targetPath)
let urlSuffix = ask.uri.search || ''

View File

@@ -121,20 +121,6 @@ describe('Redirector', function () {
)
})
it('should correctly encode the redirect URL', async function () {
const { statusCode, headers } = await got(
`${baseUrl}/very/old/service/hello%0Dworld.svg?foobar=a%0Db`,
{
followRedirect: false,
}
)
expect(statusCode).to.equal(301)
expect(headers.location).to.equal(
'/new/service/hello%0Dworld.svg?foobar=a%0Db'
)
})
describe('transformQueryParams', function () {
const route = {
base: 'another/old/service',

View File

@@ -22,7 +22,7 @@ module.exports = class InfluxMetrics {
const request = {
uri: this._config.url,
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
body: await this.metrics(),
body: this.metrics(),
timeout: this._config.timeoutMillseconds,
auth,
}
@@ -51,8 +51,8 @@ module.exports = class InfluxMetrics {
)
}
async metrics() {
return promClientJsonToInfluxV2(await this._metricInstance.metrics(), {
metrics() {
return promClientJsonToInfluxV2(this._metricInstance.metrics(), {
env: this._config.envLabel,
application: 'shields',
instance: this._instanceId,

View File

@@ -36,7 +36,7 @@ describe('Influx metrics', function () {
instanceIdEnvVarName: 'INSTANCE_ID',
})
expect(await influxMetrics.metrics()).to.contain('instance=instance3')
expect(influxMetrics.metrics()).to.contain('instance=instance3')
})
it('should use a hostname as an instance label', async function () {
@@ -46,9 +46,7 @@ describe('Influx metrics', function () {
}
const influxMetrics = new InfluxMetrics(metricInstance, customConfig)
expect(await influxMetrics.metrics()).to.be.contain(
'instance=test-hostname'
)
expect(influxMetrics.metrics()).to.be.contain('instance=test-hostname')
})
it('should use a random string as an instance label', async function () {
@@ -57,7 +55,7 @@ describe('Influx metrics', function () {
}
const influxMetrics = new InfluxMetrics(metricInstance, customConfig)
expect(await influxMetrics.metrics()).to.be.match(/instance=\w+ /)
expect(influxMetrics.metrics()).to.be.match(/instance=\w+ /)
})
it('should use a hostname alias as an instance label', async function () {
@@ -68,7 +66,7 @@ describe('Influx metrics', function () {
}
const influxMetrics = new InfluxMetrics(metricInstance, customConfig)
expect(await influxMetrics.metrics()).to.be.contain(
expect(influxMetrics.metrics()).to.be.contain(
'instance=test-hostname-alias'
)
})

View File

@@ -2,26 +2,26 @@
const groupBy = require('lodash.groupby')
function promClientJsonToInfluxV2(metrics, extraLabels = {}) {
return metrics
.flatMap(metric => {
const valuesByLabels = groupBy(metric.values, value =>
JSON.stringify(Object.entries(value.labels).sort())
)
return Object.values(valuesByLabels).map(metricsWithSameLabel => {
const labels = Object.entries(metricsWithSameLabel[0].labels)
.concat(Object.entries(extraLabels))
.sort((a, b) => a[0].localeCompare(b[0]))
.map(labelEntry => `${labelEntry[0]}=${labelEntry[1]}`)
.join(',')
const labelsFormatted = labels ? `,${labels}` : ''
const values = metricsWithSameLabel
.sort((a, b) => a.metricName.localeCompare(b.metricName))
.map(value => `${value.metricName || metric.name}=${value.value}`)
.join(',')
return `prometheus${labelsFormatted} ${values}`
})
}, metrics)
.join('\n')
// TODO Replace with Array.prototype.flatMap() after migrating to Node.js >= 11
const flatMap = (f, arr) => arr.reduce((acc, x) => acc.concat(f(x)), [])
return flatMap(metric => {
const valuesByLabels = groupBy(metric.values, value =>
JSON.stringify(Object.entries(value.labels).sort())
)
return Object.values(valuesByLabels).map(metricsWithSameLabel => {
const labels = Object.entries(metricsWithSameLabel[0].labels)
.concat(Object.entries(extraLabels))
.sort((a, b) => a[0].localeCompare(b[0]))
.map(labelEntry => `${labelEntry[0]}=${labelEntry[1]}`)
.join(',')
const labelsFormatted = labels ? `,${labels}` : ''
const values = metricsWithSameLabel
.sort((a, b) => a.metricName.localeCompare(b.metricName))
.map(value => `${value.metricName || metric.name}=${value.value}`)
.join(',')
return `prometheus${labelsFormatted} ${values}`
})
}, metrics).join('\n')
}
module.exports = { promClientJsonToInfluxV2 }

View File

@@ -22,7 +22,7 @@ describe('Metric format converters', function () {
expect(influx).to.be.equal('prometheus counter1=11')
})
it('converts a counter (from prometheus registry)', async function () {
it('converts a counter (from prometheus registry)', function () {
const register = new prometheus.Registry()
const counter = new prometheus.Counter({
name: 'counter1',
@@ -31,7 +31,7 @@ describe('Metric format converters', function () {
})
counter.inc(11)
const influx = promClientJsonToInfluxV2(await register.getMetricsAsJSON())
const influx = promClientJsonToInfluxV2(register.getMetricsAsJSON())
expect(influx).to.be.equal('prometheus counter1=11')
})
@@ -52,7 +52,7 @@ describe('Metric format converters', function () {
expect(influx).to.be.equal('prometheus gauge1=20')
})
it('converts a gauge (from prometheus registry)', async function () {
it('converts a gauge (from prometheus registry)', function () {
const register = new prometheus.Registry()
const gauge = new prometheus.Gauge({
name: 'gauge1',
@@ -61,7 +61,7 @@ describe('Metric format converters', function () {
})
gauge.inc(20)
const influx = promClientJsonToInfluxV2(await register.getMetricsAsJSON())
const influx = promClientJsonToInfluxV2(register.getMetricsAsJSON())
expect(influx).to.be.equal('prometheus gauge1=20')
})
@@ -101,7 +101,7 @@ prometheus histogram1_count=3,histogram1_sum=111`)
)
})
it('converts a histogram (from prometheus registry)', async function () {
it('converts a histogram (from prometheus registry)', function () {
const register = new prometheus.Registry()
const histogram = new prometheus.Histogram({
name: 'histogram1',
@@ -113,7 +113,7 @@ prometheus histogram1_count=3,histogram1_sum=111`)
histogram.observe(10)
histogram.observe(1)
const influx = promClientJsonToInfluxV2(await register.getMetricsAsJSON())
const influx = promClientJsonToInfluxV2(register.getMetricsAsJSON())
expect(sortLines(influx)).to.be.equal(
sortLines(`prometheus,le=+Inf histogram1_bucket=3
@@ -151,7 +151,7 @@ prometheus summary1_count=3,summary1_sum=111`)
)
})
it('converts a summary (from prometheus registry)', async function () {
it('converts a summary (from prometheus registry)', function () {
const register = new prometheus.Registry()
const summary = new prometheus.Summary({
name: 'summary1',
@@ -163,7 +163,7 @@ prometheus summary1_count=3,summary1_sum=111`)
summary.observe(10)
summary.observe(1)
const influx = promClientJsonToInfluxV2(await register.getMetricsAsJSON())
const influx = promClientJsonToInfluxV2(register.getMetricsAsJSON())
expect(sortLines(influx)).to.be.equal(
sortLines(`prometheus,quantile=0.99 summary1=100

View File

@@ -76,9 +76,9 @@ module.exports = class PrometheusMetrics {
async registerMetricsEndpoint(server) {
const { register } = this
server.route(/^\/metrics$/, async (data, match, end, ask) => {
server.route(/^\/metrics$/, (data, match, end, ask) => {
ask.res.setHeader('Content-Type', register.contentType)
ask.res.end(await register.metrics())
ask.res.end(register.metrics())
})
}
@@ -90,8 +90,8 @@ module.exports = class PrometheusMetrics {
}
}
async metrics() {
return await this.register.getMetricsAsJSON()
metrics() {
return this.register.getMetricsAsJSON()
}
/**

View File

@@ -15,7 +15,10 @@ const GithubConstellation = require('../../services/github/github-constellation'
const suggest = require('../../services/suggest')
const { loadServiceClasses } = require('../base-service/loader')
const { makeSend } = require('../base-service/legacy-result-sender')
const { handleRequest } = require('../base-service/legacy-request-handler')
const {
handleRequest,
clearRequestCache,
} = require('../base-service/legacy-request-handler')
const { clearRegularUpdateCache } = require('../legacy/regular-update')
const { rasterRedirectUrl } = require('../badge-urls/make-badge-url')
const log = require('./log')
@@ -87,10 +90,10 @@ const publicConfigSchema = Joi.object({
.integer()
.min(1)
.when('enabled', { is: true, then: Joi.required() }),
intervalSeconds: Joi.number().integer().min(1).when('enabled', {
is: true,
then: Joi.required(),
}),
intervalSeconds: Joi.number()
.integer()
.min(1)
.when('enabled', { is: true, then: Joi.required() }),
instanceIdFrom: Joi.string()
.equal('hostname', 'env-var', 'random')
.when('enabled', { is: true, then: Joi.required() }),
@@ -143,9 +146,6 @@ const publicConfigSchema = Joi.object({
rateLimit: Joi.boolean().required(),
handleInternalErrors: Joi.boolean().required(),
fetchLimit: Joi.string().regex(/^[0-9]+(b|kb|mb|gb|tb)$/i),
documentRoot: Joi.string().default(
path.resolve(__dirname, '..', '..', 'public')
),
requireCloudflare: Joi.boolean().required(),
}).required()
@@ -162,8 +162,6 @@ const privateConfigSchema = Joi.object({
jenkins_pass: Joi.string(),
jira_user: Joi.string(),
jira_pass: Joi.string(),
bitbucket_server_username: Joi.string(),
bitbucket_server_password: Joi.string(),
nexus_user: Joi.string(),
nexus_pass: Joi.string(),
npm_token: Joi.string(),
@@ -439,11 +437,10 @@ class Server {
log(`Server is starting up: ${this.baseUrl}`)
const camp = (this.camp = Camp.create({
documentRoot: this.config.public.documentRoot,
documentRoot: path.resolve(__dirname, '..', '..', 'public'),
port,
hostname,
secure,
staticMaxAge: 300,
cert,
key,
}))
@@ -484,6 +481,7 @@ class Server {
static resetGlobalState() {
// This state should be migrated to instance state. When possible, do not add new
// global state.
clearRequestCache()
clearRegularUpdateCache()
}

View File

@@ -1,11 +1,8 @@
'use strict'
const path = require('path')
const { expect } = require('chai')
const isSvg = require('is-svg')
const config = require('config')
const nock = require('nock')
const sinon = require('sinon')
const got = require('../got-test-client')
const Server = require('./server')
const { createTestServer } = require('./in-process-server-test-helpers')
@@ -16,11 +13,7 @@ describe('The server', function () {
before('Start the server', async function () {
// Fixes https://github.com/badges/shields/issues/2611
this.timeout(10000)
server = await createTestServer({
public: {
documentRoot: path.resolve(__dirname, 'test-public'),
},
})
server = await createTestServer()
baseUrl = server.baseUrl
await server.start()
})
@@ -52,16 +45,6 @@ describe('The server', function () {
.and.to.include('apple')
})
it('should serve front-end with default maxAge', async function () {
const { headers } = await got(`${baseUrl}/`)
expect(headers['cache-control']).to.equal('max-age=300, s-maxage=300')
})
it('should serve badges with custom maxAge', async function () {
const { headers } = await got(`${baseUrl}npm/l/express`)
expect(headers['cache-control']).to.equal('max-age=3600, s-maxage=3600')
})
it('should redirect colorscheme PNG badges as configured', async function () {
const { statusCode, headers } = await got(
`${baseUrl}:fruit-apple-green.png`,
@@ -368,67 +351,4 @@ describe('The server', function () {
})
})
})
describe('running with metrics enabled', function () {
let server, baseUrl, scope, clock
const metricsPushIntervalSeconds = 1
before('Start the server', async function () {
// Fixes https://github.com/badges/shields/issues/2611
this.timeout(10000)
process.env.INSTANCE_ID = 'test-instance'
server = await createTestServer({
public: {
metrics: {
prometheus: { enabled: true },
influx: {
enabled: true,
url: 'http://localhost:1112/metrics',
instanceIdFrom: 'env-var',
instanceIdEnvVarName: 'INSTANCE_ID',
envLabel: 'localhost-env',
intervalSeconds: metricsPushIntervalSeconds,
},
},
},
private: {
influx_username: 'influx-username',
influx_password: 'influx-password',
},
})
clock = sinon.useFakeTimers()
baseUrl = server.baseUrl
await server.start()
})
after('Shut down the server', async function () {
if (server) {
await server.stop()
}
server = undefined
nock.cleanAll()
delete process.env.INSTANCE_ID
clock.restore()
})
it('should push custom metrics', async function () {
scope = nock('http://localhost:1112', {
reqheaders: {
'Content-Type': 'application/x-www-form-urlencoded',
},
})
.post(
'/metrics',
/prometheus,application=shields,category=static,env=localhost-env,family=static-badge,instance=test-instance,service=static_badge service_requests_total=1\n/
)
.basicAuth({ user: 'influx-username', pass: 'influx-password' })
.reply(200)
await got(`${baseUrl}badge/fruit-apple-green.svg`)
await clock.tickAsync(1000 * metricsPushIntervalSeconds + 500)
expect(scope.isDone()).to.be.equal(
true,
`pending mocks: ${scope.pendingMocks()}`
)
})
})
})

View File

@@ -1,10 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>shields.io</title>
</head>
<body>
concise, consistent, legible
</body>
</html>

View File

@@ -226,7 +226,7 @@ Description of the code:
- [text-formatters.js](https://github.com/badges/shields/blob/master/services/text-formatters.js)
- [version.js](https://github.com/badges/shields/blob/master/services/version.js)
3. Our badge will query a JSON API so we will extend `BaseJsonService` instead of `BaseService`. This contains some helpers to reduce the need for boilerplate when calling a JSON API.
4. We perform input validation by defining a schema which we expect the JSON we receive to conform to. This is done using [Joi](https://github.com/hapijs/joi). Defining a schema means we can ensure the JSON we receive meets our expectations and throw an error if we receive unexpected input without having to explicitly code validation checks. The schema also acts as a filter on the JSON object. Any properties we're going to reference need to be validated, otherwise they will be filtered out. In this case our schema declares that we expect to receive an object which must have a property called 'version', which is a string. There is further documentation on [input validation](input-validation.md).
4. We perform input validation by defining a schema which we expect the JSON we receive to conform to. This is done using [Joi](https://github.com/hapijs/joi). Defining a schema means we can ensure the JSON we receive meets our expectations and throw an error if we receive unexpected input without having to explicitly code validation checks. The schema also acts as a filter on the JSON object. Any properties we're going to reference need to be validated, otherwise they will be filtered out. In this case our schema declares that we expect to receive an object which must have a property called 'version', which is a string.
5. Our module exports a class which extends `BaseJsonService`
6. Returns the name of the category to sort this badge into (eg. "build"). Used to sort the examples on the main [shields.io](https://shields.io) website. [Here](https://github.com/badges/shields/blob/master/services/categories.js) is the list of the valid categories. See [section 4.4](#44-adding-an-example-to-the-front-page) for more details on examples.
7. As with our previous badge, we need to declare a route. This time we will capture a variable called `gem`.

View File

@@ -125,9 +125,10 @@ test this kind of logic through unit tests (e.g. of `render()` and
registered.)
2. Scoutcamp invokes a callback with the four parameters:
`( queryParams, match, end, ask )`. This callback is defined in
[`legacy-request-handler`][legacy-request-handler]. A timeout is set to
handle unresponsive service code and the next callback is invoked: the
legacy handler function.
[`legacy-request-handler`][legacy-request-handler]. If the badge result
is found in a relatively small in-memory cache, the response is sent
immediately. Otherwise a timeout is set to handle unresponsive service
code and the next callback is invoked: the legacy handler function.
3. The legacy handler function receives
`( queryParams, match, sendBadge, request )`. Its job is to extract data
from the regex `match` and `queryParams`, invoke `request` to fetch
@@ -161,8 +162,8 @@ test this kind of logic through unit tests (e.g. of `render()` and
services defaults to produce an object that fully describes the badge to
be rendered.
9. `sendBadge` is invoked with that object. It does some housekeeping on the
timeout. Then it renders the badge to svg or raster and pushes out the
result over the HTTPS connection.
timeout and caches the result. Then it renders the badge to svg or raster
and pushes out the result over the HTTPS connection.
[error reporting]: https://github.com/badges/shields/blob/master/doc/production-hosting.md#error-reporting
[coalescebadge]: https://github.com/badges/shields/blob/master/core/base-service/coalesce-badge.js

View File

@@ -1,50 +0,0 @@
# Input Data Validation
When we receive input data from an upstream API, we perform input validation to:
- Ensure we won't throw a runtime error trying to render a badge
- Ensure we won't render badges with spurious or unexpected output e.g: ![](https://img.shields.io/badge/version-null-blue) ![](https://img.shields.io/badge/coverage-NaN%25-red) ![](https://img.shields.io/badge/build-undefined-red) ![](https://img.shields.io/badge/coverage---10%25-critical) etc
- Express and document our understanding of the input data
## Writing schemas and validation
- The default validation mechanism should be to use [Joi](https://github.com/sideway/joi) to define a schema for the input data. Validation against Joi schemas is implemented in the base classes and inherited by every service class that extends them. Sometimes additional manual validation is needed which can't be covered by Joi and plugins in which case we implement it by hand.
- If validation is implemented manually (because we need to enforce a constraint that can't be expressed with Joi), invalid data should throw an [InvalidResponse](https://contributing.shields.io/module-core_base-service_errors-InvalidResponse.html) exception.
- Our definition of "valid" should not be stricter than the upstream API's definition of "valid".
- The schema/validation we choose is informed by the assumptions we're making about the data. e.g:
- If we're going to use a value, make sure it exists.
- If we need to multiply it by something, we check it's a number.
- If we're going to call `.split()` on it, we make sure it's a string.
- If we're going to address `foo[0]`, `foo` must be an array.
- If we're going to sort a version on the assumption it is a semver, check it's a semver
- We don't need to validate characteristics we don't rely on. For example, if we're just going to render a version on a badge with the same exact value from the API response and do not need to sort or transform the value, then it doesn't matter what format the version number is in. We can use a very relaxed schema to validate in this case, e.g. `Joi.string().required()`
- If theory (docs) and practice (real-world API responses) conflict, real-world outputs take precedence over documented behaviour. e.g: if the docs say version is a semver but we learn that there are real-world packages where the version number is `0.3b` or `1.2.1.27` then we should accept those values in preference to enforcing the documented API behaviour.
- Shields is descriptive rather than prescriptive. We reflect the established norms of the communities we serve.
- It is fine to define a single schema which is applied to multiple badges. For example, we could define a schema that says:
```js
const schema = Joi.object({
license: Joi.string().required(),
version: Joi.string().required(),
}).required()
```
and have both the license and version badges validate the response against that schema.
- For build status badges there is a shared [isBuildStatus](https://github.com/badges/shields/blob/master/services/build-status.js) validator. In most cases build status badges should use `isBuildStatus` or input validation and `renderBuildStatusBadge` should be used for rendering. Any additional status values can be added to the relevant color arrays.
## Identifying problems
- If we know of a real-world example of a package/repo/etc that causes us to render an invalid value on a badge (e.g: ![](https://img.shields.io/badge/version-null-blue) ![](https://img.shields.io/badge/coverage-NaN%25-red) ![](https://img.shields.io/badge/build-undefined-red) ) our input validation is broken and we should fix it.
- If we know of a real-world example of a package/repo/etc that causes us to throw an unhandled runtime exception, our input validation is broken and we should fix it.
- We should not fail to render a badge because of a validation failure on a field that isn't necessary to render the badge. In the above example of a shared license/version schema: If we become aware of a real-world example of a package/repo/etc that has a `version` key but not a `license` key then we should split the schema (or make `version` optional and handle the error in code).

View File

@@ -14,43 +14,55 @@ Production hosting is managed by the Shields ops team:
[operations issues]: https://github.com/badges/shields/issues?q=is%3Aissue+is%3Aopen+label%3Aoperations
[ops discord]: https://discordapp.com/channels/308323056592486420/480747695879749633
| Component | Subcomponent | People with access |
| ----------------------------- | ------------------------------- | --------------------------------------------------------------- |
| shields-production-us | Account owner | @paulmelnikow |
| shields-production-us | Full access | @calebcartwright, @chris48s, @paulmelnikow, @pyvesb |
| shields-production-us | Access management | @calebcartwright, @chris48s, @paulmelnikow, @pyvesb |
| Compose.io Redis | Account owner | @paulmelnikow |
| Compose.io Redis | Account access | @paulmelnikow |
| Compose.io Redis | Database connection credentials | @calebcartwright, @chris48s, @paulmelnikow, @pyvesb |
| Zeit Now | Team owner | @paulmelnikow |
| Zeit Now | Team members | @paulmelnikow, @chris48s, @calebcartwright, @platan |
| Raster server | Full access as team members | @paulmelnikow, @chris48s, @calebcartwright, @platan |
| shields-server.com redirector | Full access as team members | @paulmelnikow, @chris48s, @calebcartwright, @platan |
| Cloudflare (CDN) | Account owner | @espadrine |
| Cloudflare (CDN) | Access management | @espadrine |
| Cloudflare (CDN) | Admin access | @calebcartwright, @chris48s, @espadrine, @paulmelnikow, @PyvesB |
| Twitch | OAuth app | @PyvesB |
| Discord | OAuth app | @PyvesB |
| YouTube | Account owner | @PyvesB |
| OpenStreetMap (for Wheelmap) | Account owner | @paulmelnikow |
| DNS | Account owner | @olivierlacan |
| DNS | Read-only account access | @espadrine, @paulmelnikow, @chris48s |
| Sentry | Error reports | @espadrine, @paulmelnikow |
| Metrics server | Owner | @platan |
| UptimeRobot | Account owner | @paulmelnikow |
| More metrics | Owner | @RedSparr0w |
| Component | Subcomponent | People with access |
| ----------------------------- | ------------------------------- | ------------------------------------------------------------------------------------------ |
| shields-production-us | Account owner | @paulmelnikow |
| shields-production-us | Full access | @calebcartwright, @chris48s, @paulmelnikow, @pyvesb |
| shields-production-us | Access management | @calebcartwright, @chris48s, @paulmelnikow, @pyvesb |
| Compose.io Redis | Account owner | @paulmelnikow |
| Compose.io Redis | Account access | @paulmelnikow |
| Compose.io Redis | Database connection credentials | @calebcartwright, @chris48s, @paulmelnikow, @pyvesb |
| Zeit Now | Team owner | @paulmelnikow |
| Zeit Now | Team members | @paulmelnikow, @chris48s, @calebcartwright, @platan |
| Raster server | Full access as team members | @paulmelnikow, @chris48s, @calebcartwright, @platan |
| shields-server.com redirector | Full access as team members | @paulmelnikow, @chris48s, @calebcartwright, @platan |
| Legacy badge servers | Account owner | @espadrine |
| Legacy badge servers | ssh, logs | @espadrine |
| Legacy badge servers | Deployment | @espadrine, @paulmelnikow |
| Legacy badge servers | Admin endpoints | @espadrine, @paulmelnikow |
| Cloudflare (CDN) | Account owner | @espadrine |
| Cloudflare (CDN) | Access management | @espadrine |
| Cloudflare (CDN) | Admin access | @calebcartwright, @chris48s, @espadrine, @paulmelnikow, @PyvesB |
| Twitch | OAuth app | @PyvesB |
| Discord | OAuth app | @PyvesB |
| YouTube | Account owner | @PyvesB |
| OpenStreetMap (for Wheelmap) | Account owner | @paulmelnikow |
| DNS | Account owner | @olivierlacan |
| DNS | Read-only account access | @espadrine, @paulmelnikow, @chris48s |
| Sentry | Error reports | @espadrine, @paulmelnikow |
| Frontend | Deployment | Technically anyone with push access but in practice must be deployed with the badge server |
| Metrics server | Owner | @platan |
| UptimeRobot | Account owner | @paulmelnikow |
| More metrics | Owner | @RedSparr0w |
| Netlify (documentation site) | Owner | @chris48s |
There are [too many bottlenecks][issue 2577]!
[issue 2577]: https://github.com/badges/shields/issues/2577
## Attached state
Shields has mercifully little persistent state:
1. The GitHub tokens we collect are saved on each server in a cloud Redis
database. They can also be fetched from the [GitHub auth admin endpoint][]
for debugging.
2. The server keeps the [regular-update cache][] in memory. It is neither
persisted nor inspectable.
1. The GitHub tokens we collect are saved on each server in a cloud Redis database.
They can also be fetched from the [GitHub auth admin endpoint][] for debugging.
2. The server keeps a few caches in memory. These are neither persisted nor
inspectable.
- The [request cache][]
- The [regular-update cache][]
[github auth admin endpoint]: https://github.com/badges/shields/blob/master/services/github/auth/admin.js
[request cache]: https://github.com/badges/shields/blob/master/core/base-service/legacy-request-handler.js#L29-L30
[regular-update cache]: https://github.com/badges/shields/blob/master/core/legacy/regular-update.js
## Configuration
@@ -78,17 +90,32 @@ files:
[shields-io-production.yml]: ../config/shields-io-production.yml
[default.yml]: ../config/default.yml
The project ships with `dotenv`, however there is no `.env` in production.
## Badge CDN
Sitting in front of the three servers is a Cloudflare Free account which
provides several services:
- Global CDN, caching, and SSL gateway for `img.shields.io` and `shields.io`
- Global CDN, caching, and SSL gateway for `img.shields.io`
- Analytics through the Cloudflare dashboard
- DNS resolution for `shields.io` (and subdomains)
- DNS hosting for `shields.io`
Cloudflare is configured to respect the servers' cache headers.
## Frontend
The frontend is served by [GitHub Pages][] via the [gh-pages branch][gh-pages]. SSL is enforced.
`shields.io` resolves to the GitHub Pages hosts. It is not proxied through
Cloudflare.
Technically any maintainer can push to `gh-pages`, but in practice the frontend must be deployed
with the badge server via the deployment process described below.
[github pages]: https://pages.github.com/
[gh-pages]: https://github.com/badges/shields/tree/gh-pages
## Raster server
The raster server `raster.shields.io` (a.k.a. the rasterizing proxy) is
@@ -98,14 +125,28 @@ hosted on [Zeit Now][]. It's managed in the
[zeit now]: https://zeit.co/now
[svg-to-image-proxy]: https://github.com/badges/svg-to-image-proxy
### Heroku Deployment
## Deployment
Both the badge server and frontend are served from Heroku.
The deployment is done in two stages: the badge server (heroku) and the front-end (gh-pages).
### Heroku
After merging a commit to master, heroku should create a staging deploy. Check this has deployed correctly in the `shields-staging` pipeline and review http://shields-staging.herokuapp.com/
If we're happy with it, "promote to production". This will deploy what's on staging to the `shields-production-eu` and `shields-production-us` pieplines.
### Frontend
To deploy the front-end to GH pages, use a clean clone of the shields repo.
```sh
$ git pull # update the working copy
$ npm ci # install dependencies (devDependencies are needed to build the frontend)
$ make deploy-gh-pages # build the frontend and push it to the gh-pages branch
```
No secrets are required to build or deploy the frontend.
## DNS
DNS is registered with [DNSimple][].
@@ -128,13 +169,6 @@ the server. It's generously donated by [Sentry][sentry home]. We bundle
[sentry home]: https://sentry.io/shields/
[sentry configuration]: https://github.com/badges/shields/blob/master/doc/self-hosting.md#sentry
## URLs
The canonical and only recommended domain for badge URLs is `img.shields.io`. Currently it is possible to request badges on both `img.shields.io` and `shields.io` i.e: https://img.shields.io/badge/build-passing-brightgreen and https://shields.io/badge/build-passing-brightgreen will both work. However:
- We never show or generate the `img.`-less URL format on https://shields.io/
- We make no guarantees about the `img.`-less URL format. At some future point we may remove the ability to serve badges on `shields.io` (without `img.`) without any warning. `img.shields.io` should always be used for badge urls.
## Monitoring
Overall server performance and requests by service are monitored using
@@ -154,3 +188,19 @@ Request performance is monitored in two places:
[monitor]: https://shields.redsparr0w.com/1568/
[notifications]: http://shields.redsparr0w.com/discord_notification
[monitor discord]: https://discordapp.com/channels/308323056592486420/470700909182320646
## Legacy servers
There are three legacy servers on OVH VPSs which are currently used for proxying.
| Cname | Hostname | Type | IP | Location |
| --------------------------- | -------------------- | ---- | -------------- | ------------------ |
| [s0.servers.shields.io][s0] | vps71670.vps.ovh.ca | VPS | 192.99.59.72 | Quebec, Canada |
| [s1.servers.shields.io][s1] | vps244529.ovh.net | VPS | 51.254.114.150 | Gravelines, France |
| [s2.servers.shields.io][s2] | vps117870.vps.ovh.ca | VPS | 149.56.96.133 | Quebec, Canada |
[s0]: https://s0.servers.shields.io/index.html
[s1]: https://s1.servers.shields.io/index.html
[s2]: https://s2.servers.shields.io/index.html
The only way to inspect the commit on the server is with `git ls-remote`.

View File

@@ -198,10 +198,10 @@ sudo node server
### Prometheus
Shields uses [prom-client](https://github.com/siimon/prom-client) to provide [default metrics](https://prometheus.io/docs/instrumenting/writing_clientlibs/#standard-and-runtime-collectors). These metrics are disabled by default.
You can enable them by `METRICS_PROMETHEUS_ENABLED` and `METRICS_PROMETHEUS_ENDPOINT_ENABLED` environment variables.
You can enable them by `METRICS_PROMETHEUS_ENABLED` environment variable.
```bash
METRICS_PROMETHEUS_ENABLED=true METRICS_PROMETHEUS_ENDPOINT_ENABLED=true npm start
METRICS_PROMETHEUS_ENABLED=true npm start
```
Metrics are available at `/metrics` resource.

View File

@@ -260,9 +260,21 @@ npm run coverage:report:open
## Pull requests
Pull requests must follow the [documented conventions][pr-conventions] in order to execute the correct set of service tests.
The affected service ids should be included in square brackets in the pull request
title. That way, Circle CI will run those service tests. When a pull request
affects multiple services, they should be separated with spaces. The test
runner is case-insensitive, so they should be capitalized for readability.
[pr-conventions]: https://github.com/badges/shields/blob/master/CONTRIBUTING.md#running-service-tests-in-pull-requests
For example:
- [Travis] Fix timeout issues
- [Travis Sonar] Support user token authentication
- Add tests for [CRAN] and [CPAN]
In the rare case when it's necessary to see the output of a full service-test
run in a PR, include `[*****]` in the title. Unless all the tests pass, the build
will fail, so likely it will be necessary to remove it and re-run the tests
before merging.
## Getting help

View File

@@ -11,6 +11,13 @@ import {
CopiedContentIndicatorHandle,
} from './copied-content-indicator'
function getBaseUrlFromWindowLocation(): string {
// Default to the current hostname for when there is no `BASE_URL` set
// at build time (as in most PaaS deploys).
const { protocol, hostname } = window.location
return `${protocol}//${hostname}`
}
export default function Customizer({
baseUrl,
title,
@@ -32,7 +39,9 @@ export default function Customizer({
}): JSX.Element {
// https://github.com/DefinitelyTyped/DefinitelyTyped/issues/35572
// https://github.com/DefinitelyTyped/DefinitelyTyped/issues/28884#issuecomment-471341041
const indicatorRef = useRef<CopiedContentIndicatorHandle>() as React.MutableRefObject<CopiedContentIndicatorHandle>
const indicatorRef = useRef<
CopiedContentIndicatorHandle
>() as React.MutableRefObject<CopiedContentIndicatorHandle>
const [path, setPath] = useState('')
const [queryString, setQueryString] = useState<string>()
const [pathIsComplete, setPathIsComplete] = useState<boolean>()
@@ -41,7 +50,7 @@ export default function Customizer({
function generateBuiltBadgeUrl(): string {
const suffix = queryString ? `?${queryString}` : ''
return `${baseUrl}${path}${suffix}`
return `${baseUrl || getBaseUrlFromWindowLocation()}${path}${suffix}`
}
function renderLivePreview(): JSX.Element {

View File

@@ -91,15 +91,15 @@ export function constructPath({
if (typeof token === 'string') {
return token.trim()
} else {
const { prefix, name, modifier } = token
const { delimiter, name, optional } = token
const value = namedParams[name]
if (value) {
return `${prefix}${value.trim()}`
} else if (modifier === '?' || modifier === '*') {
return `${delimiter}${value.trim()}`
} else if (optional) {
return ''
} else {
isComplete = false
return `${prefix}:${name}`
return `${delimiter}:${name}`
}
}
})
@@ -221,15 +221,14 @@ export default function PathBuilder({
tokenIndex: number,
namedParamIndex: number
): JSX.Element {
const { prefix, modifier } = token
const optional = modifier === '?' || modifier === '*'
const { delimiter, optional } = token
const name = `${token.name}`
const exampleValue = exampleParams[name] || '(not set)'
return (
<React.Fragment key={token.name}>
{renderLiteral(prefix, tokenIndex, false)}
{renderLiteral(delimiter, tokenIndex, false)}
<PathBuilderColumn pathContainsOnlyLiterals={false} withHorizPadding>
<NamedParamLabelContainer>
<BuilderLabel htmlFor={name}>{humanizeString(name)}</BuilderLabel>

View File

@@ -1,7 +1,7 @@
import React from 'react'
import styled from 'styled-components'
import { staticBadgeUrl } from '../../../core/badge-urls/make-badge-url'
import { getBaseUrl } from '../../constants'
import { baseUrl } from '../../constants'
import { shieldsLogos, simpleIcons } from '../../lib/supported-features'
import Meta from '../meta'
import Header from '../header'
@@ -19,7 +19,6 @@ const StyledTable = styled.table`
`
function NamedLogoTable({ logoNames }: { logoNames: string[] }): JSX.Element {
const baseUrl = getBaseUrl()
return (
<StyledTable>
<thead>

View File

@@ -2,7 +2,7 @@ import React, { Fragment } from 'react'
import styled from 'styled-components'
// @ts-ingnore
import { staticBadgeUrl } from '../../../core/badge-urls/make-badge-url'
import { getBaseUrl } from '../../constants'
import { baseUrl } from '../../constants'
import Meta from '../meta'
// @ts-ignore
import Header from '../header'
@@ -123,7 +123,6 @@ const examples = [
]
function StyleTable({ style }: { style: string }): JSX.Element {
const baseUrl = getBaseUrl()
return (
<StyledTable>
<thead>

View File

@@ -11,7 +11,7 @@ import {
RenderableExample,
} from '../lib/service-definitions'
import ServiceDefinitionSetHelper from '../lib/service-definitions/service-definition-set-helper'
import { getBaseUrl } from '../constants'
import { baseUrl } from '../constants'
import Meta from './meta'
import Header from './header'
import SuggestionAndSearch from './suggestion-and-search'
@@ -54,7 +54,6 @@ export default function Main({
setSelectedExampleIsSuggestion,
] = useState(false)
const searchTimeout = useRef(0)
const baseUrl = getBaseUrl()
function performSearch(query: string): void {
setSearchIsInProgress(false)

View File

@@ -378,10 +378,8 @@ export default function Usage({ baseUrl }: { baseUrl: string }): JSX.Element {
<QueryParam
documentation={
<span>
Specify what clicking on the left/right of a badge should do.
Note that this only works when integrating your badge in an
<StyledCode>&lt;object&gt;</StyledCode> HTML tag, but not an
<StyledCode>&lt;img&gt;</StyledCode> tag or a markup language.
Specify what clicking on the left/right of a badge should do
(esp. for social badge style)
</span>
}
key="link"

View File

@@ -1,33 +1 @@
const baseUrl = process.env.GATSBY_BASE_URL
export function getBaseUrl(): string {
if (baseUrl) {
return baseUrl
}
/*
This is a special case for production.
We want to be able to build the front end with no value set for
`GATSBY_BASE_URL` so that we can deploy a build to staging
and then promote the exact same build to production.
When deployed to staging, we want the frontend on
https://staging.shields.io/ to generate badges with the base
https://staging.shields.io/
When we promote to production we want https://shields.io/ and
https://www.shields.io/ to both generate badges with the base
https://img.shields.io/
*/
try {
const { protocol, hostname } = window.location
if (['shields.io', 'www.shields.io'].includes(hostname)) {
return 'https://img.shields.io'
}
return `${protocol}//${hostname}`
} catch (e) {
// server-side rendering
return ''
}
}
export const baseUrl = process.env.GATSBY_BASE_URL || ''

View File

@@ -21,11 +21,19 @@ export function removeRegexpFromPattern(pattern: string): string {
if (typeof token === 'string') {
return token
} else {
const { prefix, modifier, name, pattern } = token
const { delimiter, optional, repeat, name, pattern } = token
if (typeof name === 'number') {
return `${prefix}(${pattern})`
return `${delimiter}(${pattern})`
} else {
return `${prefix}:${name}${modifier}`
let modifier = ''
if (optional && !repeat) {
modifier = '?'
} else if (!optional && repeat) {
modifier = '+'
} else if (optional && repeat) {
modifier = '*'
}
return `${delimiter}:${name}${modifier}`
}
}
})

View File

@@ -1,6 +1,6 @@
import React from 'react'
import styled from 'styled-components'
import { getBaseUrl } from '../constants'
import { baseUrl } from '../constants'
import Meta from '../components/meta'
import Header from '../components/header'
import Footer from '../components/footer'
@@ -19,7 +19,6 @@ const SponsorContainer = styled.div`
`
export default function SponsorsPage(): JSX.Element {
const baseUrl = getBaseUrl()
return (
<MainContainer>
<GlobalStyle />
@@ -115,6 +114,9 @@ export default function SponsorsPage(): JSX.Element {
<li>
<a href="https://lgtm.com/">LGTM</a>
</li>
<li>
<a href="https://www.netlify.com/">Netlify</a>
</li>
<li>
<a href="https://uptimerobot.com/">Uptime Robot</a>
</li>

View File

@@ -1,7 +1,7 @@
import React from 'react'
import styled, { css } from 'styled-components'
import { staticBadgeUrl } from '../../core/badge-urls/make-badge-url'
import { getBaseUrl } from '../constants'
import { baseUrl } from '../constants'
import Meta from '../components/meta'
import Header from '../components/header'
import Footer from '../components/footer'
@@ -89,7 +89,6 @@ const Schema = styled.dl`
`
export default function EndpointPage(): JSX.Element {
const baseUrl = getBaseUrl()
return (
<MainContainer>
<GlobalStyle />

View File

@@ -1,7 +1,5 @@
'use strict'
const path = require('path')
module.exports = {
siteMetadata: {
title: 'Shields.io: Quality metadata badges for open source projects',
@@ -13,7 +11,7 @@ module.exports = {
{
resolve: 'gatsby-plugin-page-creator',
options: {
path: path.join(__dirname, 'frontend', 'pages'),
path: `${__dirname}/frontend/pages`,
},
},
'gatsby-plugin-react-helmet',

View File

@@ -12,7 +12,7 @@ const envFlag = require('node-env-flag')
const includeDevPages = envFlag(process.env.INCLUDE_DEV_PAGES, true)
const { categories } = yaml.load(
const { categories } = yaml.safeLoad(
fs.readFileSync('./service-definitions.yml', 'utf8')
)

15153
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -22,45 +22,45 @@
"url": "https://github.com/badges/shields"
},
"dependencies": {
"@sentry/node": "^6.2.0",
"@shields_io/camp": "^18.1.1",
"@sentry/node": "^5.27.1",
"@shields_io/camp": "^18.0.0",
"badge-maker": "file:badge-maker",
"bytes": "^3.1.0",
"camelcase": "^6.2.0",
"camelcase": "^6.1.0",
"chalk": "^4.1.0",
"check-node-version": "^4.1.0",
"check-node-version": "^4.0.3",
"cloudflare-middleware": "^1.0.4",
"config": "^3.3.3",
"cross-env": "^7.0.3",
"decamelize": "^5.0.0",
"config": "^3.3.2",
"cross-env": "^7.0.2",
"decamelize": "^3.2.0",
"dotenv": "^8.2.0",
"emojic": "^1.1.16",
"escape-string-regexp": "^4.0.0",
"fast-xml-parser": "^3.17.6",
"fast-xml-parser": "^3.17.4",
"glob": "^7.1.6",
"graphql": "^14.7.0",
"graphql-tag": "^2.11.0",
"heroku-client": "^3.1.0",
"ioredis": "4.23.0",
"joi": "17.4.0",
"ioredis": "4.17.3",
"joi": "17.2.1",
"joi-extension-semver": "5.0.0",
"js-yaml": "^4.0.0",
"jsonpath": "~1.1.0",
"js-yaml": "^3.14.0",
"jsonpath": "~1.0.2",
"lodash.countby": "^4.6.0",
"lodash.groupby": "^4.6.0",
"lodash.times": "^4.3.2",
"moment": "^2.29.1",
"node-env-flag": "^0.1.0",
"parse-link-header": "^1.0.1",
"path-to-regexp": "^6.2.0",
"pretty-bytes": "^5.5.0",
"path-to-regexp": "^5.0.0",
"pretty-bytes": "^5.4.1",
"priorityqueuejs": "^2.0.0",
"prom-client": "^13.1.0",
"query-string": "^6.14.1",
"prom-client": "^11.5.3",
"query-string": "^6.13.6",
"request": "~2.88.2",
"semver": "~7.3.4",
"simple-icons": "4.12.0",
"webextension-store-meta": "^1.0.3",
"xmldom": "~0.4.0",
"semver": "~7.3.2",
"simple-icons": "3.12.1",
"webextension-store-meta": "^1.0.2",
"xmldom": "~0.2.1",
"xpath": "~0.0.32"
},
"scripts": {
@@ -96,12 +96,13 @@
"check-types:package": "tsd badge-maker",
"check-types:frontend": "tsc --noEmit --project .",
"depcheck": "check-node-version --node \">= 12.0\"",
"fix-issue-5294": "rimraf node_modules/@types/react-native",
"postinstall": "run-s --silent depcheck fix-issue-5294",
"prebuild": "run-s --silent depcheck",
"features": "node scripts/export-supported-features-cli.js > supported-features.json",
"defs": "node scripts/export-service-definitions-cli.js > service-definitions.yml",
"build": "run-s defs features && gatsby build",
"heroku-postbuild": "run-s --silent build",
"heroku:scale": "node scripts/heroku-scale.js",
"start:server:prod": "node server",
"now-start": "npm run start:server:prod",
"start:server:e2e-on-build": "node server 8080",
@@ -114,10 +115,10 @@
"e2e": "start-server-and-test start http://localhost:3000 test:e2e",
"e2e-on-build": "cross-env CYPRESS_baseUrl=http://localhost:8080 start-server-and-test start:server:e2e-on-build http://localhost:8080 test:e2e",
"badge": "cross-env NODE_CONFIG_ENV=test TRACE_SERVICES=true node scripts/badge-cli.js",
"build-docs": "rimraf api-docs/ && jsdoc --pedantic -c ./jsdoc.json . && echo 'contributing.shields.io' > api-docs/CNAME"
"build-docs": "rimraf api-docs/ && jsdoc --pedantic -c ./jsdoc.json ."
},
"lint-staged": {
"**/*.@(js|ts|tsx)": [
"**/*.js": [
"eslint --fix",
"prettier --write"
],
@@ -140,105 +141,109 @@
]
},
"devDependencies": {
"@babel/core": "^7.12.17",
"@babel/core": "^7.12.3",
"@babel/polyfill": "^7.12.1",
"@babel/register": "7.13.0",
"@babel/register": "7.12.1",
"@mapbox/react-click-to-select": "^2.2.0",
"@types/chai": "^4.2.15",
"@types/chai": "^4.2.14",
"@types/lodash.debounce": "^4.0.6",
"@types/lodash.groupby": "^4.6.6",
"@types/mocha": "^8.2.1",
"@types/node": "^14.14.30",
"@types/mocha": "^8.0.3",
"@types/node": "^14.11.8",
"@types/react-helmet": "^6.1.0",
"@types/react-modal": "^3.12.0",
"@types/react-select": "^4.0.13",
"@types/styled-components": "5.1.7",
"@types/react-modal": "^3.10.6",
"@types/react-select": "^3.0.22",
"@types/styled-components": "5.1.3",
"@typescript-eslint/eslint-plugin": "^2.34.0",
"@typescript-eslint/parser": "^2.34.0",
"babel-plugin-inline-react-svg": "^1.1.2",
"babel-plugin-inline-react-svg": "^1.1.1",
"babel-plugin-istanbul": "^6.0.0",
"babel-preset-gatsby": "^0.5.1",
"caller": "^1.0.1",
"chai": "^4.3.0",
"chai": "^4.1.2",
"chai-as-promised": "^7.1.1",
"chai-datetime": "^1.8.0",
"chai-datetime": "^1.7.0",
"chai-string": "^1.4.0",
"cheerio": "^1.0.0-rc.3",
"child-process-promise": "^2.2.1",
"clipboard-copy": "^4.0.1",
"concurrently": "^6.0.0",
"cypress": "^6.5.0",
"danger": "^10.6.3",
"clipboard-copy": "^3.1.0",
"concurrently": "^5.3.0",
"cypress": "^5.3.0",
"danger": "^10.5.0",
"danger-plugin-no-test-shortcuts": "^2.0.0",
"deepmerge": "^4.2.2",
"eslint": "^7.20.0",
"eslint-config-prettier": "^7.2.0",
"eslint-config-standard": "^16.0.2",
"eslint-config-standard-jsx": "^10.0.0",
"eslint-config-standard-react": "^11.0.1",
"eslint": "^6.8.0",
"eslint-config-prettier": "^6.13.0",
"eslint-config-standard": "^14.1.1",
"eslint-config-standard-react": "^9.2.0",
"eslint-plugin-chai-friendly": "^0.6.0",
"eslint-plugin-cypress": "^2.11.2",
"eslint-plugin-import": "^2.22.1",
"eslint-plugin-jsdoc": "^32.2.0",
"eslint-plugin-mocha": "^8.0.0",
"eslint-plugin-jsdoc": "^30.7.3",
"eslint-plugin-mocha": "^6.3.0",
"eslint-plugin-no-extension-in-require": "^0.2.0",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^4.3.1",
"eslint-plugin-react": "^7.22.0",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-react": "^7.21.5",
"eslint-plugin-react-hooks": "^2.5.1",
"eslint-plugin-sort-class-members": "^1.9.0",
"fetch-ponyfill": "^7.1.0",
"form-data": "^4.0.0",
"gatsby": "2.32.8",
"eslint-plugin-sort-class-members": "^1.8.0",
"eslint-plugin-standard": "^4.0.2",
"fetch-ponyfill": "^6.1.1",
"form-data": "^3.0.0",
"fs-readfile-promise": "^3.0.1",
"gatsby": "2.24.73",
"gatsby-plugin-catch-links": "^2.3.10",
"gatsby-plugin-page-creator": "^2.10.0",
"gatsby-plugin-page-creator": "^2.3.33",
"gatsby-plugin-react-helmet": "^3.3.9",
"gatsby-plugin-remove-trailing-slashes": "^2.3.10",
"gatsby-plugin-styled-components": "^3.10.0",
"gatsby-plugin-typescript": "^2.12.0",
"got": "11.8.2",
"gatsby-plugin-styled-components": "^3.3.9",
"gatsby-plugin-typescript": "^2.4.24",
"got": "11.7.0",
"humanize-string": "^2.1.0",
"husky": "^4.3.8",
"husky": "^4.3.0",
"icedfrisby": "4.0.0",
"icedfrisby-nock": "^2.0.0",
"is-png": "^2.0.0",
"is-svg": "^4.2.1",
"js-yaml-loader": "^1.2.2",
"jsdoc": "^3.6.6",
"lint-staged": "^10.5.4",
"lint-staged": "^10.4.2",
"lodash.debounce": "^4.0.8",
"lodash.difference": "^4.5.0",
"minimist": "^1.2.5",
"mocha": "^8.3.0",
"mocha": "^7.2.0",
"mocha-env-reporter": "^4.0.0",
"mocha-junit-reporter": "^2.0.0",
"mocha-yaml-loader": "^1.0.3",
"nock": "13.0.7",
"node-mocks-http": "^1.10.1",
"nodemon": "^2.0.7",
"nock": "13.0.4",
"node-mocks-http": "^1.9.0",
"nodemon": "^2.0.6",
"npm-run-all": "^4.1.5",
"nyc": "^15.1.0",
"opn-cli": "^5.0.0",
"portfinder": "^1.0.28",
"prettier": "2.2.1",
"prettier": "2.1.2",
"react": "^16.14.0",
"react-dom": "^16.14.0",
"react-error-overlay": "^6.0.9",
"react-dom": "^16.13.1",
"react-error-overlay": "^6.0.7",
"react-helmet": "^6.1.0",
"react-modal": "^3.12.1",
"react-modal": "^3.11.2",
"react-pose": "^4.0.10",
"react-select": "^4.1.0",
"react-select": "^3.1.0",
"read-all-stdin-sync": "^1.0.5",
"redis-server": "^1.2.2",
"require-hacker": "^3.0.1",
"rimraf": "^3.0.2",
"sazerac": "^2.0.0",
"sinon": "^9.2.4",
"sinon": "^9.2.0",
"sinon-chai": "^3.5.0",
"snap-shot-it": "^7.9.6",
"start-server-and-test": "1.12.0",
"styled-components": "^5.2.1",
"snap-shot-it": "^7.9.3",
"start-server-and-test": "1.11.5",
"styled-components": "^5.2.0",
"tmp": "0.2.1",
"tsd": "^0.13.1",
"ts-mocha": "^8.0.0",
"tsd": "^0.14.0",
"typescript": "^4.1.5"
"typescript": "^4.0.3"
},
"engines": {
"node": "^12.18.3",

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="198" height="58"><rect rx="8" x="140" width="55" height="58" fill="#555" /><g stroke="#555" stroke-width="8"><path d="M135.5 54a8 8 0 0 0 8.5 -8.5"/><rect x="4" y="4" rx="8" width="190" height="50" fill="none"/></g><g fill="#555"><path d="m23.906 33.641c.953-.083 1.906-.167 2.859-.25.108 2.099 1.511 4.139 3.578 4.722 2.438.895 5.357.799 7.559-.658 1.49-1.129 1.861-3.674.324-4.925-1.557-1.322-3.685-1.504-5.576-2.057-2.343-.565-4.912-1.133-6.611-2.979-1.805-2.088-1.627-5.485.292-7.443 2.041-2.113 5.222-2.55 8.02-2.274 2.46.244 5.058 1.343 6.252 3.635.426.908 1.095 2.241.656 3.108-.888.173-1.81.148-2.715.245-.077-2.084-1.727-4.073-3.863-4.234-1.902-.317-4.02-.252-5.691.802-1.398.989-1.849 3.363-.381 4.494 1.281 1.01 2.962 1.199 4.482 1.642 2.66.627 5.602 1.118 7.596 3.158 2 2.188 1.893 5.84-.088 8.01-2.01 2.32-5.304 2.972-8.237 2.713-2.585-.147-5.319-1.024-6.916-3.184-.987-1.288-1.517-2.905-1.542-4.523"/><path d="m45.953 41c0-7.635 0-15.271 0-22.906.938 0 1.875 0 2.813 0 0 2.74 0 5.479 0 8.219 1.391-1.721 3.69-2.523 5.86-2.236 1.975.154 4.03 1.371 4.513 3.402.504 1.973.278 4.02.33 6.04 0 2.495 0 4.989 0 7.484-.938 0-1.875 0-2.813 0-.009-3.675.018-7.351-.014-11.03-.026-1.342-.627-2.835-2-3.282-2.187-.802-5.077.393-5.609 2.773-.417 1.764-.216 3.586-.264 5.381 0 2.051 0 4.102 0 6.153-.938 0-1.875 0-2.813 0"/><path d="m63.781 21.328v-3.234h2.813v3.234zm0 19.672v-16.594h2.813v16.594z"/><path d="m82.25 35.656c.969.12 1.938.24 2.906.359-.702 3.464-4.348 5.767-7.781 5.386-3.235-.066-6.43-2.328-7.06-5.598-.843-3.307-.404-7.285 2.101-9.784 3.082-3 8.699-2.618 11.235.892 1.374 1.85 1.676 4.267 1.578 6.51-4.125 0-8.25 0-12.375 0-.142 2.889 2.267 6 5.346 5.658 1.881-.162 3.613-1.566 4.045-3.423m-9.234-4.547c3.089 0 6.177 0 9.266 0 .129-2.774-2.616-5.422-5.419-4.713-2.174.427-3.912 2.474-3.846 4.713"/><path d="m88.64 41v-22.906h2.813v22.906z"/><path d="m106.59 41c0-.698 0-1.396 0-2.094-1.412 2.442-4.776 3.067-7.233 1.949-2.378-1.02-3.971-3.403-4.345-5.924-.507-2.761-.123-5.768 1.389-8.167 1.863-2.705 5.968-3.642 8.711-1.741.422.228 1.028 1.144 1.294 1.018-.006-2.649-.0001-5.298-.003-7.948.932 0 1.865 0 2.797 0 0 7.635 0 15.271 0 22.906-.87 0-1.74 0-2.61 0m-8.89-8.281c-.075 2.246.637 4.861 2.79 5.952 2 1.023 4.682-.047 5.488-2.134.897-1.996.746-4.278.388-6.382-.425-1.95-2.046-3.804-4.158-3.805-1.903-.065-3.633 1.363-4.099 3.181-.327 1.028-.394 2.116-.408 3.188"/><path d="m112.52 36.05c.927-.146 1.854-.292 2.781-.438.126 1.69 1.513 3.244 3.239 3.365 1.398.212 3.01.12 4.12-.851.807-.749 1.1-2.243.159-3.01-.908-.723-2.115-.812-3.182-1.172-1.797-.485-3.713-.848-5.243-1.97-1.83-1.551-1.868-4.679-.099-6.293 1.577-1.507 3.918-1.784 6-1.594 1.685.176 3.54.749 4.535 2.217.464.715.708 1.549.844 2.384-.917.125-1.833.25-2.75.375-.121-1.569-1.653-2.762-3.19-2.695-1.246-.082-2.702.012-3.608.982-.624.724-.543 1.971.314 2.481.998.706 2.269.757 3.389 1.173 1.754.512 3.647.848 5.141 1.965 1.686 1.476 1.728 4.244.396 5.966-1.298 1.788-3.597 2.417-5.709 2.448-1.466-.007-2.984-.214-4.299-.893-1.599-.909-2.585-2.655-2.84-4.444"/></g><g fill="#fff"><path d="m151.11 41v-22.906h3.03v22.906z"/><path d="m158.55 29.844c-.277-4.765 2.335-9.977 7.05-11.551 4.902-1.757 11.226.197 13.477 5.098 2.266 4.706 1.89 10.92-1.767 14.833-4.554 4.948-13.81 3.976-17.08-1.954-1.111-1.946-1.679-4.188-1.68-6.426m3.125.047c-.377 4.273 2.892 8.844 7.375 8.951 3.791.221 7.557-2.653 7.997-6.497.794-3.731.139-8.292-3.107-10.696-3.788-2.814-10.05-1.104-11.591 3.444-.54 1.539-.642 3.181-.675 4.798"/></g></svg>

Before

Width:  |  Height:  |  Size: 3.5 KiB

View File

@@ -9,4 +9,4 @@ const definitions = collectDefinitions()
// https://github.com/nodeca/js-yaml/issues/356#issuecomment-312430599
const cleaned = JSON.parse(JSON.stringify(definitions))
process.stdout.write(yaml.dump(cleaned, { flowLevel: 5 }))
process.stdout.write(yaml.safeDump(cleaned, { flowLevel: 5 }))

View File

@@ -1,38 +0,0 @@
'use strict'
if (process.argv.length < 3 || !/^\d+$/.test(process.argv[2])) {
console.log('Usage: npm run heroku:scale [num-dynos]')
process.exit(0)
}
if (!('HEROKU_API_TOKEN' in process.env)) {
throw new Error("'HEROKU_API_TOKEN' env var must be set")
}
if (!('HEROKU_APP_ID' in process.env)) {
throw new Error("'HEROKU_APP_ID' env var must be set")
}
const Heroku = require('heroku-client')
const HEROKU_API_TOKEN = process.env.HEROKU_API_TOKEN
const HEROKU_APP_ID = process.env.HEROKU_APP_ID
const numDynos = parseInt(process.argv[2])
const heroku = new Heroku({ token: HEROKU_API_TOKEN })
;(async () => {
const currentConfig = await heroku.get(`/apps/${HEROKU_APP_ID}/formation/web`)
if (currentConfig.quantity === numDynos) {
console.log(
`Already running the desired number of dynos (${numDynos}). No changes necessary.`
)
process.exit(0)
}
console.log(`Scaling to ${numDynos} dynos...`)
const newConfig = await heroku.patch(`/apps/${HEROKU_APP_ID}/formation/web`, {
body: {
quantity: numDynos,
},
})
console.log(`..done!`)
console.log(newConfig)
})()

View File

@@ -0,0 +1,50 @@
'use strict'
const { promises: fs } = require('fs')
const Redis = require('ioredis')
const key = 'githubUserTokens'
async function loadTokens() {
const contents = await fs.readFile('all_tokens_uniq.json', 'utf8')
const tokens = JSON.parse(contents)
console.log(`${tokens.length} tokens loaded`)
return tokens
}
function createClient() {
const redis = new Redis(process.env.REDIS_URL, {
tls: { servername: new URL(process.env.REDIS_URL).hostname },
})
redis.on('error', err => {
console.error(err)
})
return redis
}
async function load() {
const redis = createClient()
const tokens = await loadTokens()
await redis.sadd(key, tokens)
await redis.quit()
}
async function list() {
const redis = createClient()
const tokens = await redis.smembers(key)
console.log(`${tokens.length} tokens loaded`)
await redis.quit()
}
;(async () => {
try {
// await load()
await list()
} catch (e) {
console.error(e)
process.exit(1)
}
})()
// Appease the linter.
module.exports = { load, list }

View File

@@ -4,6 +4,8 @@
const fs = require('fs')
const path = require('path')
require('dotenv').config()
// Set up Sentry reporting as early in the process as possible.
const config = require('config').util.toObject()
const Sentry = require('@sentry/node')
@@ -35,13 +37,6 @@ if (process.argv[3]) {
console.log('Configuration:')
console.dir(config.public, { depth: null })
if (fs.existsSync('.env')) {
console.error(
'Legacy .env file found. It should be deleted and replaced with environment variables or config/local.yml'
)
process.exit(1)
}
const legacySecretsPath = path.join(__dirname, 'private', 'secret.json')
if (fs.existsSync(legacySecretsPath)) {
console.error(

View File

@@ -18,9 +18,7 @@ class AnsibleGalaxyContent extends BaseJsonService {
}
}
module.exports = class AnsibleGalaxyContentQualityScore extends (
AnsibleGalaxyContent
) {
module.exports = class AnsibleGalaxyContentQualityScore extends AnsibleGalaxyContent {
static category = 'analysis'
static route = { base: 'ansible/quality', pattern: ':projectId' }

View File

@@ -100,8 +100,10 @@ module.exports = class AppVeyorTests extends AppVeyorBase {
},
]
static defaultBadgeData = {
label: 'tests',
static get defaultBadgeData() {
return {
label: 'tests',
}
}
static render({

View File

@@ -168,22 +168,30 @@ class AurMaintainer extends BaseAurService {
}
class AurLastModified extends BaseAurService {
static category = 'activity'
static route = {
base: 'aur/last-modified',
pattern: ':packageName',
static get category() {
return 'activity'
}
static examples = [
{
title: 'AUR last modified',
namedParams: { packageName: 'google-chrome' },
staticPreview: this.render({ date: new Date().getTime() }),
},
]
static get route() {
return {
base: 'aur/last-modified',
pattern: ':packageName',
}
}
static defaultBadgeData = { label: 'last modified' }
static get examples() {
return [
{
title: 'AUR last modified',
namedParams: { packageName: 'google-chrome' },
staticPreview: this.render({ date: new Date().getTime() }),
},
]
}
static get defaultBadgeData() {
return { label: 'last modified' }
}
static render({ date }) {
const color = ageColor(date)

View File

@@ -15,10 +15,12 @@ const latestBuildSchema = Joi.object({
}).required()
module.exports = class AzureDevOpsBase extends BaseJsonService {
static auth = {
passKey: 'azure_devops_token',
authorizedOrigins: ['https://dev.azure.com'],
defaultToEmptyStringForUser: true,
static get auth() {
return {
passKey: 'azure_devops_token',
authorizedOrigins: ['https://dev.azure.com'],
defaultToEmptyStringForUser: true,
}
}
async fetch({ url, options, schema, errorMessages }) {

View File

@@ -1,13 +1,74 @@
'use strict'
const { deprecatedService } = require('..')
const Joi = require('joi')
const { isBuildStatus, renderBuildStatusBadge } = require('../build-status')
const { BaseJsonService } = require('..')
module.exports = deprecatedService({
category: 'build',
route: {
const schema = Joi.object({
status: Joi.alternatives().try(isBuildStatus, Joi.equal('unknown')),
}).required()
const statusMap = {
unstable: 'yellow',
running: 'blue',
}
module.exports = class ContinuousPhp extends BaseJsonService {
static category = 'build'
static route = {
base: 'continuousphp',
pattern: ':various+',
},
label: 'continuousphp',
dateAdded: new Date('2020-12-12'),
})
pattern: ':provider/:user/:repo/:branch*',
}
static examples = [
{
title: 'continuousphp',
pattern: ':provider/:user/:repo',
namedParams: {
provider: 'git-hub',
user: 'doctrine',
repo: 'dbal',
},
staticPreview: renderBuildStatusBadge({ status: 'passing' }),
},
{
title: 'continuousphp',
pattern: ':provider/:user/:repo/:branch',
namedParams: {
provider: 'git-hub',
user: 'doctrine',
repo: 'dbal',
branch: 'master',
},
staticPreview: renderBuildStatusBadge({ status: 'passing' }),
},
]
static defaultBadgeData = { label: 'continuousphp' }
static render({ status }) {
const badge = renderBuildStatusBadge({ label: 'build', status })
const customColor = statusMap[status]
if (customColor) {
badge.color = customColor
}
return badge
}
async fetch({ provider, user, repo, branch }) {
const url = `https://status.continuousphp.com/${provider}/${user}/${repo}/status-info`
return this._requestJson({
schema,
url,
options: { qs: { branch } },
errorMessages: {
404: 'project not found',
},
})
}
async handle({ provider, user, repo, branch }) {
const json = await this.fetch({ provider, user, repo, branch })
return this.constructor.render({ status: json.status })
}
}

View File

@@ -0,0 +1,19 @@
'use strict'
const { test, given } = require('sazerac')
const ContinuousPhp = require('./continuousphp.service')
describe('ContinuousPhp', function () {
test(ContinuousPhp.render, () => {
given({ status: 'unstable' }).expect({
label: 'build',
message: 'unstable',
color: 'yellow',
})
given({ status: 'running' }).expect({
label: 'build',
message: 'running',
color: 'blue',
})
})
})

View File

@@ -1,22 +1,23 @@
'use strict'
const { ServiceTester } = require('../tester')
const Joi = require('joi')
const { isBuildStatus } = require('../build-status')
const t = (module.exports = require('../tester').createServiceTester())
const t = (module.exports = new ServiceTester({
id: 'continuousphp',
title: 'Continuousphp',
}))
t.create('no longer available (previously build status on default branch)')
t.create('build status on default branch')
.get('/git-hub/doctrine/dbal.json')
.expectBadge({
label: 'continuousphp',
message: 'no longer available',
label: 'build',
message: Joi.alternatives().try(isBuildStatus, Joi.equal('unknown')),
})
t.create('no longer available (previously build status on named branch)')
t.create('build status on named branch')
.get('/git-hub/doctrine/dbal/develop.json')
.expectBadge({
label: 'continuousphp',
message: 'no longer available',
label: 'build',
message: Joi.alternatives().try(isBuildStatus, Joi.equal('unknown')),
})
t.create('unknown repo')
.get('/git-hub/this-repo/does-not-exist.json')
.expectBadge({ label: 'continuousphp', message: 'project not found' })

View File

@@ -1,17 +0,0 @@
'use strict'
const { redirector } = require('..')
module.exports = [
redirector({
name: 'CoverallsGitHubRedirect',
category: 'coverage',
route: {
base: 'coveralls',
pattern: ':user((?!github|bitbucket).*)/:repo/:branch*',
},
transformPath: ({ user, repo, branch }) =>
`/coveralls/github/${user}/${repo}${branch ? `/${branch}` : ''}`,
dateAdded: new Date('2021-02-23'),
}),
]

View File

@@ -1,13 +0,0 @@
'use strict'
const { ServiceTester } = require('../tester')
const t = (module.exports = new ServiceTester({
id: 'CoverallsGitHubRedirect',
title: 'Coveralls GitHub Redirector',
pathPrefix: '/coveralls',
}))
t.create('Coveralls VCS type missing')
.get('/lemurheavy/coveralls-ruby.svg')
.expectRedirect('/coveralls/github/lemurheavy/coveralls-ruby.svg')

View File

@@ -12,25 +12,42 @@ module.exports = class Coveralls extends BaseJsonService {
static category = 'coverage'
static route = {
base: 'coveralls',
pattern: ':vcsType(github|bitbucket)/:user/:repo/:branch*',
pattern: ':vcsType(github|bitbucket)?/:user/:repo/:branch*',
}
static examples = [
{
title: 'Coveralls',
title: 'Coveralls github',
pattern: ':vcsType/:user/:repo',
namedParams: { vcsType: 'github', user: 'jekyll', repo: 'jekyll' },
pattern: ':vcsType(github|bitbucket)/:user/:repo',
staticPreview: this.render({ coverage: 86 }),
},
{
title: 'Coveralls branch',
title: 'Coveralls github branch',
pattern: ':vcsType/:user/:repo/:branch',
namedParams: {
vcsType: 'github',
user: 'lemurheavy',
repo: 'coveralls-ruby',
branch: 'master',
},
staticPreview: this.render({ coverage: 91.81 }),
},
{
title: 'Coveralls bitbucket',
pattern: ':vcsType/:user/:repo',
namedParams: { vcsType: 'bitbucket', user: 'pyKLIP', repo: 'pyklip' },
staticPreview: this.render({ coverage: 86 }),
},
{
title: 'Coveralls bitbucket branch',
pattern: ':vcsType/:user/:repo/:branch',
namedParams: {
vcsType: 'bitbucket',
user: 'pyKLIP',
repo: 'pyklip',
branch: 'master',
},
pattern: ':vcsType(github|bitbucket)/:user/:repo/:branch',
staticPreview: this.render({ coverage: 96 }),
},
]

View File

@@ -15,6 +15,10 @@ t.create('github branch coverage')
.get('/github/lemurheavy/coveralls-ruby/master.json')
.expectBadge({ label: 'coverage', message: isIntegerPercentage })
t.create('github coverage for legacy link')
.get('/jekyll/jekyll.json')
.expectBadge({ label: 'coverage', message: isIntegerPercentage })
t.create('bitbucket coverage')
.get('/bitbucket/pyKLIP/pyklip.json')
.expectBadge({ label: 'coverage', message: isIntegerPercentage })

View File

@@ -1,11 +1,11 @@
'use strict'
const { BaseService } = require('..')
const { NonMemoryCachingBaseService } = require('..')
const serverStartTime = new Date(new Date().toGMTString())
let bitFlip = false
module.exports = class Debug extends BaseService {
module.exports = class Debug extends NonMemoryCachingBaseService {
static category = 'debug'
static route = { base: 'debug', pattern: ':variant(time|starttime|flip)' }

View File

@@ -1,5 +1,6 @@
'use strict'
const url = require('url')
const Joi = require('joi')
const { BaseJsonService } = require('..')
@@ -45,11 +46,19 @@ module.exports = class DependabotSemverCompatibility extends BaseJsonService {
})
}
_getLink({ packageManager, dependencyName }) {
const qs = new url.URLSearchParams(
this._getQuery({ packageManager, dependencyName })
)
return `https://dependabot.com/compatibility-score.html?${qs.toString()}`
}
async handle({ packageManager, dependencyName }) {
const json = await this.fetch({ packageManager, dependencyName })
return {
color: json.colour,
message: json.status,
link: this._getLink({ packageManager, dependencyName }),
}
}
}

View File

@@ -3,10 +3,15 @@
const { isIntegerPercentage } = require('../test-validators')
const t = (module.exports = require('../tester').createServiceTester())
t.create('semver stability (valid)').get('/bundler/puma.json').expectBadge({
label: 'semver stability',
message: isIntegerPercentage,
})
t.create('semver stability (valid)')
.get('/bundler/puma.json')
.expectBadge({
label: 'semver stability',
message: isIntegerPercentage,
link: [
'https://dependabot.com/compatibility-score.html?package-manager=bundler&dependency-name=puma&version-scheme=semver',
],
})
t.create('semver stability (invalid error)')
.get('/invalid-manager/puma.json')

View File

@@ -2935,111 +2935,10 @@ const versionDataWithTag = [
{ name: '2.6', images: [] },
]
const versionDataWithVaryingArchitectures = [
{
name: '3.9.5',
images: [
{
digest:
'sha256:fa5361fbf636d3ac50cb529dab20d810eb466de2079f5710cef4cbada29cf499',
architecture: 'ppc64le',
},
{
digest:
'sha256:97e9e9a15ef94526018e2fabfdfff14781e58b87f989d2e70543f296dcad26c6',
architecture: 's390x',
},
{
digest:
'sha256:cae6522b6a351615e547ae9222c9a05d172bc5c3240eec03072d4e1d0429a17a',
architecture: 'arm64',
},
],
},
{
name: '3.9',
images: [
{
digest:
'sha256:c7b3e8392e08c971e98627e2bddd10c7fa9d2eae797a16bc94de9709bb9300d0',
architecture: '386',
},
{
digest:
'sha256:5292cebaf695db860087c5582d340a406613891b2819092747b0388da47936c8',
architecture: 'arm',
},
],
},
{
name: '3.8.5',
images: [
{
digest:
'sha256:dabea2944dcc2b86482b4f0b0fb62da80e0673e900c46c0e03b45919881a5d84',
architecture: 'arm',
},
{
digest:
'sha256:514ec80ffbe1a2ab1d9a3d5e6082296296a1d8b6870246edf897228e5df2367d',
architecture: 's390x',
},
{
digest:
'sha256:ab3fe83c0696e3f565c9b4a734ec309ae9bd0d74c192de4590fd6dc2ef717815',
architecture: 'amd64',
},
],
},
{
name: '3.8',
images: [
{
digest:
'sha256:cf35b4fa14e23492df67af08ced54a15e68ad00cac545b437b1994340f20648c',
architecture: '386',
},
{
digest:
'sha256:514ec80ffbe1a2ab1d9a3d5e6082296296a1d8b6870246edf897228e5df2367d',
architecture: 's390x',
},
{
digest:
'sha256:402d21757a03a114d273bbe372fa4b9eca567e8b6c332fa7ebf982b902207242',
architecture: 'ppc64le',
},
],
},
{
name: '3.10.4',
images: [
{
digest:
'sha256:2632d6288d34d7175021683f6e363fa7c0fa8866a565eb285e36e3b856545e82',
architecture: 'arm',
},
{
digest:
'sha256:9afbfccb806687f6979661622f0c04dc534769e742465b107f84a830cbb8e77a',
architecture: 'arm',
},
{
digest:
'sha256:33158d51a7a549207e508a42bf46493f23e1e99fbc011eb3f3742e8b349a2be9',
architecture: 'ppc64le',
},
],
},
{ name: '2.7', images: [] },
{ name: '2.6', images: [] },
]
module.exports = {
sizeDataNoTagSemVerSort,
versionDataNoTagDateSort,
versionPagedDataNoTagDateSort,
versionDataNoTagSemVerSort,
versionDataWithTag,
versionDataWithVaryingArchitectures,
}

View File

@@ -28,23 +28,6 @@ const buildSchema = Joi.object({
const queryParamSchema = Joi.object({
sort: Joi.string().valid('date', 'semver').default('date'),
arch: Joi.string()
// Valid architecture values: https://golang.org/doc/install/source#environment (GOARCH)
.valid(
'amd64',
'arm',
'arm64',
's390x',
'386',
'ppc64',
'ppc64le',
'wasm',
'mips',
'mipsle',
'mips64',
'mips64le'
)
.default('amd64'),
}).required()
module.exports = class DockerVersion extends BaseJsonService {
@@ -55,7 +38,7 @@ module.exports = class DockerVersion extends BaseJsonService {
title: 'Docker Image Version (latest by date)',
pattern: ':user/:repo',
namedParams: { user: '_', repo: 'alpine' },
queryParams: { sort: 'date', arch: 'amd64' },
queryParams: { sort: 'date' },
staticPreview: this.render({ version: '3.9.5' }),
},
{
@@ -90,7 +73,7 @@ module.exports = class DockerVersion extends BaseJsonService {
})
}
transform({ tag, sort, data, pagedData, arch = 'amd64' }) {
transform({ tag, sort, data, pagedData }) {
let version
if (!tag && sort === 'date') {
@@ -98,7 +81,9 @@ module.exports = class DockerVersion extends BaseJsonService {
if (version !== 'latest') {
return { version }
}
const imageTag = data.results[0].images.find(i => i.architecture === arch) // Digest is the unique field that we utilise to match images
const imageTag = data.results[0].images.find(
i => i.architecture === 'amd64'
) // Digest is the unique field that we utilise to match images
if (!imageTag) {
throw new InvalidResponse({
prettyMessage: 'digest not found for latest tag',
@@ -117,7 +102,7 @@ module.exports = class DockerVersion extends BaseJsonService {
if (Object.keys(version.images).length === 0) {
return { version: version.name }
}
const image = version.images.find(i => i.architecture === arch)
const image = version.images.find(i => i.architecture === 'amd64')
if (!image) {
throw new InvalidResponse({
prettyMessage: 'digest not found for given tag',
@@ -128,7 +113,7 @@ module.exports = class DockerVersion extends BaseJsonService {
}
}
async handle({ user, repo, tag }, { sort, arch }) {
async handle({ user, repo, tag }, { sort }) {
let data, pagedData
if (!tag && sort === 'date') {
@@ -151,13 +136,7 @@ module.exports = class DockerVersion extends BaseJsonService {
})
}
const { version } = await this.transform({
tag,
sort,
data,
pagedData,
arch,
})
const { version } = await this.transform({ tag, sort, data, pagedData })
return this.constructor.render({ version })
}
}

View File

@@ -9,7 +9,6 @@ const {
versionPagedDataNoTagDateSort,
versionDataNoTagSemVerSort,
versionDataWithTag,
versionDataWithVaryingArchitectures,
} = require('./docker-fixtures')
describe('DockerVersion', function () {
@@ -49,24 +48,6 @@ describe('DockerVersion', function () {
}).expect({
version: '3.10.4',
})
// tag & custom architecture
given({
tag: '3.9',
data: versionDataWithVaryingArchitectures,
arch: 'arm',
}).expect({
version: '3.9',
})
// sort & custom architecture
given({
data: versionDataWithVaryingArchitectures,
sort: 'semver',
arch: 'ppc64le',
}).expect({
version: '3.10.4',
})
})
it('throws InvalidResponse error with latest tag and no amd64 architecture digests', function () {
@@ -125,52 +106,4 @@ describe('DockerVersion', function () {
.to.throw(InvalidResponse)
.with.property('prettyMessage', 'digest not found for given tag')
})
it('throws InvalidResponse error with custom tag and no matching architecture digests', function () {
expect(() => {
DockerVersion.prototype.transform({
tag: '3.10',
arch: '386',
data: [
{
name: '3.9',
images: [
{
digest:
'sha256:ab3fe83c0696e3f565c9b4a734ec309ae9bd0d74c192de4590fd6dc2ef717815',
architecture: 'amd64',
},
{
digest:
'sha256:c7b3e8392e08c971e98627e2bddd10c7fa9d2eae797a16bc94de9709bb9300d0',
architecture: '386',
},
{
digest:
'sha256:5292cebaf695db860087c5582d340a406613891b2819092747b0388da47936c8',
architecture: 'arm',
},
],
},
{
name: '3.10',
images: [
{
architecture: 'arm',
digest:
'sha256:c5ea49127cd44d0f50eafda229a056bb83b6e691883c56fd863d42675fae3909',
},
{
architecture: 'arm64',
digest:
'sha256:597bd5c319cc09d6bb295b4ef23cac50ec7c373fff5fe923cfd246ec09967b31',
},
],
},
],
})
})
.to.throw(InvalidResponse)
.with.property('prettyMessage', 'digest not found for given tag')
})
})

View File

@@ -1,62 +0,0 @@
'use strict'
const Joi = require('joi')
const { BaseJsonService } = require('..')
const schema = Joi.array()
.items(
Joi.object({
build_status: Joi.boolean().required(),
})
)
.min(1)
.required()
module.exports = class DocsRs extends BaseJsonService {
static category = 'build'
static route = { base: 'docsrs', pattern: ':crate/:version?' }
static examples = [
{
title: 'docs.rs',
namedParams: { crate: 'regex', version: 'latest' },
staticPreview: this.render({ version: 'latest', buildStatus: true }),
keywords: ['rust'],
},
]
static defaultBadgeData = { label: 'docs' }
static render({ buildStatus, version }) {
let label = `docs@${version}`
if (version === 'latest') {
label = 'docs'
}
if (buildStatus) {
return {
label,
message: 'passing',
color: 'success',
}
} else {
return {
label,
message: 'failing',
color: 'critical',
}
}
}
async fetch({ crate, version }) {
return await this._requestJson({
schema,
url: `https://docs.rs/crate/${crate}/${version}/builds.json`,
})
}
async handle({ crate, version = 'latest' }) {
const { build_status: buildStatus } = (
await this.fetch({ crate, version })
).pop()
return this.constructor.render({ version, buildStatus })
}
}

View File

@@ -1,19 +0,0 @@
'use strict'
const Joi = require('joi')
const t = (module.exports = require('../tester').createServiceTester())
t.create('Passing docs')
.get('/tokio/0.3.0.json')
.expectBadge({ label: 'docs@0.3.0', message: 'passing' })
t.create('Failing docs')
.get('/tensorflow/0.16.1.json')
.expectBadge({ label: 'docs@0.16.1', message: 'failing' })
t.create('Getting latest version works')
.get('/rand/latest.json')
.expectBadge({
label: 'docs',
messsage: Joi.allow('passing', 'failing'),
})

View File

@@ -12,9 +12,7 @@ const favoritesResponseSchema = Joi.object({
}),
}).required()
module.exports = class EclipseMarketplaceFavorites extends (
EclipseMarketplaceBase
) {
module.exports = class EclipseMarketplaceFavorites extends EclipseMarketplaceBase {
static category = 'other'
static route = this.buildRoute('eclipse-marketplace/favorites')
static examples = [

View File

@@ -11,9 +11,7 @@ const licenseResponseSchema = Joi.object({
}),
}).required()
module.exports = class EclipseMarketplaceLicense extends (
EclipseMarketplaceBase
) {
module.exports = class EclipseMarketplaceLicense extends EclipseMarketplaceBase {
static category = 'license'
static route = this.buildRoute('eclipse-marketplace/l')
static examples = [

View File

@@ -12,9 +12,7 @@ const versionResponseSchema = Joi.object({
}),
}).required()
module.exports = class EclipseMarketplaceVersion extends (
EclipseMarketplaceBase
) {
module.exports = class EclipseMarketplaceVersion extends EclipseMarketplaceBase {
static category = 'version'
static route = this.buildRoute('eclipse-marketplace/v')
static examples = [

View File

@@ -66,7 +66,6 @@ async function fetchEndpointData(
schema: anySchema,
url,
errorMessages,
options: { gzip: true },
})
return validateEndpointData(json, {
prettyErrorMessage: validationPrettyErrorMessage,

View File

@@ -55,14 +55,7 @@ module.exports = class Endpoint extends BaseJsonService {
}
async handle(namedParams, { url }) {
let protocol, hostname
try {
const parsedUrl = new URL(url)
protocol = parsedUrl.protocol
hostname = parsedUrl.hostname
} catch (e) {
throw new InvalidParameter({ prettyMessage: 'invalid url' })
}
const { protocol, hostname } = new URL(url)
if (protocol !== 'https:') {
throw new InvalidParameter({ prettyMessage: 'please use https' })
}

View File

@@ -1,6 +1,5 @@
'use strict'
const zlib = require('zlib')
const { expect } = require('chai')
const { getShieldsIcon } = require('../../lib/logos')
const t = (module.exports = require('../tester').createServiceTester())
@@ -120,7 +119,7 @@ t.create('logoWidth')
logoWidth: 30,
})
t.create('Invalid schema')
t.create('Invalid schema)')
.get('.json?url=https://example.com/badge')
.intercept(nock =>
nock('https://example.com/').get('/badge').reply(200, {
@@ -132,7 +131,7 @@ t.create('Invalid schema')
message: 'invalid properties: schemaVersion, label, message',
})
t.create('Invalid schema')
t.create('Invalid schema)')
.get('.json?url=https://example.com/badge')
.intercept(nock =>
nock('https://example.com/').get('/badge').reply(200, {
@@ -255,30 +254,7 @@ t.create('Blocked domain')
.expectBadge({ label: 'custom badge', message: 'domain is blocked' })
// https://github.com/badges/shields/issues/3780
t.create('Invalid url (1)').get('.json?url=https:/').expectBadge({
t.create('Invalid url').get('.json?url=https:/').expectBadge({
label: 'custom badge',
message: 'invalid query parameter: url',
})
t.create('Invalid url (2)')
.get('.json?url=https%3A//shields.io%foo')
.expectBadge({
label: 'custom badge',
message: 'invalid url',
})
// https://github.com/badges/shields/issues/5868
t.create('gzipped endpoint')
.get('.json?url=https://example.com/badge')
.intercept(nock =>
nock('https://example.com/')
.get('/badge')
.reply(
200,
zlib.gzipSync(
JSON.stringify({ schemaVersion: 1, label: '', message: 'yo' })
),
{ 'Content-Encoding': 'gzip' }
)
)
.expectBadge({ label: '', message: 'yo' })

View File

@@ -1,110 +0,0 @@
'use strict'
const Joi = require('joi')
const { BaseJsonService, NotFound } = require('..')
const {
renderVersionBadge,
searchServiceUrl,
stripBuildMetadata,
selectVersion,
} = require('../nuget/nuget-helpers')
const schema = Joi.object({
items: Joi.array()
.items(
Joi.object({
items: Joi.array().items(
Joi.object({
catalogEntry: Joi.object({
version: Joi.string().required(),
}).required(),
})
),
}).required()
)
.default([]),
}).required()
class FeedzVersionService extends BaseJsonService {
static category = 'version'
static route = {
base: 'feedz',
pattern: ':which(v|vpre)/:organization/:repository/:packageName',
}
static examples = [
{
title: 'Feedz',
pattern: 'v/:organization/:repository/:packageName',
namedParams: {
organization: 'shieldstests',
repository: 'mongodb',
packageName: 'MongoDB.Driver.Core',
},
staticPreview: this.render({ version: '2.10.4' }),
},
{
title: 'Feedz (with prereleases)',
pattern: 'vpre/:organization/:repository/:packageName',
namedParams: {
organization: 'shieldstests',
repository: 'mongodb',
packageName: 'MongoDB.Driver.Core',
},
staticPreview: this.render({ version: '2.11.0-beta2' }),
},
]
static defaultBadgeData = {
label: 'feedz',
}
static render(props) {
return renderVersionBadge(props)
}
apiUrl({ organization, repository }) {
return `https://f.feedz.io/${organization}/${repository}/nuget`
}
async fetch({ baseUrl, packageName }) {
const registrationsBaseUrl = await searchServiceUrl(
baseUrl,
'RegistrationsBaseUrl'
)
return await this._requestJson({
schema,
url: `${registrationsBaseUrl}${packageName}/index.json`,
errorMessages: {
404: 'repository or package not found',
},
})
}
transform({ json, includePrereleases }) {
const versions = json.items.flatMap(tl =>
tl.items.map(i => stripBuildMetadata(i.catalogEntry.version))
)
if (versions.length >= 1) {
return selectVersion(versions, includePrereleases)
} else {
throw new NotFound({ prettyMessage: 'package not found' })
}
}
async handle({ which, organization, repository, packageName }) {
const includePrereleases = which === 'vpre'
const baseUrl = this.apiUrl({ organization, repository })
const json = await this.fetch({ baseUrl, packageName })
const version = this.transform({ json, includePrereleases })
return this.constructor.render({
version,
feed: FeedzVersionService.defaultBadgeData.label,
})
}
}
module.exports = {
FeedzVersionService,
}

View File

@@ -1,98 +0,0 @@
'use strict'
const { test, given } = require('sazerac')
const { FeedzVersionService } = require('./feedz.service')
function json(versions) {
return {
items: versions.map(topLevel => ({
items: topLevel.map(v => ({
catalogEntry: {
version: v,
},
})),
})),
}
}
function noItemsJson() {
return {
items: [],
}
}
describe('Feedz service', function () {
test(FeedzVersionService.prototype.apiUrl, () => {
given({ organization: 'shieldstests', repository: 'public' }).expect(
'https://f.feedz.io/shieldstests/public/nuget'
)
})
test(FeedzVersionService.prototype.transform, () => {
given({ json: json([['1.0.0']]), includePrereleases: false }).expect(
'1.0.0'
)
given({
json: json([['1.0.0', '1.0.1']]),
includePrereleases: false,
}).expect('1.0.1')
given({
json: json([['1.0.0', '1.0.1-beta1']]),
includePrereleases: false,
}).expect('1.0.0')
given({
json: json([['1.0.0', '1.0.1-beta1']]),
includePrereleases: true,
}).expect('1.0.1-beta1')
given({
json: json([['1.0.0'], ['1.0.1']]),
includePrereleases: false,
}).expect('1.0.1')
given({ json: json([['1.0.1'], []]), includePrereleases: false }).expect(
'1.0.1'
)
given({ json: json([[], ['1.0.1']]), includePrereleases: false }).expect(
'1.0.1'
)
given({
json: json([['1.0.0'], ['1.0.1-beta1']]),
includePrereleases: false,
}).expect('1.0.0')
given({
json: json([['1.0.0'], ['1.0.1-beta1']]),
includePrereleases: true,
}).expect('1.0.1-beta1')
given({
json: json([['1.0.0+1', '1.0.1-beta1+1']]),
includePrereleases: false,
}).expect('1.0.0')
given({
json: json([['1.0.0+1', '1.0.1-beta1+1']]),
includePrereleases: true,
}).expect('1.0.1-beta1')
given({ json: json([]), includePrereleases: false }).expectError(
'Not Found: package not found'
)
given({ json: json([[]]), includePrereleases: false }).expectError(
'Not Found: package not found'
)
given({ json: json([[], []]), includePrereleases: false }).expectError(
'Not Found: package not found'
)
given({ json: json([]), includePrereleases: true }).expectError(
'Not Found: package not found'
)
given({ json: json([[]]), includePrereleases: true }).expectError(
'Not Found: package not found'
)
given({ json: noItemsJson(), includePrereleases: false }).expectError(
'Not Found: package not found'
)
given({ json: noItemsJson(), includePrereleases: true }).expectError(
'Not Found: package not found'
)
})
})

View File

@@ -1,106 +0,0 @@
'use strict'
const { ServiceTester } = require('../tester')
const t = (module.exports = new ServiceTester({
id: 'feedz',
title: 'Feedz',
pathPrefix: '',
}))
// The `shieldstests/public` repo is specifically made for these tests. It contains following packages:
// - Shields.NoV1: 0.1.0
// - Shields.TestPackage: 0.0.1, 0.1.0-pre, 1.0.0
// - Shields.TestPreOnly: 0.1.0-pre
// - Shields.MultiPage: 0.1.0-0.1.100 plus 1.0.0 but the response has multiple top-level `items`
// The source code of these packages is here: https://github.com/jakubfijalkowski/shields-test-packages
// version
t.create('version (valid)')
.get('/feedz/v/shieldstests/public/Shields.TestPackage.json')
.expectBadge({
label: 'feedz',
message: 'v1.0.0',
color: 'blue',
})
t.create('version (yellow badge)')
.get('/feedz/v/shieldstests/public/Shields.TestPreOnly.json')
.expectBadge({
label: 'feedz',
message: 'v0.1.0-pre',
color: 'yellow',
})
t.create('version (orange badge)')
.get('/feedz/v/shieldstests/public/Shields.NoV1.json')
.expectBadge({
label: 'feedz',
message: 'v0.1.0',
color: 'orange',
})
t.create('multi-page')
.get('/feedz/v/shieldstests/public/Shields.MultiPage.json')
.expectBadge({
label: 'feedz',
message: 'v1.0.0',
color: 'blue',
})
t.create('repository (not found)')
.get('/feedz/v/foo/bar/not-a-real-package.json')
.expectBadge({ label: 'feedz', message: 'repository or package not found' })
t.create('version (not found)')
.get('/feedz/v/shieldstests/public/not-a-real-package.json')
.expectBadge({ label: 'feedz', message: 'repository or package not found' })
t.create('non-existing repository')
.get('/feedz/v/shieldstests/does-not-exist/Shields.TestPackage.json')
.expectBadge({ label: 'feedz', message: 'repository or package not found' })
// version (pre)
t.create('version (pre) (valid)')
.get('/feedz/vpre/shieldstests/public/Shields.TestPackage.json')
.expectBadge({
label: 'feedz',
message: 'v1.0.0',
color: 'blue',
})
t.create('version (pre) (yellow badge)')
.get('/feedz/vpre/shieldstests/public/Shields.TestPreOnly.json')
.expectBadge({
label: 'feedz',
message: 'v0.1.0-pre',
color: 'yellow',
})
t.create('version (pre) (orange badge)')
.get('/feedz/vpre/shieldstests/public/Shields.NoV1.json')
.expectBadge({
label: 'feedz',
message: 'v0.1.0',
color: 'orange',
})
t.create('multi-page (pre)')
.get('/feedz/vpre/shieldstests/public/Shields.MultiPage.json')
.expectBadge({
label: 'feedz',
message: 'v1.0.0',
color: 'blue',
})
t.create('repository (pre) (not found)')
.get('/feedz/vpre/foo/bar/not-a-real-package.json')
.expectBadge({ label: 'feedz', message: 'repository or package not found' })
t.create('version (pre) (not found)')
.get('/feedz/vpre/shieldstests/public/not-a-real-package.json')
.expectBadge({ label: 'feedz', message: 'repository or package not found' })
t.create('non-existing repository')
.get('/feedz/vpre/shieldstests/does-not-exist/Shields.TestPackage.json')
.expectBadge({ label: 'feedz', message: 'repository or package not found' })

Some files were not shown because too many files have changed in this diff Show More