remove a decent chunk of unneeded stuff from the backend

This commit is contained in:
ihateblueb 2025-06-03 16:51:07 -04:00
commit 59d16b90f5
2932 changed files with 1 additions and 155831 deletions

View file

@ -1,15 +0,0 @@
# For details, see https://github.com/devcontainers/images/tree/main/src/ruby
FROM mcr.microsoft.com/devcontainers/ruby:1-3.3-bookworm
# Install node version from .nvmrc
WORKDIR /app
COPY .nvmrc .
RUN /bin/bash --login -i -c "nvm install"
# Install additional OS packages
RUN apt-get update && \
export DEBIAN_FRONTEND=noninteractive && \
apt-get -y install --no-install-recommends libicu-dev libidn11-dev ffmpeg imagemagick libvips42 libpam-dev
# Move welcome message to where VS Code expects it
COPY .devcontainer/welcome-message.txt /usr/local/etc/vscode-dev-containers/first-run-notice.txt

View file

@ -1,51 +0,0 @@
{
"name": "Mastodon on GitHub Codespaces",
"dockerComposeFile": "../compose.yaml",
"service": "app",
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
"features": {
"ghcr.io/devcontainers/features/sshd:1": {}
},
"runServices": ["app", "db", "redis"],
"forwardPorts": [3000, 4000],
"portsAttributes": {
"3000": {
"label": "web",
"onAutoForward": "notify"
},
"4000": {
"label": "stream",
"onAutoForward": "silent"
}
},
"remoteUser": "root",
"otherPortsAttributes": {
"onAutoForward": "silent"
},
"remoteEnv": {
"LOCAL_DOMAIN": "${localEnv:CODESPACE_NAME}-3000.app.github.dev",
"LOCAL_HTTPS": "true",
"STREAMING_API_BASE_URL": "https://${localEnv:CODESPACE_NAME}-4000.app.github.dev",
"DISABLE_FORGERY_REQUEST_PROTECTION": "true",
"ES_ENABLED": "",
"LIBRE_TRANSLATE_ENDPOINT": ""
},
"onCreateCommand": "git config --global --add safe.directory ${containerWorkspaceFolder}",
"postCreateCommand": "COREPACK_ENABLE_DOWNLOAD_PROMPT=0 bin/setup",
"waitFor": "postCreateCommand",
"customizations": {
"vscode": {
"settings": {},
"extensions": ["EditorConfig.EditorConfig", "webben.browserslist"]
}
}
}

View file

@ -1,89 +0,0 @@
services:
app:
working_dir: /workspaces/mastodon/
build:
context: ..
dockerfile: .devcontainer/Dockerfile
volumes:
- ..:/workspaces/mastodon:cached
environment:
RAILS_ENV: development
NODE_ENV: development
BIND: 0.0.0.0
REDIS_HOST: redis
REDIS_PORT: '6379'
DB_HOST: db
DB_USER: postgres
DB_PASS: postgres
DB_PORT: '5432'
ES_ENABLED: 'true'
ES_HOST: es
ES_PORT: '9200'
LIBRE_TRANSLATE_ENDPOINT: http://libretranslate:5000
# Overrides default command so things don't shut down after the process ends.
command: sleep infinity
ports:
- '127.0.0.1:3000:3000'
- '127.0.0.1:3035:3035'
- '127.0.0.1:4000:4000'
networks:
- external_network
- internal_network
db:
image: postgres:14-alpine
restart: unless-stopped
volumes:
- postgres-data:/var/lib/postgresql/data
environment:
POSTGRES_USER: postgres
POSTGRES_DB: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_HOST_AUTH_METHOD: trust
networks:
- internal_network
redis:
image: redis:7-alpine
restart: unless-stopped
volumes:
- redis-data:/data
networks:
- internal_network
es:
image: docker.elastic.co/elasticsearch/elasticsearch-oss:7.10.2
restart: unless-stopped
environment:
ES_JAVA_OPTS: -Xms512m -Xmx512m
cluster.name: es-mastodon
discovery.type: single-node
bootstrap.memory_lock: 'true'
volumes:
- es-data:/usr/share/elasticsearch/data
networks:
- internal_network
ulimits:
memlock:
soft: -1
hard: -1
libretranslate:
image: libretranslate/libretranslate:v1.5.7
restart: unless-stopped
volumes:
- lt-data:/home/libretranslate/.local
networks:
- external_network
- internal_network
volumes:
postgres-data:
redis-data:
es-data:
lt-data:
networks:
external_network:
internal_network:
internal: true

View file

@ -1,42 +0,0 @@
{
"name": "Mastodon on local machine",
"dockerComposeFile": "compose.yaml",
"service": "app",
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
"features": {
"ghcr.io/devcontainers/features/sshd:1": {}
},
"forwardPorts": [3000, 4000],
"portsAttributes": {
"3000": {
"label": "web",
"onAutoForward": "notify",
"requireLocalPort": true
},
"4000": {
"label": "stream",
"onAutoForward": "silent",
"requireLocalPort": true
}
},
"remoteUser": "root",
"otherPortsAttributes": {
"onAutoForward": "silent"
},
"onCreateCommand": "git config --global --add safe.directory ${containerWorkspaceFolder}",
"postCreateCommand": "bin/setup",
"waitFor": "postCreateCommand",
"customizations": {
"vscode": {
"settings": {},
"extensions": ["EditorConfig.EditorConfig", "webben.browserslist"]
}
}
}

View file

@ -1,7 +0,0 @@
👋 Welcome to your Mastodon Dev Container!
🛠️ Your environment is fully setup with all the required software.
💥 Run `bin/dev` to start the application processes.
🥼 Run `RAILS_ENV=test bin/rails assets:precompile && RAILS_ENV=test bin/rspec` to run the test suite.

View file

@ -1,22 +0,0 @@
.bundle
.env
.env.*
.git
.gitattributes
.gitignore
.github
public/system
public/assets
public/packs
public/packs-test
node_modules
neo4j
vendor/bundle
.DS_Store
*.swp
*~
postgres
postgres14
redis
elasticsearch
chart

View file

@ -1,4 +0,0 @@
# Required by ActiveRecord encryption feature
ACTIVE_RECORD_ENCRYPTION_DETERMINISTIC_KEY=fkSxKD2bF396kdQbrP1EJ7WbU7ZgNokR
ACTIVE_RECORD_ENCRYPTION_KEY_DERIVATION_SALT=r0hvVmzBVsjxC7AMlwhOzmtc36ZCOS1E
ACTIVE_RECORD_ENCRYPTION_PRIMARY_KEY=PhdFyyfy5xJ7WVd2lWBpcPScRQHzRTNr

View file

@ -1,311 +0,0 @@
# This is a sample configuration file. You can generate your configuration
# with the `bundle exec rails mastodon:setup` interactive setup wizard, but to customize
# your setup even further, you'll need to edit it manually. This sample does
# not demonstrate all available configuration options. Please look at
# https://docs.joinmastodon.org/admin/config/ for the full documentation.
# Note that this file accepts slightly different syntax depending on whether
# you are using `docker-compose` or not. In particular, if you use
# `docker-compose`, the value of each declared variable will be taken verbatim,
# including surrounding quotes.
# See: https://github.com/mastodon/mastodon/issues/16895
# Federation
# ----------
# This identifies your server and cannot be changed safely later
# ----------
LOCAL_DOMAIN=example.com
# Use this only if you need to run mastodon on a different domain than the one used for federation.
# You can read more about this option on https://docs.joinmastodon.org/admin/config/#web-domain
# DO *NOT* USE THIS UNLESS YOU KNOW *EXACTLY* WHAT YOU ARE DOING.
# WEB_DOMAIN=mastodon.example.com
# Use this if you want to have several aliases handler@example1.com
# handler@example2.com etc. for the same user. LOCAL_DOMAIN should not
# be added. Comma separated values
# ALTERNATE_DOMAINS=example1.com,example2.com
# Use HTTP proxy for outgoing request (optional)
# http_proxy=http://gateway.local:8118
# Access control for hidden service.
# ALLOW_ACCESS_TO_HIDDEN_SERVICE=true
# Authorized fetch mode (optional)
# Require remote servers to authentify when fetching toots, see
# https://docs.joinmastodon.org/admin/config/#authorized_fetch
# AUTHORIZED_FETCH=true
# Limited federation mode (optional)
# Only allow federation with specific domains, see
# https://docs.joinmastodon.org/admin/config/#whitelist_mode
# LIMITED_FEDERATION_MODE=true
# Redis
# -----
REDIS_HOST=localhost
REDIS_PORT=6379
# PostgreSQL
# ----------
DB_HOST=/var/run/postgresql
DB_USER=mastodon
DB_NAME=mastodon_production
DB_PASS=
DB_PORT=5432
# Elasticsearch (optional)
# ------------------------
#ES_ENABLED=true
#ES_HOST=localhost
#ES_PORT=9200
# Authentication for ES (optional)
#ES_USER=elastic
#ES_PASS=password
# Secrets
# -------
# Generate each with the `RAILS_ENV=production bundle exec rails secret` task (`docker-compose run --rm web bundle exec rails secret` if you use docker compose)
# -------
SECRET_KEY_BASE=
OTP_SECRET=
# Web Push
# --------
# Generate with `bundle exec rails mastodon:webpush:generate_vapid_key` (first is the private key, second is the public one)
# You should only generate this once per instance. If you later decide to change it, all push subscription will
# be invalidated, requiring the users to access the website again to resubscribe.
# --------
VAPID_PRIVATE_KEY=
VAPID_PUBLIC_KEY=
# Registrations
# -------------
# Single user mode will disable registrations and redirect frontpage to the first profile
# SINGLE_USER_MODE=true
# Prevent registrations with following e-mail domains
# EMAIL_DOMAIN_DENYLIST=example1.com|example2.de|etc
# Only allow registrations with the following e-mail domains
# EMAIL_DOMAIN_ALLOWLIST=example1.com|example2.de|etc
#TODO move this
# Optionally change default language
# DEFAULT_LOCALE=de
# Sending mail
# ------------
SMTP_SERVER=
SMTP_PORT=587
SMTP_LOGIN=
SMTP_PASSWORD=
SMTP_FROM_ADDRESS=notifications@example.com
# File storage (optional)
# -----------------------
# The attachment host must allow cross origin request from WEB_DOMAIN or
# LOCAL_DOMAIN if WEB_DOMAIN is not set. For example, the server may have the
# following header field:
# Access-Control-Allow-Origin: https://192.168.1.123:9000/
# -----------------------
#S3_ENABLED=true
#S3_BUCKET=files.example.com
#AWS_ACCESS_KEY_ID=
#AWS_SECRET_ACCESS_KEY=
#S3_ALIAS_HOST=files.example.com
# Swift (optional)
# The attachment host must allow cross origin request - see the description
# above.
# SWIFT_ENABLED=true
# SWIFT_USERNAME=
# For Keystone V3, the value for SWIFT_TENANT should be the project name
# SWIFT_TENANT=
# SWIFT_PASSWORD=
# Some OpenStack V3 providers require PROJECT_ID (optional)
# SWIFT_PROJECT_ID=
# Keystone V2 and V3 URLs are supported. Use a V3 URL if possible to avoid
# issues with token rate-limiting during high load.
# SWIFT_AUTH_URL=
# SWIFT_CONTAINER=
# SWIFT_OBJECT_URL=
# SWIFT_REGION=
# Defaults to 'default'
# SWIFT_DOMAIN_NAME=
# Defaults to 60 seconds. Set to 0 to disable
# SWIFT_CACHE_TTL=
# Optional asset host for multi-server setups
# The asset host must allow cross origin request from WEB_DOMAIN or LOCAL_DOMAIN
# if WEB_DOMAIN is not set. For example, the server may have the
# following header field:
# Access-Control-Allow-Origin: https://example.com/
# CDN_HOST=https://assets.example.com
# Optional list of hosts that are allowed to serve media for your instance
# This is useful if you include external media in your custom CSS or about page,
# or if your data storage provider makes use of redirects to other domains.
# EXTRA_DATA_HOSTS=https://data.example1.com|https://data.example2.com
# Optional alias for S3 (e.g. to serve files on a custom domain, possibly using Cloudfront or Cloudflare)
# S3_ALIAS_HOST=
# Streaming API integration
# STREAMING_API_BASE_URL=
# External authentication (optional)
# ----------------------------------
# LDAP authentication (optional)
# LDAP_ENABLED=true
# LDAP_HOST=localhost
# LDAP_PORT=389
# LDAP_METHOD=simple_tls
# LDAP_BASE=
# LDAP_BIND_DN=
# LDAP_PASSWORD=
# LDAP_UID=cn
# LDAP_MAIL=mail
# LDAP_SEARCH_FILTER=(|(%{uid}=%{email})(%{mail}=%{email}))
# LDAP_UID_CONVERSION_ENABLED=true
# LDAP_UID_CONVERSION_SEARCH=., -
# LDAP_UID_CONVERSION_REPLACE=_
# PAM authentication (optional)
# PAM authentication uses for the email generation the "email" pam variable
# and optional as fallback PAM_DEFAULT_SUFFIX
# The pam environment variable "email" is provided by:
# https://github.com/devkral/pam_email_extractor
# PAM_ENABLED=true
# Fallback email domain for email address generation (LOCAL_DOMAIN by default)
# PAM_EMAIL_DOMAIN=example.com
# Name of the pam service (pam "auth" section is evaluated)
# PAM_DEFAULT_SERVICE=rpam
# Name of the pam service used for checking if an user can register (pam "account" section is evaluated) (nil (disabled) by default)
# PAM_CONTROLLED_SERVICE=rpam
# Global OAuth settings (optional) :
# If you have only one strategy, you may want to enable this
# OAUTH_REDIRECT_AT_SIGN_IN=true
# Optional CAS authentication (cf. omniauth-cas) :
# CAS_ENABLED=true
# CAS_URL=https://sso.myserver.com/
# CAS_HOST=sso.myserver.com/
# CAS_PORT=443
# CAS_SSL=true
# CAS_VALIDATE_URL=
# CAS_CALLBACK_URL=
# CAS_LOGOUT_URL=
# CAS_LOGIN_URL=
# CAS_UID_FIELD='user'
# CAS_CA_PATH=
# CAS_DISABLE_SSL_VERIFICATION=false
# CAS_UID_KEY='user'
# CAS_NAME_KEY='name'
# CAS_EMAIL_KEY='email'
# CAS_NICKNAME_KEY='nickname'
# CAS_FIRST_NAME_KEY='firstname'
# CAS_LAST_NAME_KEY='lastname'
# CAS_LOCATION_KEY='location'
# CAS_IMAGE_KEY='image'
# CAS_PHONE_KEY='phone'
# Optional SAML authentication (cf. omniauth-saml)
# SAML_ENABLED=true
# SAML_ACS_URL=http://localhost:3000/auth/auth/saml/callback
# SAML_ISSUER=https://example.com
# SAML_IDP_SSO_TARGET_URL=https://idp.testshib.org/idp/profile/SAML2/Redirect/SSO
# SAML_IDP_CERT=
# SAML_IDP_CERT_FINGERPRINT=
# SAML_NAME_IDENTIFIER_FORMAT=
# SAML_CERT=
# SAML_PRIVATE_KEY=
# SAML_SECURITY_WANT_ASSERTION_SIGNED=true
# SAML_SECURITY_WANT_ASSERTION_ENCRYPTED=true
# SAML_SECURITY_ASSUME_EMAIL_IS_VERIFIED=true
# SAML_ATTRIBUTES_STATEMENTS_UID="urn:oid:0.9.2342.19200300.100.1.1"
# SAML_ATTRIBUTES_STATEMENTS_EMAIL="urn:oid:1.3.6.1.4.1.5923.1.1.1.6"
# SAML_ATTRIBUTES_STATEMENTS_FULL_NAME="urn:oid:2.16.840.1.113730.3.1.241"
# SAML_ATTRIBUTES_STATEMENTS_FIRST_NAME="urn:oid:2.5.4.42"
# SAML_ATTRIBUTES_STATEMENTS_LAST_NAME="urn:oid:2.5.4.4"
# SAML_UID_ATTRIBUTE="urn:oid:0.9.2342.19200300.100.1.1"
# SAML_ATTRIBUTES_STATEMENTS_VERIFIED=
# SAML_ATTRIBUTES_STATEMENTS_VERIFIED_EMAIL=
# Custom settings
# ---------------
# Various ways to customize Mastodon's behavior
# ---------------
# Maximum allowed character count
MAX_TOOT_CHARS=500
# Maximum allowed hashtags to follow in a feed column
# Note that setting this value higher may cause significant
# database load
MAX_FEED_HASHTAGS=4
# Maximum number of pinned posts
MAX_PINNED_TOOTS=5
# Maximum allowed bio characters
MAX_BIO_CHARS=500
# Maximim number of profile fields allowed
MAX_PROFILE_FIELDS=4
# Maximum allowed display name characters
MAX_DISPLAY_NAME_CHARS=30
# Maximum allowed poll options
MAX_POLL_OPTIONS=5
# Maximum allowed poll option characters
MAX_POLL_OPTION_CHARS=100
# Maximum number of emoji reactions per toot and user (minimum 1)
MAX_REACTIONS=1
# Maximum image and video/audio upload sizes
# Units are in bytes
# 1048576 bytes equals 1 megabyte
# MAX_IMAGE_SIZE=8388608
# MAX_VIDEO_SIZE=41943040
# Maximum search results to display
# Only relevant when elasticsearch is installed
# MAX_SEARCH_RESULTS=20
# Maximum hashtags to display
# Customize the number of hashtags shown in 'Explore'
# MAX_TRENDING_TAGS=10
# Maximum custom emoji file sizes
# If undefined or smaller than MAX_EMOJI_SIZE, the value
# of MAX_EMOJI_SIZE will be used for MAX_REMOTE_EMOJI_SIZE
# Units are in bytes
# MAX_EMOJI_SIZE=262144
# MAX_REMOTE_EMOJI_SIZE=262144
# Optional hCaptcha support
# HCAPTCHA_SECRET_KEY=
# HCAPTCHA_SITE_KEY=
# IP and session retention
# -----------------------
# Make sure to modify the scheduling of ip_cleanup_scheduler in config/sidekiq.yml
# to be less than daily if you lower IP_RETENTION_PERIOD below two days (172800).
# -----------------------
IP_RETENTION_PERIOD=31556952
SESSION_RETENTION_PERIOD=31556952

View file

@ -1,11 +0,0 @@
# In test, compile the NodeJS code as if we are in production
NODE_ENV=production
# Federation
LOCAL_DOMAIN=cb6e6126.ngrok.io
LOCAL_HTTPS=true
# Secret values required by ActiveRecord encryption feature
# Use `bin/rails db:encryption:init` to generate fresh secrets
ACTIVE_RECORD_ENCRYPTION_DETERMINISTIC_KEY=test_determinist_key_DO_NOT_USE_IN_PRODUCTION
ACTIVE_RECORD_ENCRYPTION_KEY_DERIVATION_SALT=test_salt_DO_NOT_USE_IN_PRODUCTION
ACTIVE_RECORD_ENCRYPTION_PRIMARY_KEY=test_primary_key_DO_NOT_USE_IN_PRODUCTION

View file

@ -1,8 +0,0 @@
VAGRANT=true
LOCAL_DOMAIN=mastodon.local
BIND=0.0.0.0
DB_HOST=/var/run/postgresql/
ES_ENABLED=true
ES_HOST=localhost
ES_PORT=9200

View file

@ -1 +0,0 @@
procfile: Procfile.dev

View file

@ -1,42 +0,0 @@
name: 'Setup Javascript'
description: 'Setup a Javascript environment ready to run the Mastodon code'
inputs:
onlyProduction:
description: Only install production dependencies
default: 'false'
runs:
using: 'composite'
steps:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version-file: '.nvmrc'
# The following is needed because we can not use `cache: true` for `setup-node`, as it does not support Corepack yet and mess up with the cache location if ran after Node is installed
- name: Enable corepack
shell: bash
run: corepack enable
- name: Get yarn cache directory path
id: yarn-cache-dir-path
shell: bash
run: echo "dir=$(yarn config get cacheFolder)" >> $GITHUB_OUTPUT
- uses: actions/cache@v4
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install all yarn packages
shell: bash
run: yarn install --immutable
if: inputs.onlyProduction == 'false'
- name: Install all production yarn packages
shell: bash
run: yarn workspaces focus --production
if: inputs.onlyProduction != 'false'

View file

@ -1,23 +0,0 @@
name: 'Setup RUby'
description: 'Setup a Ruby environment ready to run the Mastodon code'
inputs:
ruby-version:
description: The Ruby version to install
default: '.ruby-version'
additional-system-dependencies:
description: 'Additional packages to install'
runs:
using: 'composite'
steps:
- name: Install system dependencies
shell: bash
run: |
sudo apt-get update
sudo apt-get install -y libicu-dev libidn11-dev libvips42 ${{ inputs.additional-system-dependencies }}
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: ${{ inputs.ruby-version }}
bundler-cache: true

11
.github/codecov.yml vendored
View file

@ -1,11 +0,0 @@
comment: false # Do not leave PR comments
coverage:
status:
project:
default:
# GitHub status check is not blocking
informational: true
patch:
default:
# GitHub status check is not blocking
informational: true

158
.github/renovate.json5 vendored
View file

@ -1,158 +0,0 @@
{
$schema: 'https://docs.renovatebot.com/renovate-schema.json',
extends: [
'config:recommended',
'customManagers:dockerfileVersions',
':labels(dependencies)',
':prConcurrentLimitNone', // Remove limit for open PRs at any time.
':prHourlyLimit2', // Rate limit PR creation to a maximum of two per hour.
],
minimumReleaseAge: '3', // Wait 3 days after the package has been published before upgrading it
// packageRules order is important, they are applied from top to bottom and are merged,
// meaning the most important ones must be at the bottom, for example grouping rules
// If we do not want a package to be grouped with others, we need to set its groupName
// to `null` after any other rule set it to something.
dependencyDashboardHeader: 'This issue lists Renovate updates and detected dependencies. Read the [Dependency Dashboard](https://docs.renovatebot.com/key-concepts/dashboard/) docs to learn more. Before approving any upgrade: read the description and comments in the [`renovate.json5` file](https://github.com/mastodon/mastodon/blob/main/.github/renovate.json5).',
postUpdateOptions: ['yarnDedupeHighest'],
packageRules: [
{
// Require Dependency Dashboard Approval for major version bumps of these node packages
matchManagers: ['npm'],
matchPackageNames: [
'tesseract.js', // Requires code changes
'react-hotkeys', // Requires code changes
// Requires Webpacker upgrade or replacement
'@svgr/webpack',
'@types/webpack',
'babel-loader',
'compression-webpack-plugin',
'css-loader',
'imports-loader',
'mini-css-extract-plugin',
'postcss-loader',
'sass-loader',
'terser-webpack-plugin',
'webpack',
'webpack-assets-manifest',
'webpack-bundle-analyzer',
'webpack-dev-server',
'webpack-cli',
// react-router: Requires manual upgrade
'history',
'react-router-dom',
],
matchUpdateTypes: ['major'],
dependencyDashboardApproval: true,
},
{
// Require Dependency Dashboard Approval for major version bumps of these Ruby packages
matchManagers: ['bundler'],
matchPackageNames: [
'rack', // Needs to be synced with Rails version
'strong_migrations', // Requires manual upgrade
'sidekiq', // Requires manual upgrade
'sidekiq-unique-jobs', // Requires manual upgrades and sync with Sidekiq version
'redis', // Requires manual upgrade and sync with Sidekiq version
],
matchUpdateTypes: ['major'],
dependencyDashboardApproval: true,
},
{
// Update GitHub Actions and Docker images weekly
matchManagers: ['github-actions', 'dockerfile', 'docker-compose'],
extends: ['schedule:weekly'],
},
{
// Require Dependency Dashboard Approval for major & minor bumps for the ruby image, this needs to be synced with .ruby-version
matchManagers: ['dockerfile'],
matchPackageNames: ['moritzheiber/ruby-jemalloc'],
matchUpdateTypes: ['minor', 'major'],
dependencyDashboardApproval: true,
},
{
// Require Dependency Dashboard Approval for major bumps for the node image, this needs to be synced with .nvmrc
matchManagers: ['dockerfile'],
matchPackageNames: ['node'],
matchUpdateTypes: ['major'],
dependencyDashboardApproval: true,
},
{
// Require Dependency Dashboard Approval for major postgres bumps in the docker-compose file, as those break dev environments
matchManagers: ['docker-compose'],
matchPackageNames: ['postgres'],
matchUpdateTypes: ['major'],
dependencyDashboardApproval: true,
},
{
// Update devDependencies every week, with one grouped PR
matchDepTypes: 'devDependencies',
matchUpdateTypes: ['patch', 'minor'],
groupName: 'devDependencies (non-major)',
extends: ['schedule:weekly'],
},
{
// Group all eslint-related packages with `eslint` in the same PR
matchManagers: ['npm'],
matchPackageNames: ['eslint'],
matchPackagePrefixes: ['eslint-', '@typescript-eslint/'],
matchUpdateTypes: ['patch', 'minor'],
groupName: 'eslint (non-major)',
},
{
// Group actions/*-artifact in the same PR
matchManagers: ['github-actions'],
matchPackageNames: [
'actions/download-artifact',
'actions/upload-artifact',
],
matchUpdateTypes: ['major'],
groupName: 'artifact actions (major)',
},
{
// Update @types/* packages every week, with one grouped PR
matchPackagePrefixes: '@types/',
matchUpdateTypes: ['patch', 'minor'],
groupName: 'DefinitelyTyped types (non-major)',
extends: ['schedule:weekly'],
addLabels: ['typescript'],
},
{
// We want those packages to always have their own PR
matchManagers: ['npm'],
matchPackageNames: [
'typescript', // Typescript has code-impacting changes in minor versions
],
groupName: null, // We dont want them to belong to any group
},
{
// Group all RuboCop packages with `rubocop` in the same PR
matchManagers: ['bundler'],
matchPackageNames: ['rubocop'],
matchPackagePrefixes: ['rubocop-'],
matchUpdateTypes: ['patch', 'minor'],
groupName: 'RuboCop (non-major)',
},
{
// Group all RSpec packages with `rspec` in the same PR
matchManagers: ['bundler'],
matchPackageNames: ['rspec'],
matchPackagePrefixes: ['rspec-'],
matchUpdateTypes: ['patch', 'minor'],
groupName: 'RSpec (non-major)',
},
{
// Group all opentelemetry-ruby packages in the same PR
matchManagers: ['bundler'],
matchPackagePrefixes: ['opentelemetry-'],
matchUpdateTypes: ['patch', 'minor'],
groupName: 'opentelemetry-ruby (non-major)',
},
// Add labels depending on package manager
{ matchManagers: ['npm', 'nvm'], addLabels: ['javascript'] },
{ matchManagers: ['bundler', 'ruby-version'], addLabels: ['ruby'] },
{ matchManagers: ['docker-compose', 'dockerfile'], addLabels: ['docker'] },
{ matchManagers: ['github-actions'], addLabels: ['github_actions'] },
],
}

10
.github/stale.yml vendored
View file

@ -1,10 +0,0 @@
daysUntilStale: 120
daysUntilClose: 7
exemptLabels:
- security
staleLabel: wontfix
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
only: pulls

View file

@ -1,102 +0,0 @@
on:
workflow_call:
inputs:
platforms:
required: true
type: string
cache:
type: boolean
default: true
use_native_arm64_builder:
type: boolean
push_to_images:
type: string
version_prerelease:
type: string
version_metadata:
type: string
flavor:
type: string
tags:
type: string
labels:
type: string
file_to_build:
type: string
jobs:
build-image:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: docker/setup-qemu-action@v3
if: contains(inputs.platforms, 'linux/arm64') && !inputs.use_native_arm64_builder
- uses: docker/setup-buildx-action@v3
id: buildx
if: ${{ !(inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')) }}
- name: Start a local Docker Builder
if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
run: |
docker run --rm -d --name buildkitd -p 1234:1234 --privileged moby/buildkit:latest --addr tcp://0.0.0.0:1234
- uses: docker/setup-buildx-action@v3
id: buildx-native
if: inputs.use_native_arm64_builder && contains(inputs.platforms, 'linux/arm64')
with:
driver: remote
endpoint: tcp://localhost:1234
platforms: linux/amd64
append: |
- endpoint: tcp://${{ vars.DOCKER_BUILDER_HETZNER_ARM64_01_HOST }}:13865
platforms: linux/arm64
name: mastodon-docker-builder-arm64-01
driver-opts:
- servername=mastodon-docker-builder-arm64-01
env:
BUILDER_NODE_1_AUTH_TLS_CACERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CACERT }}
BUILDER_NODE_1_AUTH_TLS_CERT: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_CERT }}
BUILDER_NODE_1_AUTH_TLS_KEY: ${{ secrets.DOCKER_BUILDER_HETZNER_ARM64_01_KEY }}
- name: Log in to Docker Hub
if: contains(inputs.push_to_images, 'tootsuite')
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Log in to the GitHub Container registry
if: contains(inputs.push_to_images, 'ghcr.io')
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- uses: docker/metadata-action@v5
id: meta
if: ${{ inputs.push_to_images != '' }}
with:
images: ${{ inputs.push_to_images }}
flavor: ${{ inputs.flavor }}
tags: ${{ inputs.tags }}
labels: ${{ inputs.labels }}
- uses: docker/build-push-action@v5
with:
context: .
file: ${{ inputs.file_to_build }}
build-args: |
MASTODON_VERSION_PRERELEASE=${{ inputs.version_prerelease }}
MASTODON_VERSION_METADATA=${{ inputs.version_metadata }}
platforms: ${{ inputs.platforms }}
provenance: false
builder: ${{ steps.buildx.outputs.name || steps.buildx-native.outputs.name }}
push: ${{ inputs.push_to_images != '' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: ${{ inputs.cache && 'type=gha' || '' }}
cache-to: ${{ inputs.cache && 'type=gha,mode=max' || '' }}

View file

@ -1,64 +0,0 @@
name: Build nightly container image
on:
workflow_dispatch:
schedule:
- cron: '0 2 * * *' # run at 2 AM UTC
permissions:
contents: read
packages: write
jobs:
compute-suffix:
runs-on: ubuntu-latest
if: github.repository == 'TheEssem/mastodon'
steps:
- id: version_vars
env:
TZ: Etc/UTC
run: |
echo mastodon_version_prerelease=nightly.$(date +'%Y-%m-%d')>> $GITHUB_OUTPUT
outputs:
prerelease: ${{ steps.version_vars.outputs.mastodon_version_prerelease }}
build-image:
needs: compute-suffix
uses: ./.github/workflows/build-container-image.yml
with:
file_to_build: Dockerfile
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: false
cache: false
push_to_images: |
ghcr.io/${{ github.repository_owner }}/mastodon
version_prerelease: ${{ needs.compute-suffix.outputs.prerelease }}
labels: |
org.opencontainers.image.description=Nightly build image used for testing purposes
flavor: |
latest=true
tags: |
type=raw,value=edge
type=raw,value=nightly
type=schedule,pattern=${{ needs.compute-suffix.outputs.prerelease }}
secrets: inherit
build-image-streaming:
needs: compute-suffix
uses: ./.github/workflows/build-container-image.yml
with:
file_to_build: streaming/Dockerfile
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: false
cache: false
push_to_images: |
ghcr.io/${{ github.repository_owner }}/mastodon-streaming
version_prerelease: ${{ needs.compute-suffix.outputs.prerelease }}
labels: |
org.opencontainers.image.description=Nightly build image used for testing purposes
flavor: |
latest=true
tags: |
type=raw,value=edge
type=raw,value=nightly
type=schedule,pattern=${{ needs.compute-suffix.outputs.prerelease }}
secrets: inherit

View file

@ -1,58 +0,0 @@
name: Build container image for PR
on:
pull_request:
types: [labeled, synchronize, reopened, ready_for_review, opened]
permissions:
contents: read
packages: write
jobs:
compute-suffix:
runs-on: ubuntu-latest
# This is only allowed to run if:
# - the PR branch is in the `mastodon/mastodon` repository
# - the PR is not a draft
# - the PR has the "build-image" label
if: ${{ github.event.pull_request.head.repo.full_name == github.repository && !github.event.pull_request.draft && contains(github.event.pull_request.labels.*.name, 'build-image') }}
steps:
# Repository needs to be cloned so `git rev-parse` below works
- name: Clone repository
uses: actions/checkout@v4
- id: version_vars
run: |
echo mastodon_version_metadata=pr-${{ github.event.pull_request.number }}-$(git rev-parse --short HEAD) >> $GITHUB_OUTPUT
outputs:
metadata: ${{ steps.version_vars.outputs.mastodon_version_metadata }}
build-image:
needs: compute-suffix
uses: ./.github/workflows/build-container-image.yml
with:
file_to_build: Dockerfile
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: false
push_to_images: |
ghcr.io/${{ github.repository_owner }}/mastodon
version_metadata: ${{ needs.compute-suffix.outputs.metadata }}
flavor: |
latest=auto
tags: |
type=ref,event=pr
secrets: inherit
build-image-streaming:
needs: compute-suffix
uses: ./.github/workflows/build-container-image.yml
with:
file_to_build: streaming/Dockerfile
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: false
push_to_images: |
ghcr.io/${{ github.repository_owner }}/mastodon-streaming
version_metadata: ${{ needs.compute-suffix.outputs.metadata }}
flavor: |
latest=auto
tags: |
type=ref,event=pr
secrets: inherit

View file

@ -1,49 +0,0 @@
name: Build container release images
on:
push:
tags:
- '*'
permissions:
contents: read
packages: write
jobs:
build-image:
uses: ./.github/workflows/build-container-image.yml
with:
file_to_build: Dockerfile
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: false
push_to_images: |
ghcr.io/${{ github.repository_owner }}/mastodon
# Do not use cache when building releases, so apt update is always ran and the release always contain the latest packages
cache: false
# Only tag with latest when ran against the latest stable branch
# This needs to be updated after each minor version release
flavor: |
latest=${{ startsWith(github.ref, 'refs/tags/v4.2.') }}
tags: |
type=pep440,pattern={{raw}}
type=pep440,pattern=v{{major}}.{{minor}}
secrets: inherit
build-image-streaming:
if: startsWith(github.ref, 'refs/tags/v4.3.')
uses: ./.github/workflows/build-container-image.yml
with:
file_to_build: streaming/Dockerfile
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: false
push_to_images: |
ghcr.io/${{ github.repository_owner }}/mastodon-streaming
# Do not use cache when building releases, so apt update is always ran and the release always contain the latest packages
cache: false
# Only tag with latest when ran against the latest stable branch
# This needs to be updated after each minor version release
flavor: |
latest=${{ startsWith(github.ref, 'refs/tags/v4.3.') }}
tags: |
type=pep440,pattern={{raw}}
type=pep440,pattern=v{{major}}.{{minor}}
secrets: inherit

View file

@ -1,61 +0,0 @@
name: Build security nightly container image
on:
workflow_dispatch:
permissions:
contents: read
packages: write
jobs:
compute-suffix:
runs-on: ubuntu-latest
steps:
- id: version_vars
env:
TZ: Etc/UTC
run: |
echo mastodon_version_prerelease=nightly.$(date --date='next day' +'%Y-%m-%d')-security>> $GITHUB_OUTPUT
outputs:
prerelease: ${{ steps.version_vars.outputs.mastodon_version_prerelease }}
build-image:
needs: compute-suffix
uses: ./.github/workflows/build-container-image.yml
with:
file_to_build: Dockerfile
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: false
cache: false
push_to_images: |
ghcr.io/${{ github.repository_owner }}/mastodon
version_prerelease: ${{ needs.compute-suffix.outputs.prerelease }}
labels: |
org.opencontainers.image.description=Nightly build image used for testing purposes
flavor: |
latest=true
tags: |
type=raw,value=edge
type=raw,value=nightly
type=raw,value=${{ needs.compute-suffix.outputs.prerelease }}
secrets: inherit
build-image-streaming:
needs: compute-suffix
uses: ./.github/workflows/build-container-image.yml
with:
file_to_build: streaming/Dockerfile
platforms: linux/amd64,linux/arm64
use_native_arm64_builder: false
cache: false
push_to_images: |
ghcr.io/${{ github.repository_owner }}/mastodon-streaming
version_prerelease: ${{ needs.compute-suffix.outputs.prerelease }}
labels: |
org.opencontainers.image.description=Nightly build image used for testing purposes
flavor: |
latest=true
tags: |
type=raw,value=edge
type=raw,value=nightly
type=raw,value=${{ needs.compute-suffix.outputs.prerelease }}
secrets: inherit

View file

@ -1,39 +0,0 @@
name: Bundler Audit
on:
merge_group:
push:
branches:
- 'main'
- 'stable-*'
paths:
- 'Gemfile*'
- '.ruby-version'
- '.github/workflows/bundler-audit.yml'
pull_request:
paths:
- 'Gemfile*'
- '.ruby-version'
- '.github/workflows/bundler-audit.yml'
schedule:
- cron: '0 5 * * 1'
jobs:
security:
runs-on: ubuntu-latest
env:
BUNDLE_ONLY: development
steps:
- name: Clone repository
uses: actions/checkout@v4
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
bundler-cache: true
- name: Run bundler-audit
run: bundle exec bundler-audit check --update

View file

@ -1,52 +0,0 @@
name: Check i18n
on:
push:
branches:
- 'main'
- 'stable-*'
pull_request:
branches:
- 'main'
- 'stable-*'
env:
RAILS_ENV: test
permissions:
contents: read
jobs:
check-i18n:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- name: Set up Ruby environment
uses: ./.github/actions/setup-ruby
- name: Set up Javascript environment
uses: ./.github/actions/setup-javascript
- name: Check for missing strings in English JSON
run: |
yarn i18n:extract --throws
git diff --exit-code
- name: Check locale file normalization
run: bundle exec i18n-tasks check-normalized
- name: Check for unused strings
run: bundle exec i18n-tasks unused
- name: Check for missing strings in English YML
run: |
bundle exec i18n-tasks add-missing -l en
git diff --exit-code
- name: Check for wrong string interpolations
run: bundle exec i18n-tasks check-consistent-interpolations
- name: Check that all required locale files exist
run: bundle exec rake repo:check_locales_files

View file

@ -1,66 +0,0 @@
name: 'CodeQL'
on:
merge_group:
push:
branches:
- 'main'
- 'stable-*'
pull_request:
branches:
- 'main'
- 'stable-*'
schedule:
- cron: '22 6 * * 1'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ['javascript', 'ruby']
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: '/language:${{matrix.language}}'

View file

@ -1,72 +0,0 @@
name: Crowdin / Download translations
on:
schedule:
- cron: '17 4 * * *' # Every day
workflow_dispatch:
permissions:
contents: write
pull-requests: write
jobs:
download-translations:
runs-on: ubuntu-latest
if: github.repository == 'glitch-soc/mastodon'
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Increase Git http.postBuffer
# This is needed due to a bug in Ubuntu's cURL version?
# See https://github.com/orgs/community/discussions/55820
run: |
git config --global http.version HTTP/1.1
git config --global http.postBuffer 157286400
# Download the translation files from Crowdin
- name: crowdin action
uses: crowdin/github-action@v1
with:
config: crowdin-glitch.yml
upload_sources: false
upload_translations: false
download_translations: true
crowdin_branch_name: main
push_translations: false
create_pull_request: false
env:
CROWDIN_PROJECT_ID: ${{ vars.CROWDIN_PROJECT_ID }}
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
# As the files are extracted from a Docker container, they belong to root:root
# We need to fix this before the next steps
- name: Fix file permissions
run: sudo chown -R runner:docker .
# This is needed to run the normalize step
- name: Set up Ruby environment
uses: ./.github/actions/setup-ruby
- name: Run i18n normalize task
run: bundle exec i18n-tasks normalize
# Create or update the pull request
- name: Create Pull Request
uses: peter-evans/create-pull-request@v6.0.5
with:
commit-message: 'New Crowdin translations'
title: 'New Crowdin Translations (automated)'
author: 'GitHub Actions <noreply@github.com>'
body: |
New Crowdin translations, automated with GitHub Actions
See `.github/workflows/crowdin-download.yml`
This PR will be updated every day with new translations.
Due to a limitation in GitHub Actions, checks are not running on this PR without manual action.
If you want to run the checks, then close and re-open it.
branch: i18n/crowdin/translations
base: main
labels: i18n

View file

@ -1,39 +0,0 @@
name: Crowdin / Upload translations
on:
merge_group:
push:
branches:
- 'main'
- 'stable-*'
paths:
- crowdin-glitch.yml
- app/javascript/flavours/glitch/locales/en.json
- config/locales-glitch/en.yml
- config/locales-glitch/simple_form.en.yml
- config/locales-glitch/activerecord.en.yml
- config/locales-glitch/devise.en.yml
- config/locales-glitch/doorkeeper.en.yml
- .github/workflows/crowdin-upload.yml
jobs:
upload-translations:
runs-on: ubuntu-latest
if: github.repository == 'mastodon/mastodon'
steps:
- name: Checkout
uses: actions/checkout@v4
- name: crowdin action
uses: crowdin/github-action@v1
with:
config: crowdin-glitch.yml
upload_sources: true
upload_translations: false
download_translations: false
crowdin_branch_name: main
env:
CROWDIN_PROJECT_ID: ${{ vars.CROWDIN_PROJECT_ID }}
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}

View file

@ -1,22 +0,0 @@
name: Check formatting
on:
merge_group:
push:
branches:
- 'main'
- 'stable-*'
pull_request:
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Clone repository
uses: actions/checkout@v4
- name: Set up Javascript environment
uses: ./.github/actions/setup-javascript
- name: Check formatting with Prettier
run: yarn format:check

View file

@ -1,17 +0,0 @@
{
"problemMatcher": [
{
"owner": "haml-lint",
"severity": "warning",
"pattern": [
{
"regexp": "^(.*):(\\d+)\\s\\[W]\\s(.*):\\s(.*)$",
"file": 1,
"line": 2,
"code": 3,
"message": 4
}
]
}
]
}

View file

@ -1,43 +0,0 @@
name: CSS Linting
on:
merge_group:
push:
branches:
- 'main'
- 'stable-*'
paths:
- 'package.json'
- 'yarn.lock'
- '.nvmrc'
- '.prettier*'
- 'stylelint.config.js'
- '**/*.css'
- '**/*.scss'
- '.github/workflows/lint-css.yml'
- '.github/stylelint-matcher.json'
pull_request:
paths:
- 'package.json'
- 'yarn.lock'
- '.nvmrc'
- '.prettier*'
- 'stylelint.config.js'
- '**/*.css'
- '**/*.scss'
- '.github/workflows/lint-css.yml'
- '.github/stylelint-matcher.json'
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Clone repository
uses: actions/checkout@v4
- name: Set up Javascript environment
uses: ./.github/actions/setup-javascript
- name: Stylelint
run: yarn lint:css -f github

View file

@ -1,46 +0,0 @@
name: Haml Linting
on:
merge_group:
push:
branches:
- 'main'
- 'stable-*'
paths:
- '.github/workflows/haml-lint-problem-matcher.json'
- '.github/workflows/lint-haml.yml'
- '.haml-lint*.yml'
- '.rubocop*.yml'
- '.ruby-version'
- '**/*.haml'
- 'Gemfile*'
pull_request:
paths:
- '.github/workflows/haml-lint-problem-matcher.json'
- '.github/workflows/lint-haml.yml'
- '.haml-lint*.yml'
- '.rubocop*.yml'
- '.ruby-version'
- '**/*.haml'
- 'Gemfile*'
jobs:
lint:
runs-on: ubuntu-latest
env:
BUNDLE_ONLY: development
steps:
- name: Clone repository
uses: actions/checkout@v4
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
bundler-cache: true
- name: Run haml-lint
run: |
echo "::add-matcher::.github/workflows/haml-lint-problem-matcher.json"
bundle exec haml-lint --reporter github

View file

@ -1,50 +0,0 @@
name: JavaScript Linting
on:
merge_group:
push:
branches:
- 'main'
- 'stable-*'
paths:
- 'package.json'
- 'yarn.lock'
- 'tsconfig.json'
- '.nvmrc'
- '.prettier*'
- '.eslint*'
- '**/*.js'
- '**/*.jsx'
- '**/*.ts'
- '**/*.tsx'
- '.github/workflows/lint-js.yml'
pull_request:
paths:
- 'package.json'
- 'yarn.lock'
- 'tsconfig.json'
- '.nvmrc'
- '.prettier*'
- '.eslint*'
- '**/*.js'
- '**/*.jsx'
- '**/*.ts'
- '**/*.tsx'
- '.github/workflows/lint-js.yml'
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Clone repository
uses: actions/checkout@v4
- name: Set up Javascript environment
uses: ./.github/actions/setup-javascript
- name: ESLint
run: yarn lint:js --max-warnings 0
- name: Typecheck
run: yarn typecheck

View file

@ -1,51 +0,0 @@
name: Ruby Linting
on:
merge_group:
push:
branches:
- 'main'
- 'stable-*'
paths:
- 'Gemfile*'
- '.rubocop*.yml'
- '.ruby-version'
- 'config/brakeman.ignore'
- '**/*.rb'
- '**/*.rake'
- '.github/workflows/lint-ruby.yml'
pull_request:
paths:
- 'Gemfile*'
- '.rubocop*.yml'
- '.ruby-version'
- 'config/brakeman.ignore'
- '**/*.rb'
- '**/*.rake'
- '.github/workflows/lint-ruby.yml'
jobs:
lint:
runs-on: ubuntu-latest
env:
BUNDLE_ONLY: development
steps:
- name: Clone repository
uses: actions/checkout@v4
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
bundler-cache: true
- name: Set-up RuboCop Problem Matcher
uses: r7kamura/rubocop-problem-matchers-action@v1
- name: Run rubocop
run: bin/rubocop
- name: Run brakeman
if: always() # Run both checks, even if the first failed
run: bin/brakeman

View file

@ -1,27 +0,0 @@
name: PR Needs Rebase
on:
schedule:
- cron: '0 * * * *'
permissions:
pull-requests: write
jobs:
label-rebase-needed:
runs-on: ubuntu-latest
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
steps:
- name: Check for merge conflicts
uses: eps1lon/actions-label-merge-conflict@v3
with:
dirtyLabel: 'rebase needed :construction:'
repoToken: '${{ secrets.GITHUB_TOKEN }}'
commentOnClean: This pull request has resolved merge conflicts and is ready for review.
commentOnDirty: This pull request has merge conflicts that must be resolved before it can be merged.
retryMax: 30
continueOnMissingPermissions: false

View file

@ -1,35 +0,0 @@
name: Test container image build
on:
pull_request:
paths:
- .github/workflows/build-nightly.yml
- .github/workflows/build-push-pr.yml
- .github/workflows/build-releases.yml
- .github/workflows/test-image-build.yml
- Dockerfile
- streaming/Dockerfile
permissions:
contents: read
jobs:
build-image:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
uses: ./.github/workflows/build-container-image.yml
with:
file_to_build: Dockerfile
platforms: linux/amd64 # Testing only on native platform so it is performant
cache: true
build-image-streaming:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-streaming
cancel-in-progress: true
uses: ./.github/workflows/build-container-image.yml
with:
file_to_build: streaming/Dockerfile
platforms: linux/amd64 # Testing only on native platform so it is performant
cache: true

View file

@ -1,43 +0,0 @@
name: JavaScript Testing
on:
merge_group:
push:
branches:
- 'main'
- 'stable-*'
paths:
- 'package.json'
- 'yarn.lock'
- '.nvmrc'
- '**/*.js'
- '**/*.jsx'
- '**/*.ts'
- '**/*.tsx'
- '**/*.snap'
- '.github/workflows/test-js.yml'
pull_request:
paths:
- 'package.json'
- 'yarn.lock'
- '.nvmrc'
- '**/*.js'
- '**/*.jsx'
- '**/*.ts'
- '**/*.tsx'
- '**/*.snap'
- '.github/workflows/test-js.yml'
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Clone repository
uses: actions/checkout@v4
- name: Set up Javascript environment
uses: ./.github/actions/setup-javascript
- name: JavaScript testing
run: yarn jest --reporters github-actions summary

View file

@ -1,93 +0,0 @@
name: Historical data migration test
on:
merge_group:
push:
branches:
- 'main'
- 'stable-*'
paths:
- 'Gemfile*'
- '.ruby-version'
- '**/*.rb'
- '.github/workflows/test-migrations.yml'
- 'lib/tasks/tests.rake'
pull_request:
paths:
- 'Gemfile*'
- '.ruby-version'
- '**/*.rb'
- '.github/workflows/test-migrations.yml'
- 'lib/tasks/tests.rake'
jobs:
test:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
postgres:
- 14-alpine
- 15-alpine
services:
postgres:
image: postgres:${{ matrix.postgres}}
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres
options: >-
--health-cmd pg_isready
--health-interval 10ms
--health-timeout 3s
--health-retries 50
ports:
- 5432:5432
redis:
image: redis:7-alpine
options: >-
--health-cmd "redis-cli ping"
--health-interval 10ms
--health-timeout 3s
--health-retries 50
ports:
- 6379:6379
env:
DB_HOST: localhost
DB_USER: postgres
DB_PASS: postgres
DISABLE_SIMPLECOV: true
RAILS_ENV: test
BUNDLE_CLEAN: true
BUNDLE_FROZEN: true
BUNDLE_WITHOUT: 'development:production'
BUNDLE_JOBS: 3
BUNDLE_RETRY: 3
steps:
- uses: actions/checkout@v4
- name: Set up Ruby environment
uses: ./.github/actions/setup-ruby
- name: Test "one step migration" flow
run: |
bin/rails db:drop
bin/rails db:create
bin/rails tests:migrations:prepare_database
bin/rails db:migrate
bin/rails tests:migrations:check_database
- name: Test "two step migration" flow
run: |
bin/rails db:drop
bin/rails db:create
SKIP_POST_DEPLOYMENT_MIGRATIONS=true bin/rails tests:migrations:prepare_database
SKIP_POST_DEPLOYMENT_MIGRATIONS=true bin/rails db:migrate
bin/rails db:migrate
bin/rails tests:migrations:check_database

View file

@ -1,444 +0,0 @@
name: Ruby Testing
on:
merge_group:
push:
branches:
- 'main'
- 'stable-*'
pull_request:
env:
BUNDLE_CLEAN: true
BUNDLE_FROZEN: true
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: true
matrix:
mode:
- production
- test
env:
RAILS_ENV: ${{ matrix.mode }}
BUNDLE_WITH: ${{ matrix.mode }}
SECRET_KEY_BASE_DUMMY: 1
steps:
- uses: actions/checkout@v4
- name: Set up Ruby environment
uses: ./.github/actions/setup-ruby
- name: Set up Javascript environment
uses: ./.github/actions/setup-javascript
with:
onlyProduction: 'true'
- name: Precompile assets
# Previously had set this, but it's not supported
# export NODE_OPTIONS=--openssl-legacy-provider
run: |-
./bin/rails assets:precompile
- name: Archive asset artifacts
run: |
tar --exclude={"*.br","*.gz"} -zcf artifacts.tar.gz public/assets public/packs*
- uses: actions/upload-artifact@v4
if: matrix.mode == 'test'
with:
path: |-
./artifacts.tar.gz
name: ${{ github.sha }}
retention-days: 0
test:
runs-on: ubuntu-latest
needs:
- build
services:
postgres:
image: postgres:14-alpine
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres
options: >-
--health-cmd pg_isready
--health-interval 10ms
--health-timeout 3s
--health-retries 50
ports:
- 5432:5432
redis:
image: redis:7-alpine
options: >-
--health-cmd "redis-cli ping"
--health-interval 10ms
--health-timeout 3s
--health-retries 50
ports:
- 6379:6379
env:
DB_HOST: localhost
DB_USER: postgres
DB_PASS: postgres
DISABLE_SIMPLECOV: ${{ matrix.ruby-version != '.ruby-version' }}
RAILS_ENV: test
ALLOW_NOPAM: true
PAM_ENABLED: true
PAM_DEFAULT_SERVICE: pam_test
PAM_CONTROLLED_SERVICE: pam_test_controlled
OIDC_ENABLED: true
OIDC_SCOPE: read
SAML_ENABLED: true
CAS_ENABLED: true
BUNDLE_WITH: 'pam_authentication test'
GITHUB_RSPEC: ${{ matrix.ruby-version == '.ruby-version' && github.event.pull_request && 'true' }}
strategy:
fail-fast: false
matrix:
ruby-version:
- '3.1'
- '3.2'
- '.ruby-version'
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
path: './'
name: ${{ github.sha }}
- name: Expand archived asset artifacts
run: |
tar xvzf artifacts.tar.gz
- name: Set up Ruby environment
uses: ./.github/actions/setup-ruby
with:
ruby-version: ${{ matrix.ruby-version}}
additional-system-dependencies: ffmpeg libpam-dev
- name: Load database schema
run: |
bin/rails db:setup
bin/flatware fan bin/rails db:test:prepare
- run: bin/flatware rspec -r ./spec/flatware_helper.rb
- name: Upload coverage reports to Codecov
if: matrix.ruby-version == '.ruby-version'
uses: codecov/codecov-action@v4
with:
files: coverage/lcov/*.lcov
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
test-libvips:
name: Libvips tests
runs-on: ubuntu-24.04
needs:
- build
services:
postgres:
image: postgres:14-alpine
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres
options: >-
--health-cmd pg_isready
--health-interval 10ms
--health-timeout 3s
--health-retries 50
ports:
- 5432:5432
redis:
image: redis:7-alpine
options: >-
--health-cmd "redis-cli ping"
--health-interval 10ms
--health-timeout 3s
--health-retries 50
ports:
- 6379:6379
env:
DB_HOST: localhost
DB_USER: postgres
DB_PASS: postgres
DISABLE_SIMPLECOV: ${{ matrix.ruby-version != '.ruby-version' }}
RAILS_ENV: test
ALLOW_NOPAM: true
PAM_ENABLED: true
PAM_DEFAULT_SERVICE: pam_test
PAM_CONTROLLED_SERVICE: pam_test_controlled
OIDC_ENABLED: true
OIDC_SCOPE: read
SAML_ENABLED: true
CAS_ENABLED: true
BUNDLE_WITH: 'pam_authentication test'
GITHUB_RSPEC: ${{ matrix.ruby-version == '.ruby-version' && github.event.pull_request && 'true' }}
MASTODON_USE_LIBVIPS: true
strategy:
fail-fast: false
matrix:
ruby-version:
- '3.1'
- '3.2'
- '.ruby-version'
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
path: './'
name: ${{ github.sha }}
- name: Expand archived asset artifacts
run: |
tar xvzf artifacts.tar.gz
- name: Set up Ruby environment
uses: ./.github/actions/setup-ruby
with:
ruby-version: ${{ matrix.ruby-version}}
additional-system-dependencies: ffmpeg libpam-dev libyaml-dev
- name: Load database schema
run: './bin/rails db:create db:schema:load db:seed'
- run: bin/rspec --tag attachment_processing
- name: Upload coverage reports to Codecov
if: matrix.ruby-version == '.ruby-version'
uses: codecov/codecov-action@v4
with:
files: coverage/lcov/mastodon.lcov
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
test-e2e:
name: End to End testing
runs-on: ubuntu-latest
needs:
- build
services:
postgres:
image: postgres:14-alpine
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres
options: >-
--health-cmd pg_isready
--health-interval 10ms
--health-timeout 3s
--health-retries 50
ports:
- 5432:5432
redis:
image: redis:7-alpine
options: >-
--health-cmd "redis-cli ping"
--health-interval 10ms
--health-timeout 3s
--health-retries 50
ports:
- 6379:6379
env:
DB_HOST: localhost
DB_USER: postgres
DB_PASS: postgres
DISABLE_SIMPLECOV: true
RAILS_ENV: test
BUNDLE_WITH: test
LOCAL_DOMAIN: localhost:3000
LOCAL_HTTPS: false
strategy:
fail-fast: false
matrix:
ruby-version:
- '3.1'
- '3.2'
- '.ruby-version'
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
path: './'
name: ${{ github.sha }}
- name: Expand archived asset artifacts
run: |
tar xvzf artifacts.tar.gz
- name: Set up Ruby environment
uses: ./.github/actions/setup-ruby
with:
ruby-version: ${{ matrix.ruby-version}}
additional-system-dependencies: ffmpeg
- name: Set up Javascript environment
uses: ./.github/actions/setup-javascript
- name: Load database schema
run: './bin/rails db:create db:schema:load db:seed'
- run: bin/rspec spec/system --tag streaming --tag js
- name: Archive logs
uses: actions/upload-artifact@v4
if: failure()
with:
name: e2e-logs-${{ matrix.ruby-version }}
path: log/
- name: Archive test screenshots
uses: actions/upload-artifact@v4
if: failure()
with:
name: e2e-screenshots
path: tmp/capybara/
test-search:
name: Elastic Search integration testing
runs-on: ubuntu-latest
needs:
- build
services:
postgres:
image: postgres:14-alpine
env:
POSTGRES_PASSWORD: postgres
POSTGRES_USER: postgres
options: >-
--health-cmd pg_isready
--health-interval 10ms
--health-timeout 3s
--health-retries 50
ports:
- 5432:5432
redis:
image: redis:7-alpine
options: >-
--health-cmd "redis-cli ping"
--health-interval 10ms
--health-timeout 3s
--health-retries 50
ports:
- 6379:6379
elasticsearch:
image: ${{ contains(matrix.search-image, 'elasticsearch') && matrix.search-image || '' }}
env:
discovery.type: single-node
xpack.security.enabled: false
options: >-
--health-cmd "curl http://localhost:9200/_cluster/health"
--health-interval 2s
--health-timeout 3s
--health-retries 50
ports:
- 9200:9200
opensearch:
image: ${{ contains(matrix.search-image, 'opensearch') && matrix.search-image || '' }}
env:
discovery.type: single-node
DISABLE_INSTALL_DEMO_CONFIG: true
DISABLE_SECURITY_PLUGIN: true
options: >-
--health-cmd "curl http://localhost:9200/_cluster/health"
--health-interval 2s
--health-timeout 3s
--health-retries 50
ports:
- 9200:9200
env:
DB_HOST: localhost
DB_USER: postgres
DB_PASS: postgres
DISABLE_SIMPLECOV: true
RAILS_ENV: test
BUNDLE_WITH: test
ES_ENABLED: true
ES_HOST: localhost
ES_PORT: 9200
strategy:
fail-fast: false
matrix:
ruby-version:
- '3.1'
- '3.2'
- '.ruby-version'
search-image:
- docker.elastic.co/elasticsearch/elasticsearch:7.17.13
include:
- ruby-version: '.ruby-version'
search-image: docker.elastic.co/elasticsearch/elasticsearch:8.10.2
- ruby-version: '.ruby-version'
search-image: opensearchproject/opensearch:2
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
path: './'
name: ${{ github.sha }}
- name: Set up Ruby environment
uses: ./.github/actions/setup-ruby
with:
ruby-version: ${{ matrix.ruby-version}}
additional-system-dependencies: ffmpeg
- name: Set up Javascript environment
uses: ./.github/actions/setup-javascript
- name: Load database schema
run: './bin/rails db:create db:schema:load db:seed'
- run: bin/rspec --tag search
- name: Archive logs
uses: actions/upload-artifact@v4
if: failure()
with:
name: test-search-logs-${{ matrix.ruby-version }}
path: log/
- name: Archive test screenshots
uses: actions/upload-artifact@v4
if: failure()
with:
name: test-search-screenshots
path: tmp/capybara/

View file

@ -1,15 +0,0 @@
exclude:
- 'vendor/**/*'
require:
- ./lib/linter/haml_middle_dot.rb
linters:
AltText:
enabled: true
MiddleDot:
enabled: true
LineLength:
max: 300
ViewLength:
max: 200 # Override default value of 100 inherited from rubocop

View file

@ -1 +0,0 @@
yarn lint-staged

View file

@ -1,34 +0,0 @@
---
AllCops:
CacheRootDirectory: tmp
DisplayStyleGuide: true
Exclude:
- Vagrantfile
- config/initializers/json_ld*
- lib/mastodon/migration_helpers.rb
ExtraDetails: true
NewCops: enable
TargetRubyVersion: 3.1 # Oldest supported ruby version
inherit_from:
- .rubocop/layout.yml
- .rubocop/metrics.yml
- .rubocop/naming.yml
- .rubocop/rails.yml
- .rubocop/rspec_rails.yml
- .rubocop/rspec.yml
- .rubocop/style.yml
- .rubocop/custom.yml
- .rubocop_todo.yml
- .rubocop/strict.yml
inherit_mode:
merge:
- Exclude
require:
- rubocop-rails
- rubocop-rspec
- rubocop-rspec_rails
- rubocop-performance
- rubocop-capybara

View file

@ -1,6 +0,0 @@
---
require:
- ../lib/linter/rubocop_middle_dot
Style/MiddleDot:
Enabled: true

View file

@ -1,6 +0,0 @@
---
Layout/FirstHashElementIndentation:
EnforcedStyle: consistent
Layout/LineLength:
Max: 300 # Default of 120 causes a duplicate entry in generated todo file

View file

@ -1,23 +0,0 @@
---
Metrics/AbcSize:
Exclude:
- lib/mastodon/cli/*.rb
Metrics/BlockLength:
Enabled: false
Metrics/ClassLength:
Enabled: false
Metrics/CyclomaticComplexity:
Exclude:
- lib/mastodon/cli/*.rb
Metrics/MethodLength:
Enabled: false
Metrics/ModuleLength:
Enabled: false
Metrics/ParameterLists:
CountKeywordArgs: false

View file

@ -1,3 +0,0 @@
---
Naming/BlockForwarding:
EnforcedStyle: explicit

View file

@ -1,23 +0,0 @@
---
Rails/BulkChangeTable:
Enabled: false # Conflicts with strong_migrations features
Rails/FilePath:
EnforcedStyle: arguments
Rails/HttpStatus:
EnforcedStyle: numeric
Rails/NegateInclude:
Enabled: false
Rails/RakeEnvironment:
Exclude: # Tasks are doing local work which do not need full env loaded
- lib/tasks/auto_annotate_models.rake
- lib/tasks/emojis.rake
- lib/tasks/mastodon.rake
- lib/tasks/repo.rake
- lib/tasks/statistics.rake
Rails/SkipsModelValidations:
Enabled: false

View file

@ -1,27 +0,0 @@
---
RSpec/ExampleLength:
CountAsOne: ['array', 'heredoc', 'method_call']
Max: 20 # Override default of 5
RSpec/MultipleExpectations:
Max: 10 # Overrides default of 1
RSpec/MultipleMemoizedHelpers:
Max: 20 # Overrides default of 5
RSpec/NamedSubject:
EnforcedStyle: named_only
RSpec/NestedGroups:
Max: 10 # Overrides default of 3
RSpec/NotToNot:
EnforcedStyle: to_not
RSpec/SpecFilePathFormat:
CustomTransform:
ActivityPub: activitypub
DeepL: deepl
FetchOEmbedService: fetch_oembed_service
OEmbedController: oembed_controller
OStatus: ostatus

View file

@ -1,3 +0,0 @@
---
RSpecRails/HttpStatus:
EnforcedStyle: numeric

View file

@ -1,19 +0,0 @@
Lint/Debugger: # Remove any `binding.pry`
Enabled: true
Exclude: []
RSpec/Focus: # Require full spec run on CI
Enabled: true
Exclude: []
Rails/Output: # Remove any `puts` debugging
Enabled: true
Exclude: []
Rails/FindEach: # Using `each` could impact performance, use `find_each`
Enabled: true
Exclude: []
Rails/UniqBeforePluck: # Require `uniq.pluck` and not `pluck.uniq`
Enabled: true
Exclude: []

View file

@ -1,47 +0,0 @@
---
Style/ClassAndModuleChildren:
Enabled: false
Style/Documentation:
Enabled: false
Style/FormatStringToken:
AllowedMethods:
- redirect_with_vary # Route redirects are not token-formatted
inherit_mode:
merge:
- AllowedMethods
Style/HashAsLastArrayItem:
Enabled: false
Style/HashSyntax:
EnforcedShorthandSyntax: either
EnforcedStyle: ruby19_no_mixed_keys
Style/NumericLiterals:
AllowedPatterns:
- \d{4}_\d{2}_\d{2}_\d{6}
Style/PercentLiteralDelimiters:
PreferredDelimiters:
'%i': ()
'%w': ()
Style/RedundantBegin:
Enabled: false
Style/RedundantFetchBlock:
Enabled: false
Style/RescueStandardError:
EnforcedStyle: implicit
Style/SymbolArray:
Enabled: false
Style/TrailingCommaInArrayLiteral:
EnforcedStyleForMultiline: comma
Style/TrailingCommaInHashLiteral:
EnforcedStyleForMultiline: comma

View file

@ -1,117 +0,0 @@
# This configuration was generated by
# `rubocop --auto-gen-config --auto-gen-only-exclude --no-offense-counts --no-auto-gen-timestamp`
# using RuboCop version 1.65.0.
# The point is for the user to remove these configuration records
# one by one as the offenses are removed from the code base.
# Note that changes in the inspected code, or installation of new
# versions of RuboCop, may require this file to be generated again.
Lint/NonLocalExitFromIterator:
Exclude:
- 'app/helpers/jsonld_helper.rb'
# Configuration parameters: AllowedMethods, AllowedPatterns, CountRepeatedAttributes.
Metrics/AbcSize:
Max: 90
# Configuration parameters: CountBlocks, CountModifierForms, Max.
Metrics/BlockNesting:
Exclude:
- 'lib/tasks/mastodon.rake'
# Configuration parameters: AllowedMethods, AllowedPatterns.
Metrics/CyclomaticComplexity:
Max: 25
# Configuration parameters: AllowedMethods, AllowedPatterns.
Metrics/PerceivedComplexity:
Max: 27
Rails/OutputSafety:
Exclude:
- 'config/initializers/simple_form.rb'
# This cop supports safe autocorrection (--autocorrect).
# Configuration parameters: AllowedVars.
Style/FetchEnvVar:
Exclude:
- 'app/lib/redis_configuration.rb'
- 'app/lib/translation_service.rb'
- 'config/environments/production.rb'
- 'config/initializers/2_limited_federation_mode.rb'
- 'config/initializers/3_omniauth.rb'
- 'config/initializers/blacklists.rb'
- 'config/initializers/cache_buster.rb'
- 'config/initializers/devise.rb'
- 'config/initializers/paperclip.rb'
- 'config/initializers/vapid.rb'
- 'lib/mastodon/redis_config.rb'
- 'lib/tasks/repo.rake'
# This cop supports safe autocorrection (--autocorrect).
# Configuration parameters: EnforcedStyle, MaxUnannotatedPlaceholdersAllowed, AllowedMethods, AllowedPatterns.
# SupportedStyles: annotated, template, unannotated
# AllowedMethods: redirect
Style/FormatStringToken:
Exclude:
- 'config/initializers/devise.rb'
- 'lib/paperclip/color_extractor.rb'
# This cop supports safe autocorrection (--autocorrect).
# Configuration parameters: MinBodyLength, AllowConsecutiveConditionals.
Style/GuardClause:
Enabled: false
# This cop supports unsafe autocorrection (--autocorrect-all).
Style/HashTransformValues:
Exclude:
- 'app/serializers/rest/web_push_subscription_serializer.rb'
- 'app/services/import_service.rb'
# This cop supports unsafe autocorrection (--autocorrect-all).
Style/MapToHash:
Exclude:
- 'app/models/status.rb'
# This cop supports unsafe autocorrection (--autocorrect-all).
# Configuration parameters: EnforcedStyle.
# SupportedStyles: literals, strict
Style/MutableConstant:
Exclude:
- 'app/models/tag.rb'
- 'app/services/delete_account_service.rb'
- 'lib/mastodon/migration_warning.rb'
# Configuration parameters: AllowedMethods.
# AllowedMethods: respond_to_missing?
Style/OptionalBooleanParameter:
Exclude:
- 'app/helpers/jsonld_helper.rb'
- 'app/lib/admin/system_check/message.rb'
- 'app/lib/request.rb'
- 'app/lib/webfinger.rb'
- 'app/services/block_domain_service.rb'
- 'app/services/fetch_resource_service.rb'
- 'app/workers/domain_block_worker.rb'
- 'app/workers/unfollow_follow_worker.rb'
- 'lib/mastodon/redis_config.rb'
# This cop supports unsafe autocorrection (--autocorrect-all).
# Configuration parameters: EnforcedStyle.
# SupportedStyles: short, verbose
Style/PreferredHashMethods:
Exclude:
- 'config/initializers/paperclip.rb'
# This cop supports safe autocorrection (--autocorrect).
Style/RedundantConstantBase:
Exclude:
- 'config/environments/production.rb'
- 'config/initializers/sidekiq.rb'
# This cop supports safe autocorrection (--autocorrect).
# Configuration parameters: WordRegex.
# SupportedStyles: percent, brackets
Style/WordArray:
EnforcedStyle: percent
MinSize: 3

View file

@ -1,5 +0,0 @@
ffmpeg
libopenblas0-pthread
libpq-dev
libxdamage1
libxfixes3

File diff suppressed because it is too large Load diff

View file

@ -1,46 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
- Using welcoming and inclusive language
- Being respectful of differing viewpoints and experiences
- Gracefully accepting constructive criticism
- Focusing on what is best for the community
- Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
- The use of sexualized language or imagery and unwelcome sexual attention or advances
- Trolling, insulting/derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or electronic address, without explicit permission
- Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at glitch-abuse@sitedethib.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [https://contributor-covenant.org/version/1/4][version]
[homepage]: https://contributor-covenant.org
[version]: https://contributor-covenant.org/version/1/4/

View file

@ -1,96 +0,0 @@
# Contributing to Mastodon Glitch Edition
Thank you for your interest in contributing to the `glitch-soc` project!
Here are some guidelines, and ways you can help.
> (This document is a bit of a work-in-progress, so please bear with us.
> If you don't see what you're looking for here, please don't hesitate to reach out!)
## Translations
You can submit glitch-soc-specific translations via [Crowdin](https://crowdin.com/project/glitch-soc). They are periodically merged into the codebase.
[![Crowdin](https://badges.crowdin.net/glitch-soc/localized.svg)](https://crowdin.com/project/glitch-soc)
## Planning
Right now a lot of the planning for this project takes place in our development Discord, or through GitHub Issues and Projects.
We're working on ways to improve the planning structure and better solicit feedback, and if you feel like you can help in this respect, feel free to give us a holler.
## Documentation
The documentation for this repository is available at [`glitch-soc/docs`](https://github.com/glitch-soc/docs) (online at [glitch-soc.github.io/docs/](https://glitch-soc.github.io/docs/)).
Right now, we've mostly focused on the features that make this fork different from upstream in some manner.
Adding screenshots, improving descriptions, and so forth are all ways to help contribute to the project even if you don't know any code.
## Frontend Development
Check out [the documentation here](https://glitch-soc.github.io/docs/contributing/frontend/) for more information.
## Backend Development
See the guidelines below.
---
You should also try to follow the guidelines set out in the original `CONTRIBUTING.md` from `mastodon/mastodon`, reproduced below.
<blockquote>
# Contributing
Thank you for considering contributing to Mastodon 🐘
You can contribute in the following ways:
- Finding and reporting bugs
- Translating the Mastodon interface into various languages
- Contributing code to Mastodon by fixing bugs or implementing features
- Improving the documentation
If your contributions are accepted into Mastodon, you can request to be paid through [our OpenCollective](https://opencollective.com/mastodon).
Please review the org-level [contribution guidelines] for high-level acceptance
criteria guidance.
[contribution guidelines]: https://github.com/mastodon/.github/blob/main/CONTRIBUTING.md
## API Changes and Additions
Please note that any changes or additions made to the API should have an accompanying pull request on [our documentation repository](https://github.com/mastodon/documentation).
## Bug reports
Bug reports and feature suggestions must use descriptive and concise titles and be submitted to [GitHub Issues](https://github.com/mastodon/mastodon/issues). Please use the search function to make sure that you are not submitting duplicates, and that a similar report or request has not already been resolved or rejected.
## Translations
You can submit translations via [Crowdin](https://crowdin.com/project/mastodon). They are periodically merged into the codebase.
[![Crowdin](https://d322cqt584bo4o.cloudfront.net/mastodon/localized.svg)](https://crowdin.com/project/mastodon)
## Pull requests
**Please use clean, concise titles for your pull requests.** Unless the pull request is about refactoring code, updating dependencies or other internal tasks, assume that the person reading the pull request title is not a programmer or Mastodon developer, but instead a Mastodon user or server administrator, and **try to describe your change or fix from their perspective**. We use commit squashing, so the final commit in the main branch will carry the title of the pull request, and commits from the main branch are fed into the changelog. The changelog is separated into [keepachangelog.com categories](https://keepachangelog.com/en/1.0.0/), and while that spec does not prescribe how the entries ought to be named, for easier sorting, start your pull request titles using one of the verbs "Add", "Change", "Deprecate", "Remove", or "Fix" (present tense).
Example:
| Not ideal | Better |
| ------------------------------------ | ------------------------------------------------------------- |
| Fixed NoMethodError in RemovalWorker | Fix nil error when removing statuses caused by race condition |
It is not always possible to phrase every change in such a manner, but it is desired.
**The smaller the set of changes in the pull request is, the quicker it can be reviewed and merged.** Splitting tasks into multiple smaller pull requests is often preferable.
**Pull requests that do not pass automated checks may not be reviewed**. In particular, you need to keep in mind:
- Unit and integration tests (rspec, jest)
- Code style rules (rubocop, eslint)
- Normalization of locale files (i18n-tasks)
## Documentation
The [Mastodon documentation](https://docs.joinmastodon.org) is a statically generated site. You can [submit merge requests to mastodon/documentation](https://github.com/mastodon/documentation).
</blockquote>

View file

@ -1,404 +0,0 @@
# syntax=docker/dockerfile:1.9
# This file is designed for production server deployment, not local development work
# For a containerized local dev environment, see: https://github.com/mastodon/mastodon/blob/main/README.md#docker
# Please see https://docs.docker.com/engine/reference/builder for information about
# the extended buildx capabilities used in this file.
# Make sure multiarch TARGETPLATFORM is available for interpolation
# See: https://docs.docker.com/build/building/multi-platform/
ARG TARGETPLATFORM=${TARGETPLATFORM}
ARG BUILDPLATFORM=${BUILDPLATFORM}
# Ruby image to use for base image, change with [--build-arg RUBY_VERSION="3.3.x"]
# renovate: datasource=docker depName=docker.io/ruby
ARG RUBY_VERSION="3.3.4"
# # Node version to use in base image, change with [--build-arg NODE_MAJOR_VERSION="20"]
# renovate: datasource=node-version depName=node
ARG NODE_MAJOR_VERSION="20"
# Debian image to use for base image, change with [--build-arg DEBIAN_VERSION="bookworm"]
ARG DEBIAN_VERSION="bookworm"
# Node image to use for base image based on combined variables (ex: 20-bookworm-slim)
FROM docker.io/node:${NODE_MAJOR_VERSION}-${DEBIAN_VERSION}-slim AS node
# Ruby image to use for base image based on combined variables (ex: 3.3.x-slim-bookworm)
FROM docker.io/ruby:${RUBY_VERSION}-slim-${DEBIAN_VERSION} AS ruby
# Resulting version string is vX.X.X-MASTODON_VERSION_PRERELEASE+MASTODON_VERSION_METADATA
# Example: v4.3.0-nightly.2023.11.09+pr-123456
# Overwrite existence of 'alpha.X' in version.rb [--build-arg MASTODON_VERSION_PRERELEASE="nightly.2023.11.09"]
ARG MASTODON_VERSION_PRERELEASE=""
# Append build metadata or fork information to version.rb [--build-arg MASTODON_VERSION_METADATA="pr-123456"]
ARG MASTODON_VERSION_METADATA=""
# Allow Ruby on Rails to serve static files
# See: https://docs.joinmastodon.org/admin/config/#rails_serve_static_files
ARG RAILS_SERVE_STATIC_FILES="true"
# Allow to use YJIT compiler
# See: https://github.com/ruby/ruby/blob/v3_2_4/doc/yjit/yjit.md
ARG RUBY_YJIT_ENABLE="1"
# Timezone used by the Docker container and runtime, change with [--build-arg TZ=Europe/Berlin]
ARG TZ="Etc/UTC"
# Linux UID (user id) for the mastodon user, change with [--build-arg UID=1234]
ARG UID="991"
# Linux GID (group id) for the mastodon user, change with [--build-arg GID=1234]
ARG GID="991"
# Apply Mastodon build options based on options above
ENV \
# Apply Mastodon version information
MASTODON_VERSION_PRERELEASE="${MASTODON_VERSION_PRERELEASE}" \
MASTODON_VERSION_METADATA="${MASTODON_VERSION_METADATA}" \
# Apply Mastodon static files and YJIT options
RAILS_SERVE_STATIC_FILES=${RAILS_SERVE_STATIC_FILES} \
RUBY_YJIT_ENABLE=${RUBY_YJIT_ENABLE} \
# Apply timezone
TZ=${TZ}
ENV \
# Configure the IP to bind Mastodon to when serving traffic
BIND="0.0.0.0" \
# Use production settings for Yarn, Node and related nodejs based tools
NODE_ENV="production" \
# Use production settings for Ruby on Rails
RAILS_ENV="production" \
# Add Ruby and Mastodon installation to the PATH
DEBIAN_FRONTEND="noninteractive" \
PATH="${PATH}:/opt/ruby/bin:/opt/mastodon/bin" \
# Optimize jemalloc 5.x performance
MALLOC_CONF="narenas:2,background_thread:true,thp:never,dirty_decay_ms:1000,muzzy_decay_ms:0" \
# Enable libvips, should not be changed
MASTODON_USE_LIBVIPS=true \
# Sidekiq will touch tmp/sidekiq_process_has_started_and_will_begin_processing_jobs to indicate it is ready. This can be used for a readiness check in Kubernetes
MASTODON_SIDEKIQ_READY_FILENAME=sidekiq_process_has_started_and_will_begin_processing_jobs
# Set default shell used for running commands
SHELL ["/bin/bash", "-o", "pipefail", "-o", "errexit", "-c"]
ARG TARGETPLATFORM
RUN echo "Target platform is $TARGETPLATFORM"
RUN \
# Remove automatic apt cache Docker cleanup scripts
rm -f /etc/apt/apt.conf.d/docker-clean; \
# Sets timezone
echo "${TZ}" > /etc/localtime; \
# Creates mastodon user/group and sets home directory
groupadd -g "${GID}" mastodon; \
useradd -l -u "${UID}" -g "${GID}" -m -d /opt/mastodon mastodon; \
# Creates /mastodon symlink to /opt/mastodon
ln -s /opt/mastodon /mastodon;
# Set /opt/mastodon as working directory
WORKDIR /opt/mastodon
# hadolint ignore=DL3008,DL3005
RUN \
# Mount Apt cache and lib directories from Docker buildx caches
--mount=type=cache,id=apt-cache-${TARGETPLATFORM},target=/var/cache/apt,sharing=locked \
--mount=type=cache,id=apt-lib-${TARGETPLATFORM},target=/var/lib/apt,sharing=locked \
# Apt update & upgrade to check for security updates to Debian image
apt-get update; \
apt-get dist-upgrade -yq; \
# Install jemalloc, curl and other necessary components
apt-get install -y --no-install-recommends \
curl \
file \
libjemalloc2 \
patchelf \
procps \
tini \
tzdata \
wget \
; \
# Patch Ruby to use jemalloc
patchelf --add-needed libjemalloc.so.2 /usr/local/bin/ruby; \
# Discard patchelf after use
apt-get purge -y \
patchelf \
;
# Create temporary build layer from base image
FROM ruby AS build
# Copy Node package configuration files into working directory
COPY package.json yarn.lock .yarnrc.yml /opt/mastodon/
COPY .yarn /opt/mastodon/.yarn
COPY --from=node /usr/local/bin /usr/local/bin
COPY --from=node /usr/local/lib /usr/local/lib
ARG TARGETPLATFORM
# hadolint ignore=DL3008
RUN \
# Mount Apt cache and lib directories from Docker buildx caches
--mount=type=cache,id=apt-cache-${TARGETPLATFORM},target=/var/cache/apt,sharing=locked \
--mount=type=cache,id=apt-lib-${TARGETPLATFORM},target=/var/lib/apt,sharing=locked \
# Install build tools and bundler dependencies from APT
apt-get install -y --no-install-recommends \
autoconf \
automake \
build-essential \
cmake \
git \
libgdbm-dev \
libglib2.0-dev \
libgmp-dev \
libicu-dev \
libidn-dev \
libpq-dev \
libssl-dev \
libtool \
meson \
nasm \
pkg-config \
shared-mime-info \
xz-utils \
# libvips components
libcgif-dev \
libexif-dev \
libexpat1-dev \
libgirepository1.0-dev \
libheif-dev \
libimagequant-dev \
libjpeg62-turbo-dev \
liblcms2-dev \
liborc-dev \
libspng-dev \
libtiff-dev \
libwebp-dev \
# ffmpeg components
libdav1d-dev \
liblzma-dev \
libmp3lame-dev \
libopus-dev \
libsnappy-dev \
libvorbis-dev \
libvpx-dev \
libx264-dev \
libx265-dev \
;
RUN \
# Configure Corepack
rm /usr/local/bin/yarn*; \
corepack enable; \
corepack prepare --activate;
# Create temporary libvips specific build layer from build layer
FROM build AS libvips
# libvips version to compile, change with [--build-arg VIPS_VERSION="8.15.2"]
# renovate: datasource=github-releases depName=libvips packageName=libvips/libvips
ARG VIPS_VERSION=8.15.2
# libvips download URL, change with [--build-arg VIPS_URL="https://github.com/libvips/libvips/releases/download"]
ARG VIPS_URL=https://github.com/libvips/libvips/releases/download
WORKDIR /usr/local/libvips/src
RUN \
curl -sSL -o vips-${VIPS_VERSION}.tar.xz ${VIPS_URL}/v${VIPS_VERSION}/vips-${VIPS_VERSION}.tar.xz; \
tar xf vips-${VIPS_VERSION}.tar.xz; \
cd vips-${VIPS_VERSION}; \
meson setup build --prefix /usr/local/libvips --libdir=lib -Ddeprecated=false -Dintrospection=disabled -Dmodules=disabled -Dexamples=false; \
cd build; \
ninja; \
ninja install;
# Create temporary ffmpeg specific build layer from build layer
FROM build AS ffmpeg
# ffmpeg version to compile, change with [--build-arg FFMPEG_VERSION="7.0.x"]
# renovate: datasource=repology depName=ffmpeg packageName=openpkg_current/ffmpeg
ARG FFMPEG_VERSION=7.0.1
# ffmpeg download URL, change with [--build-arg FFMPEG_URL="https://ffmpeg.org/releases"]
ARG FFMPEG_URL=https://ffmpeg.org/releases
WORKDIR /usr/local/ffmpeg/src
RUN \
curl -sSL -o ffmpeg-${FFMPEG_VERSION}.tar.xz ${FFMPEG_URL}/ffmpeg-${FFMPEG_VERSION}.tar.xz; \
tar xf ffmpeg-${FFMPEG_VERSION}.tar.xz; \
cd ffmpeg-${FFMPEG_VERSION}; \
./configure \
--prefix=/usr/local/ffmpeg \
--toolchain=hardened \
--disable-debug \
--disable-devices \
--disable-doc \
--disable-ffplay \
--disable-network \
--disable-static \
--enable-ffmpeg \
--enable-ffprobe \
--enable-gpl \
--enable-libdav1d \
--enable-libmp3lame \
--enable-libopus \
--enable-libsnappy \
--enable-libvorbis \
--enable-libvpx \
--enable-libwebp \
--enable-libx264 \
--enable-libx265 \
--enable-shared \
--enable-version3 \
; \
make -j$(nproc); \
make install;
# Create temporary bundler specific build layer from build layer
FROM build AS bundler
ARG TARGETPLATFORM
# Copy Gemfile config into working directory
COPY Gemfile* /opt/mastodon/
RUN \
# Mount Ruby Gem caches
--mount=type=cache,id=gem-cache-${TARGETPLATFORM},target=/usr/local/bundle/cache/,sharing=locked \
# Configure bundle to prevent changes to Gemfile and Gemfile.lock
bundle config set --global frozen "true"; \
# Configure bundle to not cache downloaded Gems
bundle config set --global cache_all "false"; \
# Configure bundle to only process production Gems
bundle config set --local without "development test"; \
# Configure bundle to not warn about root user
bundle config set silence_root_warning "true"; \
# Download and install required Gems
bundle install -j"$(nproc)";
# Create temporary node specific build layer from build layer
FROM build AS yarn
ARG TARGETPLATFORM
# Copy Node package configuration files into working directory
COPY package.json yarn.lock .yarnrc.yml /opt/mastodon/
COPY streaming/package.json /opt/mastodon/streaming/
COPY .yarn /opt/mastodon/.yarn
# hadolint ignore=DL3008
RUN \
--mount=type=cache,id=corepack-cache-${TARGETPLATFORM},target=/usr/local/share/.cache/corepack,sharing=locked \
--mount=type=cache,id=yarn-cache-${TARGETPLATFORM},target=/usr/local/share/.cache/yarn,sharing=locked \
# Install Node packages
yarn workspaces focus --production @mastodon/mastodon;
# Create temporary assets build layer from build layer
FROM build AS precompiler
# Copy Mastodon sources into precompiler layer
COPY . /opt/mastodon/
# Copy bundler and node packages from build layer to container
COPY --from=yarn /opt/mastodon /opt/mastodon/
COPY --from=bundler /opt/mastodon /opt/mastodon/
COPY --from=bundler /usr/local/bundle/ /usr/local/bundle/
# Copy libvips components to layer for precompiler
COPY --from=libvips /usr/local/libvips/bin /usr/local/bin
COPY --from=libvips /usr/local/libvips/lib /usr/local/lib
ARG TARGETPLATFORM
RUN \
ldconfig; \
# Use Ruby on Rails to create Mastodon assets
SECRET_KEY_BASE_DUMMY=1 \
bundle exec rails assets:precompile; \
# Cleanup temporary files
rm -fr /opt/mastodon/tmp;
# Prep final Mastodon Ruby layer
FROM ruby AS mastodon
ARG TARGETPLATFORM
# hadolint ignore=DL3008
RUN \
# Mount Apt cache and lib directories from Docker buildx caches
--mount=type=cache,id=apt-cache-${TARGETPLATFORM},target=/var/cache/apt,sharing=locked \
--mount=type=cache,id=apt-lib-${TARGETPLATFORM},target=/var/lib/apt,sharing=locked \
# Mount Corepack and Yarn caches from Docker buildx caches
--mount=type=cache,id=corepack-cache-${TARGETPLATFORM},target=/usr/local/share/.cache/corepack,sharing=locked \
--mount=type=cache,id=yarn-cache-${TARGETPLATFORM},target=/usr/local/share/.cache/yarn,sharing=locked \
# Apt update install non-dev versions of necessary components
apt-get install -y --no-install-recommends \
libexpat1 \
libglib2.0-0 \
libicu72 \
libidn12 \
libpq5 \
libreadline8 \
libssl3 \
libyaml-0-2 \
# libvips components
libcgif0 \
libexif12 \
libheif1 \
libimagequant0 \
libjpeg62-turbo \
liblcms2-2 \
liborc-0.4-0 \
libspng0 \
libtiff6 \
libwebp7 \
libwebpdemux2 \
libwebpmux3 \
# ffmpeg components
libdav1d6 \
libmp3lame0 \
libopencore-amrnb0 \
libopencore-amrwb0 \
libopus0 \
libsnappy1v5 \
libtheora0 \
libvorbis0a \
libvorbisenc2 \
libvorbisfile3 \
libvpx7 \
libx264-164 \
libx265-199 \
;
# Copy Mastodon sources into final layer
COPY . /opt/mastodon/
# Copy compiled assets to layer
COPY --from=precompiler /opt/mastodon/public/packs /opt/mastodon/public/packs
COPY --from=precompiler /opt/mastodon/public/assets /opt/mastodon/public/assets
# Copy bundler components to layer
COPY --from=bundler /usr/local/bundle/ /usr/local/bundle/
# Copy libvips components to layer
COPY --from=libvips /usr/local/libvips/bin /usr/local/bin
COPY --from=libvips /usr/local/libvips/lib /usr/local/lib
# Copy ffpmeg components to layer
COPY --from=ffmpeg /usr/local/ffmpeg/bin /usr/local/bin
COPY --from=ffmpeg /usr/local/ffmpeg/lib /usr/local/lib
RUN \
ldconfig; \
# Smoketest media processors
vips -v; \
ffmpeg -version; \
ffprobe -version;
RUN \
# Precompile bootsnap code for faster Rails startup
bundle exec bootsnap precompile --gemfile app/ lib/;
RUN \
# Pre-create and chown system volume to Mastodon user
mkdir -p /opt/mastodon/public/system; \
chown mastodon:mastodon /opt/mastodon/public/system; \
# Set Mastodon user as owner of tmp folder
chown -R mastodon:mastodon /opt/mastodon/tmp;
# Set the running user for resulting container
USER mastodon
# Expose default Puma ports
EXPOSE 3000
# Set container tini as default entry point
ENTRYPOINT ["/usr/bin/tini", "--"]

View file

@ -1,49 +0,0 @@
# Federation
## Supported federation protocols and standards
- [ActivityPub](https://www.w3.org/TR/activitypub/) (Server-to-Server)
- [WebFinger](https://webfinger.net/)
- [Http Signatures](https://datatracker.ietf.org/doc/html/draft-cavage-http-signatures)
- [NodeInfo](https://nodeinfo.diaspora.software/)
## Supported FEPs
- [FEP-67ff: FEDERATION.md](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md)
- [FEP-f1d5: NodeInfo in Fediverse Software](https://codeberg.org/fediverse/fep/src/branch/main/fep/f1d5/fep-f1d5.md)
- [FEP-8fcf: Followers collection synchronization across servers](https://codeberg.org/fediverse/fep/src/branch/main/fep/8fcf/fep-8fcf.md)
- [FEP-5feb: Search indexing consent for actors](https://codeberg.org/fediverse/fep/src/branch/main/fep/5feb/fep-5feb.md)
## ActivityPub in Mastodon
Mastodon largely follows the ActivityPub server-to-server specification but it makes uses of some non-standard extensions, some of which are required for interacting with Mastodon at all.
- [Supported ActivityPub vocabulary](https://docs.joinmastodon.org/spec/activitypub/)
### Required extensions
#### WebFinger
In Mastodon, users are identified by a `username` and `domain` pair (e.g., `Gargron@mastodon.social`).
This is used both for discovery and for unambiguously mentioning users across the fediverse. Furthermore, this is part of Mastodon's database design from its very beginnings.
As a result, Mastodon requires that each ActivityPub actor uniquely maps back to an `acct:` URI that can be resolved via WebFinger.
- [WebFinger information and examples](https://docs.joinmastodon.org/spec/webfinger/)
#### HTTP Signatures
In order to authenticate activities, Mastodon relies on HTTP Signatures, signing every `POST` and `GET` request to other ActivityPub implementations on behalf of the user authoring an activity (for `POST` requests) or an actor representing the Mastodon server itself (for most `GET` requests).
Mastodon requires all `POST` requests to be signed, and MAY require `GET` requests to be signed, depending on the configuration of the Mastodon server.
- [HTTP Signatures information and examples](https://docs.joinmastodon.org/spec/security/#http)
### Optional extensions
- [Linked-Data Signatures](https://docs.joinmastodon.org/spec/security/#ld)
- [Bearcaps](https://docs.joinmastodon.org/spec/bearcaps/)
### Additional documentation
- [Mastodon documentation](https://docs.joinmastodon.org/)

View file

@ -1,14 +0,0 @@
web: bin/heroku-web
worker: bundle exec sidekiq
# For the streaming API, you need a separate app that shares Postgres and Redis:
#
# heroku create
# heroku buildpacks:add heroku/nodejs
# heroku config:set RUN_STREAMING=true
# heroku addons:attach <main-app>::DATABASE
# heroku addons:attach <main-app>::REDIS
#
# and let the main app use the separate app:
#
# heroku config:set STREAMING_API_BASE_URL=wss://<streaming-app>.herokuapp.com -a <main-app>

View file

@ -1,4 +0,0 @@
web: env PORT=3000 RAILS_ENV=development bundle exec puma -C config/puma.rb
sidekiq: env PORT=3000 RAILS_ENV=development bundle exec sidekiq
stream: env PORT=4000 yarn workspace @mastodon/streaming start
webpack: bin/webpack-dev-server

View file

@ -7,6 +7,7 @@
- add bubble timeline to "Getting started" and "Live feeds"
- fix avatar corner rounding to look as intended
- add remlit mascot
- slimmed down repository. no need for the backend too.
for my tweaked version, you can choose to use an express-hosted version. instead of the below steps, you can just reverse proxy or tunnel localhost:3132 when running frontend-server/server.mjs.

View file

@ -1,8 +0,0 @@
# frozen_string_literal: true
# Add your own tasks in files placed in lib/tasks ending in .rake,
# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake.
require File.expand_path('config/application', __dir__)
Rails.application.load_tasks

201
Vagrantfile vendored
View file

@ -1,201 +0,0 @@
# -*- mode: ruby -*-
# vi: set ft=ruby :
ENV["PORT"] ||= "3000"
$provisionA = <<SCRIPT
# Add the yarn repo + yarn repo keys
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
sudo apt-add-repository 'deb https://dl.yarnpkg.com/debian/ stable main'
# Add repo for NodeJS
sudo mkdir -p /etc/apt/keyrings
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | sudo gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg
NODE_MAJOR=20
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main" | sudo tee /etc/apt/sources.list.d/nodesource.list
sudo apt-get update
# Add firewall rule to redirect 80 to PORT and save
sudo iptables -t nat -A PREROUTING -p tcp --dport 80 -j REDIRECT --to-port #{ENV["PORT"]}
echo iptables-persistent iptables-persistent/autosave_v4 boolean true | sudo debconf-set-selections
echo iptables-persistent iptables-persistent/autosave_v6 boolean true | sudo debconf-set-selections
sudo apt-get install iptables-persistent -y
# Add packages to build and run Mastodon
sudo apt-get install \
git-core \
g++ \
libpq-dev \
libxml2-dev \
libxslt1-dev \
imagemagick \
nodejs \
redis-server \
redis-tools \
postgresql \
postgresql-contrib \
libicu-dev \
libidn11-dev \
libreadline6-dev \
autoconf \
bison \
build-essential \
ffmpeg \
file \
gcc \
libffi-dev \
libgdbm-dev \
libjemalloc-dev \
libncurses5-dev \
libprotobuf-dev \
libssl-dev \
libyaml-dev \
pkg-config \
protobuf-compiler \
zlib1g-dev \
-y
# Install rvm
sudo apt-add-repository -y ppa:rael-gc/rvm
sudo apt-get install rvm -y
sudo usermod -a -G rvm $USER
SCRIPT
$provisionElasticsearch = <<SCRIPT
# Install Elastic Search
sudo apt install openjdk-17-jre-headless -y
sudo wget -O /usr/share/keyrings/elasticsearch.asc https://artifacts.elastic.co/GPG-KEY-elasticsearch
sudo sh -c 'echo "deb [signed-by=/usr/share/keyrings/elasticsearch.asc] https://artifacts.elastic.co/packages/7.x/apt stable main" > /etc/apt/sources.list.d/elastic-7.x.list'
sudo apt update
sudo apt install elasticsearch -y
sudo systemctl daemon-reload
sudo systemctl enable --now elasticsearch
echo 'path.data: /var/lib/elasticsearch
path.logs: /var/log/elasticsearch
network.host: 0.0.0.0
http.port: 9200
discovery.seed_hosts: ["localhost"]
cluster.initial_master_nodes: ["node-1"]
xpack.security.enabled: false' > /etc/elasticsearch/elasticsearch.yml
sudo systemctl restart elasticsearch
# Install Kibana
sudo apt install kibana -y
sudo systemctl enable --now kibana
echo 'server.host: "0.0.0.0"
elasticsearch.hosts: ["http://localhost:9200"]' > /etc/kibana/kibana.yml
sudo systemctl restart kibana
SCRIPT
$provisionB = <<SCRIPT
source "/etc/profile.d/rvm.sh"
# Install Ruby
read RUBY_VERSION < /vagrant/.ruby-version
rvm install ruby-$RUBY_VERSION --disable-binary
# Configure database
sudo -u postgres createuser -U postgres vagrant -s
sudo -u postgres createdb -U postgres mastodon_development
cd /vagrant # This is where the host folder/repo is mounted
# Install gems
gem install bundler foreman
bundle install
# Install node modules
sudo corepack enable
corepack prepare
yarn install
# Build Mastodon
export RAILS_ENV=development
export $(cat ".env.vagrant" | xargs)
bundle exec rails db:setup
# Configure automatic loading of environment variable
echo 'export RAILS_ENV=development' >> ~/.bash_profile
echo 'export $(cat "/vagrant/.env.vagrant" | xargs)' >> ~/.bash_profile
SCRIPT
VAGRANTFILE_API_VERSION = "2"
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
config.vm.box = "ubuntu/focal64"
config.vm.provider :virtualbox do |vb|
vb.name = "mastodon"
vb.customize ["modifyvm", :id, "--memory", "8192"]
vb.customize ["modifyvm", :id, "--cpus", "3"]
# Disable VirtualBox DNS proxy to skip long-delay IPv6 resolutions.
# https://github.com/mitchellh/vagrant/issues/1172
vb.customize ["modifyvm", :id, "--natdnsproxy1", "off"]
vb.customize ["modifyvm", :id, "--natdnshostresolver1", "off"]
# Use "virtio" network interfaces for better performance.
vb.customize ["modifyvm", :id, "--nictype1", "virtio"]
vb.customize ["modifyvm", :id, "--nictype2", "virtio"]
end
config.vm.provider :libvirt do |libvirt|
libvirt.cpus = 3
libvirt.memory = 8192
end
# This uses the vagrant-hostsupdater plugin, and lets you
# access the development site at http://mastodon.local.
# If you change it, also change it in .env.vagrant before provisioning
# the vagrant server to update the development build.
#
# To install:
# $ vagrant plugin install vagrant-hostsupdater
config.vm.hostname = "mastodon.local"
if defined?(VagrantPlugins::HostsUpdater)
config.vm.network :private_network, ip: "192.168.42.42", nictype: "virtio"
config.hostsupdater.remove_on_suspend = false
end
if config.vm.networks.any? { |type, options| type == :private_network }
config.vm.synced_folder ".", "/vagrant", type: "nfs", mount_options: ['rw', 'actimeo=1']
else
config.vm.synced_folder ".", "/vagrant"
end
# Otherwise, you can access the site at http://localhost:3000 and http://localhost:4000 , http://localhost:8080
config.vm.network :forwarded_port, guest: 3000, host: 3000
config.vm.network :forwarded_port, guest: 3035, host: 3035
config.vm.network :forwarded_port, guest: 4000, host: 4000
config.vm.network :forwarded_port, guest: 8080, host: 8080
config.vm.network :forwarded_port, guest: 9200, host: 9200
config.vm.network :forwarded_port, guest: 9300, host: 9300
config.vm.network :forwarded_port, guest: 9243, host: 9243
config.vm.network :forwarded_port, guest: 5601, host: 5601
# Full provisioning script, only runs on first 'vagrant up' or with 'vagrant provision'
config.vm.provision :shell, inline: $provisionA, privileged: false, reset: true
# Run with elevated privileges for Elasticsearch installation
config.vm.provision :shell, inline: $provisionElasticsearch, privileged: true
config.vm.provision :shell, inline: $provisionB, privileged: false
config.vm.post_up_message = <<MESSAGE
To start server
$ vagrant ssh -c "cd /vagrant && bin/dev"
MESSAGE
end

View file

@ -1,70 +0,0 @@
# frozen_string_literal: true
class AccountsIndex < Chewy::Index
include DatetimeClampingConcern
settings index: index_preset(refresh_interval: '30s'), analysis: {
filter: {
english_stop: {
type: 'stop',
stopwords: '_english_',
},
english_stemmer: {
type: 'stemmer',
language: 'english',
},
english_possessive_stemmer: {
type: 'stemmer',
language: 'possessive_english',
},
},
analyzer: {
natural: {
tokenizer: 'standard',
filter: %w(
lowercase
asciifolding
cjk_width
elision
english_possessive_stemmer
english_stop
english_stemmer
),
},
verbatim: {
tokenizer: 'standard',
filter: %w(lowercase asciifolding cjk_width),
},
edge_ngram: {
tokenizer: 'edge_ngram',
filter: %w(lowercase asciifolding cjk_width),
},
},
tokenizer: {
edge_ngram: {
type: 'edge_ngram',
min_gram: 1,
max_gram: 15,
},
},
}
index_scope ::Account.searchable.includes(:account_stat)
root date_detection: false do
field(:id, type: 'long')
field(:following_count, type: 'long')
field(:followers_count, type: 'long')
field(:properties, type: 'keyword', value: ->(account) { account.searchable_properties })
field(:last_status_at, type: 'date', value: ->(account) { clamp_date(account.last_status_at || account.created_at) })
field(:display_name, type: 'text', analyzer: 'verbatim') { field :edge_ngram, type: 'text', analyzer: 'edge_ngram', search_analyzer: 'verbatim' }
field(:username, type: 'text', analyzer: 'verbatim', value: ->(account) { [account.username, account.domain].compact.join('@') }) { field :edge_ngram, type: 'text', analyzer: 'edge_ngram', search_analyzer: 'verbatim' }
field(:text, type: 'text', analyzer: 'verbatim', value: ->(account) { account.searchable_text }) { field :stemmed, type: 'text', analyzer: 'natural' }
end
end

View file

@ -1,14 +0,0 @@
# frozen_string_literal: true
module DatetimeClampingConcern
extend ActiveSupport::Concern
MIN_ISO8601_DATETIME = '0000-01-01T00:00:00Z'.to_datetime.freeze
MAX_ISO8601_DATETIME = '9999-12-31T23:59:59Z'.to_datetime.freeze
class_methods do
def clamp_date(datetime)
datetime.clamp(MIN_ISO8601_DATETIME, MAX_ISO8601_DATETIME)
end
end
end

View file

@ -1,12 +0,0 @@
# frozen_string_literal: true
class InstancesIndex < Chewy::Index
settings index: index_preset(refresh_interval: '30s')
index_scope ::Instance.searchable
root date_detection: false do
field :domain, type: 'text', index_prefixes: { min_chars: 1, max_chars: 5 }
field :accounts_count, type: 'long'
end
end

View file

@ -1,69 +0,0 @@
# frozen_string_literal: true
class PublicStatusesIndex < Chewy::Index
include DatetimeClampingConcern
settings index: index_preset(refresh_interval: '30s', number_of_shards: 5), analysis: {
filter: {
english_stop: {
type: 'stop',
stopwords: '_english_',
},
english_stemmer: {
type: 'stemmer',
language: 'english',
},
english_possessive_stemmer: {
type: 'stemmer',
language: 'possessive_english',
},
},
analyzer: {
verbatim: {
tokenizer: 'uax_url_email',
filter: %w(lowercase),
},
content: {
tokenizer: 'standard',
filter: %w(
lowercase
asciifolding
cjk_width
elision
english_possessive_stemmer
english_stop
english_stemmer
),
},
hashtag: {
tokenizer: 'keyword',
filter: %w(
word_delimiter_graph
lowercase
asciifolding
cjk_width
),
},
},
}
index_scope ::Status.unscoped
.kept
.indexable
.includes(:media_attachments, :preloadable_poll, :tags, preview_cards_status: :preview_card)
root date_detection: false do
field(:id, type: 'long')
field(:account_id, type: 'long')
field(:text, type: 'text', analyzer: 'verbatim', value: ->(status) { status.searchable_text }) { field(:stemmed, type: 'text', analyzer: 'content') }
field(:tags, type: 'text', analyzer: 'hashtag', value: ->(status) { status.tags.map(&:display_name) })
field(:language, type: 'keyword')
field(:properties, type: 'keyword', value: ->(status) { status.searchable_properties })
field(:created_at, type: 'date', value: ->(status) { clamp_date(status.created_at) })
end
end

View file

@ -1,67 +0,0 @@
# frozen_string_literal: true
class StatusesIndex < Chewy::Index
include DatetimeClampingConcern
settings index: index_preset(refresh_interval: '30s', number_of_shards: 5), analysis: {
filter: {
english_stop: {
type: 'stop',
stopwords: '_english_',
},
english_stemmer: {
type: 'stemmer',
language: 'english',
},
english_possessive_stemmer: {
type: 'stemmer',
language: 'possessive_english',
},
},
analyzer: {
verbatim: {
tokenizer: 'uax_url_email',
filter: %w(lowercase),
},
content: {
tokenizer: 'standard',
filter: %w(
lowercase
asciifolding
cjk_width
elision
english_possessive_stemmer
english_stop
english_stemmer
),
},
hashtag: {
tokenizer: 'keyword',
filter: %w(
word_delimiter_graph
lowercase
asciifolding
cjk_width
),
},
},
}
index_scope ::Status.unscoped.kept.without_reblogs.includes(:media_attachments, :local_mentioned, :local_favorited, :local_reblogged, :local_bookmarked, :tags, preview_cards_status: :preview_card, preloadable_poll: :local_voters), delete_if: ->(status) { status.searchable_by.empty? }
root date_detection: false do
field(:id, type: 'long')
field(:account_id, type: 'long')
field(:text, type: 'text', analyzer: 'verbatim', value: ->(status) { status.searchable_text }) { field(:stemmed, type: 'text', analyzer: 'content') }
field(:tags, type: 'text', analyzer: 'hashtag', value: ->(status) { status.tags.map(&:display_name) })
field(:searchable_by, type: 'long', value: ->(status) { status.searchable_by })
field(:language, type: 'keyword')
field(:properties, type: 'keyword', value: ->(status) { status.searchable_properties })
field(:created_at, type: 'date', value: ->(status) { clamp_date(status.created_at) })
end
end

View file

@ -1,49 +0,0 @@
# frozen_string_literal: true
class TagsIndex < Chewy::Index
include DatetimeClampingConcern
settings index: index_preset(refresh_interval: '30s'), analysis: {
analyzer: {
content: {
tokenizer: 'keyword',
filter: %w(
word_delimiter_graph
lowercase
asciifolding
cjk_width
),
},
edge_ngram: {
tokenizer: 'edge_ngram',
filter: %w(
lowercase
asciifolding
cjk_width
),
},
},
tokenizer: {
edge_ngram: {
type: 'edge_ngram',
min_gram: 2,
max_gram: 15,
},
},
}
index_scope ::Tag.listable
crutch :time_period do
7.days.ago.to_date..0.days.ago.to_date
end
root date_detection: false do
field(:name, type: 'text', analyzer: 'content', value: :display_name) { field(:edge_ngram, type: 'text', analyzer: 'edge_ngram', search_analyzer: 'content') }
field(:reviewed, type: 'boolean', value: ->(tag) { tag.reviewed? })
field(:usage, type: 'long', value: ->(tag, crutches) { tag.history.aggregate(crutches.time_period).accounts })
field(:last_status_at, type: 'date', value: ->(tag) { clamp_date(tag.last_status_at || tag.created_at) })
end
end

View file

@ -1,11 +0,0 @@
# frozen_string_literal: true
class AboutController < ApplicationController
include WebAppControllerConcern
skip_before_action :require_functional!
def show
expires_in(15.seconds, public: true, stale_while_revalidate: 30.seconds, stale_if_error: 1.day) unless user_signed_in?
end
end

View file

@ -1,102 +0,0 @@
# frozen_string_literal: true
class AccountsController < ApplicationController
PAGE_SIZE = 20
PAGE_SIZE_MAX = 200
include AccountControllerConcern
include SignatureAuthentication
vary_by -> { public_fetch_mode? ? 'Accept, Accept-Language, Cookie' : 'Accept, Accept-Language, Cookie, Signature' }
before_action :require_account_signature!, if: -> { request.format == :json && authorized_fetch_mode? }
skip_around_action :set_locale, if: -> { [:json, :rss].include?(request.format&.to_sym) }
skip_before_action :require_functional!, unless: :limited_federation_mode?
def show
respond_to do |format|
format.html do
expires_in(15.seconds, public: true, stale_while_revalidate: 30.seconds, stale_if_error: 1.hour) unless user_signed_in?
end
format.rss do
expires_in 1.minute, public: true
limit = params[:limit].present? ? [params[:limit].to_i, PAGE_SIZE_MAX].min : PAGE_SIZE
@statuses = filtered_statuses.without_reblogs.limit(limit)
@statuses = preload_collection(@statuses, Status)
end
format.json do
expires_in 3.minutes, public: !(authorized_fetch_mode? && signed_request_account.present?)
render_with_cache json: @account, content_type: 'application/activity+json', serializer: ActivityPub::ActorSerializer, adapter: ActivityPub::Adapter
end
end
end
private
def filtered_statuses
default_statuses.tap do |statuses|
statuses.merge!(hashtag_scope) if tag_requested?
statuses.merge!(only_media_scope) if media_requested?
statuses.merge!(no_replies_scope) unless replies_requested?
end
end
def default_statuses
@account.statuses.not_local_only.distributable_visibility
end
def only_media_scope
Status.joins(:media_attachments).merge(@account.media_attachments).group(:id)
end
def no_replies_scope
Status.without_replies
end
def hashtag_scope
tag = Tag.find_normalized(params[:tag])
if tag
Status.tagged_with(tag.id)
else
Status.none
end
end
def username_param
params[:username]
end
def skip_temporary_suspension_response?
request.format == :json
end
def rss_url
if tag_requested?
short_account_tag_url(@account, params[:tag], format: 'rss')
else
short_account_url(@account, format: 'rss')
end
end
helper_method :rss_url
def media_requested?
path_without_format.end_with?('/media') && !tag_requested?
end
def replies_requested?
path_without_format.end_with?('/with_replies') && !tag_requested?
end
def tag_requested?
path_without_format.end_with?(Addressable::URI.parse("/tagged/#{params[:tag]}").normalize)
end
def path_without_format
request.path.split('.').first
end
end

View file

@ -1,16 +0,0 @@
# frozen_string_literal: true
class ActivityPub::BaseController < Api::BaseController
include SignatureVerification
include AccountOwnedConcern
skip_before_action :require_authenticated_user!
skip_before_action :require_not_suspended!
skip_around_action :set_locale
private
def skip_temporary_suspension_response?
false
end
end

View file

@ -1,18 +0,0 @@
# frozen_string_literal: true
class ActivityPub::ClaimsController < ActivityPub::BaseController
skip_before_action :authenticate_user!
before_action :require_account_signature!
before_action :set_claim_result
def create
render json: @claim_result, serializer: ActivityPub::OneTimeKeySerializer
end
private
def set_claim_result
@claim_result = ::Keys::ClaimService.new.call(@account.id, params[:id])
end
end

View file

@ -1,72 +0,0 @@
# frozen_string_literal: true
class ActivityPub::CollectionsController < ActivityPub::BaseController
vary_by -> { 'Signature' if authorized_fetch_mode? }
before_action :require_account_signature!, if: :authorized_fetch_mode?
before_action :set_items
before_action :set_size
before_action :set_type
def show
expires_in 3.minutes, public: public_fetch_mode?
render_with_cache json: collection_presenter, content_type: 'application/activity+json', serializer: ActivityPub::CollectionSerializer, adapter: ActivityPub::Adapter
end
private
def set_items
case params[:id]
when 'featured'
@items = for_signed_account { preload_collection(@account.pinned_statuses.not_local_only, Status) }
@items = @items.map { |item| item.distributable? ? item : ActivityPub::TagManager.instance.uri_for(item) }
when 'tags'
@items = for_signed_account { @account.featured_tags }
when 'devices'
@items = @account.devices
else
not_found
end
end
def set_size
case params[:id]
when 'featured', 'devices', 'tags'
@size = @items.size
else
not_found
end
end
def set_type
case params[:id]
when 'featured'
@type = :ordered
when 'devices', 'tags'
@type = :unordered
else
not_found
end
end
def collection_presenter
ActivityPub::CollectionPresenter.new(
id: account_collection_url(@account, params[:id]),
type: @type,
size: @size,
items: @items
)
end
def for_signed_account
# Because in public fetch mode we cache the response, there would be no
# benefit from performing the check below, since a blocked account or domain
# would likely be served the cache from the reverse proxy anyway
if authorized_fetch_mode? && !signed_request_account.nil? && (@account.blocking?(signed_request_account) || (!signed_request_account.domain.nil? && @account.domain_blocking?(signed_request_account.domain)))
[]
else
yield
end
end
end

View file

@ -1,34 +0,0 @@
# frozen_string_literal: true
class ActivityPub::FollowersSynchronizationsController < ActivityPub::BaseController
vary_by -> { 'Signature' if authorized_fetch_mode? }
before_action :require_account_signature!
before_action :set_items
def show
expires_in 0, public: false
render json: collection_presenter,
serializer: ActivityPub::CollectionSerializer,
adapter: ActivityPub::Adapter,
content_type: 'application/activity+json'
end
private
def uri_prefix
signed_request_account.uri[Account::URL_PREFIX_RE]
end
def set_items
@items = @account.followers.matches_uri_prefix(uri_prefix).pluck(:uri)
end
def collection_presenter
ActivityPub::CollectionPresenter.new(
id: account_followers_synchronization_url(@account),
type: :ordered,
items: @items
)
end
end

View file

@ -1,73 +0,0 @@
# frozen_string_literal: true
class ActivityPub::InboxesController < ActivityPub::BaseController
include JsonLdHelper
before_action :skip_unknown_actor_activity
before_action :require_actor_signature!
skip_before_action :authenticate_user!
def create
upgrade_account
process_collection_synchronization
process_payload
head 202
end
private
def skip_unknown_actor_activity
head 202 if unknown_affected_account?
end
def unknown_affected_account?
json = Oj.load(body, mode: :strict)
json.is_a?(Hash) && %w(Delete Update).include?(json['type']) && json['actor'].present? && json['actor'] == value_or_id(json['object']) && !Account.exists?(uri: json['actor'])
rescue Oj::ParseError
false
end
def account_required?
params[:account_username].present?
end
def skip_temporary_suspension_response?
true
end
def body
return @body if defined?(@body)
@body = request.body.read
@body.force_encoding('UTF-8') if @body.present?
request.body.rewind if request.body.respond_to?(:rewind)
@body
end
def upgrade_account
if signed_request_account&.ostatus?
signed_request_account.update(last_webfingered_at: nil)
ResolveAccountWorker.perform_async(signed_request_account.acct)
end
DeliveryFailureTracker.reset!(signed_request_actor.inbox_url)
end
def process_collection_synchronization
raw_params = request.headers['Collection-Synchronization']
return if raw_params.blank? || ENV['DISABLE_FOLLOWERS_SYNCHRONIZATION'] == 'true' || signed_request_account.nil?
# Re-using the syntax for signature parameters
params = SignatureParser.parse(raw_params)
ActivityPub::PrepareFollowersSynchronizationService.new.call(signed_request_account, params)
rescue SignatureParser::ParsingError
Rails.logger.warn 'Error parsing Collection-Synchronization header'
end
def process_payload
ActivityPub::ProcessingWorker.perform_async(signed_request_actor.id, body, @account&.id, signed_request_actor.class.name)
end
end

View file

@ -1,82 +0,0 @@
# frozen_string_literal: true
class ActivityPub::OutboxesController < ActivityPub::BaseController
LIMIT = 20
vary_by -> { 'Signature' if authorized_fetch_mode? || page_requested? }
before_action :require_account_signature!, if: :authorized_fetch_mode?
before_action :set_statuses
def show
if page_requested?
expires_in(1.minute, public: public_fetch_mode? && signed_request_account.nil?)
else
expires_in(3.minutes, public: public_fetch_mode?)
end
render json: outbox_presenter, serializer: ActivityPub::OutboxSerializer, adapter: ActivityPub::Adapter, content_type: 'application/activity+json'
end
private
def outbox_presenter
if page_requested?
ActivityPub::CollectionPresenter.new(
id: outbox_url(**page_params),
type: :ordered,
part_of: outbox_url,
prev: prev_page,
next: next_page,
items: @statuses
)
else
ActivityPub::CollectionPresenter.new(
id: outbox_url,
type: :ordered,
size: @account.statuses_count,
first: outbox_url(page: true),
last: outbox_url(page: true, min_id: 0)
)
end
end
def outbox_url(**kwargs)
if params[:account_username].present?
account_outbox_url(@account, **kwargs)
else
instance_actor_outbox_url(**kwargs)
end
end
def next_page
outbox_url(page: true, max_id: @statuses.last.id) if @statuses.size == LIMIT
end
def prev_page
outbox_url(page: true, min_id: @statuses.first.id) unless @statuses.empty?
end
def set_statuses
return unless page_requested?
@statuses = preload_collection_paginated_by_id(
AccountStatusesFilter.new(@account, signed_request_account).results,
Status,
LIMIT,
params_slice(:max_id, :min_id, :since_id)
)
end
def page_requested?
truthy_param?(:page)
end
def page_params
{ page: true, max_id: params[:max_id], min_id: params[:min_id] }.compact
end
def set_account
@account = params[:account_username].present? ? Account.find_local!(username_param) : Account.representative
end
end

View file

@ -1,93 +0,0 @@
# frozen_string_literal: true
class ActivityPub::RepliesController < ActivityPub::BaseController
include Authorization
DESCENDANTS_LIMIT = 60
vary_by -> { 'Signature' if authorized_fetch_mode? }
before_action :require_account_signature!, if: :authorized_fetch_mode?
before_action :set_status
before_action :set_replies
def index
expires_in 0, public: public_fetch_mode?
render json: replies_collection_presenter, serializer: ActivityPub::CollectionSerializer, adapter: ActivityPub::Adapter, content_type: 'application/activity+json', skip_activities: true
end
private
def pundit_user
signed_request_account
end
def set_status
@status = @account.statuses.find(params[:status_id])
authorize @status, :show?
rescue Mastodon::NotPermittedError
not_found
end
def set_replies
@replies = only_other_accounts? ? Status.where.not(account_id: @account.id).joins(:account).merge(Account.without_suspended) : @account.statuses
@replies = @replies.distributable_visibility.where(in_reply_to_id: @status.id)
@replies = @replies.paginate_by_min_id(DESCENDANTS_LIMIT, params[:min_id])
end
def replies_collection_presenter
page = ActivityPub::CollectionPresenter.new(
id: account_status_replies_url(@account, @status, page_params),
type: :unordered,
part_of: account_status_replies_url(@account, @status),
next: next_page,
items: @replies.map { |status| status.local? ? status : status.uri }
)
return page if page_requested?
ActivityPub::CollectionPresenter.new(
id: account_status_replies_url(@account, @status),
type: :unordered,
first: page
)
end
def page_requested?
truthy_param?(:page)
end
def only_other_accounts?
truthy_param?(:only_other_accounts)
end
def next_page
if only_other_accounts?
# Only consider remote accounts
return nil if @replies.size < DESCENDANTS_LIMIT
account_status_replies_url(
@account,
@status,
page: true,
min_id: @replies&.last&.id,
only_other_accounts: true
)
else
# For now, we're serving only self-replies, but next page might be other accounts
next_only_other_accounts = @replies&.last&.account_id != @account.id || @replies.size < DESCENDANTS_LIMIT
account_status_replies_url(
@account,
@status,
page: true,
min_id: next_only_other_accounts ? nil : @replies&.last&.id,
only_other_accounts: next_only_other_accounts
)
end
end
def page_params
params_slice(:only_other_accounts, :min_id).merge(page: true)
end
end

View file

@ -1,40 +0,0 @@
# frozen_string_literal: true
module Admin
class AccountActionsController < BaseController
before_action :set_account
def new
authorize @account, :show?
@account_action = Admin::AccountAction.new(type: params[:type], report_id: params[:report_id], send_email_notification: true, include_statuses: true)
@warning_presets = AccountWarningPreset.all
end
def create
authorize @account, :show?
account_action = Admin::AccountAction.new(resource_params)
account_action.target_account = @account
account_action.current_account = current_account
account_action.save!
if account_action.with_report?
redirect_to admin_reports_path, notice: I18n.t('admin.reports.processed_msg', id: resource_params[:report_id])
else
redirect_to admin_account_path(@account.id)
end
end
private
def set_account
@account = Account.find(params[:account_id])
end
def resource_params
params.require(:admin_account_action).permit(:type, :report_id, :warning_preset_id, :text, :send_email_notification, :include_statuses)
end
end
end

View file

@ -1,42 +0,0 @@
# frozen_string_literal: true
module Admin
class AccountModerationNotesController < BaseController
before_action :set_account_moderation_note, only: [:destroy]
def create
authorize AccountModerationNote, :create?
@account_moderation_note = current_account.account_moderation_notes.new(resource_params)
if @account_moderation_note.save
redirect_to admin_account_path(@account_moderation_note.target_account_id), notice: I18n.t('admin.account_moderation_notes.created_msg')
else
@account = @account_moderation_note.target_account
@moderation_notes = @account.targeted_moderation_notes.latest
@warnings = @account.strikes.custom.latest
render 'admin/accounts/show'
end
end
def destroy
authorize @account_moderation_note, :destroy?
@account_moderation_note.destroy!
redirect_to admin_account_path(@account_moderation_note.target_account_id), notice: I18n.t('admin.account_moderation_notes.destroyed_msg')
end
private
def resource_params
params.require(:account_moderation_note).permit(
:content,
:target_account_id
)
end
def set_account_moderation_note
@account_moderation_note = AccountModerationNote.find(params[:id])
end
end
end

View file

@ -1,174 +0,0 @@
# frozen_string_literal: true
module Admin
class AccountsController < BaseController
before_action :set_account, except: [:index, :batch]
before_action :require_remote_account!, only: [:redownload]
before_action :require_local_account!, only: [:enable, :memorialize, :approve, :reject]
def index
authorize :account, :index?
@accounts = filtered_accounts.page(params[:page])
@form = Form::AccountBatch.new
end
def batch
authorize :account, :index?
@form = Form::AccountBatch.new(form_account_batch_params)
@form.current_account = current_account
@form.action = action_from_button
@form.select_all_matching = params[:select_all_matching]
@form.query = filtered_accounts
@form.save
rescue ActionController::ParameterMissing
flash[:alert] = I18n.t('admin.accounts.no_account_selected')
ensure
redirect_to admin_accounts_path(filter_params)
end
def show
authorize @account, :show?
@deletion_request = @account.deletion_request
@account_moderation_note = current_account.account_moderation_notes.new(target_account: @account)
@moderation_notes = @account.targeted_moderation_notes.latest
@warnings = @account.strikes.includes(:target_account, :account, :appeal).latest
@domain_block = DomainBlock.rule_for(@account.domain)
end
def memorialize
authorize @account, :memorialize?
@account.memorialize!
log_action :memorialize, @account
redirect_to admin_account_path(@account.id), notice: I18n.t('admin.accounts.memorialized_msg', username: @account.acct)
end
def enable
authorize @account.user, :enable?
@account.user.enable!
log_action :enable, @account.user
redirect_to admin_account_path(@account.id), notice: I18n.t('admin.accounts.enabled_msg', username: @account.acct)
end
def approve
authorize @account.user, :approve?
@account.user.approve!
log_action :approve, @account.user
redirect_to admin_accounts_path(status: 'pending'), notice: I18n.t('admin.accounts.approved_msg', username: @account.acct)
end
def reject
authorize @account.user, :reject?
DeleteAccountService.new.call(@account, reserve_email: false, reserve_username: false)
log_action :reject, @account.user
redirect_to admin_accounts_path(status: 'pending'), notice: I18n.t('admin.accounts.rejected_msg', username: @account.acct)
end
def destroy
authorize @account, :destroy?
Admin::AccountDeletionWorker.perform_async(@account.id)
redirect_to admin_account_path(@account.id), notice: I18n.t('admin.accounts.destroyed_msg', username: @account.acct)
end
def unsensitive
authorize @account, :unsensitive?
@account.unsensitize!
log_action :unsensitive, @account
redirect_to admin_account_path(@account.id)
end
def unsilence
authorize @account, :unsilence?
@account.unsilence!
log_action :unsilence, @account
redirect_to admin_account_path(@account.id), notice: I18n.t('admin.accounts.unsilenced_msg', username: @account.acct)
end
def unsuspend
authorize @account, :unsuspend?
@account.unsuspend!
Admin::UnsuspensionWorker.perform_async(@account.id)
log_action :unsuspend, @account
redirect_to admin_account_path(@account.id), notice: I18n.t('admin.accounts.unsuspended_msg', username: @account.acct)
end
def redownload
authorize @account, :redownload?
@account.update!(last_webfingered_at: nil)
ResolveAccountService.new.call(@account)
redirect_to admin_account_path(@account.id), notice: I18n.t('admin.accounts.redownloaded_msg', username: @account.acct)
end
def remove_avatar
authorize @account, :remove_avatar?
@account.avatar = nil
@account.save!
log_action :remove_avatar, @account.user
redirect_to admin_account_path(@account.id), notice: I18n.t('admin.accounts.removed_avatar_msg', username: @account.acct)
end
def remove_header
authorize @account, :remove_header?
@account.header = nil
@account.save!
log_action :remove_header, @account.user
redirect_to admin_account_path(@account.id), notice: I18n.t('admin.accounts.removed_header_msg', username: @account.acct)
end
def unblock_email
authorize @account, :unblock_email?
CanonicalEmailBlock.where(reference_account: @account).delete_all
log_action :unblock_email, @account
redirect_to admin_account_path(@account.id), notice: I18n.t('admin.accounts.unblocked_email_msg', username: @account.acct)
end
private
def set_account
@account = Account.find(params[:id])
end
def require_remote_account!
redirect_to admin_account_path(@account.id) if @account.local?
end
def require_local_account!
redirect_to admin_account_path(@account.id) unless @account.local? && @account.user.present?
end
def filtered_accounts
AccountFilter.new(filter_params.with_defaults(order: 'recent')).results
end
def filter_params
params.slice(:page, *AccountFilter::KEYS).permit(:page, *AccountFilter::KEYS)
end
def form_account_batch_params
params.require(:form_account_batch).permit(:action, account_ids: [])
end
def action_from_button
if params[:suspend]
'suspend'
elsif params[:approve]
'approve'
elsif params[:reject]
'reject'
end
end
end
end

View file

@ -1,22 +0,0 @@
# frozen_string_literal: true
module Admin
class ActionLogsController < BaseController
before_action :set_action_logs
def index
authorize :audit_log, :index?
@auditable_accounts = Account.auditable.select(:id, :username)
end
private
def set_action_logs
@action_logs = Admin::ActionLogFilter.new(filter_params).results.page(params[:page])
end
def filter_params
params.slice(:page, *Admin::ActionLogFilter::KEYS).permit(:page, *Admin::ActionLogFilter::KEYS)
end
end
end

View file

@ -1,88 +0,0 @@
# frozen_string_literal: true
class Admin::AnnouncementsController < Admin::BaseController
before_action :set_announcements, only: :index
before_action :set_announcement, except: [:index, :new, :create]
def index
authorize :announcement, :index?
end
def new
authorize :announcement, :create?
@announcement = Announcement.new
end
def edit
authorize :announcement, :update?
end
def create
authorize :announcement, :create?
@announcement = Announcement.new(resource_params)
if @announcement.save
PublishScheduledAnnouncementWorker.perform_async(@announcement.id) if @announcement.published?
log_action :create, @announcement
redirect_to admin_announcements_path, notice: @announcement.published? ? I18n.t('admin.announcements.published_msg') : I18n.t('admin.announcements.scheduled_msg')
else
render :new
end
end
def update
authorize :announcement, :update?
if @announcement.update(resource_params)
PublishScheduledAnnouncementWorker.perform_async(@announcement.id) if @announcement.published?
log_action :update, @announcement
redirect_to admin_announcements_path, notice: I18n.t('admin.announcements.updated_msg')
else
render :edit
end
end
def publish
authorize :announcement, :update?
@announcement.publish!
PublishScheduledAnnouncementWorker.perform_async(@announcement.id)
log_action :update, @announcement
redirect_to admin_announcements_path, notice: I18n.t('admin.announcements.published_msg')
end
def unpublish
authorize :announcement, :update?
@announcement.unpublish!
UnpublishAnnouncementWorker.perform_async(@announcement.id)
log_action :update, @announcement
redirect_to admin_announcements_path, notice: I18n.t('admin.announcements.unpublished_msg')
end
def destroy
authorize :announcement, :destroy?
@announcement.destroy!
UnpublishAnnouncementWorker.perform_async(@announcement.id) if @announcement.published?
log_action :destroy, @announcement
redirect_to admin_announcements_path, notice: I18n.t('admin.announcements.destroyed_msg')
end
private
def set_announcements
@announcements = AnnouncementFilter.new(filter_params).results.reverse_chronological.page(params[:page])
end
def set_announcement
@announcement = Announcement.find(params[:id])
end
def filter_params
params.slice(*AnnouncementFilter::KEYS).permit(*AnnouncementFilter::KEYS)
end
def resource_params
params.require(:announcement).permit(:text, :scheduled_at, :starts_at, :ends_at, :all_day)
end
end

View file

@ -1,29 +0,0 @@
# frozen_string_literal: true
module Admin
class BaseController < ApplicationController
include Authorization
include AccountableConcern
layout 'admin'
before_action :set_body_classes
before_action :set_cache_headers
after_action :verify_authorized
private
def set_body_classes
@body_classes = 'admin'
end
def set_cache_headers
response.cache_control.replace(private: true, no_store: true)
end
def set_user
@user = Account.find(params[:account_id]).user || raise(ActiveRecord::RecordNotFound)
end
end
end

View file

@ -1,49 +0,0 @@
# frozen_string_literal: true
module Admin
class ChangeEmailsController < BaseController
before_action :set_account
before_action :require_local_account!
def show
authorize @user, :change_email?
end
def update
authorize @user, :change_email?
new_email = resource_params.fetch(:unconfirmed_email)
if new_email != @user.email
@user.update!(
unconfirmed_email: new_email,
# Regenerate the confirmation token:
confirmation_token: nil
)
log_action :change_email, @user
@user.send_confirmation_instructions
end
redirect_to admin_account_path(@account.id), notice: I18n.t('admin.accounts.change_email.changed_msg')
end
private
def set_account
@account = Account.find(params[:account_id])
@user = @account.user
end
def require_local_account!
redirect_to admin_account_path(@account.id) unless @account.local? && @account.user.present?
end
def resource_params
params.require(:user).permit(
:unconfirmed_email
)
end
end
end

View file

@ -1,37 +0,0 @@
# frozen_string_literal: true
module Admin
class ConfirmationsController < BaseController
before_action :set_user
before_action :redirect_confirmed_user, only: [:resend], if: :user_confirmed?
def create
authorize @user, :confirm?
@user.mark_email_as_confirmed!
log_action :confirm, @user
redirect_to admin_accounts_path
end
def resend
authorize @user, :confirm?
@user.resend_confirmation_instructions
log_action :resend, @user
flash[:notice] = I18n.t('admin.accounts.resend_confirmation.success')
redirect_to admin_accounts_path
end
private
def redirect_confirmed_user
flash[:error] = I18n.t('admin.accounts.resend_confirmation.already_confirmed')
redirect_to admin_accounts_path
end
def user_confirmed?
@user.confirmed?
end
end
end

View file

@ -1,83 +0,0 @@
# frozen_string_literal: true
module Admin
class CustomEmojisController < BaseController
def index
authorize :custom_emoji, :index?
@custom_emojis = filtered_custom_emojis.eager_load(:local_counterpart).page(params[:page])
@form = Form::CustomEmojiBatch.new
end
def new
authorize :custom_emoji, :create?
@custom_emoji = CustomEmoji.new
end
def create
authorize :custom_emoji, :create?
@custom_emoji = CustomEmoji.new(resource_params)
if @custom_emoji.save
log_action :create, @custom_emoji
redirect_to admin_custom_emojis_path, notice: I18n.t('admin.custom_emojis.created_msg')
else
render :new
end
end
def batch
authorize :custom_emoji, :index?
@form = Form::CustomEmojiBatch.new(form_custom_emoji_batch_params.merge(current_account: current_account, action: action_from_button))
@form.save
rescue ActionController::ParameterMissing
flash[:alert] = I18n.t('admin.custom_emojis.no_emoji_selected')
rescue Mastodon::NotPermittedError
flash[:alert] = I18n.t('admin.custom_emojis.not_permitted')
rescue ActiveRecord::RecordInvalid => e
error_message = action_from_button == 'copy' ? 'admin.custom_emojis.batch_copy_error' : 'admin.custom_emojis.batch_error'
flash[:alert] = I18n.t(error_message, message: e.message)
ensure
redirect_to admin_custom_emojis_path(filter_params)
end
private
def resource_params
params.require(:custom_emoji).permit(:shortcode, :image, :visible_in_picker)
end
def filtered_custom_emojis
CustomEmojiFilter.new(filter_params).results
end
def filter_params
params.slice(:page, *CustomEmojiFilter::KEYS).permit(:page, *CustomEmojiFilter::KEYS)
end
def action_from_button
if params[:update]
'update'
elsif params[:list]
'list'
elsif params[:unlist]
'unlist'
elsif params[:enable]
'enable'
elsif params[:disable]
'disable'
elsif params[:copy]
'copy'
elsif params[:delete]
'delete'
end
end
def form_custom_emoji_batch_params
params.require(:form_custom_emoji_batch).permit(:action, :category_id, :category_name, custom_emoji_ids: [])
end
end
end

View file

@ -1,18 +0,0 @@
# frozen_string_literal: true
module Admin
class DashboardController < BaseController
include Redisable
def index
authorize :dashboard, :index?
@system_checks = Admin::SystemCheck.perform(current_user)
@time_period = (29.days.ago.to_date...Time.now.utc.to_date)
@pending_users_count = User.pending.count
@pending_reports_count = Report.unresolved.count
@pending_tags_count = Tag.pending_review.count
@pending_appeals_count = Appeal.pending.count
end
end
end

View file

@ -1,40 +0,0 @@
# frozen_string_literal: true
class Admin::Disputes::AppealsController < Admin::BaseController
before_action :set_appeal, except: :index
def index
authorize :appeal, :index?
@appeals = filtered_appeals.page(params[:page])
end
def approve
authorize @appeal, :approve?
log_action :approve, @appeal
ApproveAppealService.new.call(@appeal, current_account)
redirect_to disputes_strike_path(@appeal.strike)
end
def reject
authorize @appeal, :approve?
log_action :reject, @appeal
@appeal.reject!(current_account)
UserMailer.appeal_rejected(@appeal.account.user, @appeal).deliver_later
redirect_to disputes_strike_path(@appeal.strike)
end
private
def filtered_appeals
Admin::AppealFilter.new(filter_params.with_defaults(status: 'pending')).results.includes(strike: :account)
end
def filter_params
params.slice(:page, *Admin::AppealFilter::KEYS).permit(:page, *Admin::AppealFilter::KEYS)
end
def set_appeal
@appeal = Appeal.find(params[:id])
end
end

View file

@ -1,42 +0,0 @@
# frozen_string_literal: true
class Admin::DomainAllowsController < Admin::BaseController
before_action :set_domain_allow, only: [:destroy]
def new
authorize :domain_allow, :create?
@domain_allow = DomainAllow.new(domain: params[:_domain])
end
def create
authorize :domain_allow, :create?
@domain_allow = DomainAllow.new(resource_params)
if @domain_allow.save
log_action :create, @domain_allow
redirect_to admin_instances_path, notice: I18n.t('admin.domain_allows.created_msg')
else
render :new
end
end
def destroy
authorize @domain_allow, :destroy?
UnallowDomainService.new.call(@domain_allow)
log_action :destroy, @domain_allow
redirect_to admin_instances_path, notice: I18n.t('admin.domain_allows.destroyed_msg')
end
private
def set_domain_allow
@domain_allow = DomainAllow.find(params[:id])
end
def resource_params
params.require(:domain_allow).permit(:domain)
end
end

View file

@ -1,128 +0,0 @@
# frozen_string_literal: true
module Admin
class DomainBlocksController < BaseController
before_action :set_domain_block, only: [:destroy, :edit, :update]
PERMITTED_PARAMS = %i(
domain
obfuscate
private_comment
public_comment
reject_media
reject_reports
severity
).freeze
PERMITTED_UPDATE_PARAMS = PERMITTED_PARAMS.without(:domain).freeze
def batch
authorize :domain_block, :create?
@form = Form::DomainBlockBatch.new(form_domain_block_batch_params.merge(current_account: current_account, action: action_from_button))
@form.save
rescue ActionController::ParameterMissing
flash[:alert] = I18n.t('admin.domain_blocks.no_domain_block_selected')
rescue Mastodon::NotPermittedError
flash[:alert] = I18n.t('admin.domain_blocks.not_permitted')
else
redirect_to admin_instances_path(limited: '1'), notice: I18n.t('admin.domain_blocks.created_msg')
end
def new
authorize :domain_block, :create?
@domain_block = DomainBlock.new(domain: params[:_domain])
end
def edit
authorize :domain_block, :create?
end
def create
authorize :domain_block, :create?
@domain_block = DomainBlock.new(resource_params)
existing_domain_block = resource_params[:domain].present? ? DomainBlock.rule_for(resource_params[:domain]) : nil
# Disallow accidentally downgrading a domain block
if existing_domain_block.present? && !@domain_block.stricter_than?(existing_domain_block)
@domain_block.validate
flash.now[:alert] = I18n.t('admin.domain_blocks.existing_domain_block_html', name: existing_domain_block.domain, unblock_url: admin_domain_block_path(existing_domain_block)).html_safe
@domain_block.errors.delete(:domain)
return render :new
end
# Allow transparently upgrading a domain block
if existing_domain_block.present? && existing_domain_block.domain == TagManager.instance.normalize_domain(@domain_block.domain.strip)
@domain_block = existing_domain_block
@domain_block.assign_attributes(resource_params)
end
# Require explicit confirmation when suspending
return render :confirm_suspension if requires_confirmation?
if @domain_block.save
DomainBlockWorker.perform_async(@domain_block.id)
log_action :create, @domain_block
redirect_to admin_instances_path(limited: '1'), notice: I18n.t('admin.domain_blocks.created_msg')
else
render :new
end
end
def update
authorize :domain_block, :update?
@domain_block.assign_attributes(update_params)
# Require explicit confirmation when suspending
return render :confirm_suspension if requires_confirmation?
if @domain_block.save
DomainBlockWorker.perform_async(@domain_block.id, @domain_block.severity_previously_changed?)
log_action :update, @domain_block
redirect_to admin_instances_path(limited: '1'), notice: I18n.t('admin.domain_blocks.created_msg')
else
render :edit
end
end
def destroy
authorize @domain_block, :destroy?
UnblockDomainService.new.call(@domain_block)
log_action :destroy, @domain_block
redirect_to admin_instances_path(limited: '1'), notice: I18n.t('admin.domain_blocks.destroyed_msg')
end
private
def set_domain_block
@domain_block = DomainBlock.find(params[:id])
end
def update_params
params
.require(:domain_block)
.slice(*PERMITTED_UPDATE_PARAMS)
.permit(*PERMITTED_UPDATE_PARAMS)
end
def resource_params
params
.require(:domain_block)
.slice(*PERMITTED_PARAMS)
.permit(*PERMITTED_PARAMS)
end
def form_domain_block_batch_params
params.require(:form_domain_block_batch).permit(domain_blocks_attributes: [:enabled, :domain, :severity, :reject_media, :reject_reports, :private_comment, :public_comment, :obfuscate])
end
def action_from_button
'save' if params[:save]
end
def requires_confirmation?
@domain_block.valid? && (@domain_block.new_record? || @domain_block.severity_changed?) && @domain_block.severity.to_s == 'suspend' && !params[:confirm]
end
end
end

View file

@ -1,83 +0,0 @@
# frozen_string_literal: true
module Admin
class EmailDomainBlocksController < BaseController
def index
authorize :email_domain_block, :index?
@email_domain_blocks = EmailDomainBlock.where(parent_id: nil).includes(:children).order(id: :desc).page(params[:page])
@form = Form::EmailDomainBlockBatch.new
end
def batch
authorize :email_domain_block, :index?
@form = Form::EmailDomainBlockBatch.new(form_email_domain_block_batch_params.merge(current_account: current_account, action: action_from_button))
@form.save
rescue ActionController::ParameterMissing
flash[:alert] = I18n.t('admin.email_domain_blocks.no_email_domain_block_selected')
rescue Mastodon::NotPermittedError
flash[:alert] = I18n.t('admin.email_domain_blocks.not_permitted')
ensure
redirect_to admin_email_domain_blocks_path
end
def new
authorize :email_domain_block, :create?
@email_domain_block = EmailDomainBlock.new(domain: params[:_domain])
end
def create
authorize :email_domain_block, :create?
@email_domain_block = EmailDomainBlock.new(resource_params)
if action_from_button == 'save'
EmailDomainBlock.transaction do
@email_domain_block.save!
log_action :create, @email_domain_block
(@email_domain_block.other_domains || []).uniq.each do |domain|
next if EmailDomainBlock.exists?(domain: domain)
other_email_domain_block = EmailDomainBlock.create!(domain: domain, allow_with_approval: @email_domain_block.allow_with_approval, parent: @email_domain_block)
log_action :create, other_email_domain_block
end
end
redirect_to admin_email_domain_blocks_path, notice: I18n.t('admin.email_domain_blocks.created_msg')
else
set_resolved_records
render :new
end
rescue ActiveRecord::RecordInvalid
set_resolved_records
render :new
end
private
def set_resolved_records
Resolv::DNS.open do |dns|
dns.timeouts = 5
@resolved_records = dns.getresources(@email_domain_block.domain, Resolv::DNS::Resource::IN::MX).to_a
end
end
def resource_params
params.require(:email_domain_block).permit(:domain, :allow_with_approval, other_domains: [])
end
def form_email_domain_block_batch_params
params.require(:form_email_domain_block_batch).permit(email_domain_block_ids: [])
end
def action_from_button
if params[:delete]
'delete'
elsif params[:save]
'save'
end
end
end
end

View file

@ -1,58 +0,0 @@
# frozen_string_literal: true
require 'csv'
module Admin
class ExportDomainAllowsController < BaseController
include Admin::ExportControllerConcern
before_action :set_dummy_import!, only: [:new]
def new
authorize :domain_allow, :create?
end
def export
authorize :instance, :index?
send_export_file
end
def import
authorize :domain_allow, :create?
begin
@import = Admin::Import.new(import_params)
return render :new unless @import.validate
@import.csv_rows.each do |row|
domain = row['#domain'].strip
next if DomainAllow.allowed?(domain)
domain_allow = DomainAllow.new(domain: domain)
log_action :create, domain_allow if domain_allow.save
end
flash[:notice] = I18n.t('admin.domain_allows.created_msg')
rescue ActionController::ParameterMissing
flash[:error] = I18n.t('admin.export_domain_allows.no_file')
end
redirect_to admin_instances_path
end
private
def export_filename
'domain_allows.csv'
end
def export_headers
%w(#domain)
end
def export_data
CSV.generate(headers: export_headers, write_headers: true) do |content|
DomainAllow.allowed_domains.each do |instance|
content << [instance.domain]
end
end
end
end
end

View file

@ -1,81 +0,0 @@
# frozen_string_literal: true
require 'csv'
module Admin
class ExportDomainBlocksController < BaseController
include Admin::ExportControllerConcern
before_action :set_dummy_import!, only: [:new]
def new
authorize :domain_block, :create?
end
def export
authorize :instance, :index?
send_export_file
end
def import
authorize :domain_block, :create?
@import = Admin::Import.new(import_params)
return render :new unless @import.validate
@global_private_comment = I18n.t('admin.export_domain_blocks.import.private_comment_template', source: @import.data_file_name, date: I18n.l(Time.now.utc))
@form = Form::DomainBlockBatch.new
@domain_blocks = @import.csv_rows.filter_map do |row|
domain = row['#domain'].strip
next if DomainBlock.rule_for(domain).present?
domain_block = DomainBlock.new(domain: domain,
severity: row.fetch('#severity', :suspend),
reject_media: row.fetch('#reject_media', false),
reject_reports: row.fetch('#reject_reports', false),
private_comment: @global_private_comment,
public_comment: row['#public_comment'],
obfuscate: row.fetch('#obfuscate', false))
if domain_block.invalid?
flash.now[:alert] = I18n.t('admin.export_domain_blocks.invalid_domain_block', error: domain_block.errors.full_messages.join(', '))
next
end
domain_block
rescue ArgumentError => e
flash.now[:alert] = I18n.t('admin.export_domain_blocks.invalid_domain_block', error: e.message)
next
end
@warning_domains = instances_from_imported_blocks.pluck(:domain)
rescue ActionController::ParameterMissing
flash.now[:alert] = I18n.t('admin.export_domain_blocks.no_file')
set_dummy_import!
render :new
end
private
def instances_from_imported_blocks
Instance.with_domain_follows(@domain_blocks.map(&:domain))
end
def export_filename
'domain_blocks.csv'
end
def export_headers
%w(#domain #severity #reject_media #reject_reports #public_comment #obfuscate)
end
def export_data
CSV.generate(headers: export_headers, write_headers: true) do |content|
DomainBlock.with_limitations.order(id: :asc).each do |instance|
content << [instance.domain, instance.severity, instance.reject_media, instance.reject_reports, instance.public_comment, instance.obfuscate]
end
end
end
end
end

View file

@ -1,55 +0,0 @@
# frozen_string_literal: true
module Admin
class FollowRecommendationsController < BaseController
before_action :set_language
def show
authorize :follow_recommendation, :show?
@form = Form::AccountBatch.new
@accounts = filtered_follow_recommendations.page(params[:page])
end
def update
authorize :follow_recommendation, :show?
@form = Form::AccountBatch.new(form_account_batch_params.merge(current_account: current_account, action: action_from_button))
@form.save
rescue ActionController::ParameterMissing
# Do nothing
ensure
redirect_to admin_follow_recommendations_path(filter_params)
end
private
def set_language
@language = follow_recommendation_filter.language
end
def filtered_follow_recommendations
follow_recommendation_filter.results
end
def follow_recommendation_filter
@follow_recommendation_filter ||= FollowRecommendationFilter.new(filter_params)
end
def form_account_batch_params
params.require(:form_account_batch).permit(:action, account_ids: [])
end
def filter_params
params.slice(*FollowRecommendationFilter::KEYS).permit(*FollowRecommendationFilter::KEYS)
end
def action_from_button
if params[:suppress]
'suppress_follow_recommendation'
elsif params[:unsuppress]
'unsuppress_follow_recommendation'
end
end
end
end

View file

@ -1,76 +0,0 @@
# frozen_string_literal: true
module Admin
class InstancesController < BaseController
before_action :set_instances, only: :index
before_action :set_instance, except: :index
def index
authorize :instance, :index?
preload_delivery_failures!
end
def show
authorize :instance, :show?
@time_period = (6.days.ago.to_date...Time.now.utc.to_date)
@action_logs = Admin::ActionLogFilter.new(target_domain: @instance.domain).results.limit(5)
end
def destroy
authorize :instance, :destroy?
Admin::DomainPurgeWorker.perform_async(@instance.domain)
log_action :destroy, @instance
redirect_to admin_instances_path, notice: I18n.t('admin.instances.destroyed_msg', domain: @instance.domain)
end
def clear_delivery_errors
authorize :delivery, :clear_delivery_errors?
@instance.delivery_failure_tracker.clear_failures!
redirect_to admin_instance_path(@instance.domain)
end
def restart_delivery
authorize :delivery, :restart_delivery?
if @instance.unavailable?
@instance.delivery_failure_tracker.track_success!
log_action :destroy, @instance.unavailable_domain
end
redirect_to admin_instance_path(@instance.domain)
end
def stop_delivery
authorize :delivery, :stop_delivery?
unavailable_domain = UnavailableDomain.create!(domain: @instance.domain)
log_action :create, unavailable_domain
redirect_to admin_instance_path(@instance.domain)
end
private
def set_instance
@instance = Instance.find_or_initialize_by(domain: TagManager.instance.normalize_domain(params[:id]&.strip))
end
def set_instances
@instances = filtered_instances.page(params[:page])
end
def preload_delivery_failures!
warning_domains_map = DeliveryFailureTracker.warning_domains_map(@instances.map(&:domain))
@instances.each do |instance|
instance.failure_days = warning_domains_map[instance.domain]
end
end
def filtered_instances
InstanceFilter.new(limited_federation_mode? ? { allowed: true } : filter_params).results
end
def filter_params
params.slice(*InstanceFilter::KEYS).permit(*InstanceFilter::KEYS)
end
end
end

View file

@ -1,53 +0,0 @@
# frozen_string_literal: true
module Admin
class InvitesController < BaseController
def index
authorize :invite, :index?
@invites = filtered_invites.includes(user: :account).page(params[:page])
@invite = Invite.new
end
def create
authorize :invite, :create?
@invite = Invite.new(resource_params)
@invite.user = current_user
if @invite.save
redirect_to admin_invites_path
else
@invites = Invite.page(params[:page])
render :index
end
end
def destroy
@invite = Invite.find(params[:id])
authorize @invite, :destroy?
@invite.expire!
redirect_to admin_invites_path
end
def deactivate_all
authorize :invite, :deactivate_all?
Invite.available.in_batches.update_all(expires_at: Time.now.utc)
redirect_to admin_invites_path
end
private
def resource_params
params.require(:invite).permit(:max_uses, :expires_in)
end
def filtered_invites
InviteFilter.new(filter_params).results
end
def filter_params
params.slice(*InviteFilter::KEYS).permit(*InviteFilter::KEYS)
end
end
end

View file

@ -1,58 +0,0 @@
# frozen_string_literal: true
module Admin
class IpBlocksController < BaseController
def index
authorize :ip_block, :index?
@ip_blocks = IpBlock.order(ip: :asc).page(params[:page])
@form = Form::IpBlockBatch.new
end
def new
authorize :ip_block, :create?
@ip_block = IpBlock.new(ip: '', severity: :no_access, expires_in: 1.year)
end
def create
authorize :ip_block, :create?
@ip_block = IpBlock.new(resource_params)
if @ip_block.save
log_action :create, @ip_block
redirect_to admin_ip_blocks_path, notice: I18n.t('admin.ip_blocks.created_msg')
else
render :new
end
end
def batch
authorize :ip_block, :index?
@form = Form::IpBlockBatch.new(form_ip_block_batch_params.merge(current_account: current_account, action: action_from_button))
@form.save
rescue ActionController::ParameterMissing
flash[:alert] = I18n.t('admin.ip_blocks.no_ip_block_selected')
rescue Mastodon::NotPermittedError
flash[:alert] = I18n.t('admin.custom_emojis.not_permitted')
ensure
redirect_to admin_ip_blocks_path
end
private
def resource_params
params.require(:ip_block).permit(:ip, :severity, :comment, :expires_in)
end
def action_from_button
'delete' if params[:delete]
end
def form_ip_block_batch_params
params.require(:form_ip_block_batch).permit(ip_block_ids: [])
end
end
end

View file

@ -1,26 +0,0 @@
# frozen_string_literal: true
module Admin
class RelationshipsController < BaseController
before_action :set_account
PER_PAGE = 40
def index
authorize @account, :show?
@accounts = RelationshipFilter.new(@account, filter_params).results.includes(:account_stat, user: [:ips, :invite_request]).page(params[:page]).per(PER_PAGE)
@form = Form::AccountBatch.new
end
private
def set_account
@account = Account.find(params[:account_id])
end
def filter_params
params.slice(*RelationshipFilter::KEYS).permit(*RelationshipFilter::KEYS)
end
end
end

View file

@ -1,63 +0,0 @@
# frozen_string_literal: true
module Admin
class RelaysController < BaseController
before_action :set_relay, except: [:index, :new, :create]
before_action :warn_signatures_not_enabled!, only: [:new, :create, :enable]
def index
authorize :relay, :update?
@relays = Relay.all
end
def new
authorize :relay, :update?
@relay = Relay.new
end
def create
authorize :relay, :update?
@relay = Relay.new(resource_params)
if @relay.save
@relay.enable!
redirect_to admin_relays_path
else
render :new
end
end
def destroy
authorize :relay, :update?
@relay.destroy
redirect_to admin_relays_path
end
def enable
authorize :relay, :update?
@relay.enable!
redirect_to admin_relays_path
end
def disable
authorize :relay, :update?
@relay.disable!
redirect_to admin_relays_path
end
private
def set_relay
@relay = Relay.find(params[:id])
end
def resource_params
params.require(:relay).permit(:inbox_url)
end
def warn_signatures_not_enabled!
flash.now[:error] = I18n.t('admin.relays.signatures_not_enabled') if authorized_fetch_mode?
end
end
end

View file

@ -1,60 +0,0 @@
# frozen_string_literal: true
module Admin
class ReportNotesController < BaseController
before_action :set_report_note, only: [:destroy]
def create
authorize :report_note, :create?
@report_note = current_account.report_notes.new(resource_params)
@report = @report_note.report
if @report_note.save
if params[:create_and_resolve]
@report.resolve!(current_account)
log_action :resolve, @report
elsif params[:create_and_unresolve]
@report.unresolve!
log_action :reopen, @report
end
redirect_to after_create_redirect_path, notice: I18n.t('admin.report_notes.created_msg')
else
@report_notes = @report.notes.includes(:account).order(id: :desc)
@action_logs = @report.history.includes(:target)
@form = Admin::StatusBatchAction.new
@statuses = @report.statuses.with_includes
render 'admin/reports/show'
end
end
def destroy
authorize @report_note, :destroy?
@report_note.destroy!
redirect_to admin_report_path(@report_note.report_id), notice: I18n.t('admin.report_notes.destroyed_msg')
end
private
def after_create_redirect_path
if params[:create_and_resolve]
admin_reports_path
else
admin_report_path(@report)
end
end
def resource_params
params.require(:report_note).permit(
:content,
:report_id
)
end
def set_report_note
@report_note = ReportNote.find(params[:id])
end
end
end

View file

@ -1,63 +0,0 @@
# frozen_string_literal: true
class Admin::Reports::ActionsController < Admin::BaseController
before_action :set_report
def preview
authorize @report, :show?
@moderation_action = action_from_button
end
def create
authorize @report, :show?
case action_from_button
when 'delete', 'mark_as_sensitive'
status_batch_action = Admin::StatusBatchAction.new(
type: action_from_button,
status_ids: @report.status_ids,
current_account: current_account,
report_id: @report.id,
send_email_notification: !@report.spam?,
text: params[:text]
)
status_batch_action.save!
when 'silence', 'suspend'
account_action = Admin::AccountAction.new(
type: action_from_button,
report_id: @report.id,
target_account: @report.target_account,
current_account: current_account,
send_email_notification: !@report.spam?,
text: params[:text]
)
account_action.save!
else
return redirect_to admin_report_path(@report), alert: I18n.t('admin.reports.unknown_action_msg', action: action_from_button)
end
redirect_to admin_reports_path, notice: I18n.t('admin.reports.processed_msg', id: @report.id)
end
private
def set_report
@report = Report.find(params[:report_id])
end
def action_from_button
if params[:delete]
'delete'
elsif params[:mark_as_sensitive]
'mark_as_sensitive'
elsif params[:silence]
'silence'
elsif params[:suspend]
'suspend'
elsif params[:moderation_action]
params[:moderation_action]
end
end
end

Some files were not shown because too many files have changed in this diff Show more