Compare commits

..

No commits in common. "develop" and "v2.13.0" have entirely different histories.

295 changed files with 3267 additions and 19138 deletions

104
.github/dependabot.yml vendored
View File

@ -1,104 +0,0 @@
version: 2
updates:
- package-ecosystem: "npm"
directory: "/backend"
schedule:
interval: "weekly"
groups:
dev-patch-updates:
dependency-type: "development"
update-types:
- "patch"
dev-minor-updates:
dependency-type: "development"
update-types:
- "minor"
prod-patch-updates:
dependency-type: "production"
update-types:
- "patch"
prod-minor-updates:
dependency-type: "production"
update-types:
- "minor"
- package-ecosystem: "npm"
directory: "/frontend"
schedule:
interval: "weekly"
groups:
dev-patch-updates:
dependency-type: "development"
update-types:
- "patch"
dev-minor-updates:
dependency-type: "development"
update-types:
- "minor"
prod-patch-updates:
dependency-type: "production"
update-types:
- "patch"
prod-minor-updates:
dependency-type: "production"
update-types:
- "minor"
- package-ecosystem: "npm"
directory: "/docs"
schedule:
interval: "weekly"
groups:
dev-patch-updates:
dependency-type: "development"
update-types:
- "patch"
dev-minor-updates:
dependency-type: "development"
update-types:
- "minor"
prod-patch-updates:
dependency-type: "production"
update-types:
- "patch"
prod-minor-updates:
dependency-type: "production"
update-types:
- "minor"
- package-ecosystem: "npm"
directory: "/test"
schedule:
interval: "weekly"
groups:
dev-patch-updates:
dependency-type: "development"
update-types:
- "patch"
dev-minor-updates:
dependency-type: "development"
update-types:
- "minor"
prod-patch-updates:
dependency-type: "production"
update-types:
- "patch"
prod-minor-updates:
dependency-type: "production"
update-types:
- "minor"
- package-ecosystem: "docker"
directory: "/docker"
schedule:
interval: "weekly"
groups:
updates:
update-types:
- "patch"
- "minor"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"

View File

@ -8,7 +8,7 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v10
- uses: actions/stale@v9
with:
stale-issue-label: 'stale'
stale-pr-label: 'stale'

View File

@ -1 +1 @@
2.13.7
2.13.0

285
Jenkinsfile vendored Normal file
View File

@ -0,0 +1,285 @@
import groovy.transform.Field
@Field
def shOutput = ""
def buildxPushTags = ""
pipeline {
agent {
label 'docker-multiarch'
}
options {
buildDiscarder(logRotator(numToKeepStr: '5'))
disableConcurrentBuilds()
ansiColor('xterm')
}
environment {
IMAGE = 'nginx-proxy-manager'
BUILD_VERSION = getVersion()
MAJOR_VERSION = '2'
BRANCH_LOWER = "${BRANCH_NAME.toLowerCase().replaceAll('\\\\', '-').replaceAll('/', '-').replaceAll('\\.', '-')}"
BUILDX_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}"
COMPOSE_INTERACTIVE_NO_CLI = 1
}
stages {
stage('Environment') {
parallel {
stage('Master') {
when {
branch 'master'
}
steps {
script {
buildxPushTags = "-t docker.io/jc21/${IMAGE}:${BUILD_VERSION} -t docker.io/jc21/${IMAGE}:${MAJOR_VERSION} -t docker.io/jc21/${IMAGE}:latest"
}
}
}
stage('Other') {
when {
not {
branch 'master'
}
}
steps {
script {
// Defaults to the Branch name, which is applies to all branches AND pr's
buildxPushTags = "-t docker.io/nginxproxymanager/${IMAGE}-dev:${BRANCH_LOWER}"
}
}
}
stage('Versions') {
steps {
sh 'cat frontend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge frontend/package.json'
sh 'echo -e "\\E[1;36mFrontend Version is:\\E[1;33m $(cat frontend/package.json | jq -r .version)\\E[0m"'
sh 'cat backend/package.json | jq --arg BUILD_VERSION "${BUILD_VERSION}" \'.version = $BUILD_VERSION\' | sponge backend/package.json'
sh 'echo -e "\\E[1;36mBackend Version is:\\E[1;33m $(cat backend/package.json | jq -r .version)\\E[0m"'
sh 'sed -i -E "s/(version-)[0-9]+\\.[0-9]+\\.[0-9]+(-green)/\\1${BUILD_VERSION}\\2/" README.md'
}
}
stage('Docker Login') {
steps {
withCredentials([usernamePassword(credentialsId: 'jc21-dockerhub', passwordVariable: 'dpass', usernameVariable: 'duser')]) {
sh 'docker login -u "${duser}" -p "${dpass}"'
}
}
}
}
}
stage('Builds') {
parallel {
stage('Project') {
steps {
script {
// Frontend and Backend
def shStatusCode = sh(label: 'Checking and Building', returnStatus: true, script: '''
set -e
./scripts/ci/frontend-build > ${WORKSPACE}/tmp-sh-build 2>&1
./scripts/ci/test-and-build > ${WORKSPACE}/tmp-sh-build 2>&1
''')
shOutput = readFile "${env.WORKSPACE}/tmp-sh-build"
if (shStatusCode != 0) {
error "${shOutput}"
}
}
}
post {
always {
sh 'rm -f ${WORKSPACE}/tmp-sh-build'
}
failure {
npmGithubPrComment("CI Error:\n\n```\n${shOutput}\n```", true)
}
}
}
stage('Docs') {
steps {
dir(path: 'docs') {
sh 'yarn install'
sh 'yarn build'
}
}
}
}
}
stage('Test Sqlite') {
environment {
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_sqlite"
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.sqlite.yml'
}
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh 'rm -rf ./test/results/junit/*'
sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug/sqlite'
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/sqlite/docker_fullstack.log 2>&1'
sh 'docker logs $(docker compose ps --all -q stepca) > debug/sqlite/docker_stepca.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns) > debug/sqlite/docker_pdns.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/sqlite/docker_pdns-db.log 2>&1'
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/sqlite/docker_dnsrouter.log 2>&1'
junit 'test/results/junit/*'
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('Test Mysql') {
environment {
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_mysql"
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.mysql.yml'
}
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh 'rm -rf ./test/results/junit/*'
sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug/mysql'
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/mysql/docker_fullstack.log 2>&1'
sh 'docker logs $(docker compose ps --all -q stepca) > debug/mysql/docker_stepca.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns) > debug/mysql/docker_pdns.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/mysql/docker_pdns-db.log 2>&1'
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/mysql/docker_dnsrouter.log 2>&1'
junit 'test/results/junit/*'
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('Test Postgres') {
environment {
COMPOSE_PROJECT_NAME = "npm_${BRANCH_LOWER}_${BUILD_NUMBER}_postgres"
COMPOSE_FILE = 'docker/docker-compose.ci.yml:docker/docker-compose.ci.postgres.yml'
}
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh 'rm -rf ./test/results/junit/*'
sh './scripts/ci/fulltest-cypress'
}
post {
always {
// Dumps to analyze later
sh 'mkdir -p debug/postgres'
sh 'docker logs $(docker compose ps --all -q fullstack) > debug/postgres/docker_fullstack.log 2>&1'
sh 'docker logs $(docker compose ps --all -q stepca) > debug/postgres/docker_stepca.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns) > debug/postgres/docker_pdns.log 2>&1'
sh 'docker logs $(docker compose ps --all -q pdns-db) > debug/postgres/docker_pdns-db.log 2>&1'
sh 'docker logs $(docker compose ps --all -q dnsrouter) > debug/postgres/docker_dnsrouter.log 2>&1'
sh 'docker logs $(docker compose ps --all -q db-postgres) > debug/postgres/docker_db-postgres.log 2>&1'
sh 'docker logs $(docker compose ps --all -q authentik) > debug/postgres/docker_authentik.log 2>&1'
sh 'docker logs $(docker compose ps --all -q authentik-redis) > debug/postgres/docker_authentik-redis.log 2>&1'
sh 'docker logs $(docke rcompose ps --all -q authentik-ldap) > debug/postgres/docker_authentik-ldap.log 2>&1'
junit 'test/results/junit/*'
sh 'docker compose down --remove-orphans --volumes -t 30 || true'
}
unstable {
dir(path: 'test/results') {
archiveArtifacts(allowEmptyArchive: true, artifacts: '**/*', excludes: '**/*.xml')
}
}
}
}
stage('MultiArch Build') {
when {
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
steps {
sh "./scripts/buildx --push ${buildxPushTags}"
}
}
stage('Docs / Comment') {
parallel {
stage('Docs Job') {
when {
allOf {
branch pattern: "^(develop|master)\$", comparator: "REGEXP"
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
}
steps {
build wait: false, job: 'nginx-proxy-manager-docs', parameters: [string(name: 'docs_branch', value: "$BRANCH_NAME")]
}
}
stage('PR Comment') {
when {
allOf {
changeRequest()
not {
equals expected: 'UNSTABLE', actual: currentBuild.result
}
}
}
steps {
script {
npmGithubPrComment("""Docker Image for build ${BUILD_NUMBER} is available on [DockerHub](https://cloud.docker.com/repository/docker/nginxproxymanager/${IMAGE}-dev):
```
nginxproxymanager/${IMAGE}-dev:${BRANCH_LOWER}
```
> [!NOTE]
> Ensure you backup your NPM instance before testing this image! Especially if there are database changes.
> This is a different docker image namespace than the official image.
> [!WARNING]
> Changes and additions to DNS Providers require verification by at least 2 members of the community!
""", true)
}
}
}
}
}
}
post {
always {
sh 'echo Reverting ownership'
sh 'docker run --rm -v "$(pwd):/data" jc21/ci-tools chown -R "$(id -u):$(id -g)" /data'
printResult(true)
}
failure {
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
}
unstable {
archiveArtifacts(artifacts: 'debug/**/*.*', allowEmptyArchive: true)
}
}
}
def getVersion() {
ver = sh(script: 'cat .version', returnStdout: true)
return ver.trim()
}
def getCommit() {
ver = sh(script: 'git log -n 1 --format=%h', returnStdout: true)
return ver.trim()
}

View File

@ -1,7 +1,7 @@
<p align="center">
<img src="https://nginxproxymanager.com/github.png">
<br><br>
<img src="https://img.shields.io/badge/version-2.13.7-green.svg?style=for-the-badge">
<img src="https://img.shields.io/badge/version-2.13.0-green.svg?style=for-the-badge">
<a href="https://hub.docker.com/repository/docker/jc21/nginx-proxy-manager">
<img src="https://img.shields.io/docker/stars/jc21/nginx-proxy-manager.svg?style=for-the-badge">
</a>

View File

@ -1,5 +1,5 @@
{
"$schema": "https://biomejs.dev/schemas/2.3.14/schema.json",
"$schema": "https://biomejs.dev/schemas/2.3.2/schema.json",
"vcs": {
"enabled": true,
"clientKind": "git",

View File

@ -26,8 +26,8 @@
"azure": {
"name": "Azure",
"package_name": "certbot-dns-azure",
"version": "~=2.6.1",
"dependencies": "azure-mgmt-dns==8.2.0",
"version": "~=1.2.0",
"dependencies": "",
"credentials": "# This plugin supported API authentication using either Service Principals or utilizing a Managed Identity assigned to the virtual machine.\n# Regardless which authentication method used, the identity will need the “DNS Zone Contributor” role assigned to it.\n# As multiple Azure DNS Zones in multiple resource groups can exist, the config file needs a mapping of zone to resource group ID. Multiple zones -> ID mappings can be listed by using the key dns_azure_zoneX where X is a unique number. At least 1 zone mapping is required.\n\n# Using a service principal (option 1)\ndns_azure_sp_client_id = 912ce44a-0156-4669-ae22-c16a17d34ca5\ndns_azure_sp_client_secret = E-xqXU83Y-jzTI6xe9fs2YC~mck3ZzUih9\ndns_azure_tenant_id = ed1090f3-ab18-4b12-816c-599af8a88cf7\n\n# Using used assigned MSI (option 2)\n# dns_azure_msi_client_id = 912ce44a-0156-4669-ae22-c16a17d34ca5\n\n# Using system assigned MSI (option 3)\n# dns_azure_msi_system_assigned = true\n\n# Zones (at least one always required)\ndns_azure_zone1 = example.com:/subscriptions/c135abce-d87d-48df-936c-15596c6968a5/resourceGroups/dns1\ndns_azure_zone2 = example.org:/subscriptions/99800903-fb14-4992-9aff-12eaf2744622/resourceGroups/dns2",
"full_plugin_name": "dns-azure"
},
@ -74,7 +74,7 @@
"cloudns": {
"name": "ClouDNS",
"package_name": "certbot-dns-cloudns",
"version": "~=0.7.0",
"version": "~=0.6.0",
"dependencies": "",
"credentials": "# Target user ID (see https://www.cloudns.net/api-settings/)\n\tdns_cloudns_auth_id=1234\n\t# Alternatively, one of the following two options can be set:\n\t# dns_cloudns_sub_auth_id=1234\n\t# dns_cloudns_sub_auth_user=foobar\n\n\t# API password\n\tdns_cloudns_auth_password=password1",
"full_plugin_name": "dns-cloudns"
@ -255,14 +255,6 @@
"credentials": "dns_gcore_apitoken = 0123456789abcdef0123456789abcdef01234567",
"full_plugin_name": "dns-gcore"
},
"glesys": {
"name": "Glesys",
"package_name": "certbot-dns-glesys",
"version": "~=2.1.0",
"dependencies": "",
"credentials": "dns_glesys_user = CL00000\ndns_glesys_password = apikeyvalue",
"full_plugin_name": "dns-glesys"
},
"godaddy": {
"name": "GoDaddy",
"package_name": "certbot-dns-godaddy",
@ -295,14 +287,6 @@
"credentials": "dns_he_user = Me\ndns_he_pass = my HE password",
"full_plugin_name": "dns-he"
},
"he-ddns": {
"name": "Hurricane Electric - DDNS",
"package_name": "certbot-dns-he-ddns",
"version": "~=0.1.0",
"dependencies": "",
"credentials": "dns_he_ddns_password = verysecurepassword",
"full_plugin_name": "dns-he-ddns"
},
"hetzner": {
"name": "Hetzner",
"package_name": "certbot-dns-hetzner",
@ -383,18 +367,10 @@
"credentials": "dns_joker_username = <Dynamic DNS Authentication Username>\ndns_joker_password = <Dynamic DNS Authentication Password>\ndns_joker_domain = <Dynamic DNS Domain>",
"full_plugin_name": "dns-joker"
},
"kas": {
"name": "All-Inkl",
"package_name": "certbot-dns-kas",
"version": "~=0.1.1",
"dependencies": "kasserver",
"credentials": "dns_kas_user = your_kas_user\ndns_kas_password = your_kas_password",
"full_plugin_name": "dns-kas"
},
"leaseweb": {
"name": "LeaseWeb",
"package_name": "certbot-dns-leaseweb",
"version": "~=1.0.3",
"version": "~=1.0.1",
"dependencies": "",
"credentials": "dns_leaseweb_api_token = 01234556789",
"full_plugin_name": "dns-leaseweb"
@ -423,14 +399,6 @@
"credentials": "dns_luadns_email = user@example.com\ndns_luadns_token = 0123456789abcdef0123456789abcdef",
"full_plugin_name": "dns-luadns"
},
"mchost24": {
"name": "MC-HOST24",
"package_name": "certbot-dns-mchost24",
"version": "",
"dependencies": "",
"credentials": "# Obtain API token using https://github.com/JoeJoeTV/mchost24-api-python\ndns_mchost24_api_token=<insert obtained API token here>",
"full_plugin_name": "dns-mchost24"
},
"mijnhost": {
"name": "mijn.host",
"package_name": "certbot-dns-mijn-host",
@ -506,7 +474,7 @@
"porkbun": {
"name": "Porkbun",
"package_name": "certbot-dns-porkbun",
"version": "~=0.11.0",
"version": "~=0.9",
"dependencies": "",
"credentials": "dns_porkbun_key=your-porkbun-api-key\ndns_porkbun_secret=your-porkbun-api-secret",
"full_plugin_name": "dns-porkbun"
@ -551,14 +519,6 @@
"credentials": "[default]\naws_access_key_id=AKIAIOSFODNN7EXAMPLE\naws_secret_access_key=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
"full_plugin_name": "dns-route53"
},
"simply": {
"name": "Simply",
"package_name": "certbot-dns-simply",
"version": "~=0.1.2",
"dependencies": "",
"credentials": "dns_simply_account_name = UExxxxxx\ndns_simply_api_key = DsHJdsjh2812872sahj",
"full_plugin_name": "dns-simply"
},
"spaceship": {
"name": "Spaceship",
"package_name": "certbot-dns-spaceship",

View File

@ -1,8 +1,6 @@
import knex from "knex";
import {configGet, configHas} from "./lib/config.js";
let instance = null;
const generateDbConfig = () => {
if (!configHas("database")) {
throw new Error(
@ -32,11 +30,4 @@ const generateDbConfig = () => {
};
};
const getInstance = () => {
if (!instance) {
instance = knex(generateDbConfig());
}
return instance;
}
export default getInstance;
export default knex(generateDbConfig());

View File

@ -1,302 +0,0 @@
import crypto from "node:crypto";
import bcrypt from "bcrypt";
import { createGuardrails, generateSecret, generateURI, verify } from "otplib";
import errs from "../lib/error.js";
import authModel from "../models/auth.js";
import internalUser from "./user.js";
const APP_NAME = "Nginx Proxy Manager";
const BACKUP_CODE_COUNT = 8;
/**
* Generate backup codes
* @returns {Promise<{plain: string[], hashed: string[]}>}
*/
const generateBackupCodes = async () => {
const plain = [];
const hashed = [];
for (let i = 0; i < BACKUP_CODE_COUNT; i++) {
const code = crypto.randomBytes(4).toString("hex").toUpperCase();
plain.push(code);
const hash = await bcrypt.hash(code, 10);
hashed.push(hash);
}
return { plain, hashed };
};
const internal2fa = {
/**
* Check if user has 2FA enabled
* @param {number} userId
* @returns {Promise<boolean>}
*/
isEnabled: async (userId) => {
const auth = await internal2fa.getUserPasswordAuth(userId);
return auth?.meta?.totp_enabled === true;
},
/**
* Get 2FA status for user
* @param {Access} access
* @param {number} userId
* @returns {Promise<{enabled: boolean, backup_codes_remaining: number}>}
*/
getStatus: async (access, userId) => {
await access.can("users:password", userId);
await internalUser.get(access, { id: userId });
const auth = await internal2fa.getUserPasswordAuth(userId);
const enabled = auth?.meta?.totp_enabled === true;
let backup_codes_remaining = 0;
if (enabled) {
const backupCodes = auth.meta.backup_codes || [];
backup_codes_remaining = backupCodes.length;
}
return {
enabled,
backup_codes_remaining,
};
},
/**
* Start 2FA setup - store pending secret
*
* @param {Access} access
* @param {number} userId
* @returns {Promise<{secret: string, otpauth_url: string}>}
*/
startSetup: async (access, userId) => {
await access.can("users:password", userId);
const user = await internalUser.get(access, { id: userId });
const secret = generateSecret();
const otpauth_url = generateURI({
issuer: APP_NAME,
label: user.email,
secret: secret,
});
const auth = await internal2fa.getUserPasswordAuth(userId);
// ensure user isn't already setup for 2fa
const enabled = auth?.meta?.totp_enabled === true;
if (enabled) {
throw new errs.ValidationError("2FA is already enabled");
}
const meta = auth.meta || {};
meta.totp_pending_secret = secret;
await authModel
.query()
.where("id", auth.id)
.andWhere("user_id", userId)
.andWhere("type", "password")
.patch({ meta });
return { secret, otpauth_url };
},
/**
* Enable 2FA after verifying code
*
* @param {Access} access
* @param {number} userId
* @param {string} code
* @returns {Promise<{backup_codes: string[]}>}
*/
enable: async (access, userId, code) => {
await access.can("users:password", userId);
await internalUser.get(access, { id: userId });
const auth = await internal2fa.getUserPasswordAuth(userId);
const secret = auth?.meta?.totp_pending_secret || false;
if (!secret) {
throw new errs.ValidationError("No pending 2FA setup found");
}
const result = await verify({ token: code, secret });
if (!result.valid) {
throw new errs.ValidationError("Invalid verification code");
}
const { plain, hashed } = await generateBackupCodes();
const meta = {
...auth.meta,
totp_secret: secret,
totp_enabled: true,
totp_enabled_at: new Date().toISOString(),
backup_codes: hashed,
};
delete meta.totp_pending_secret;
await authModel
.query()
.where("id", auth.id)
.andWhere("user_id", userId)
.andWhere("type", "password")
.patch({ meta });
return { backup_codes: plain };
},
/**
* Disable 2FA
*
* @param {Access} access
* @param {number} userId
* @param {string} code
* @returns {Promise<void>}
*/
disable: async (access, userId, code) => {
await access.can("users:password", userId);
await internalUser.get(access, { id: userId });
const auth = await internal2fa.getUserPasswordAuth(userId);
const enabled = auth?.meta?.totp_enabled === true;
if (!enabled) {
throw new errs.ValidationError("2FA is not enabled");
}
const result = await verify({
token: code,
secret: auth.meta.totp_secret,
});
if (!result.valid) {
throw new errs.AuthError("Invalid verification code");
}
const meta = { ...auth.meta };
delete meta.totp_secret;
delete meta.totp_enabled;
delete meta.totp_enabled_at;
delete meta.backup_codes;
await authModel
.query()
.where("id", auth.id)
.andWhere("user_id", userId)
.andWhere("type", "password")
.patch({ meta });
},
/**
* Verify 2FA code for login
*
* @param {number} userId
* @param {string} token
* @returns {Promise<boolean>}
*/
verifyForLogin: async (userId, token) => {
const auth = await internal2fa.getUserPasswordAuth(userId);
const secret = auth?.meta?.totp_secret || false;
if (!secret) {
return false;
}
// Try TOTP code first, if it's 6 chars. it will throw errors if it's not 6 chars
// and the backup codes are 8 chars.
if (token.length === 6) {
const result = await verify({
token,
secret,
// These guardrails lower the minimum length requirement for secrets.
// In v12 of otplib the default minimum length is 10 and in v13 it is 16.
// Since there are 2fa secrets in the wild generated with v12 we need to allow shorter secrets
// so people won't be locked out when upgrading.
guardrails: createGuardrails({
MIN_SECRET_BYTES: 10,
}),
});
if (result.valid) {
return true;
}
}
// Try backup codes
const backupCodes = auth?.meta?.backup_codes || [];
for (let i = 0; i < backupCodes.length; i++) {
const match = await bcrypt.compare(token.toUpperCase(), backupCodes[i]);
if (match) {
// Remove used backup code
const updatedCodes = [...backupCodes];
updatedCodes.splice(i, 1);
const meta = { ...auth.meta, backup_codes: updatedCodes };
await authModel
.query()
.where("id", auth.id)
.andWhere("user_id", userId)
.andWhere("type", "password")
.patch({ meta });
return true;
}
}
return false;
},
/**
* Regenerate backup codes
*
* @param {Access} access
* @param {number} userId
* @param {string} token
* @returns {Promise<{backup_codes: string[]}>}
*/
regenerateBackupCodes: async (access, userId, token) => {
await access.can("users:password", userId);
await internalUser.get(access, { id: userId });
const auth = await internal2fa.getUserPasswordAuth(userId);
const enabled = auth?.meta?.totp_enabled === true;
const secret = auth?.meta?.totp_secret || false;
if (!enabled) {
throw new errs.ValidationError("2FA is not enabled");
}
if (!secret) {
throw new errs.ValidationError("No 2FA secret found");
}
const result = await verify({
token,
secret,
});
if (!result.valid) {
throw new errs.ValidationError("Invalid verification code");
}
const { plain, hashed } = await generateBackupCodes();
const meta = { ...auth.meta, backup_codes: hashed };
await authModel
.query()
.where("id", auth.id)
.andWhere("user_id", userId)
.andWhere("type", "password")
.patch({ meta });
return { backup_codes: plain };
},
getUserPasswordAuth: async (userId) => {
const auth = await authModel
.query()
.where("user_id", userId)
.andWhere("type", "password")
.first();
if (!auth) {
throw new errs.ItemNotFoundError("Auth not found");
}
return auth;
},
};
export default internal2fa;

View File

@ -4,7 +4,6 @@ import path from "path";
import archiver from "archiver";
import _ from "lodash";
import moment from "moment";
import { ProxyAgent } from "proxy-agent";
import tempWrite from "temp-write";
import dnsPlugins from "../certbot/dns-plugins.json" with { type: "json" };
import { installPlugin } from "../lib/certbot.js";
@ -798,11 +797,6 @@ const internalCertificate = {
certificate.domain_names.join(","),
];
// Add key-type parameter if specified
if (certificate.meta?.key_type) {
args.push("--key-type", certificate.meta.key_type);
}
const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id);
args.push(...adds.args);
@ -863,11 +857,6 @@ const internalCertificate = {
);
}
// Add key-type parameter if specified
if (certificate.meta?.key_type) {
args.push("--key-type", certificate.meta.key_type);
}
const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id, certificate.meta.dns_provider);
args.push(...adds.args);
@ -948,11 +937,6 @@ const internalCertificate = {
"--disable-hook-validation",
];
// Add key-type parameter if specified
if (certificate.meta?.key_type) {
args.push("--key-type", certificate.meta.key_type);
}
const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id, certificate.meta.dns_provider);
args.push(...adds.args);
@ -994,11 +978,6 @@ const internalCertificate = {
"--no-random-sleep-on-renew",
];
// Add key-type parameter if specified
if (certificate.meta?.key_type) {
args.push("--key-type", certificate.meta.key_type);
}
const adds = internalCertificate.getAdditionalCertbotArgs(certificate.id, certificate.meta.dns_provider);
args.push(...adds.args);
@ -1135,7 +1114,6 @@ const internalCertificate = {
performTestForDomain: async (domain) => {
logger.info(`Testing http challenge for ${domain}`);
const agent = new ProxyAgent();
const url = `http://${domain}/.well-known/acme-challenge/test-challenge`;
const formBody = `method=G&url=${encodeURI(url)}&bodytype=T&requestbody=&headername=User-Agent&headervalue=None&locationid=1&ch=false&cc=false`;
const options = {
@ -1145,7 +1123,6 @@ const internalCertificate = {
"Content-Type": "application/x-www-form-urlencoded",
"Content-Length": Buffer.byteLength(formBody),
},
agent,
};
const result = await new Promise((resolve) => {

View File

@ -2,7 +2,6 @@ import fs from "node:fs";
import https from "node:https";
import { dirname } from "node:path";
import { fileURLToPath } from "node:url";
import { ProxyAgent } from "proxy-agent";
import errs from "../lib/error.js";
import utils from "../lib/utils.js";
import { ipRanges as logger } from "../logger.js";
@ -30,11 +29,10 @@ const internalIpRanges = {
},
fetchUrl: (url) => {
const agent = new ProxyAgent();
return new Promise((resolve, reject) => {
logger.info(`Fetching ${url}`);
return https
.get(url, { agent }, (res) => {
.get(url, (res) => {
res.setEncoding("utf8");
let raw_data = "";
res.on("data", (chunk) => {

View File

@ -216,11 +216,6 @@ const internalNginx = {
}
}
// For redirection hosts, if the scheme is not http or https, set it to $scheme
if (nice_host_type === "redirection_host" && ['http', 'https'].indexOf(host.forward_scheme.toLowerCase()) === -1) {
host.forward_scheme = "$scheme";
}
if (host.locations) {
//logger.info ('host.locations = ' + JSON.stringify(host.locations, null, 2));
origLocations = [].concat(host.locations);

View File

@ -1,84 +0,0 @@
import https from "node:https";
import { ProxyAgent } from "proxy-agent";
import { debug, remoteVersion as logger } from "../logger.js";
import pjson from "../package.json" with { type: "json" };
const VERSION_URL = "https://api.github.com/repos/NginxProxyManager/nginx-proxy-manager/releases/latest";
const internalRemoteVersion = {
cache_timeout: 1000 * 60 * 15, // 15 minutes
last_result: null,
last_fetch_time: null,
/**
* Fetch the latest version info, using a cached result if within the cache timeout period.
* @return {Promise<{current: string, latest: string, update_available: boolean}>} Version info
*/
get: async () => {
if (
!internalRemoteVersion.last_result ||
!internalRemoteVersion.last_fetch_time ||
Date.now() - internalRemoteVersion.last_fetch_time > internalRemoteVersion.cache_timeout
) {
const raw = await internalRemoteVersion.fetchUrl(VERSION_URL);
const data = JSON.parse(raw);
internalRemoteVersion.last_result = data;
internalRemoteVersion.last_fetch_time = Date.now();
} else {
debug(logger, "Using cached remote version result");
}
const latestVersion = internalRemoteVersion.last_result.tag_name;
const version = pjson.version.split("-").shift().split(".");
const currentVersion = `v${version[0]}.${version[1]}.${version[2]}`;
return {
current: currentVersion,
latest: latestVersion,
update_available: internalRemoteVersion.compareVersions(currentVersion, latestVersion),
};
},
fetchUrl: (url) => {
const agent = new ProxyAgent();
const headers = {
"User-Agent": `NginxProxyManager v${pjson.version}`,
};
return new Promise((resolve, reject) => {
logger.info(`Fetching ${url}`);
return https
.get(url, { agent, headers }, (res) => {
res.setEncoding("utf8");
let raw_data = "";
res.on("data", (chunk) => {
raw_data += chunk;
});
res.on("end", () => {
resolve(raw_data);
});
})
.on("error", (err) => {
reject(err);
});
});
},
compareVersions: (current, latest) => {
const cleanCurrent = current.replace(/^v/, "");
const cleanLatest = latest.replace(/^v/, "");
const currentParts = cleanCurrent.split(".").map(Number);
const latestParts = cleanLatest.split(".").map(Number);
for (let i = 0; i < Math.max(currentParts.length, latestParts.length); i++) {
const curr = currentParts[i] || 0;
const lat = latestParts[i] || 0;
if (lat > curr) return true;
if (lat < curr) return false;
}
return false;
},
};
export default internalRemoteVersion;

View File

@ -15,10 +15,10 @@ const internalReport = {
const userId = access.token.getUserId(1);
const promises = [
internalProxyHost.getCount(userId, access_data.permission_visibility),
internalRedirectionHost.getCount(userId, access_data.permission_visibility),
internalStream.getCount(userId, access_data.permission_visibility),
internalDeadHost.getCount(userId, access_data.permission_visibility),
internalProxyHost.getCount(userId, access_data.visibility),
internalRedirectionHost.getCount(userId, access_data.visibility),
internalStream.getCount(userId, access_data.visibility),
internalDeadHost.getCount(userId, access_data.visibility),
];
return Promise.all(promises);

View File

@ -4,12 +4,9 @@ import { parseDatePeriod } from "../lib/helpers.js";
import authModel from "../models/auth.js";
import TokenModel from "../models/token.js";
import userModel from "../models/user.js";
import twoFactor from "./2fa.js";
const ERROR_MESSAGE_INVALID_AUTH = "Invalid email or password";
const ERROR_MESSAGE_INVALID_AUTH_I18N = "error.invalid-auth";
const ERROR_MESSAGE_INVALID_2FA = "Invalid verification code";
const ERROR_MESSAGE_INVALID_2FA_I18N = "error.invalid-2fa";
export default {
/**
@ -62,25 +59,6 @@ export default {
throw new errs.AuthError(`Invalid scope: ${data.scope}`);
}
// Check if 2FA is enabled
const has2FA = await twoFactor.isEnabled(user.id);
if (has2FA) {
// Return challenge token instead of full token
const challengeToken = await Token.create({
iss: issuer || "api",
attrs: {
id: user.id,
},
scope: ["2fa-challenge"],
expiresIn: "5m",
});
return {
requires_2fa: true,
challenge_token: challengeToken.token,
};
}
// Create a moment of the expiry expression
const expiry = parseDatePeriod(data.expiry);
if (expiry === null) {
@ -151,65 +129,6 @@ export default {
throw new error.AssertionFailedError("Existing token contained invalid user data");
},
/**
* Verify 2FA code and return full token
* @param {string} challengeToken
* @param {string} code
* @param {string} [expiry]
* @returns {Promise}
*/
verify2FA: async (challengeToken, code, expiry) => {
const Token = TokenModel();
const tokenExpiry = expiry || "1d";
// Verify challenge token
let tokenData;
try {
tokenData = await Token.load(challengeToken);
} catch {
throw new errs.AuthError("Invalid or expired challenge token");
}
// Check scope
if (!tokenData.scope || tokenData.scope[0] !== "2fa-challenge") {
throw new errs.AuthError("Invalid challenge token");
}
const userId = tokenData.attrs?.id;
if (!userId) {
throw new errs.AuthError("Invalid challenge token");
}
// Verify 2FA code
const valid = await twoFactor.verifyForLogin(userId, code);
if (!valid) {
throw new errs.AuthError(
ERROR_MESSAGE_INVALID_2FA,
ERROR_MESSAGE_INVALID_2FA_I18N,
);
}
// Create full token
const expiryDate = parseDatePeriod(tokenExpiry);
if (expiryDate === null) {
throw new errs.AuthError(`Invalid expiry time: ${tokenExpiry}`);
}
const signed = await Token.create({
iss: "api",
attrs: {
id: userId,
},
scope: ["user"],
expiresIn: tokenExpiry,
});
return {
token: signed.token,
expires: expiryDate.toISOString(),
};
},
/**
* @param {Object} user
* @returns {Promise}

View File

@ -5,7 +5,7 @@ import { global as logger } from "../logger.js";
const keysFile = '/data/keys.json';
const mysqlEngine = 'mysql2';
const postgresEngine = 'pg';
const sqliteClientName = 'better-sqlite3';
const sqliteClientName = 'sqlite3';
let instance = null;
@ -25,12 +25,6 @@ const configure = () => {
if (configData?.database) {
logger.info(`Using configuration from file: ${filename}`);
// Migrate those who have "mysql" engine to "mysql2"
if (configData.database.engine === "mysql") {
configData.database.engine = mysqlEngine;
}
instance = configData;
instance.keys = getKeys();
return;
@ -39,12 +33,12 @@ const configure = () => {
const toBool = (v) => /^(1|true|yes|on)$/i.test((v || '').trim());
const envMysqlHost = process.env.DB_MYSQL_HOST || null;
const envMysqlUser = process.env.DB_MYSQL_USER || null;
const envMysqlName = process.env.DB_MYSQL_NAME || null;
const envMysqlSSL = toBool(process.env.DB_MYSQL_SSL);
const envMysqlSSLRejectUnauthorized = process.env.DB_MYSQL_SSL_REJECT_UNAUTHORIZED === undefined ? true : toBool(process.env.DB_MYSQL_SSL_REJECT_UNAUTHORIZED);
const envMysqlSSLVerifyIdentity = process.env.DB_MYSQL_SSL_VERIFY_IDENTITY === undefined ? true : toBool(process.env.DB_MYSQL_SSL_VERIFY_IDENTITY);
const envMysqlHost = process.env.DB_MYSQL_HOST || null;
const envMysqlUser = process.env.DB_MYSQL_USER || null;
const envMysqlName = process.env.DB_MYSQL_NAME || null;
const envMysqlSSL = toBool(process.env.DB_MYSQL_SSL);
const envMysqlSSLRejectUnauthorized = process.env.DB_MYSQL_SSL_REJECT_UNAUTHORIZED === undefined ? true : toBool(process.env.DB_MYSQL_SSL_REJECT_UNAUTHORIZED);
const envMysqlSSLVerifyIdentity = process.env.DB_MYSQL_SSL_VERIFY_IDENTITY === undefined ? true : toBool(process.env.DB_MYSQL_SSL_VERIFY_IDENTITY);
if (envMysqlHost && envMysqlUser && envMysqlName) {
// we have enough mysql creds to go with mysql
logger.info("Using MySQL configuration");
@ -84,7 +78,6 @@ const configure = () => {
}
const envSqliteFile = process.env.DB_SQLITE_FILE || "/data/database.sqlite";
logger.info(`Using Sqlite: ${envSqliteFile}`);
instance = {
database: {

View File

@ -15,7 +15,6 @@ const certbot = new signale.Signale({ scope: "Certbot ", ...opts });
const importer = new signale.Signale({ scope: "Importer ", ...opts });
const setup = new signale.Signale({ scope: "Setup ", ...opts });
const ipRanges = new signale.Signale({ scope: "IP Ranges", ...opts });
const remoteVersion = new signale.Signale({ scope: "Remote Version", ...opts });
const debug = (logger, ...args) => {
if (isDebugMode()) {
@ -23,4 +22,4 @@ const debug = (logger, ...args) => {
}
};
export { debug, global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges, remoteVersion };
export { debug, global, migrate, express, access, nginx, ssl, certbot, importer, setup, ipRanges };

View File

@ -2,9 +2,9 @@ import db from "./db.js";
import { migrate as logger } from "./logger.js";
const migrateUp = async () => {
const version = await db().migrate.currentVersion();
const version = await db.migrate.currentVersion();
logger.info("Current database version:", version);
return await db().migrate.latest({
return await db.migrate.latest({
tableName: "migrations",
directory: "migrations",
});

View File

@ -1,50 +0,0 @@
import { migrate as logger } from "../logger.js";
const migrateName = "redirect_auto_scheme";
/**
* Migrate
*
* @see http://knexjs.org/#Schema
*
* @param {Object} knex
* @returns {Promise}
*/
const up = (knex) => {
logger.info(`[${migrateName}] Migrating Up...`);
return knex.schema
.table("redirection_host", async (table) => {
// change the column default from $scheme to auto
await table.string("forward_scheme").notNull().defaultTo("auto").alter();
await knex('redirection_host')
.where('forward_scheme', '$scheme')
.update({ forward_scheme: 'auto' });
})
.then(() => {
logger.info(`[${migrateName}] redirection_host Table altered`);
});
};
/**
* Undo Migrate
*
* @param {Object} knex
* @returns {Promise}
*/
const down = (knex) => {
logger.info(`[${migrateName}] Migrating Down...`);
return knex.schema
.table("redirection_host", async (table) => {
await table.string("forward_scheme").notNull().defaultTo("$scheme").alter();
await knex('redirection_host')
.where('forward_scheme', 'auto')
.update({ forward_scheme: '$scheme' });
})
.then(() => {
logger.info(`[${migrateName}] redirection_host Table altered`);
});
};
export { up, down };

View File

@ -10,7 +10,7 @@ import now from "./now_helper.js";
import ProxyHostModel from "./proxy_host.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted", "satisfy_any", "pass_auth"];

View File

@ -6,7 +6,7 @@ import db from "../db.js";
import accessListModel from "./access_list.js";
import now from "./now_helper.js";
Model.knex(db());
Model.knex(db);
class AccessListAuth extends Model {
$beforeInsert() {

View File

@ -6,7 +6,7 @@ import db from "../db.js";
import accessListModel from "./access_list.js";
import now from "./now_helper.js";
Model.knex(db());
Model.knex(db);
class AccessListClient extends Model {
$beforeInsert() {

View File

@ -6,7 +6,7 @@ import db from "../db.js";
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
class AuditLog extends Model {
$beforeInsert() {

View File

@ -8,7 +8,7 @@ import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.j
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted"];

View File

@ -11,7 +11,7 @@ import redirectionHostModel from "./redirection_host.js";
import streamModel from "./stream.js";
import userModel from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted"];

View File

@ -8,7 +8,7 @@ import Certificate from "./certificate.js";
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted", "ssl_forced", "http2_support", "enabled", "hsts_enabled", "hsts_subdomains"];

View File

@ -2,7 +2,7 @@ import { Model } from "objection";
import db from "../db.js";
import { isSqlite } from "../lib/config.js";
Model.knex(db());
Model.knex(db);
export default () => {
if (isSqlite()) {

View File

@ -9,7 +9,7 @@ import Certificate from "./certificate.js";
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = [
"is_deleted",

View File

@ -8,7 +8,7 @@ import Certificate from "./certificate.js";
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = [
"is_deleted",

View File

@ -4,7 +4,7 @@
import { Model } from "objection";
import db from "../db.js";
Model.knex(db());
Model.knex(db);
class Setting extends Model {
$beforeInsert () {

View File

@ -5,7 +5,7 @@ import Certificate from "./certificate.js";
import now from "./now_helper.js";
import User from "./user.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted", "enabled", "tcp_forwarding", "udp_forwarding"];

View File

@ -7,7 +7,7 @@ import { convertBoolFieldsToInt, convertIntFieldsToBool } from "../lib/helpers.j
import now from "./now_helper.js";
import UserPermission from "./user_permission.js";
Model.knex(db());
Model.knex(db);
const boolFields = ["is_deleted", "is_disabled"];

View File

@ -5,7 +5,7 @@ import { Model } from "objection";
import db from "../db.js";
import now from "./now_helper.js";
Model.knex(db());
Model.knex(db);
class UserPermission extends Model {
$beforeInsert () {

View File

@ -12,38 +12,35 @@
"validate-schema": "node validate-schema.js"
},
"dependencies": {
"@apidevtools/json-schema-ref-parser": "^14.1.1",
"@apidevtools/json-schema-ref-parser": "^11.7.0",
"ajv": "^8.17.1",
"archiver": "^7.0.1",
"archiver": "^5.3.0",
"batchflow": "^0.4.0",
"bcrypt": "^6.0.0",
"better-sqlite3": "^12.6.2",
"body-parser": "^2.2.2",
"bcrypt": "^5.0.0",
"body-parser": "^1.20.3",
"compression": "^1.7.4",
"express": "^5.2.1",
"express": "^4.20.0",
"express-fileupload": "^1.5.2",
"gravatar": "^1.8.2",
"jsonwebtoken": "^9.0.3",
"knex": "3.1.0",
"liquidjs": "10.24.0",
"lodash": "^4.17.23",
"jsonwebtoken": "^9.0.2",
"knex": "2.4.2",
"liquidjs": "10.6.1",
"lodash": "^4.17.21",
"moment": "^2.30.1",
"mysql2": "^3.16.3",
"mysql2": "^3.15.3",
"node-rsa": "^1.1.1",
"objection": "3.1.5",
"otplib": "^13.2.1",
"objection": "3.0.1",
"path": "^0.12.7",
"pg": "^8.18.0",
"proxy-agent": "^6.5.0",
"pg": "^8.16.3",
"signale": "1.4.0",
"sqlite3": "^5.1.7",
"temp-write": "^4.0.0"
},
"devDependencies": {
"@apidevtools/swagger-parser": "^12.1.0",
"@biomejs/biome": "^2.3.14",
"chalk": "5.6.2",
"nodemon": "^3.1.11"
"@apidevtools/swagger-parser": "^10.1.0",
"@biomejs/biome": "^2.3.2",
"chalk": "4.1.2",
"nodemon": "^2.0.2"
},
"signale": {
"displayDate": true,

View File

@ -14,7 +14,6 @@ import schemaRoutes from "./schema.js";
import settingsRoutes from "./settings.js";
import tokensRoutes from "./tokens.js";
import usersRoutes from "./users.js";
import versionRoutes from "./version.js";
const router = express.Router({
caseSensitive: true,
@ -47,7 +46,6 @@ router.use("/users", usersRoutes);
router.use("/audit-log", auditLogRoutes);
router.use("/reports", reportsRoutes);
router.use("/settings", settingsRoutes);
router.use("/version", versionRoutes);
router.use("/nginx/proxy-hosts", proxyHostsRoutes);
router.use("/nginx/redirection-hosts", redirectionHostsRoutes);
router.use("/nginx/dead-hosts", deadHostsRoutes);

View File

@ -53,26 +53,4 @@ router
}
});
router
.route("/2fa")
.options((_, res) => {
res.sendStatus(204);
})
/**
* POST /tokens/2fa
*
* Verify 2FA code and get full token
*/
.post(async (req, res, next) => {
try {
const { challenge_token, code } = await apiValidator(getValidationSchema("/tokens/2fa", "post"), req.body);
const result = await internalToken.verify2FA(challenge_token, code);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
export default router;

View File

@ -1,5 +1,4 @@
import express from "express";
import internal2FA from "../internal/2fa.js";
import internalUser from "../internal/user.js";
import Access from "../lib/access.js";
import { isCI } from "../lib/config.js";
@ -326,130 +325,4 @@ router
}
});
/**
* User 2FA status
*
* /api/users/123/2fa
*/
router
.route("/:user_id/2fa")
.options((_, res) => {
res.sendStatus(204);
})
.all(jwtdecode())
.all(userIdFromMe)
/**
* POST /api/users/123/2fa
*
* Start 2FA setup, returns QR code URL
*/
.post(async (req, res, next) => {
try {
const result = await internal2FA.startSetup(res.locals.access, req.params.user_id);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
/**
* GET /api/users/123/2fa
*
* Get 2FA status for a user
*/
.get(async (req, res, next) => {
try {
const status = await internal2FA.getStatus(res.locals.access, req.params.user_id);
res.status(200).send(status);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
})
/**
* DELETE /api/users/123/2fa?code=XXXXXX
*
* Disable 2FA for a user
*/
.delete(async (req, res, next) => {
try {
const code = typeof req.query.code === "string" ? req.query.code : null;
if (!code) {
throw new errs.ValidationError("Missing required parameter: code");
}
await internal2FA.disable(res.locals.access, req.params.user_id, code);
res.status(200).send(true);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
/**
* User 2FA enable
*
* /api/users/123/2fa/enable
*/
router
.route("/:user_id/2fa/enable")
.options((_, res) => {
res.sendStatus(204);
})
.all(jwtdecode())
.all(userIdFromMe)
/**
* POST /api/users/123/2fa/enable
*
* Verify code and enable 2FA
*/
.post(async (req, res, next) => {
try {
const { code } = await apiValidator(
getValidationSchema("/users/{userID}/2fa/enable", "post"),
req.body,
);
const result = await internal2FA.enable(res.locals.access, req.params.user_id, code);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
/**
* User 2FA backup codes
*
* /api/users/123/2fa/backup-codes
*/
router
.route("/:user_id/2fa/backup-codes")
.options((_, res) => {
res.sendStatus(204);
})
.all(jwtdecode())
.all(userIdFromMe)
/**
* POST /api/users/123/2fa/backup-codes
*
* Regenerate backup codes
*/
.post(async (req, res, next) => {
try {
const { code } = await apiValidator(
getValidationSchema("/users/{userID}/2fa/backup-codes", "post"),
req.body,
);
const result = await internal2FA.regenerateBackupCodes(res.locals.access, req.params.user_id, code);
res.status(200).send(result);
} catch (err) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${err}`);
next(err);
}
});
export default router;

View File

@ -1,40 +0,0 @@
import express from "express";
import internalRemoteVersion from "../internal/remote-version.js";
import { debug, express as logger } from "../logger.js";
const router = express.Router({
caseSensitive: true,
strict: true,
mergeParams: true,
});
/**
* /api/version/check
*/
router
.route("/check")
.options((_, res) => {
res.sendStatus(204);
})
/**
* GET /api/version/check
*
* Check for available updates
*/
.get(async (req, res, _next) => {
try {
const data = await internalRemoteVersion.get();
res.status(200).send(data);
} catch (error) {
debug(logger, `${req.method.toUpperCase()} ${req.path}: ${error}`);
// Send 200 even though there's an error to avoid triggering update checks repeatedly
res.status(200).send({
current: null,
latest: null,
update_available: false,
});
}
});
export default router;

View File

@ -71,11 +71,6 @@
"propagation_seconds": {
"type": "integer",
"minimum": 0
},
"key_type": {
"type": "string",
"enum": ["rsa", "ecdsa"],
"default": "rsa"
}
},
"example": {

View File

@ -1,23 +0,0 @@
{
"type": "object",
"description": "Check Version object",
"additionalProperties": false,
"required": ["current", "latest", "update_available"],
"properties": {
"current": {
"type": ["string", "null"],
"description": "Current version string",
"example": "v2.10.1"
},
"latest": {
"type": ["string", "null"],
"description": "Latest version string",
"example": "v2.13.4"
},
"update_available": {
"type": "boolean",
"description": "Whether there's an update available",
"example": true
}
}
}

View File

@ -1,18 +0,0 @@
{
"type": "object",
"description": "Token object",
"required": ["requires_2fa", "challenge_token"],
"additionalProperties": false,
"properties": {
"requires_2fa": {
"description": "Whether this token request requires two-factor authentication",
"example": true,
"type": "boolean"
},
"challenge_token": {
"description": "Challenge Token used in subsequent 2FA verification",
"example": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
"type": "string"
}
}
}

View File

@ -1,55 +0,0 @@
{
"operationId": "loginWith2FA",
"summary": "Verify 2FA code and get full token",
"tags": ["tokens"],
"requestBody": {
"description": "2fa Challenge Payload",
"required": true,
"content": {
"application/json": {
"schema": {
"additionalProperties": false,
"properties": {
"challenge_token": {
"minLength": 1,
"type": "string",
"example": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4"
},
"code": {
"minLength": 6,
"maxLength": 8,
"type": "string",
"example": "012345"
}
},
"required": ["challenge_token", "code"],
"type": "object"
},
"example": {
"challenge_token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4",
"code": "012345"
}
}
}
},
"responses": {
"200": {
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"expires": "2025-02-04T20:40:46.340Z",
"token": "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.ey...xaHKYr3Kk6MvkUjcC4"
}
}
},
"schema": {
"$ref": "../../../components/token-object.json"
}
}
},
"description": "200 response"
}
}
}

View File

@ -50,14 +50,7 @@
}
},
"schema": {
"oneOf": [
{
"$ref": "../../components/token-object.json"
},
{
"$ref": "../../components/token-challenge.json"
}
]
"$ref": "../../components/token-object.json"
}
}
},

View File

@ -1,92 +0,0 @@
{
"operationId": "regenUser2faCodes",
"summary": "Regenerate 2FA backup codes",
"tags": ["users"],
"parameters": [
{
"in": "path",
"name": "userID",
"schema": {
"type": "integer",
"minimum": 1
},
"required": true,
"description": "User ID",
"example": 2
}
],
"requestBody": {
"description": "Verification Payload",
"required": true,
"content": {
"application/json": {
"schema": {
"additionalProperties": false,
"properties": {
"code": {
"minLength": 6,
"maxLength": 8,
"type": "string",
"example": "123456"
}
},
"required": ["code"],
"type": "object"
},
"example": {
"code": "123456"
}
}
}
},
"responses": {
"200": {
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"backup_codes": [
"6CD7CB06",
"495302F3",
"D8037852",
"A6FFC956",
"BC1A1851",
"A05E644F",
"A406D2E8",
"0AE3C522"
]
}
}
},
"schema": {
"type": "object",
"required": ["backup_codes"],
"additionalProperties": false,
"properties": {
"backup_codes": {
"description": "Backup codes",
"example": [
"6CD7CB06",
"495302F3",
"D8037852",
"A6FFC956",
"BC1A1851",
"A05E644F",
"A406D2E8",
"0AE3C522"
],
"type": "array",
"items": {
"type": "string",
"example": "6CD7CB06"
}
}
}
}
}
},
"description": "200 response"
}
}
}

View File

@ -1,48 +0,0 @@
{
"operationId": "disableUser2fa",
"summary": "Disable 2fa for user",
"tags": ["users"],
"parameters": [
{
"in": "path",
"name": "userID",
"schema": {
"type": "integer",
"minimum": 1
},
"required": true,
"description": "User ID",
"example": 2
},
{
"in": "query",
"name": "code",
"schema": {
"type": "string",
"minLength": 6,
"maxLength": 6,
"example": "012345"
},
"required": true,
"description": "2fa Code",
"example": "012345"
}
],
"responses": {
"200": {
"content": {
"application/json": {
"examples": {
"default": {
"value": true
}
},
"schema": {
"type": "boolean"
}
}
},
"description": "200 response"
}
}
}

View File

@ -1,92 +0,0 @@
{
"operationId": "enableUser2fa",
"summary": "Verify code and enable 2FA",
"tags": ["users"],
"parameters": [
{
"in": "path",
"name": "userID",
"schema": {
"type": "integer",
"minimum": 1
},
"required": true,
"description": "User ID",
"example": 2
}
],
"requestBody": {
"description": "Verification Payload",
"required": true,
"content": {
"application/json": {
"schema": {
"additionalProperties": false,
"properties": {
"code": {
"minLength": 6,
"maxLength": 8,
"type": "string",
"example": "123456"
}
},
"required": ["code"],
"type": "object"
},
"example": {
"code": "123456"
}
}
}
},
"responses": {
"200": {
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"backup_codes": [
"6CD7CB06",
"495302F3",
"D8037852",
"A6FFC956",
"BC1A1851",
"A05E644F",
"A406D2E8",
"0AE3C522"
]
}
}
},
"schema": {
"type": "object",
"required": ["backup_codes"],
"additionalProperties": false,
"properties": {
"backup_codes": {
"description": "Backup codes",
"example": [
"6CD7CB06",
"495302F3",
"D8037852",
"A6FFC956",
"BC1A1851",
"A05E644F",
"A406D2E8",
"0AE3C522"
],
"type": "array",
"items": {
"type": "string",
"example": "6CD7CB06"
}
}
}
}
}
},
"description": "200 response"
}
}
}

View File

@ -1,57 +0,0 @@
{
"operationId": "getUser2faStatus",
"summary": "Get user 2fa Status",
"tags": ["users"],
"security": [
{
"bearerAuth": []
}
],
"parameters": [
{
"in": "path",
"name": "userID",
"schema": {
"type": "integer",
"minimum": 1
},
"required": true,
"description": "User ID",
"example": 2
}
],
"responses": {
"200": {
"description": "200 response",
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"enabled": false,
"backup_codes_remaining": 0
}
}
},
"schema": {
"type": "object",
"additionalProperties": false,
"required": ["enabled", "backup_codes_remaining"],
"properties": {
"enabled": {
"type": "boolean",
"description": "Is 2FA enabled for this user",
"example": true
},
"backup_codes_remaining": {
"type": "integer",
"description": "Number of remaining backup codes for this user",
"example": 5
}
}
}
}
}
}
}
}

View File

@ -1,52 +0,0 @@
{
"operationId": "setupUser2fa",
"summary": "Start 2FA setup, returns QR code URL",
"tags": ["users"],
"parameters": [
{
"in": "path",
"name": "userID",
"schema": {
"type": "integer",
"minimum": 1
},
"required": true,
"description": "User ID",
"example": 2
}
],
"responses": {
"200": {
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"secret": "JZYCEBIEEJYUGPQM",
"otpauth_url": "otpauth://totp/Nginx%20Proxy%20Manager:jc%40jc21.com?secret=JZYCEBIEEJYUGPQM&period=30&digits=6&algorithm=SHA1&issuer=Nginx%20Proxy%20Manager"
}
}
},
"schema": {
"type": "object",
"required": ["secret", "otpauth_url"],
"additionalProperties": false,
"properties": {
"secret": {
"description": "TOTP Secret",
"example": "JZYCEBIEEJYUGPQM",
"type": "string"
},
"otpauth_url": {
"description": "OTP Auth URL for QR Code generation",
"example": "otpauth://totp/Nginx%20Proxy%20Manager:jc%40jc21.com?secret=JZYCEBIEEJYUGPQM&period=30&digits=6&algorithm=SHA1&issuer=Nginx%20Proxy%20Manager",
"type": "string"
}
}
}
}
},
"description": "200 response"
}
}
}

View File

@ -1,26 +0,0 @@
{
"operationId": "checkVersion",
"summary": "Returns any new version data from github",
"tags": ["public"],
"responses": {
"200": {
"description": "200 response",
"content": {
"application/json": {
"examples": {
"default": {
"value": {
"current": "v2.12.0",
"latest": "v2.13.4",
"update_available": true
}
}
},
"schema": {
"$ref": "../../../components/check-version-object.json"
}
}
}
}
}
}

View File

@ -293,16 +293,6 @@
"$ref": "./paths/tokens/post.json"
}
},
"/tokens/2fa": {
"post": {
"$ref": "./paths/tokens/2fa/post.json"
}
},
"/version/check": {
"get": {
"$ref": "./paths/version/check/get.json"
}
},
"/users": {
"get": {
"$ref": "./paths/users/get.json"
@ -322,27 +312,6 @@
"$ref": "./paths/users/userID/delete.json"
}
},
"/users/{userID}/2fa": {
"post": {
"$ref": "./paths/users/userID/2fa/post.json"
},
"get": {
"$ref": "./paths/users/userID/2fa/get.json"
},
"delete": {
"$ref": "./paths/users/userID/2fa/delete.json"
}
},
"/users/{userID}/2fa/enable": {
"post": {
"$ref": "./paths/users/userID/2fa/enable/post.json"
}
},
"/users/{userID}/2fa/backup-codes": {
"post": {
"$ref": "./paths/users/userID/2fa/backup-codes/post.json"
}
},
"/users/{userID}/auth": {
"put": {
"$ref": "./paths/users/userID/auth/put.json"

View File

@ -37,7 +37,7 @@ const setupDefaultUser = async () => {
const data = {
is_deleted: 0,
email: initialAdminEmail,
email: email,
name: "Administrator",
nickname: "Admin",
avatar: "",
@ -53,7 +53,7 @@ const setupDefaultUser = async () => {
.insert({
user_id: user.id,
type: "password",
secret: initialAdminPassword,
secret: password,
meta: {},
});

View File

@ -4,7 +4,7 @@
auth_basic "Authorization required";
auth_basic_user_file /data/access/{{ access_list_id }};
{% if access_list.pass_auth == 0 or access_list.pass_auth == false %}
{% if access_list.pass_auth == 0 or access_list.pass_auth == true %}
proxy_set_header Authorization "";
{% endif %}

View File

@ -12,9 +12,6 @@ server {
proxy_pass {{ forwarding_host }}:{{ forwarding_port }};
access_log /data/logs/stream-{{ id }}_access.log stream;
error_log /data/logs/stream-{{ id }}_error.log warn;
# Custom
include /data/nginx/custom/server_stream[.]conf;
include /data/nginx/custom/server_stream_tcp[.]conf;
@ -28,12 +25,9 @@ server {
proxy_pass {{ forwarding_host }}:{{ forwarding_port }};
access_log /data/logs/stream-{{ id }}_access.log stream;
error_log /data/logs/stream-{{ id }}_error.log warn;
# Custom
include /data/nginx/custom/server_stream[.]conf;
include /data/nginx/custom/server_stream_udp[.]conf;
}
{% endif %}
{% endif %}
{% endif %}

File diff suppressed because it is too large Load Diff

View File

@ -4,6 +4,7 @@
# This file assumes that the frontend has been built using ./scripts/frontend-build
FROM nginxproxymanager/testca AS testca
FROM letsencrypt/pebble AS pebbleca
FROM nginxproxymanager/nginx-full:certbot-node
ARG TARGETPLATFORM
@ -45,6 +46,7 @@ RUN yarn install \
# add late to limit cache-busting by modifications
COPY docker/rootfs /
COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
# Remove frontend service not required for prod, dev nginx config as well

View File

@ -1,6 +1,6 @@
AUTHENTIK_SECRET_KEY=gl8woZe8L6IIX8SC0c5Ocsj0xPkX5uJo5DVZCFl+L/QGbzuplfutYuua2ODNLEiDD3aFd9H2ylJmrke0
AUTHENTIK_REDIS__HOST=authentik-redis
AUTHENTIK_POSTGRESQL__HOST=pgdb.internal
AUTHENTIK_POSTGRESQL__HOST=db-postgres
AUTHENTIK_POSTGRESQL__USER=authentik
AUTHENTIK_POSTGRESQL__NAME=authentik
AUTHENTIK_POSTGRESQL__PASSWORD=07EKS5NLI6Tpv68tbdvrxfvj

View File

@ -1,4 +1,5 @@
FROM nginxproxymanager/testca AS testca
FROM letsencrypt/pebble AS pebbleca
FROM nginxproxymanager/nginx-full:certbot-node
LABEL maintainer="Jamie Curnow <jc@jc21.com>"
@ -32,6 +33,7 @@ RUN rm -f /etc/nginx/conf.d/production.conf \
&& chmod 644 -R /root/.cache
# Certs for testing purposes
COPY --from=pebbleca /test/certs/pebble.minica.pem /etc/ssl/certs/pebble.minica.pem
COPY --from=testca /home/step/certs/root_ca.crt /etc/ssl/certs/NginxProxyManager.crt
EXPOSE 80 81 443

View File

@ -0,0 +1,12 @@
{
"pebble": {
"listenAddress": "0.0.0.0:443",
"managementListenAddress": "0.0.0.0:15000",
"certificate": "test/certs/localhost/cert.pem",
"privateKey": "test/certs/localhost/key.pem",
"httpPort": 80,
"tlsPort": 443,
"ocspResponderURL": "",
"externalAccountBindingRequired": false
}
}

View File

@ -6,7 +6,7 @@ services:
fullstack:
environment:
DB_POSTGRES_HOST: "pgdb.internal"
DB_POSTGRES_HOST: "db-postgres"
DB_POSTGRES_PORT: "5432"
DB_POSTGRES_USER: "npm"
DB_POSTGRES_PASSWORD: "npmpass"
@ -27,9 +27,7 @@ services:
- psql_vol:/var/lib/postgresql/data
- ./ci/postgres:/docker-entrypoint-initdb.d
networks:
fulltest:
aliases:
- pgdb.internal
- fulltest
authentik-redis:
image: "redis:alpine"
@ -43,8 +41,6 @@ services:
timeout: 3s
volumes:
- redis_vol:/data
networks:
- fulltest
authentik:
image: ghcr.io/goauthentik/server:2024.10.1
@ -55,8 +51,6 @@ services:
depends_on:
- authentik-redis
- db-postgres
networks:
- fulltest
authentik-worker:
image: ghcr.io/goauthentik/server:2024.10.1
@ -67,8 +61,6 @@ services:
depends_on:
- authentik-redis
- db-postgres
networks:
- fulltest
authentik-ldap:
image: ghcr.io/goauthentik/ldap:2024.10.1
@ -79,8 +71,6 @@ services:
restart: unless-stopped
depends_on:
- authentik
networks:
- fulltest
volumes:
psql_vol:

View File

@ -3,34 +3,31 @@
# This is a base compose file, it should be extended with a
# docker-compose.ci.*.yml file
services:
fullstack:
image: "${IMAGE}:${BRANCH_LOWER}-ci-${BUILD_NUMBER}"
environment:
TZ: "${TZ:-Australia/Brisbane}"
DEBUG: "true"
CI: "true"
DEBUG: 'true'
CI: 'true'
FORCE_COLOR: 1
# Required for DNS Certificate provisioning in CI
LE_SERVER: "https://ca.internal/acme/acme/directory"
REQUESTS_CA_BUNDLE: "/etc/ssl/certs/NginxProxyManager.crt"
LE_SERVER: 'https://ca.internal/acme/acme/directory'
REQUESTS_CA_BUNDLE: '/etc/ssl/certs/NginxProxyManager.crt'
volumes:
- "npm_data_ci:/data"
- "npm_le_ci:/etc/letsencrypt"
- "./dev/letsencrypt.ini:/etc/letsencrypt.ini:ro"
- "./dev/resolv.conf:/etc/resolv.conf:ro"
- "/etc/localtime:/etc/localtime:ro"
- 'npm_data_ci:/data'
- 'npm_le_ci:/etc/letsencrypt'
- './dev/letsencrypt.ini:/etc/letsencrypt.ini:ro'
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
healthcheck:
test: ["CMD", "/usr/bin/check-health"]
interval: 10s
timeout: 3s
expose:
- "80/tcp"
- "81/tcp"
- "443/tcp"
- "1500/tcp"
- "1501/tcp"
- "1502/tcp"
- "1503/tcp"
- '80-81/tcp'
- '443/tcp'
- '1500-1503/tcp'
networks:
fulltest:
aliases:
@ -41,8 +38,8 @@ services:
stepca:
image: jc21/testca
volumes:
- "./dev/resolv.conf:/etc/resolv.conf:ro"
- "/etc/localtime:/etc/localtime:ro"
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
networks:
fulltest:
aliases:
@ -51,18 +48,18 @@ services:
pdns:
image: pschiffe/pdns-mysql:4.8
volumes:
- "/etc/localtime:/etc/localtime:ro"
- '/etc/localtime:/etc/localtime:ro'
environment:
PDNS_master: "yes"
PDNS_api: "yes"
PDNS_api_key: "npm"
PDNS_webserver: "yes"
PDNS_webserver_address: "0.0.0.0"
PDNS_webserver_password: "npm"
PDNS_webserver-allow-from: "127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8"
PDNS_version_string: "anonymous"
PDNS_master: 'yes'
PDNS_api: 'yes'
PDNS_api_key: 'npm'
PDNS_webserver: 'yes'
PDNS_webserver_address: '0.0.0.0'
PDNS_webserver_password: 'npm'
PDNS_webserver-allow-from: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
PDNS_version_string: 'anonymous'
PDNS_default_ttl: 1500
PDNS_allow_axfr_ips: "127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8"
PDNS_allow_axfr_ips: '127.0.0.0/8,192.0.0.0/8,10.0.0.0/8,172.0.0.0/8'
PDNS_gmysql_host: pdns-db
PDNS_gmysql_port: 3306
PDNS_gmysql_user: pdns
@ -79,14 +76,14 @@ services:
pdns-db:
image: mariadb
environment:
MYSQL_ROOT_PASSWORD: "pdns"
MYSQL_DATABASE: "pdns"
MYSQL_USER: "pdns"
MYSQL_PASSWORD: "pdns"
MYSQL_ROOT_PASSWORD: 'pdns'
MYSQL_DATABASE: 'pdns'
MYSQL_USER: 'pdns'
MYSQL_PASSWORD: 'pdns'
volumes:
- "pdns_mysql_vol:/var/lib/mysql"
- "/etc/localtime:/etc/localtime:ro"
- "./dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro"
- 'pdns_mysql_vol:/var/lib/mysql'
- '/etc/localtime:/etc/localtime:ro'
- './dev/pdns-db.sql:/docker-entrypoint-initdb.d/01_init.sql:ro'
networks:
- fulltest
@ -103,22 +100,22 @@ services:
context: ../
dockerfile: test/cypress/Dockerfile
environment:
HTTP_PROXY: "squid:3128"
HTTPS_PROXY: "squid:3128"
HTTP_PROXY: 'squid:3128'
HTTPS_PROXY: 'squid:3128'
volumes:
- "cypress_logs:/test/results"
- "./dev/resolv.conf:/etc/resolv.conf:ro"
- "/etc/localtime:/etc/localtime:ro"
command: cypress run --browser chrome --config-file=cypress/config/ci.mjs
- 'cypress_logs:/test/results'
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
command: cypress run --browser chrome --config-file=cypress/config/ci.js
networks:
- fulltest
squid:
image: ubuntu/squid
volumes:
- "./dev/squid.conf:/etc/squid/squid.conf:ro"
- "./dev/resolv.conf:/etc/resolv.conf:ro"
- "/etc/localtime:/etc/localtime:ro"
- './dev/squid.conf:/etc/squid/squid.conf:ro'
- './dev/resolv.conf:/etc/resolv.conf:ro'
- '/etc/localtime:/etc/localtime:ro'
networks:
- fulltest

View File

@ -32,7 +32,7 @@ services:
# DB_MYSQL_PASSWORD: 'npm'
# DB_MYSQL_NAME: 'npm'
# db-postgres:
DB_POSTGRES_HOST: "pgdb.internal"
DB_POSTGRES_HOST: "db-postgres"
DB_POSTGRES_PORT: "5432"
DB_POSTGRES_USER: "npm"
DB_POSTGRES_PASSWORD: "npmpass"
@ -81,6 +81,8 @@ services:
db-postgres:
image: postgres:17
container_name: npm2dev.db-postgres
networks:
- nginx_proxy_manager
environment:
POSTGRES_USER: "npm"
POSTGRES_PASSWORD: "npmpass"
@ -88,10 +90,6 @@ services:
volumes:
- psql_data:/var/lib/postgresql/data
- ./ci/postgres:/docker-entrypoint-initdb.d
networks:
nginx_proxy_manager:
aliases:
- pgdb.internal
stepca:
image: jc21/testca
@ -192,7 +190,7 @@ services:
- "../test/results:/results"
- "./dev/resolv.conf:/etc/resolv.conf:ro"
- "/etc/localtime:/etc/localtime:ro"
command: cypress run --browser chrome --config-file=cypress/config/ci.mjs
command: cypress run --browser chrome --config-file=cypress/config/ci.js
networks:
- nginx_proxy_manager

View File

@ -8,8 +8,8 @@ server {
set $port "80";
server_name localhost-nginx-proxy-manager;
access_log /data/logs/fallback_http_access.log standard;
error_log /data/logs/fallback_http_error.log warn;
access_log /data/logs/fallback_access.log standard;
error_log /data/logs/fallback_error.log warn;
include conf.d/include/assets.conf;
include conf.d/include/block-exploits.conf;
include conf.d/include/letsencrypt-acme-challenge.conf;
@ -30,7 +30,7 @@ server {
set $port "443";
server_name localhost;
access_log /data/logs/fallback_http_access.log standard;
access_log /data/logs/fallback_access.log standard;
error_log /dev/null crit;
include conf.d/include/ssl-ciphers.conf;
ssl_reject_handshake on;

View File

@ -5,9 +5,6 @@ if ($scheme = "http") {
if ($request_uri = /.well-known/acme-challenge/test-challenge) {
set $test "${test}T";
}
if ($http_x_forwarded_proto = "https") {
set $test "${test}S";
}
if ($test = H) {
return 301 https://$host$request_uri;
}

View File

@ -1,3 +0,0 @@
log_format stream '[$time_local] [Client $remote_addr:$remote_port] $protocol $status $bytes_sent $bytes_received $session_time [Sent-to $upstream_addr] [Sent $upstream_bytes_sent] [Received $upstream_bytes_received] [Time $upstream_connect_time] $ssl_protocol $ssl_cipher';
access_log /data/logs/fallback_stream_access.log stream;

View File

@ -1,4 +1,4 @@
log_format proxy '[$time_local] $upstream_cache_status $upstream_status $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] [Sent-to $server] "$http_user_agent" "$http_referer"';
log_format standard '[$time_local] $status - $request_method $scheme $host "$request_uri" [Client $remote_addr] [Length $body_bytes_sent] [Gzip $gzip_ratio] "$http_user_agent" "$http_referer"';
access_log /data/logs/fallback_http_access.log proxy;
access_log /data/logs/fallback_access.log proxy;

View File

@ -47,7 +47,7 @@ http {
proxy_cache_path /var/lib/nginx/cache/private levels=1:2 keys_zone=private-cache:5m max_size=1024m;
# Log format and fallback log file
include /etc/nginx/conf.d/include/log-proxy.conf;
include /etc/nginx/conf.d/include/log.conf;
# Dynamically generated resolvers file
include /etc/nginx/conf.d/include/resolvers.conf;
@ -85,9 +85,6 @@ http {
}
stream {
# Log format and fallback log file
include /etc/nginx/conf.d/include/log-stream.conf;
# Files generated by NPM
include /data/nginx/stream/*.conf;

File diff suppressed because it is too large Load Diff

2
frontend/.gitignore vendored
View File

@ -1,5 +1,3 @@
src/locale/lang
# Logs
logs
*.log

View File

@ -1,5 +1,5 @@
{
"$schema": "https://biomejs.dev/schemas/2.3.14/schema.json",
"$schema": "https://biomejs.dev/schemas/2.3.2/schema.json",
"vcs": {
"enabled": true,
"clientKind": "git",

View File

@ -7,27 +7,13 @@
// - Also checks the error messages returned by the backend
const allLocales = [
["en", "en-US"],
["de", "de-DE"],
["pt", "pt-PT"],
["es", "es-ES"],
["fr", "fr-FR"],
["it", "it-IT"],
["ja", "ja-JP"],
["nl", "nl-NL"],
["pl", "pl-PL"],
["ru", "ru-RU"],
["sk", "sk-SK"],
["vi", "vi-VN"],
["zh", "zh-CN"],
["ko", "ko-KR"],
["bg", "bg-BG"],
["id", "id-ID"],
["tr", "tr-TR"],
["hu", "hu-HU"],
["en", "en-US"],
["fa", "fa-IR"],
];
const ignoreUnused = [/^.*$/];
const ignoreUnused = [
/^.*$/,
];
const { spawnSync } = require("child_process");
const fs = require("fs");
@ -68,95 +54,105 @@ const allWarnings = [];
const allKeys = [];
const checkLangList = (fullCode) => {
const key = "locale-" + fullCode;
if (typeof langList[key] === "undefined") {
allErrors.push("ERROR: `" + key + "` language does not exist in lang-list.json");
}
const key = "locale-" + fullCode;
if (typeof langList[key] === "undefined") {
allErrors.push(
"ERROR: `" + key + "` language does not exist in lang-list.json",
);
}
};
const compareLocale = (locale) => {
const projectLocaleKeys = Object.keys(allLocalesInProject);
// Check that locale contains the items used in the codebase
projectLocaleKeys.map((key) => {
if (typeof locale.data[key] === "undefined") {
allErrors.push("ERROR: `" + locale[0] + "` does not contain item: `" + key + "`");
}
return null;
});
// Check that locale contains all error.* items
BACKEND_ERRORS.forEach((key) => {
if (typeof locale.data[key] === "undefined") {
allErrors.push("ERROR: `" + locale[0] + "` does not contain item: `" + key + "`");
}
return null;
});
const projectLocaleKeys = Object.keys(allLocalesInProject);
// Check that locale contains the items used in the codebase
projectLocaleKeys.map((key) => {
if (typeof locale.data[key] === "undefined") {
allErrors.push(
"ERROR: `" + locale[0] + "` does not contain item: `" + key + "`",
);
}
return null;
});
// Check that locale contains all error.* items
BACKEND_ERRORS.forEach((key) => {
if (typeof locale.data[key] === "undefined") {
allErrors.push(
"ERROR: `" + locale[0] + "` does not contain item: `" + key + "`",
);
}
return null;
});
// Check that locale does not contain items not used in the codebase
const localeKeys = Object.keys(locale.data);
localeKeys.map((key) => {
let ignored = false;
ignoreUnused.map((regex) => {
if (key.match(regex)) {
ignored = true;
}
return null;
});
// Check that locale does not contain items not used in the codebase
const localeKeys = Object.keys(locale.data);
localeKeys.map((key) => {
let ignored = false;
ignoreUnused.map((regex) => {
if (key.match(regex)) {
ignored = true;
}
return null;
});
if (!ignored && typeof allLocalesInProject[key] === "undefined") {
// ensure this key doesn't exist in the backend errors either
if (!BACKEND_ERRORS.includes(key)) {
allErrors.push("ERROR: `" + locale[0] + "` contains unused item: `" + key + "`");
}
}
if (!ignored && typeof allLocalesInProject[key] === "undefined") {
// ensure this key doesn't exist in the backend errors either
if (!BACKEND_ERRORS.includes(key)) {
allErrors.push(
"ERROR: `" + locale[0] + "` contains unused item: `" + key + "`",
);
}
}
// Add this key to allKeys
if (allKeys.indexOf(key) === -1) {
allKeys.push(key);
}
return null;
});
// Add this key to allKeys
if (allKeys.indexOf(key) === -1) {
allKeys.push(key);
}
return null;
});
};
// Checks for any keys missing from this locale, that
// have been defined in any other locales
const checkForMissing = (locale) => {
allKeys.forEach((key) => {
if (typeof locale.data[key] === "undefined") {
allWarnings.push("WARN: `" + locale[0] + "` does not contain item: `" + key + "`");
}
return null;
});
allKeys.forEach((key) => {
if (typeof locale.data[key] === "undefined") {
allWarnings.push(
"WARN: `" + locale[0] + "` does not contain item: `" + key + "`",
);
}
return null;
});
};
// Local all locale data
allLocales.map((locale, idx) => {
checkLangList(locale[1]);
allLocales[idx].data = require("./src/locale/src/" + locale[0] + ".json");
return null;
checkLangList(locale[1]);
allLocales[idx].data = require("./src/locale/src/" + locale[0] + ".json");
return null;
});
// Verify all locale data
allLocales.map((locale) => {
compareLocale(locale);
checkForMissing(locale);
return null;
compareLocale(locale);
checkForMissing(locale);
return null;
});
if (allErrors.length) {
allErrors.map((err) => {
console.log("\x1b[31m%s\x1b[0m", err);
return null;
});
allErrors.map((err) => {
console.log("\x1b[31m%s\x1b[0m", err);
return null;
});
}
if (allWarnings.length) {
allWarnings.map((err) => {
console.log("\x1b[33m%s\x1b[0m", err);
return null;
});
allWarnings.map((err) => {
console.log("\x1b[33m%s\x1b[0m", err);
return null;
});
}
if (allErrors.length) {
process.exit(1);
process.exit(1);
}
console.log("\x1b[32m%s\x1b[0m", "Locale check passed");

View File

@ -5,7 +5,6 @@
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>Nginx Proxy Manager</title>
<meta name="description" content="In The Office Planner" />
<link rel="preload" href="/images/logo-no-text.svg" as="image" type="image/svg+xml" fetchPriority="high">
<link
rel="apple-touch-icon"
sizes="180x180"

View File

@ -17,50 +17,50 @@
},
"dependencies": {
"@tabler/core": "^1.4.0",
"@tabler/icons-react": "^3.36.1",
"@tanstack/react-query": "^5.90.20",
"@tabler/icons-react": "^3.35.0",
"@tanstack/react-query": "^5.90.6",
"@tanstack/react-table": "^8.21.3",
"@uiw/react-textarea-code-editor": "^3.1.1",
"classnames": "^2.5.1",
"country-flag-icons": "^1.6.12",
"country-flag-icons": "^1.5.21",
"date-fns": "^4.1.0",
"ez-modal-react": "^1.0.5",
"formik": "^2.4.9",
"formik": "^2.4.6",
"generate-password-browser": "^1.1.0",
"humps": "^2.0.1",
"query-string": "^9.3.1",
"react": "^19.2.4",
"react": "^19.2.0",
"react-bootstrap": "^2.10.10",
"react-dom": "^19.2.4",
"react-intl": "^8.1.3",
"react-dom": "^19.2.0",
"react-intl": "^7.1.14",
"react-markdown": "^10.1.0",
"react-router-dom": "^7.13.0",
"react-router-dom": "^7.9.5",
"react-select": "^5.10.2",
"react-toastify": "^11.0.5",
"rooks": "^9.5.0"
"rooks": "^9.3.0"
},
"devDependencies": {
"@biomejs/biome": "^2.3.14",
"@formatjs/cli": "^6.12.2",
"@tanstack/react-query-devtools": "^5.91.3",
"@biomejs/biome": "^2.3.2",
"@formatjs/cli": "^6.7.4",
"@tanstack/react-query-devtools": "^5.90.2",
"@testing-library/dom": "^10.4.1",
"@testing-library/jest-dom": "^6.9.1",
"@testing-library/react": "^16.3.2",
"@testing-library/react": "^16.3.0",
"@types/country-flag-icons": "^1.2.2",
"@types/humps": "^2.0.6",
"@types/react": "^19.2.10",
"@types/react-dom": "^19.2.3",
"@types/react": "^19.2.2",
"@types/react-dom": "^19.2.2",
"@types/react-table": "^7.7.20",
"@vitejs/plugin-react": "^5.1.3",
"happy-dom": "^20.5.0",
"@vitejs/plugin-react": "^5.1.0",
"happy-dom": "^20.0.10",
"postcss": "^8.5.6",
"postcss-simple-vars": "^7.0.1",
"sass": "^1.97.3",
"sass": "^1.93.3",
"tmp": "^0.2.5",
"typescript": "5.9.3",
"vite": "^7.3.1",
"vite-plugin-checker": "^0.12.0",
"vite-tsconfig-paths": "^6.0.5",
"vitest": "^4.0.18"
"vite": "^7.1.12",
"vite-plugin-checker": "^0.11.0",
"vite-tsconfig-paths": "^5.1.4",
"vitest": "^4.0.6"
}
}

View File

@ -13,15 +13,6 @@
--tblr-backdrop-opacity: 0.8 !important;
}
[data-bs-theme="dark"] .modal-content {
--tblr-modal-box-shadow: 0 0.5rem 1rem rgba(0, 0, 0, 0.15) !important;
}
[data-bs-theme="dark"] .modal-backdrop {
--tblr-backdrop-bg: #000 !important;
--tblr-backdrop-opacity: 0.65 !important;
}
.domain-name {
font-family: monospace;
}
@ -104,15 +95,3 @@ label.row {
border-radius: var(--tblr-border-radius) 0 0 var(--tblr-border-radius);
}
}
/* Fix for dropdown menus being clipped by table-responsive containers. */
.table-responsive .dropdown {
position: static;
}
/* Fix for Tabler scrollbar compensation */
@media (min-width: 992px) {
:host, :root {
margin-left: 0;
}
}

View File

@ -156,6 +156,7 @@ export async function del({ url, params }: DeleteArgs, abortController?: AbortCo
const method = "DELETE";
const headers = {
...buildAuthHeader(),
[contentTypeHeader]: "application/json",
};
const signal = abortController?.signal;
const response = await fetch(apiUrl, { method, headers, signal });

View File

@ -1,8 +0,0 @@
import * as api from "./base";
import type { VersionCheckResponse } from "./responseTypes";
export async function checkVersion(): Promise<VersionCheckResponse> {
return await api.get({
url: "/version/check",
});
}

View File

@ -1,22 +1,9 @@
import * as api from "./base";
import type { TokenResponse, TwoFactorChallengeResponse } from "./responseTypes";
import type { TokenResponse } from "./responseTypes";
export type LoginResponse = TokenResponse | TwoFactorChallengeResponse;
export function isTwoFactorChallenge(response: LoginResponse): response is TwoFactorChallengeResponse {
return "requires2fa" in response && response.requires2fa === true;
}
export async function getToken(identity: string, secret: string): Promise<LoginResponse> {
export async function getToken(identity: string, secret: string): Promise<TokenResponse> {
return await api.post({
url: "/tokens",
data: { identity, secret },
});
}
export async function verify2FA(challengeToken: string, code: string): Promise<TokenResponse> {
return await api.post({
url: "/tokens/2fa",
data: { challengeToken, code },
});
}

View File

@ -1,4 +1,3 @@
export * from "./checkVersion";
export * from "./createAccessList";
export * from "./createCertificate";
export * from "./createDeadHost";
@ -60,4 +59,3 @@ export * from "./updateStream";
export * from "./updateUser";
export * from "./uploadCertificate";
export * from "./validateCertificate";
export * from "./twoFactor";

View File

@ -19,28 +19,3 @@ export interface ValidatedCertificateResponse {
export interface LoginAsTokenResponse extends TokenResponse {
user: User;
}
export interface VersionCheckResponse {
current: string | null;
latest: string | null;
updateAvailable: boolean;
}
export interface TwoFactorChallengeResponse {
requires2fa: boolean;
challengeToken: string;
}
export interface TwoFactorStatusResponse {
enabled: boolean;
backupCodesRemaining: number;
}
export interface TwoFactorSetupResponse {
secret: string;
otpauthUrl: string;
}
export interface TwoFactorEnableResponse {
backupCodes: string[];
}

View File

@ -1,37 +0,0 @@
import * as api from "./base";
import type { TwoFactorEnableResponse, TwoFactorSetupResponse, TwoFactorStatusResponse } from "./responseTypes";
export async function get2FAStatus(userId: number | "me"): Promise<TwoFactorStatusResponse> {
return await api.get({
url: `/users/${userId}/2fa`,
});
}
export async function start2FASetup(userId: number | "me"): Promise<TwoFactorSetupResponse> {
return await api.post({
url: `/users/${userId}/2fa`,
});
}
export async function enable2FA(userId: number | "me", code: string): Promise<TwoFactorEnableResponse> {
return await api.post({
url: `/users/${userId}/2fa/enable`,
data: { code },
});
}
export async function disable2FA(userId: number | "me", code: string): Promise<boolean> {
return await api.del({
url: `/users/${userId}/2fa`,
params: {
code,
},
});
}
export async function regenerateBackupCodes(userId: number | "me", code: string): Promise<TwoFactorEnableResponse> {
return await api.post({
url: `/users/${userId}/2fa/backup-codes`,
data: { code },
});
}

View File

@ -3,7 +3,7 @@ import cn from "classnames";
import { useFormikContext } from "formik";
import { useState } from "react";
import type { AccessListClient } from "src/api/backend";
import { intl, T } from "src/locale";
import { T } from "src/locale";
interface Props {
initialValues: AccessListClient[];
@ -65,8 +65,8 @@ export function AccessClientFields({ initialValues, name = "clients" }: Props) {
value={client.directive}
onChange={(e) => handleChange(idx, "directive", e.target.value)}
>
<option value="allow"><T id="action.allow" /></option>
<option value="deny"><T id="action.deny" /></option>
<option value="allow">Allow</option>
<option value="deny">Deny</option>
</select>
</span>
<input
@ -76,7 +76,7 @@ export function AccessClientFields({ initialValues, name = "clients" }: Props) {
autoComplete="off"
value={client.address}
onChange={(e) => handleChange(idx, "address", e.target.value)}
placeholder={intl.formatMessage({ id: "access-list.rule-source.placeholder" })}
placeholder="192.168.1.100 or 192.168.1.0/24 or 2001:0db8::/32"
/>
</div>
</div>
@ -112,7 +112,7 @@ export function AccessClientFields({ initialValues, name = "clients" }: Props) {
value="deny"
disabled
>
<option value="deny"><T id="action.deny" /></option>
<option value="deny">Deny</option>
</select>
</span>
<input

View File

@ -3,9 +3,8 @@ import { Field, useFormikContext } from "formik";
import type { ReactNode } from "react";
import Select, { type ActionMeta, components, type OptionProps } from "react-select";
import type { AccessList } from "src/api/backend";
import { useLocaleState } from "src/context";
import { useAccessLists } from "src/hooks";
import { formatDateTime, intl, T } from "src/locale";
import { DateTimeFormat, intl, T } from "src/locale";
interface AccessOption {
readonly value: number;
@ -33,7 +32,6 @@ interface Props {
label?: string;
}
export function AccessField({ name = "accessListId", label = "access-list", id = "accessListId" }: Props) {
const { locale } = useLocaleState();
const { isLoading, isError, error, data } = useAccessLists(["owner", "items", "clients"]);
const { setFieldValue } = useFormikContext();
@ -50,7 +48,7 @@ export function AccessField({ name = "accessListId", label = "access-list", id =
{
users: item?.items?.length,
rules: item?.clients?.length,
date: item?.createdOn ? formatDateTime(item?.createdOn, locale) : "N/A",
date: item?.createdOn ? DateTimeFormat(item?.createdOn) : "N/A",
},
),
icon: <IconLock size={14} className="text-lime" />,

View File

@ -5,7 +5,7 @@ import { useState } from "react";
import Select, { type ActionMeta } from "react-select";
import type { DNSProvider } from "src/api/backend";
import { useDnsProviders } from "src/hooks";
import { intl, T } from "src/locale";
import { T } from "src/locale";
import styles from "./DNSProviderFields.module.css";
interface DNSProviderOption {
@ -57,7 +57,7 @@ export function DNSProviderFields({ showBoundaryBox = false }: Props) {
id="dnsProvider"
closeMenuOnSelect={true}
isClearable={false}
placeholder={intl.formatMessage({ id: "certificates.dns.provider.placeholder" })}
placeholder="Select a Provider..."
isLoading={isLoading}
isSearchable
onChange={handleChange}
@ -116,7 +116,7 @@ export function DNSProviderFields({ showBoundaryBox = false }: Props) {
type="number"
className="form-control"
min={0}
max={7200}
max={600}
{...field}
/>
<small className="text-muted">

View File

@ -2,9 +2,8 @@ import { IconShield } from "@tabler/icons-react";
import { Field, useFormikContext } from "formik";
import Select, { type ActionMeta, components, type OptionProps } from "react-select";
import type { Certificate } from "src/api/backend";
import { useLocaleState } from "src/context";
import { useCertificates } from "src/hooks";
import { formatDateTime, intl, T } from "src/locale";
import { DateTimeFormat, intl, T } from "src/locale";
interface CertOption {
readonly value: number | "new";
@ -42,7 +41,6 @@ export function SSLCertificateField({
allowNew,
forHttp = true,
}: Props) {
const { locale } = useLocaleState();
const { isLoading, isError, error, data } = useCertificates();
const { values, setFieldValue } = useFormikContext();
const v: any = values || {};
@ -77,7 +75,7 @@ export function SSLCertificateField({
data?.map((cert: Certificate) => ({
value: cert.id,
label: cert.niceName,
subLabel: `${cert.provider === "letsencrypt" ? intl.formatMessage({ id: "lets-encrypt" }) : cert.provider} ${intl.formatMessage({ id: "expires.on" }, { date: cert.expiresOn ? formatDateTime(cert.expiresOn, locale) : "N/A" })}`,
subLabel: `${cert.provider === "letsencrypt" ? intl.formatMessage({ id: "lets-encrypt" }) : cert.provider} &mdash; ${intl.formatMessage({ id: "expires.on" }, { date: cert.expiresOn ? DateTimeFormat(cert.expiresOn) : "N/A" })}`,
icon: <IconShield size={14} className="text-pink" />,
})) || [];

View File

@ -5,11 +5,7 @@ import { useTheme } from "src/hooks";
import { changeLocale, getFlagCodeForLocale, localeOptions, T } from "src/locale";
import styles from "./LocalePicker.module.css";
interface Props {
menuAlign?: "start" | "end";
}
function LocalePicker({ menuAlign = "start" }: Props) {
function LocalePicker() {
const { locale, setLocale } = useLocaleState();
const { getTheme } = useTheme();
@ -27,24 +23,22 @@ function LocalePicker({ menuAlign = "start" }: Props) {
<button type="button" className={cns} data-bs-toggle="dropdown">
<Flag countryCode={getFlagCodeForLocale(locale)} />
</button>
<div
className={cn("dropdown-menu", {
"dropdown-menu-end": menuAlign === "end",
<div className="dropdown-menu">
{localeOptions.map((item) => {
return (
<a
className="dropdown-item"
href={`/locale/${item[0]}`}
key={`locale-${item[0]}`}
onClick={(e) => {
e.preventDefault();
changeTo(item[0]);
}}
>
<Flag countryCode={getFlagCodeForLocale(item[0])} /> <T id={`locale-${item[1]}`} />
</a>
);
})}
>
{localeOptions.map((item: any) => (
<a
className="dropdown-item"
href={`/locale/${item[0]}`}
key={`locale-${item[0]}`}
onClick={(e) => {
e.preventDefault();
changeTo(item[0]);
}}
>
<Flag countryCode={getFlagCodeForLocale(item[0])} /> <T id={`locale-${item[1]}`} />
</a>
))}
</div>
</div>
);

View File

@ -2,5 +2,5 @@ interface Props {
children: React.ReactNode;
}
export function SiteContainer({ children }: Props) {
return <div className="container-xl py-3 min-w-0 overflow-x-auto">{children}</div>;
return <div className="container-xl py-3">{children}</div>;
}

View File

@ -1,9 +1,8 @@
import { useCheckVersion, useHealth } from "src/hooks";
import { useHealth } from "src/hooks";
import { T } from "src/locale";
export function SiteFooter() {
const health = useHealth();
const { data: versionData } = useCheckVersion();
const getVersion = () => {
if (!health.data) {
@ -56,19 +55,6 @@ export function SiteFooter() {
{getVersion()}{" "}
</a>
</li>
{versionData?.updateAvailable && versionData?.latest && (
<li className="list-inline-item">
<a
href={`https://github.com/NginxProxyManager/nginx-proxy-manager/releases/tag/${versionData.latest}`}
className="link-warning fw-bold"
target="_blank"
rel="noopener"
title={`New version ${versionData.latest} is available`}
>
<T id="update-available" data={{ latestVersion: versionData.latest }} />
</a>
</li>
)}
</ul>
</div>
</div>

View File

@ -1,9 +1,9 @@
import { IconLock, IconLogout, IconShieldLock, IconUser } from "@tabler/icons-react";
import { IconLock, IconLogout, IconUser } from "@tabler/icons-react";
import { LocalePicker, NavLink, ThemeSwitcher } from "src/components";
import { useAuthState } from "src/context";
import { useUser } from "src/hooks";
import { T } from "src/locale";
import { showChangePasswordModal, showTwoFactorModal, showUserModal } from "src/modals";
import { showChangePasswordModal, showUserModal } from "src/modals";
import styles from "./SiteHeader.module.css";
export function SiteHeader() {
@ -25,7 +25,7 @@ export function SiteHeader() {
>
<span className="navbar-toggler-icon" />
</button>
<div className="navbar-brand navbar-brand-autodark pe-0 pe-md-3">
<div className="navbar-brand navbar-brand-autodark d-none-navbar-horizontal pe-0 pe-md-3">
<NavLink to="/">
<div className={styles.logo}>
<img
@ -48,11 +48,11 @@ export function SiteHeader() {
<ThemeSwitcher />
</div>
</div>
<div className="nav-item d-md-flex">
<div className="nav-item d-none d-md-flex me-3">
<div className="nav-item dropdown">
<a
href="/"
className="nav-link d-flex lh-1"
className="nav-link d-flex lh-1 p-0 px-2"
data-bs-toggle="dropdown"
aria-label="Open user menu"
>
@ -70,22 +70,6 @@ export function SiteHeader() {
</div>
</a>
<div className="dropdown-menu dropdown-menu-end dropdown-menu-arrow">
<div className="d-md-none">
{/* biome-ignore lint/a11y/noStaticElementInteractions lint/a11y/useKeyWithClickEvents: This div is not interactive. */}
<div className="p-2 pb-1 pe-1 d-flex align-items-center" onClick={e => e.stopPropagation()}>
<div className="ps-2 pe-1 me-auto">
<div>{currentUser?.nickname}</div>
<div className="mt-1 small text-secondary text-nowrap">
<T id={isAdmin ? "role.admin" : "role.standard-user"} />
</div>
</div>
<div className="d-flex align-items-center">
<ThemeSwitcher className="me-n2" />
<LocalePicker menuAlign="end" />
</div>
</div>
<div className="dropdown-divider" />
</div>
<a
href="?"
className="dropdown-item"
@ -108,17 +92,6 @@ export function SiteHeader() {
<IconLock width={18} />
<T id="user.change-password" />
</a>
<a
href="?"
className="dropdown-item"
onClick={(e) => {
e.preventDefault();
showTwoFactorModal("me");
}}
>
<IconShieldLock width={18} />
<T id="user.two-factor" />
</a>
<div className="dropdown-divider" />
<a
href="?"

View File

@ -176,17 +176,21 @@ const getMenuDropown = (item: MenuItem, onClick?: () => void) => {
};
export function SiteMenu() {
const closeMenu = () => setTimeout(() => {
const navbarToggler = document.querySelector<HTMLElement>(".navbar-toggler");
const navbarMenu = document.querySelector("#navbar-menu");
if (navbarToggler && navbarMenu?.classList.contains("show")) {
navbarToggler.click();
}
}, 300);
// This is hacky AF. But that's the price of using a non-react UI kit.
const closeMenus = () => {
const navMenus = document.querySelectorAll(".nav-item.dropdown");
navMenus.forEach((menu) => {
menu.classList.remove("show");
const dropdown = menu.querySelector(".dropdown-menu");
if (dropdown) {
dropdown.classList.remove("show");
}
});
};
return (
<header className="navbar-expand-md">
<div className="collapse navbar-collapse" id="navbar-menu">
<div className="collapse navbar-collapse">
<div className="navbar">
<div className="container-xl">
<div className="row flex-column flex-md-row flex-fill align-items-center">
@ -194,7 +198,7 @@ export function SiteMenu() {
<ul className="navbar-nav">
{menuItems.length > 0 &&
menuItems.map((item) => {
return getMenuItem(item, closeMenu);
return getMenuItem(item, closeMenus);
})}
</ul>
</div>

View File

@ -5,14 +5,5 @@ interface Props {
certificate?: Certificate;
}
export function CertificateFormatter({ certificate }: Props) {
let translation = "http-only";
if (certificate) {
translation = certificate.provider;
if (translation === "letsencrypt") {
translation = "lets-encrypt";
} else if (translation === "other") {
translation = "certificates.custom";
}
}
return <T id={translation} />;
return <T id={certificate ? "lets-encrypt" : "http-only"} />;
}

View File

@ -1,7 +1,6 @@
import cn from "classnames";
import { differenceInDays, isPast } from "date-fns";
import { useLocaleState } from "src/context";
import { formatDateTime, parseDate } from "src/locale";
import { differenceInDays, isPast, parseISO } from "date-fns";
import { DateTimeFormat } from "src/locale";
interface Props {
value: string;
@ -9,13 +8,11 @@ interface Props {
highlistNearlyExpired?: boolean;
}
export function DateFormatter({ value, highlightPast, highlistNearlyExpired }: Props) {
const { locale } = useLocaleState();
const d = parseDate(value);
const dateIsPast = d ? isPast(d) : false;
const days = d ? differenceInDays(d, new Date()) : 0;
const dateIsPast = isPast(parseISO(value));
const days = differenceInDays(parseISO(value), new Date());
const cl = cn({
"text-danger": highlightPast && dateIsPast,
"text-warning": highlistNearlyExpired && !dateIsPast && days <= 30 && days >= 0,
});
return <span className={cl}>{formatDateTime(value, locale)}</span>;
return <span className={cl}>{DateTimeFormat(value)}</span>;
}

View File

@ -1,7 +1,6 @@
import cn from "classnames";
import type { ReactNode } from "react";
import { useLocaleState } from "src/context";
import { formatDateTime, T } from "src/locale";
import { DateTimeFormat, T } from "src/locale";
interface Props {
domains: string[];
@ -11,44 +10,35 @@ interface Props {
color?: string;
}
const DomainLink = ({ domain, color }: { domain?: string; color?: string }) => {
const DomainLink = ({ domain, color }: { domain: string; color?: string }) => {
// when domain contains a wildcard, make the link go nowhere.
// Apparently the domain can be null or undefined sometimes.
// This try is just a safeguard to prevent the whole formatter from breaking.
if (!domain) return null;
try {
let onClick: ((e: React.MouseEvent) => void) | undefined;
if (domain.includes("*")) {
onClick = (e: React.MouseEvent) => e.preventDefault();
}
return (
<a
key={domain}
href={`http://${domain}`}
target="_blank"
onClick={onClick}
className={cn("badge", color ? `bg-${color}-lt` : null, "domain-name", "me-2")}
>
{domain}
</a>
);
} catch {
return null;
let onClick: ((e: React.MouseEvent) => void) | undefined;
if (domain.includes("*")) {
onClick = (e: React.MouseEvent) => e.preventDefault();
}
return (
<a
key={domain}
href={`http://${domain}`}
target="_blank"
onClick={onClick}
className={cn("badge", color ? `bg-${color}-lt` : null, "domain-name", "me-2")}
>
{domain}
</a>
);
};
export function DomainsFormatter({ domains, createdOn, niceName, provider, color }: Props) {
const { locale } = useLocaleState();
const elms: ReactNode[] = [];
if ((!domains || domains.length === 0) && !niceName) {
if (domains.length === 0 && !niceName) {
elms.push(
<span key="nice-name" className="badge bg-danger-lt me-2">
Unknown
</span>,
);
}
if (!domains || (niceName && provider !== "letsencrypt")) {
if (niceName && provider !== "letsencrypt") {
elms.push(
<span key="nice-name" className="badge bg-info-lt me-2">
{niceName}
@ -56,16 +46,14 @@ export function DomainsFormatter({ domains, createdOn, niceName, provider, color
);
}
if (domains) {
domains.map((domain: string) => elms.push(<DomainLink key={domain} domain={domain} color={color} />));
}
domains.map((domain: string) => elms.push(<DomainLink key={domain} domain={domain} color={color} />));
return (
<div className="flex-fill">
<div className="font-weight-medium">{...elms}</div>
{createdOn ? (
<div className="text-secondary mt-1">
<T id="created-on" data={{ date: formatDateTime(createdOn, locale) }} />
<T id="created-on" data={{ date: DateTimeFormat(createdOn) }} />
</div>
) : null}
</div>

View File

@ -1,8 +1,7 @@
import { IconArrowsCross, IconBolt, IconBoltOff, IconDisc, IconLock, IconShield, IconUser } from "@tabler/icons-react";
import cn from "classnames";
import type { AuditLog } from "src/api/backend";
import { useLocaleState } from "src/context";
import { formatDateTime, T } from "src/locale";
import { DateTimeFormat, T } from "src/locale";
const getEventValue = (event: AuditLog) => {
switch (event.objectType) {
@ -67,7 +66,6 @@ interface Props {
row: AuditLog;
}
export function EventFormatter({ row }: Props) {
const { locale } = useLocaleState();
return (
<div className="flex-fill">
<div className="font-weight-medium">
@ -75,7 +73,7 @@ export function EventFormatter({ row }: Props) {
<T id={`object.event.${row.action}`} tData={{ object: row.objectType }} />
&nbsp; &mdash; <span className="badge">{getEventValue(row)}</span>
</div>
<div className="text-secondary mt-1">{formatDateTime(row.createdOn, locale)}</div>
<div className="text-secondary mt-1">{DateTimeFormat(row.createdOn)}</div>
</div>
);
}

View File

@ -1,7 +1,5 @@
const defaultImg = "/images/default-avatar.jpg";
interface Props {
url?: string;
url: string;
name?: string;
}
export function GravatarFormatter({ url, name }: Props) {
@ -11,7 +9,7 @@ export function GravatarFormatter({ url, name }: Props) {
title={name}
className="avatar avatar-2 me-2"
style={{
backgroundImage: `url(${url || defaultImg})`,
backgroundImage: `url(${url})`,
}}
/>
</div>

View File

@ -1,5 +1,4 @@
import { useLocaleState } from "src/context";
import { formatDateTime, T } from "src/locale";
import { DateTimeFormat, T } from "src/locale";
interface Props {
value: string;
@ -7,7 +6,6 @@ interface Props {
disabled?: boolean;
}
export function ValueWithDateFormatter({ value, createdOn, disabled }: Props) {
const { locale } = useLocaleState();
return (
<div className="flex-fill">
<div className="font-weight-medium">
@ -15,7 +13,7 @@ export function ValueWithDateFormatter({ value, createdOn, disabled }: Props) {
</div>
{createdOn ? (
<div className={`text-secondary mt-1 ${disabled ? "text-red" : ""}`}>
<T id={disabled ? "disabled" : "created-on"} data={{ date: formatDateTime(createdOn, locale) }} />
<T id={disabled ? "disabled" : "created-on"} data={{ date: DateTimeFormat(createdOn) }} />
</div>
) : null}
</div>

View File

@ -12,12 +12,10 @@ interface TableLayoutProps<TFields> {
function TableLayout<TFields>(props: TableLayoutProps<TFields>) {
const hasRows = props.tableInstance.getRowModel().rows.length > 0;
return (
<div className="table-responsive">
<table className="table table-vcenter table-selectable mb-0">
{hasRows ? <TableHeader tableInstance={props.tableInstance} /> : null}
<TableBody {...props} />
</table>
</div>
<table className="table table-vcenter table-selectable mb-0">
{hasRows ? <TableHeader tableInstance={props.tableInstance} /> : null}
<TableBody {...props} />
</table>
);
}

View File

@ -1,28 +1,13 @@
import { useQueryClient } from "@tanstack/react-query";
import { createContext, type ReactNode, useContext, useState } from "react";
import { useIntervalWhen } from "rooks";
import {
getToken,
isTwoFactorChallenge,
loginAsUser,
refreshToken,
verify2FA,
type TokenResponse,
} from "src/api/backend";
import { getToken, loginAsUser, refreshToken, type TokenResponse } from "src/api/backend";
import AuthStore from "src/modules/AuthStore";
// 2FA challenge state
export interface TwoFactorChallenge {
challengeToken: string;
}
// Context
export interface AuthContextType {
authenticated: boolean;
twoFactorChallenge: TwoFactorChallenge | null;
login: (username: string, password: string) => Promise<void>;
verifyTwoFactor: (code: string) => Promise<void>;
cancelTwoFactor: () => void;
loginAs: (id: number) => Promise<void>;
logout: () => void;
token?: string;
@ -39,35 +24,17 @@ interface Props {
function AuthProvider({ children, tokenRefreshInterval = 5 * 60 * 1000 }: Props) {
const queryClient = useQueryClient();
const [authenticated, setAuthenticated] = useState(AuthStore.hasActiveToken());
const [twoFactorChallenge, setTwoFactorChallenge] = useState<TwoFactorChallenge | null>(null);
const handleTokenUpdate = (response: TokenResponse) => {
AuthStore.set(response);
setAuthenticated(true);
setTwoFactorChallenge(null);
};
const login = async (identity: string, secret: string) => {
const response = await getToken(identity, secret);
if (isTwoFactorChallenge(response)) {
setTwoFactorChallenge({ challengeToken: response.challengeToken });
return;
}
handleTokenUpdate(response);
};
const verifyTwoFactor = async (code: string) => {
if (!twoFactorChallenge) {
throw new Error("No 2FA challenge pending");
}
const response = await verify2FA(twoFactorChallenge.challengeToken, code);
handleTokenUpdate(response);
};
const cancelTwoFactor = () => {
setTwoFactorChallenge(null);
};
const loginAs = async (id: number) => {
const response = await loginAsUser(id);
AuthStore.add(response);
@ -102,15 +69,7 @@ function AuthProvider({ children, tokenRefreshInterval = 5 * 60 * 1000 }: Props)
true,
);
const value = {
authenticated,
twoFactorChallenge,
login,
verifyTwoFactor,
cancelTwoFactor,
loginAs,
logout,
};
const value = { authenticated, login, logout, loginAs };
return <AuthContext.Provider value={value}>{children}</AuthContext.Provider>;
}

View File

@ -4,7 +4,6 @@ export * from "./useAuditLog";
export * from "./useAuditLogs";
export * from "./useCertificate";
export * from "./useCertificates";
export * from "./useCheckVersion";
export * from "./useDeadHost";
export * from "./useDeadHosts";
export * from "./useDnsProviders";

View File

@ -1,18 +0,0 @@
import { useQuery } from "@tanstack/react-query";
import { checkVersion, type VersionCheckResponse } from "src/api/backend";
const fetchVersion = () => checkVersion();
const useCheckVersion = (options = {}) => {
return useQuery<VersionCheckResponse, Error>({
queryKey: ["version-check"],
queryFn: fetchVersion,
refetchOnWindowFocus: false,
retry: 5,
refetchInterval: 30 * 1000, // 30 seconds
staleTime: 5 * 60 * 1000, // 5 mins
...options,
});
};
export { fetchVersion, useCheckVersion };

View File

@ -0,0 +1,15 @@
import { intlFormat, parseISO } from "date-fns";
const DateTimeFormat = (isoDate: string) =>
intlFormat(parseISO(isoDate), {
weekday: "long",
year: "numeric",
month: "numeric",
day: "numeric",
hour: "numeric",
minute: "numeric",
second: "numeric",
hour12: true,
});
export { DateTimeFormat };

149
frontend/src/locale/IntlProvider.tsx Executable file → Normal file
View File

@ -1,91 +1,36 @@
import { createIntl, createIntlCache } from "react-intl";
import langBg from "./lang/bg.json";
import langDe from "./lang/de.json";
import langPt from "./lang/pt.json";
import langEn from "./lang/en.json";
import langEs from "./lang/es.json";
import langFr from "./lang/fr.json";
import langGa from "./lang/ga.json";
import langId from "./lang/id.json";
import langIt from "./lang/it.json";
import langJa from "./lang/ja.json";
import langKo from "./lang/ko.json";
import langNl from "./lang/nl.json";
import langPl from "./lang/pl.json";
import langRu from "./lang/ru.json";
import langSk from "./lang/sk.json";
import langVi from "./lang/vi.json";
import langZh from "./lang/zh.json";
import langTr from "./lang/tr.json";
import langHu from "./lang/hu.json";
import langList from "./lang/lang-list.json";
// first item of each array should be the language code,
// not the country code
// Remember when adding to this list, also update check-locales.js script
const localeOptions = [
["en", "en-US", langEn],
["de", "de-DE", langDe],
["es", "es-ES", langEs],
["pt", "pt-PT", langPt],
["fr", "fr-FR", langFr],
["ga", "ga-IE", langGa],
["ja", "ja-JP", langJa],
["it", "it-IT", langIt],
["nl", "nl-NL", langNl],
["pl", "pl-PL", langPl],
["ru", "ru-RU", langRu],
["sk", "sk-SK", langSk],
["vi", "vi-VN", langVi],
["zh", "zh-CN", langZh],
["ko", "ko-KR", langKo],
["bg", "bg-BG", langBg],
["id", "id-ID", langId],
["tr", "tr-TR", langTr],
["hu", "hu-HU", langHu],
];
const localeOptions = [["en", "en-US"]];
const loadMessages = (locale?: string): typeof langList & typeof langEn => {
const thisLocale = (locale || "en").slice(0, 2);
// ensure this lang exists in localeOptions above, otherwise fallback to en
if (thisLocale === "en" || !localeOptions.some(([code]) => code === thisLocale)) {
return Object.assign({}, langList, langEn);
}
return Object.assign({}, langList, langEn, localeOptions.find(([code]) => code === thisLocale)?.[2]);
const thisLocale = locale || "en";
switch (thisLocale.slice(0, 2)) {
default:
return Object.assign({}, langList, langEn);
}
};
const getFlagCodeForLocale = (locale?: string) => {
const thisLocale = (locale || "en").slice(0, 2);
// only add to this if your flag is different from the locale code
const specialCases: Record<string, string> = {
ja: "jp", // Japan
zh: "cn", // China
vi: "vn", // Vietnam
ko: "kr", // Korea
};
if (specialCases[thisLocale]) {
return specialCases[thisLocale].toUpperCase();
}
return thisLocale.toUpperCase();
switch (locale) {
default:
return "EN";
}
};
const getLocale = (short = false) => {
let loc = window.localStorage.getItem("locale");
if (!loc) {
loc = document.documentElement.lang;
}
if (short) {
return loc.slice(0, 2);
}
// finally, fallback
if (!loc) {
loc = "en";
}
return loc;
let loc = window.localStorage.getItem("locale");
if (!loc) {
loc = document.documentElement.lang;
}
if (short) {
return loc.slice(0, 2);
}
return loc;
};
const cache = createIntlCache();
@ -94,43 +39,41 @@ const initialMessages = loadMessages(getLocale());
let intl = createIntl({ locale: getLocale(), messages: initialMessages }, cache);
const changeLocale = (locale: string): void => {
const messages = loadMessages(locale);
intl = createIntl({ locale, messages }, cache);
window.localStorage.setItem("locale", locale);
document.documentElement.lang = locale;
const messages = loadMessages(locale);
intl = createIntl({ locale, messages }, cache);
window.localStorage.setItem("locale", locale);
document.documentElement.lang = locale;
};
// This is a translation component that wraps the translation in a span with a data
// attribute so devs can inspect the element to see the translation ID
const T = ({
id,
data,
tData,
id,
data,
tData,
}: {
id: string;
data?: Record<string, string | number | undefined>;
tData?: Record<string, string>;
id: string;
data?: Record<string, string | number | undefined>;
tData?: Record<string, string>;
}) => {
const translatedData: Record<string, string> = {};
if (tData) {
// iterate over tData and translate each value
Object.entries(tData).forEach(([key, value]) => {
translatedData[key] = intl.formatMessage({ id: value });
});
}
return (
<span data-translation-id={id}>
{intl.formatMessage(
{ id },
{
...data,
...translatedData,
},
)}
</span>
);
const translatedData: Record<string, string> = {};
if (tData) {
// iterate over tData and translate each value
Object.entries(tData).forEach(([key, value]) => {
translatedData[key] = intl.formatMessage({ id: value });
});
}
return (
<span data-translation-id={id}>
{intl.formatMessage(
{ id },
{
...data,
...translatedData,
},
)}
</span>
);
};
//console.log("L:", localeOptions);
export { localeOptions, getFlagCodeForLocale, getLocale, createIntl, changeLocale, intl, T };

Some files were not shown because too many files have changed in this diff Show More