mirror of
https://github.com/dani-garcia/vaultwarden.git
synced 2025-11-06 20:33:12 -05:00
Optimizations and build speedup (#6339)
* Optimizations and build speedup With this commit I have changed several components to be more efficient. This can be less llvm-lines generated or less `clone()` calls. ### Config - Re-ordered the `make_config` macro to be more efficient - Created a custom Deserializer for `ConfigBuilder` less code and more efficient - Use struct's for the `prepare_json` function instead of generating a custom JSON object. This generates less code and is more efficient. - Updated the `get_support_string` function to handle the masking differently. This generates less code and also was able to remove some sub-macro-calls ### Error - Added an extra new call to prevent duplicate Strings in generated macro code. This generated less llvm-lines and seems to be more efficient. - Created a custom Serializer for `ApiError` and `CompactApiError` This makes that struct smaller in size, so better for memory, but also less llvm-lines. ### General - Removed `once_lock` and replace it all with Rust's std LazyLock - Added and fixed some Clippy lints which reduced `clone()` calls for example. - Updated build profiles for more efficiency Also added a new profile specifically for CI, which should decrease the build check - Updated several GitHub Workflows for better security and use the new `ci` build profile - Updated to Rust v1.90.0 which uses a new linker `rust-lld` which should help in faster building - Updated the Cargo.toml for all crates to better use the `workspace` variables - Added a `typos` Workflow and Pre-Commit, which should help in detecting spell error's. Also fixed a few found by it. Signed-off-by: BlackDex <black.dex@gmail.com> * Fix release profile Signed-off-by: BlackDex <black.dex@gmail.com> * Update typos and remove mimalloc check from pre-commit checks Signed-off-by: BlackDex <black.dex@gmail.com> * Misc fixes and updated typos Signed-off-by: BlackDex <black.dex@gmail.com> * Update crates and workflows Signed-off-by: BlackDex <black.dex@gmail.com> * Fix formating and pre-commit Signed-off-by: BlackDex <black.dex@gmail.com> * Update to Rust v1.91 and update crates Signed-off-by: BlackDex <black.dex@gmail.com> * Update web-vault to v2025.10.1 and xx to v1.8.0 Signed-off-by: BlackDex <black.dex@gmail.com> --------- Signed-off-by: BlackDex <black.dex@gmail.com>
This commit is contained in:
committed by
GitHub
parent
8d30285160
commit
9017ca265a
41
.github/workflows/build.yml
vendored
41
.github/workflows/build.yml
vendored
@@ -14,6 +14,7 @@ on:
|
|||||||
- "diesel.toml"
|
- "diesel.toml"
|
||||||
- "docker/Dockerfile.j2"
|
- "docker/Dockerfile.j2"
|
||||||
- "docker/DockerSettings.yaml"
|
- "docker/DockerSettings.yaml"
|
||||||
|
- "macros/**"
|
||||||
|
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
@@ -27,13 +28,11 @@ on:
|
|||||||
- "diesel.toml"
|
- "diesel.toml"
|
||||||
- "docker/Dockerfile.j2"
|
- "docker/Dockerfile.j2"
|
||||||
- "docker/DockerSettings.yaml"
|
- "docker/DockerSettings.yaml"
|
||||||
|
- "macros/**"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
name: Build and Test ${{ matrix.channel }}
|
name: Build and Test ${{ matrix.channel }}
|
||||||
permissions:
|
|
||||||
actions: write
|
|
||||||
contents: read
|
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
# Make warnings errors, this is to prevent warnings slipping through.
|
# Make warnings errors, this is to prevent warnings slipping through.
|
||||||
@@ -81,7 +80,7 @@ jobs:
|
|||||||
|
|
||||||
# Only install the clippy and rustfmt components on the default rust-toolchain
|
# Only install the clippy and rustfmt components on the default rust-toolchain
|
||||||
- name: "Install rust-toolchain version"
|
- name: "Install rust-toolchain version"
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @ Aug 23, 2025, 3:20 AM GMT+2
|
uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2
|
||||||
if: ${{ matrix.channel == 'rust-toolchain' }}
|
if: ${{ matrix.channel == 'rust-toolchain' }}
|
||||||
with:
|
with:
|
||||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||||
@@ -91,7 +90,7 @@ jobs:
|
|||||||
|
|
||||||
# Install the any other channel to be used for which we do not execute clippy and rustfmt
|
# Install the any other channel to be used for which we do not execute clippy and rustfmt
|
||||||
- name: "Install MSRV version"
|
- name: "Install MSRV version"
|
||||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 # master @ Aug 23, 2025, 3:20 AM GMT+2
|
uses: dtolnay/rust-toolchain@6d653acede28d24f02e3cd41383119e8b1b35921 # master @ Sep 16, 2025, 8:37 PM GMT+2
|
||||||
if: ${{ matrix.channel != 'rust-toolchain' }}
|
if: ${{ matrix.channel != 'rust-toolchain' }}
|
||||||
with:
|
with:
|
||||||
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
toolchain: "${{steps.toolchain.outputs.RUST_TOOLCHAIN}}"
|
||||||
@@ -121,43 +120,55 @@ jobs:
|
|||||||
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
|
# Use a custom prefix-key to force a fresh start. This is sometimes needed with bigger changes.
|
||||||
# Like changing the build host from Ubuntu 20.04 to 22.04 for example.
|
# Like changing the build host from Ubuntu 20.04 to 22.04 for example.
|
||||||
# Only update when really needed! Use a <year>.<month>[.<inc>] format.
|
# Only update when really needed! Use a <year>.<month>[.<inc>] format.
|
||||||
prefix-key: "v2023.07-rust"
|
prefix-key: "v2025.09-rust"
|
||||||
# End Enable Rust Caching
|
# End Enable Rust Caching
|
||||||
|
|
||||||
# Run cargo tests
|
# Run cargo tests
|
||||||
# First test all features together, afterwards test them separately.
|
# First test all features together, afterwards test them separately.
|
||||||
|
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc,s3"
|
||||||
|
id: test_sqlite_mysql_postgresql_mimalloc_s3
|
||||||
|
if: ${{ !cancelled() }}
|
||||||
|
run: |
|
||||||
|
cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3
|
||||||
|
|
||||||
|
- name: "test features: sqlite,mysql,postgresql,enable_mimalloc"
|
||||||
|
id: test_sqlite_mysql_postgresql_mimalloc
|
||||||
|
if: ${{ !cancelled() }}
|
||||||
|
run: |
|
||||||
|
cargo test --profile ci --features sqlite,mysql,postgresql,enable_mimalloc
|
||||||
|
|
||||||
- name: "test features: sqlite,mysql,postgresql"
|
- name: "test features: sqlite,mysql,postgresql"
|
||||||
id: test_sqlite_mysql_postgresql
|
id: test_sqlite_mysql_postgresql
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
run: |
|
run: |
|
||||||
cargo test --features sqlite,mysql,postgresql
|
cargo test --profile ci --features sqlite,mysql,postgresql
|
||||||
|
|
||||||
- name: "test features: sqlite"
|
- name: "test features: sqlite"
|
||||||
id: test_sqlite
|
id: test_sqlite
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
run: |
|
run: |
|
||||||
cargo test --features sqlite
|
cargo test --profile ci --features sqlite
|
||||||
|
|
||||||
- name: "test features: mysql"
|
- name: "test features: mysql"
|
||||||
id: test_mysql
|
id: test_mysql
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
run: |
|
run: |
|
||||||
cargo test --features mysql
|
cargo test --profile ci --features mysql
|
||||||
|
|
||||||
- name: "test features: postgresql"
|
- name: "test features: postgresql"
|
||||||
id: test_postgresql
|
id: test_postgresql
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
run: |
|
run: |
|
||||||
cargo test --features postgresql
|
cargo test --profile ci --features postgresql
|
||||||
# End Run cargo tests
|
# End Run cargo tests
|
||||||
|
|
||||||
|
|
||||||
# Run cargo clippy, and fail on warnings
|
# Run cargo clippy, and fail on warnings
|
||||||
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc"
|
- name: "clippy features: sqlite,mysql,postgresql,enable_mimalloc,s3"
|
||||||
id: clippy
|
id: clippy
|
||||||
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
|
if: ${{ !cancelled() && matrix.channel == 'rust-toolchain' }}
|
||||||
run: |
|
run: |
|
||||||
cargo clippy --features sqlite,mysql,postgresql,enable_mimalloc
|
cargo clippy --profile ci --features sqlite,mysql,postgresql,enable_mimalloc,s3
|
||||||
# End Run cargo clippy
|
# End Run cargo clippy
|
||||||
|
|
||||||
|
|
||||||
@@ -175,6 +186,8 @@ jobs:
|
|||||||
- name: "Some checks failed"
|
- name: "Some checks failed"
|
||||||
if: ${{ failure() }}
|
if: ${{ failure() }}
|
||||||
env:
|
env:
|
||||||
|
TEST_DB_M_S3: ${{ steps.test_sqlite_mysql_postgresql_mimalloc_s3.outcome }}
|
||||||
|
TEST_DB_M: ${{ steps.test_sqlite_mysql_postgresql_mimalloc.outcome }}
|
||||||
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
|
TEST_DB: ${{ steps.test_sqlite_mysql_postgresql.outcome }}
|
||||||
TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
|
TEST_SQLITE: ${{ steps.test_sqlite.outcome }}
|
||||||
TEST_MYSQL: ${{ steps.test_mysql.outcome }}
|
TEST_MYSQL: ${{ steps.test_mysql.outcome }}
|
||||||
@@ -186,11 +199,13 @@ jobs:
|
|||||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
|
echo "|Job|Status|" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
|
echo "|---|------|" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
|
echo "|test (sqlite,mysql,postgresql,enable_mimalloc,s3)|${TEST_DB_M_S3}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
|
echo "|test (sqlite,mysql,postgresql,enable_mimalloc)|${TEST_DB_M}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
|
echo "|test (sqlite,mysql,postgresql)|${TEST_DB}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
|
echo "|test (sqlite)|${TEST_SQLITE}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
echo "|test (mysql)|${TEST_MYSQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
echo "|test (postgresql)|${TEST_POSTGRESQL}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
|
echo "|clippy (sqlite,mysql,postgresql,enable_mimalloc,s3)|${CLIPPY}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
|
echo "|fmt|${FMT}|" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
echo "" >> "${GITHUB_STEP_SUMMARY}"
|
||||||
echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"
|
echo "Please check the failed jobs and fix where needed." >> "${GITHUB_STEP_SUMMARY}"
|
||||||
|
|||||||
2
.github/workflows/check-templates.yml
vendored
2
.github/workflows/check-templates.yml
vendored
@@ -6,8 +6,6 @@ on: [ push, pull_request ]
|
|||||||
jobs:
|
jobs:
|
||||||
docker-templates:
|
docker-templates:
|
||||||
name: Validate docker templates
|
name: Validate docker templates
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
|
|
||||||
|
|||||||
7
.github/workflows/hadolint.yml
vendored
7
.github/workflows/hadolint.yml
vendored
@@ -1,13 +1,12 @@
|
|||||||
name: Hadolint
|
name: Hadolint
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
on: [ push, pull_request ]
|
on: [ push, pull_request ]
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
hadolint:
|
hadolint:
|
||||||
name: Validate Dockerfile syntax
|
name: Validate Dockerfile syntax
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
|
|
||||||
@@ -31,7 +30,7 @@ jobs:
|
|||||||
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
|
sudo curl -L https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VERSION}/hadolint-$(uname -s)-$(uname -m) -o /usr/local/bin/hadolint && \
|
||||||
sudo chmod +x /usr/local/bin/hadolint
|
sudo chmod +x /usr/local/bin/hadolint
|
||||||
env:
|
env:
|
||||||
HADOLINT_VERSION: 2.13.1
|
HADOLINT_VERSION: 2.14.0
|
||||||
# End Download hadolint
|
# End Download hadolint
|
||||||
# Checkout the repo
|
# Checkout the repo
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
|
|||||||
20
.github/workflows/release.yml
vendored
20
.github/workflows/release.yml
vendored
@@ -21,10 +21,10 @@ jobs:
|
|||||||
name: Build Vaultwarden containers
|
name: Build Vaultwarden containers
|
||||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write # Needed to upload packages and artifacts
|
||||||
contents: read
|
contents: read
|
||||||
attestations: write
|
attestations: write # Needed to generate an artifact attestation for a build
|
||||||
id-token: write
|
id-token: write # Needed to mint the OIDC token necessary to request a Sigstore signing certificate
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
timeout-minutes: 120
|
timeout-minutes: 120
|
||||||
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
|
# Start a local docker registry to extract the compiled binaries to upload as artifacts and attest them
|
||||||
@@ -103,7 +103,7 @@ jobs:
|
|||||||
|
|
||||||
# Login to Docker Hub
|
# Login to Docker Hub
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
@@ -119,7 +119,7 @@ jobs:
|
|||||||
|
|
||||||
# Login to GitHub Container Registry
|
# Login to GitHub Container Registry
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -136,7 +136,7 @@ jobs:
|
|||||||
|
|
||||||
# Login to Quay.io
|
# Login to Quay.io
|
||||||
- name: Login to Quay.io
|
- name: Login to Quay.io
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
with:
|
with:
|
||||||
registry: quay.io
|
registry: quay.io
|
||||||
username: ${{ secrets.QUAY_USERNAME }}
|
username: ${{ secrets.QUAY_USERNAME }}
|
||||||
@@ -275,25 +275,25 @@ jobs:
|
|||||||
|
|
||||||
# Upload artifacts to Github Actions and Attest the binaries
|
# Upload artifacts to Github Actions and Attest the binaries
|
||||||
- name: "Upload amd64 artifact ${{ matrix.base_image }}"
|
- name: "Upload amd64 artifact ${{ matrix.base_image }}"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }}
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-amd64-${{ matrix.base_image }}
|
||||||
path: vaultwarden-amd64-${{ matrix.base_image }}
|
path: vaultwarden-amd64-${{ matrix.base_image }}
|
||||||
|
|
||||||
- name: "Upload arm64 artifact ${{ matrix.base_image }}"
|
- name: "Upload arm64 artifact ${{ matrix.base_image }}"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }}
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-arm64-${{ matrix.base_image }}
|
||||||
path: vaultwarden-arm64-${{ matrix.base_image }}
|
path: vaultwarden-arm64-${{ matrix.base_image }}
|
||||||
|
|
||||||
- name: "Upload armv7 artifact ${{ matrix.base_image }}"
|
- name: "Upload armv7 artifact ${{ matrix.base_image }}"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }}
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv7-${{ matrix.base_image }}
|
||||||
path: vaultwarden-armv7-${{ matrix.base_image }}
|
path: vaultwarden-armv7-${{ matrix.base_image }}
|
||||||
|
|
||||||
- name: "Upload armv6 artifact ${{ matrix.base_image }}"
|
- name: "Upload armv6 artifact ${{ matrix.base_image }}"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }}
|
name: vaultwarden-${{ env.SOURCE_VERSION }}-linux-armv6-${{ matrix.base_image }}
|
||||||
path: vaultwarden-armv6-${{ matrix.base_image }}
|
path: vaultwarden-armv6-${{ matrix.base_image }}
|
||||||
|
|||||||
2
.github/workflows/releasecache-cleanup.yml
vendored
2
.github/workflows/releasecache-cleanup.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
|||||||
releasecache-cleanup:
|
releasecache-cleanup:
|
||||||
name: Releasecache Cleanup
|
name: Releasecache Cleanup
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write # To be able to cleanup old caches
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
|
|||||||
6
.github/workflows/trivy.yml
vendored
6
.github/workflows/trivy.yml
vendored
@@ -23,9 +23,7 @@ jobs:
|
|||||||
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
if: ${{ github.repository == 'dani-garcia/vaultwarden' }}
|
||||||
name: Trivy Scan
|
name: Trivy Scan
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
security-events: write # To write the security report
|
||||||
actions: read
|
|
||||||
security-events: write
|
|
||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
|
|
||||||
@@ -48,6 +46,6 @@ jobs:
|
|||||||
severity: CRITICAL,HIGH
|
severity: CRITICAL,HIGH
|
||||||
|
|
||||||
- name: Upload Trivy scan results to GitHub Security tab
|
- name: Upload Trivy scan results to GitHub Security tab
|
||||||
uses: github/codeql-action/upload-sarif@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
uses: github/codeql-action/upload-sarif@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||||
with:
|
with:
|
||||||
sarif_file: 'trivy-results.sarif'
|
sarif_file: 'trivy-results.sarif'
|
||||||
|
|||||||
22
.github/workflows/typos.yml
vendored
Normal file
22
.github/workflows/typos.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
name: Code Spell Checking
|
||||||
|
|
||||||
|
on: [ push, pull_request ]
|
||||||
|
permissions: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
typos:
|
||||||
|
name: Run typos spell checking
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
timeout-minutes: 30
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# Checkout the repo
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
# End Checkout the repo
|
||||||
|
|
||||||
|
# When this version is updated, do not forget to update this in `.pre-commit-config.yaml` too
|
||||||
|
- name: Spell Check Repo
|
||||||
|
uses: crate-ci/typos@07d900b8fa1097806b8adb6391b0d3e0ac2fdea7 # v1.39.0
|
||||||
2
.github/workflows/zizmor.yml
vendored
2
.github/workflows/zizmor.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
name: Run zizmor
|
name: Run zizmor
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
security-events: write
|
security-events: write # To write the security report
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v6.0.0
|
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # v6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: check-json
|
- id: check-json
|
||||||
@@ -22,14 +22,15 @@ repos:
|
|||||||
description: Format files with cargo fmt.
|
description: Format files with cargo fmt.
|
||||||
entry: cargo fmt
|
entry: cargo fmt
|
||||||
language: system
|
language: system
|
||||||
types: [rust]
|
always_run: true
|
||||||
|
pass_filenames: false
|
||||||
args: ["--", "--check"]
|
args: ["--", "--check"]
|
||||||
- id: cargo-test
|
- id: cargo-test
|
||||||
name: cargo test
|
name: cargo test
|
||||||
description: Test the package for errors.
|
description: Test the package for errors.
|
||||||
entry: cargo test
|
entry: cargo test
|
||||||
language: system
|
language: system
|
||||||
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--"]
|
args: ["--features", "sqlite,mysql,postgresql", "--"]
|
||||||
types_or: [rust, file]
|
types_or: [rust, file]
|
||||||
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
|
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
@@ -38,7 +39,7 @@ repos:
|
|||||||
description: Lint Rust sources
|
description: Lint Rust sources
|
||||||
entry: cargo clippy
|
entry: cargo clippy
|
||||||
language: system
|
language: system
|
||||||
args: ["--features", "sqlite,mysql,postgresql,enable_mimalloc", "--", "-D", "warnings"]
|
args: ["--features", "sqlite,mysql,postgresql", "--", "-D", "warnings"]
|
||||||
types_or: [rust, file]
|
types_or: [rust, file]
|
||||||
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
|
files: (Cargo.toml|Cargo.lock|rust-toolchain.toml|rustfmt.toml|.*\.rs$)
|
||||||
pass_filenames: false
|
pass_filenames: false
|
||||||
@@ -50,3 +51,8 @@ repos:
|
|||||||
args:
|
args:
|
||||||
- "-c"
|
- "-c"
|
||||||
- "cd docker && make"
|
- "cd docker && make"
|
||||||
|
# When this version is updated, do not forget to update this in `.github/workflows/typos.yaml` too
|
||||||
|
- repo: https://github.com/crate-ci/typos
|
||||||
|
rev: 07d900b8fa1097806b8adb6391b0d3e0ac2fdea7 # v1.39.0
|
||||||
|
hooks:
|
||||||
|
- id: typos
|
||||||
|
|||||||
26
.typos.toml
Normal file
26
.typos.toml
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
[files]
|
||||||
|
extend-exclude = [
|
||||||
|
".git/",
|
||||||
|
"playwright/",
|
||||||
|
"*.js", # Ignore all JavaScript files
|
||||||
|
"!admin*.js", # Except our own JavaScript files
|
||||||
|
]
|
||||||
|
ignore-hidden = false
|
||||||
|
|
||||||
|
[default]
|
||||||
|
extend-ignore-re = [
|
||||||
|
# We use this in place of the reserved type identifier at some places
|
||||||
|
"typ",
|
||||||
|
# In SMTP it's called HELO, so ignore it
|
||||||
|
"(?i)helo_name",
|
||||||
|
"Server name sent during.+HELO",
|
||||||
|
# COSE Is short for CBOR Object Signing and Encryption, ignore these specific items
|
||||||
|
"COSEKey",
|
||||||
|
"COSEAlgorithm",
|
||||||
|
# Ignore this specific string as it's valid
|
||||||
|
"Ensure they are valid OTPs",
|
||||||
|
# This word is misspelled upstream
|
||||||
|
# https://github.com/bitwarden/server/blob/dff9f1cf538198819911cf2c20f8cda3307701c5/src/Notifications/HubHelpers.cs#L86
|
||||||
|
# https://github.com/bitwarden/clients/blob/9612a4ac45063e372a6fbe87eb253c7cb3c588fb/libs/common/src/auth/services/anonymous-hub.service.ts#L45
|
||||||
|
"AuthRequestResponseRecieved",
|
||||||
|
]
|
||||||
160
Cargo.lock
generated
160
Cargo.lock
generated
@@ -33,9 +33,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aho-corasick"
|
name = "aho-corasick"
|
||||||
version = "1.1.3"
|
version = "1.1.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
|
checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
@@ -76,6 +76,15 @@ version = "1.0.100"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ar_archive_writer"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f0c269894b6fe5e9d7ada0cf69b5bf847ff35bc25fc271f08e1d080fce80339a"
|
||||||
|
dependencies = [
|
||||||
|
"object",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "argon2"
|
name = "argon2"
|
||||||
version = "0.5.3"
|
version = "0.5.3"
|
||||||
@@ -589,9 +598,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aws-smithy-runtime-api"
|
name = "aws-smithy-runtime-api"
|
||||||
version = "1.9.1"
|
version = "1.9.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3683c5b152d2ad753607179ed71988e8cfd52964443b4f74fd8e552d0bbfeb46"
|
checksum = "ec7204f9fd94749a7c53b26da1b961b4ac36bf070ef1e0b94bb09f79d4f6c193"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aws-smithy-async",
|
"aws-smithy-async",
|
||||||
"aws-smithy-types",
|
"aws-smithy-types",
|
||||||
@@ -606,9 +615,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aws-smithy-types"
|
name = "aws-smithy-types"
|
||||||
version = "1.3.3"
|
version = "1.3.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9f5b3a7486f6690ba25952cabf1e7d75e34d69eaff5081904a47bc79074d6457"
|
checksum = "25f535879a207fce0db74b679cfc3e91a3159c8144d717d55f5832aea9eef46e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64-simd",
|
"base64-simd",
|
||||||
"bytes",
|
"bytes",
|
||||||
@@ -2482,9 +2491,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "icu_collections"
|
name = "icu_collections"
|
||||||
version = "2.0.0"
|
version = "2.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
|
checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"displaydoc",
|
"displaydoc",
|
||||||
"potential_utf",
|
"potential_utf",
|
||||||
@@ -2495,9 +2504,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "icu_locale_core"
|
name = "icu_locale_core"
|
||||||
version = "2.0.0"
|
version = "2.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
|
checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"displaydoc",
|
"displaydoc",
|
||||||
"litemap",
|
"litemap",
|
||||||
@@ -2508,11 +2517,10 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "icu_normalizer"
|
name = "icu_normalizer"
|
||||||
version = "2.0.0"
|
version = "2.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
|
checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"displaydoc",
|
|
||||||
"icu_collections",
|
"icu_collections",
|
||||||
"icu_normalizer_data",
|
"icu_normalizer_data",
|
||||||
"icu_properties",
|
"icu_properties",
|
||||||
@@ -2523,42 +2531,38 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "icu_normalizer_data"
|
name = "icu_normalizer_data"
|
||||||
version = "2.0.0"
|
version = "2.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
|
checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "icu_properties"
|
name = "icu_properties"
|
||||||
version = "2.0.1"
|
version = "2.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
|
checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"displaydoc",
|
|
||||||
"icu_collections",
|
"icu_collections",
|
||||||
"icu_locale_core",
|
"icu_locale_core",
|
||||||
"icu_properties_data",
|
"icu_properties_data",
|
||||||
"icu_provider",
|
"icu_provider",
|
||||||
"potential_utf",
|
|
||||||
"zerotrie",
|
"zerotrie",
|
||||||
"zerovec",
|
"zerovec",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "icu_properties_data"
|
name = "icu_properties_data"
|
||||||
version = "2.0.1"
|
version = "2.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
|
checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "icu_provider"
|
name = "icu_provider"
|
||||||
version = "2.0.0"
|
version = "2.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
|
checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"displaydoc",
|
"displaydoc",
|
||||||
"icu_locale_core",
|
"icu_locale_core",
|
||||||
"stable_deref_trait",
|
|
||||||
"tinystr",
|
|
||||||
"writeable",
|
"writeable",
|
||||||
"yoke",
|
"yoke",
|
||||||
"zerofrom",
|
"zerofrom",
|
||||||
@@ -2715,9 +2719,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "js-sys"
|
name = "js-sys"
|
||||||
version = "0.3.81"
|
version = "0.3.82"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ec48937a97411dcb524a265206ccd4c90bb711fca92b2792c407f268825b9305"
|
checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
@@ -2838,9 +2842,9 @@ checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "litemap"
|
name = "litemap"
|
||||||
version = "0.8.0"
|
version = "0.8.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
|
checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "litrs"
|
name = "litrs"
|
||||||
@@ -3225,6 +3229,15 @@ dependencies = [
|
|||||||
"url",
|
"url",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "object"
|
||||||
|
version = "0.32.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441"
|
||||||
|
dependencies = [
|
||||||
|
"memchr",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oid-registry"
|
name = "oid-registry"
|
||||||
version = "0.7.1"
|
version = "0.7.1"
|
||||||
@@ -3714,9 +3727,9 @@ checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "potential_utf"
|
name = "potential_utf"
|
||||||
version = "0.1.3"
|
version = "0.1.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a"
|
checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"zerovec",
|
"zerovec",
|
||||||
]
|
]
|
||||||
@@ -3786,10 +3799,11 @@ checksum = "33cb294fe86a74cbcf50d4445b37da762029549ebeea341421c7c70370f86cac"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "psm"
|
name = "psm"
|
||||||
version = "0.1.27"
|
version = "0.1.28"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e66fcd288453b748497d8fb18bccc83a16b0518e3906d4b8df0a8d42d93dbb1c"
|
checksum = "d11f2fedc3b7dafdc2851bc52f277377c5473d378859be234bc7ebb593144d01"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"ar_archive_writer",
|
||||||
"cc",
|
"cc",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -4427,7 +4441,7 @@ dependencies = [
|
|||||||
"once_cell",
|
"once_cell",
|
||||||
"ring",
|
"ring",
|
||||||
"rustls-pki-types",
|
"rustls-pki-types",
|
||||||
"rustls-webpki 0.103.7",
|
"rustls-webpki 0.103.8",
|
||||||
"subtle",
|
"subtle",
|
||||||
"zeroize",
|
"zeroize",
|
||||||
]
|
]
|
||||||
@@ -4475,9 +4489,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustls-webpki"
|
name = "rustls-webpki"
|
||||||
version = "0.103.7"
|
version = "0.103.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e10b3f4191e8a80e6b43eebabfac91e5dcecebb27a71f04e820c47ec41d314bf"
|
checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ring",
|
"ring",
|
||||||
"rustls-pki-types",
|
"rustls-pki-types",
|
||||||
@@ -5218,9 +5232,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tinystr"
|
name = "tinystr"
|
||||||
version = "0.8.1"
|
version = "0.8.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
|
checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"displaydoc",
|
"displaydoc",
|
||||||
"zerovec",
|
"zerovec",
|
||||||
@@ -5597,9 +5611,9 @@ checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-ident"
|
name = "unicode-ident"
|
||||||
version = "1.0.20"
|
version = "1.0.22"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "462eeb75aeb73aea900253ce739c8e18a67423fadf006037cd3ff27e82748a06"
|
checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-xid"
|
name = "unicode-xid"
|
||||||
@@ -5710,7 +5724,6 @@ dependencies = [
|
|||||||
"mini-moka",
|
"mini-moka",
|
||||||
"num-derive",
|
"num-derive",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"once_cell",
|
|
||||||
"opendal",
|
"opendal",
|
||||||
"openidconnect",
|
"openidconnect",
|
||||||
"openssl",
|
"openssl",
|
||||||
@@ -5800,9 +5813,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen"
|
name = "wasm-bindgen"
|
||||||
version = "0.2.104"
|
version = "0.2.105"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c1da10c01ae9f1ae40cbfac0bac3b1e724b320abfcf52229f80b547c0d250e2d"
|
checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
@@ -5811,25 +5824,11 @@ dependencies = [
|
|||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasm-bindgen-backend"
|
|
||||||
version = "0.2.104"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "671c9a5a66f49d8a47345ab942e2cb93c7d1d0339065d4f8139c486121b43b19"
|
|
||||||
dependencies = [
|
|
||||||
"bumpalo",
|
|
||||||
"log",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
"wasm-bindgen-shared",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-futures"
|
name = "wasm-bindgen-futures"
|
||||||
version = "0.4.54"
|
version = "0.4.55"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7e038d41e478cc73bae0ff9b36c60cff1c98b8f38f8d7e8061e79ee63608ac5c"
|
checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
@@ -5840,9 +5839,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro"
|
name = "wasm-bindgen-macro"
|
||||||
version = "0.2.104"
|
version = "0.2.105"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7ca60477e4c59f5f2986c50191cd972e3a50d8a95603bc9434501cf156a9a119"
|
checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote",
|
"quote",
|
||||||
"wasm-bindgen-macro-support",
|
"wasm-bindgen-macro-support",
|
||||||
@@ -5850,22 +5849,22 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro-support"
|
name = "wasm-bindgen-macro-support"
|
||||||
version = "0.2.104"
|
version = "0.2.105"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9f07d2f20d4da7b26400c9f4a0511e6e0345b040694e8a75bd41d578fa4421d7"
|
checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"bumpalo",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn",
|
||||||
"wasm-bindgen-backend",
|
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-shared"
|
name = "wasm-bindgen-shared"
|
||||||
version = "0.2.104"
|
version = "0.2.105"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bad67dc8b2a1a6e5448428adec4c3e84c43e561d8c9ee8a9e5aabeb193ec41d1"
|
checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
@@ -5885,9 +5884,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "web-sys"
|
name = "web-sys"
|
||||||
version = "0.3.81"
|
version = "0.3.82"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9367c417a924a74cae129e6a2ae3b47fabb1f8995595ab474029da749a8be120"
|
checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
@@ -6404,9 +6403,9 @@ checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "writeable"
|
name = "writeable"
|
||||||
version = "0.6.1"
|
version = "0.6.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
|
checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "x509-parser"
|
name = "x509-parser"
|
||||||
@@ -6448,11 +6447,10 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "yoke"
|
name = "yoke"
|
||||||
version = "0.8.0"
|
version = "0.8.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
|
checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
|
||||||
"stable_deref_trait",
|
"stable_deref_trait",
|
||||||
"yoke-derive",
|
"yoke-derive",
|
||||||
"zerofrom",
|
"zerofrom",
|
||||||
@@ -6460,9 +6458,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "yoke-derive"
|
name = "yoke-derive"
|
||||||
version = "0.8.0"
|
version = "0.8.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
|
checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -6535,9 +6533,9 @@ checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zerotrie"
|
name = "zerotrie"
|
||||||
version = "0.2.2"
|
version = "0.2.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
|
checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"displaydoc",
|
"displaydoc",
|
||||||
"yoke",
|
"yoke",
|
||||||
@@ -6546,9 +6544,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zerovec"
|
name = "zerovec"
|
||||||
version = "0.11.4"
|
version = "0.11.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b"
|
checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"yoke",
|
"yoke",
|
||||||
"zerofrom",
|
"zerofrom",
|
||||||
@@ -6557,9 +6555,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zerovec-derive"
|
name = "zerovec-derive"
|
||||||
version = "0.11.1"
|
version = "0.11.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
|
checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|||||||
77
Cargo.toml
77
Cargo.toml
@@ -1,3 +1,10 @@
|
|||||||
|
[workspace.package]
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.89.0"
|
||||||
|
license = "AGPL-3.0-only"
|
||||||
|
repository = "https://github.com/dani-garcia/vaultwarden"
|
||||||
|
publish = false
|
||||||
|
|
||||||
[workspace]
|
[workspace]
|
||||||
members = ["macros"]
|
members = ["macros"]
|
||||||
|
|
||||||
@@ -5,15 +12,14 @@ members = ["macros"]
|
|||||||
name = "vaultwarden"
|
name = "vaultwarden"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
|
||||||
edition = "2021"
|
|
||||||
rust-version = "1.87.0"
|
|
||||||
resolver = "2"
|
|
||||||
|
|
||||||
repository = "https://github.com/dani-garcia/vaultwarden"
|
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
license = "AGPL-3.0-only"
|
|
||||||
publish = false
|
|
||||||
build = "build.rs"
|
build = "build.rs"
|
||||||
|
resolver = "2"
|
||||||
|
repository.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
publish.workspace = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = [
|
default = [
|
||||||
@@ -56,9 +62,6 @@ tracing = { version = "0.1.41", features = ["log"] } # Needed to have lettre and
|
|||||||
# A `dotenv` implementation for Rust
|
# A `dotenv` implementation for Rust
|
||||||
dotenvy = { version = "0.15.7", default-features = false }
|
dotenvy = { version = "0.15.7", default-features = false }
|
||||||
|
|
||||||
# Lazy initialization
|
|
||||||
once_cell = "1.21.3"
|
|
||||||
|
|
||||||
# Numerical libraries
|
# Numerical libraries
|
||||||
num-traits = "0.2.19"
|
num-traits = "0.2.19"
|
||||||
num-derive = "0.4.2"
|
num-derive = "0.4.2"
|
||||||
@@ -196,7 +199,7 @@ opendal = { version = "0.54.1", features = ["services-fs"], default-features = f
|
|||||||
anyhow = { version = "1.0.100", optional = true }
|
anyhow = { version = "1.0.100", optional = true }
|
||||||
aws-config = { version = "1.8.8", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
|
aws-config = { version = "1.8.8", features = ["behavior-version-latest", "rt-tokio", "credentials-process", "sso"], default-features = false, optional = true }
|
||||||
aws-credential-types = { version = "1.2.8", optional = true }
|
aws-credential-types = { version = "1.2.8", optional = true }
|
||||||
aws-smithy-runtime-api = { version = "1.9.1", optional = true }
|
aws-smithy-runtime-api = { version = "1.9.2", optional = true }
|
||||||
http = { version = "1.3.1", optional = true }
|
http = { version = "1.3.1", optional = true }
|
||||||
reqsign = { version = "0.16.5", optional = true }
|
reqsign = { version = "0.16.5", optional = true }
|
||||||
|
|
||||||
@@ -207,23 +210,13 @@ reqsign = { version = "0.16.5", optional = true }
|
|||||||
strip = "debuginfo"
|
strip = "debuginfo"
|
||||||
lto = "fat"
|
lto = "fat"
|
||||||
codegen-units = 1
|
codegen-units = 1
|
||||||
|
debug = false
|
||||||
# A little bit of a speedup
|
|
||||||
[profile.dev]
|
|
||||||
split-debuginfo = "unpacked"
|
|
||||||
|
|
||||||
# Always build argon2 using opt-level 3
|
|
||||||
# This is a huge speed improvement during testing
|
|
||||||
[profile.dev.package.argon2]
|
|
||||||
opt-level = 3
|
|
||||||
|
|
||||||
# Optimize for size
|
# Optimize for size
|
||||||
[profile.release-micro]
|
[profile.release-micro]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
opt-level = "z"
|
|
||||||
strip = "symbols"
|
strip = "symbols"
|
||||||
lto = "fat"
|
opt-level = "z"
|
||||||
codegen-units = 1
|
|
||||||
panic = "abort"
|
panic = "abort"
|
||||||
|
|
||||||
# Profile for systems with low resources
|
# Profile for systems with low resources
|
||||||
@@ -234,6 +227,32 @@ strip = "symbols"
|
|||||||
lto = "thin"
|
lto = "thin"
|
||||||
codegen-units = 16
|
codegen-units = 16
|
||||||
|
|
||||||
|
# Used for profiling and debugging like valgrind or heaptrack
|
||||||
|
# Inherits release to be sure all optimizations have been done
|
||||||
|
[profile.dbg]
|
||||||
|
inherits = "release"
|
||||||
|
strip = "none"
|
||||||
|
split-debuginfo = "off"
|
||||||
|
debug = "full"
|
||||||
|
|
||||||
|
# A little bit of a speedup for generic building
|
||||||
|
[profile.dev]
|
||||||
|
split-debuginfo = "unpacked"
|
||||||
|
debug = "line-tables-only"
|
||||||
|
|
||||||
|
# Used for CI builds to improve compile time
|
||||||
|
[profile.ci]
|
||||||
|
inherits = "dev"
|
||||||
|
debug = false
|
||||||
|
debug-assertions = false
|
||||||
|
strip = "symbols"
|
||||||
|
panic = "abort"
|
||||||
|
|
||||||
|
# Always build argon2 using opt-level 3
|
||||||
|
# This is a huge speed improvement during testing
|
||||||
|
[profile.dev.package.argon2]
|
||||||
|
opt-level = 3
|
||||||
|
|
||||||
# Linting config
|
# Linting config
|
||||||
# https://doc.rust-lang.org/rustc/lints/groups.html
|
# https://doc.rust-lang.org/rustc/lints/groups.html
|
||||||
[workspace.lints.rust]
|
[workspace.lints.rust]
|
||||||
@@ -243,15 +262,16 @@ non_ascii_idents = "forbid"
|
|||||||
|
|
||||||
# Deny
|
# Deny
|
||||||
deprecated_in_future = "deny"
|
deprecated_in_future = "deny"
|
||||||
|
deprecated_safe = { level = "deny", priority = -1 }
|
||||||
future_incompatible = { level = "deny", priority = -1 }
|
future_incompatible = { level = "deny", priority = -1 }
|
||||||
keyword_idents = { level = "deny", priority = -1 }
|
keyword_idents = { level = "deny", priority = -1 }
|
||||||
let_underscore = { level = "deny", priority = -1 }
|
let_underscore = { level = "deny", priority = -1 }
|
||||||
|
nonstandard_style = { level = "deny", priority = -1 }
|
||||||
noop_method_call = "deny"
|
noop_method_call = "deny"
|
||||||
refining_impl_trait = { level = "deny", priority = -1 }
|
refining_impl_trait = { level = "deny", priority = -1 }
|
||||||
rust_2018_idioms = { level = "deny", priority = -1 }
|
rust_2018_idioms = { level = "deny", priority = -1 }
|
||||||
rust_2021_compatibility = { level = "deny", priority = -1 }
|
rust_2021_compatibility = { level = "deny", priority = -1 }
|
||||||
rust_2024_compatibility = { level = "deny", priority = -1 }
|
rust_2024_compatibility = { level = "deny", priority = -1 }
|
||||||
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
|
|
||||||
single_use_lifetimes = "deny"
|
single_use_lifetimes = "deny"
|
||||||
trivial_casts = "deny"
|
trivial_casts = "deny"
|
||||||
trivial_numeric_casts = "deny"
|
trivial_numeric_casts = "deny"
|
||||||
@@ -261,7 +281,8 @@ unused_lifetimes = "deny"
|
|||||||
unused_qualifications = "deny"
|
unused_qualifications = "deny"
|
||||||
variant_size_differences = "deny"
|
variant_size_differences = "deny"
|
||||||
# Allow the following lints since these cause issues with Rust v1.84.0 or newer
|
# Allow the following lints since these cause issues with Rust v1.84.0 or newer
|
||||||
# Building Vaultwarden with Rust v1.85.0 and edition 2024 also works without issues
|
# Building Vaultwarden with Rust v1.85.0 with edition 2024 also works without issues
|
||||||
|
edition_2024_expr_fragment_specifier = "allow" # Once changed to Rust 2024 this should be removed and macro's should be validated again
|
||||||
if_let_rescope = "allow"
|
if_let_rescope = "allow"
|
||||||
tail_expr_drop_order = "allow"
|
tail_expr_drop_order = "allow"
|
||||||
|
|
||||||
@@ -275,10 +296,12 @@ todo = "warn"
|
|||||||
result_large_err = "allow"
|
result_large_err = "allow"
|
||||||
|
|
||||||
# Deny
|
# Deny
|
||||||
|
branches_sharing_code = "deny"
|
||||||
case_sensitive_file_extension_comparisons = "deny"
|
case_sensitive_file_extension_comparisons = "deny"
|
||||||
cast_lossless = "deny"
|
cast_lossless = "deny"
|
||||||
clone_on_ref_ptr = "deny"
|
clone_on_ref_ptr = "deny"
|
||||||
equatable_if_let = "deny"
|
equatable_if_let = "deny"
|
||||||
|
excessive_precision = "deny"
|
||||||
filter_map_next = "deny"
|
filter_map_next = "deny"
|
||||||
float_cmp_const = "deny"
|
float_cmp_const = "deny"
|
||||||
implicit_clone = "deny"
|
implicit_clone = "deny"
|
||||||
@@ -292,15 +315,19 @@ manual_instant_elapsed = "deny"
|
|||||||
manual_string_new = "deny"
|
manual_string_new = "deny"
|
||||||
match_wildcard_for_single_variants = "deny"
|
match_wildcard_for_single_variants = "deny"
|
||||||
mem_forget = "deny"
|
mem_forget = "deny"
|
||||||
|
needless_borrow = "deny"
|
||||||
|
needless_collect = "deny"
|
||||||
needless_continue = "deny"
|
needless_continue = "deny"
|
||||||
needless_lifetimes = "deny"
|
needless_lifetimes = "deny"
|
||||||
option_option = "deny"
|
option_option = "deny"
|
||||||
|
redundant_clone = "deny"
|
||||||
string_add_assign = "deny"
|
string_add_assign = "deny"
|
||||||
unnecessary_join = "deny"
|
unnecessary_join = "deny"
|
||||||
unnecessary_self_imports = "deny"
|
unnecessary_self_imports = "deny"
|
||||||
unnested_or_patterns = "deny"
|
unnested_or_patterns = "deny"
|
||||||
unused_async = "deny"
|
unused_async = "deny"
|
||||||
unused_self = "deny"
|
unused_self = "deny"
|
||||||
|
useless_let_if_seq = "deny"
|
||||||
verbose_file_reads = "deny"
|
verbose_file_reads = "deny"
|
||||||
zero_sized_map_values = "deny"
|
zero_sized_map_values = "deny"
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
---
|
---
|
||||||
vault_version: "v2025.9.1"
|
vault_version: "v2025.10.1"
|
||||||
vault_image_digest: "sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4"
|
vault_image_digest: "sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa"
|
||||||
# Cross Compile Docker Helper Scripts v1.6.1
|
# Cross Compile Docker Helper Scripts v1.8.0
|
||||||
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
# We use the linux/amd64 platform shell scripts since there is no difference between the different platform scripts
|
||||||
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
# https://github.com/tonistiigi/xx | https://hub.docker.com/r/tonistiigi/xx/tags
|
||||||
xx_image_digest: "sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894"
|
xx_image_digest: "sha256:add602d55daca18914838a78221f6bbe4284114b452c86a48f96d59aeb00f5c6"
|
||||||
rust_version: 1.89.0 # Rust version to be used
|
rust_version: 1.91.0 # Rust version to be used
|
||||||
debian_version: trixie # Debian release name to be used
|
debian_version: trixie # Debian release name to be used
|
||||||
alpine_version: "3.22" # Alpine version to be used
|
alpine_version: "3.22" # Alpine version to be used
|
||||||
# For which platforms/architectures will we try to build images
|
# For which platforms/architectures will we try to build images
|
||||||
|
|||||||
@@ -19,23 +19,23 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.9.1
|
# $ docker pull docker.io/vaultwarden/web-vault:v2025.10.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.9.1
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.10.1
|
||||||
# [docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4]
|
# [docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa
|
||||||
# [docker.io/vaultwarden/web-vault:v2025.9.1]
|
# [docker.io/vaultwarden/web-vault:v2025.10.1]
|
||||||
#
|
#
|
||||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4 AS vault
|
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa AS vault
|
||||||
|
|
||||||
########################## ALPINE BUILD IMAGES ##########################
|
########################## ALPINE BUILD IMAGES ##########################
|
||||||
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
## NOTE: The Alpine Base Images do not support other platforms then linux/amd64
|
||||||
## And for Alpine we define all build images here, they will only be loaded when actually used
|
## And for Alpine we define all build images here, they will only be loaded when actually used
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.89.0 AS build_amd64
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:x86_64-musl-stable-1.91.0 AS build_amd64
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.89.0 AS build_arm64
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:aarch64-musl-stable-1.91.0 AS build_arm64
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.89.0 AS build_armv7
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:armv7-musleabihf-stable-1.91.0 AS build_armv7
|
||||||
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.89.0 AS build_armv6
|
FROM --platform=linux/amd64 ghcr.io/blackdex/rust-musl:arm-musleabi-stable-1.91.0 AS build_armv6
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# hadolint ignore=DL3006
|
# hadolint ignore=DL3006
|
||||||
|
|||||||
@@ -19,24 +19,24 @@
|
|||||||
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
# - From https://hub.docker.com/r/vaultwarden/web-vault/tags,
|
||||||
# click the tag name to view the digest of the image it currently points to.
|
# click the tag name to view the digest of the image it currently points to.
|
||||||
# - From the command line:
|
# - From the command line:
|
||||||
# $ docker pull docker.io/vaultwarden/web-vault:v2025.9.1
|
# $ docker pull docker.io/vaultwarden/web-vault:v2025.10.1
|
||||||
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.9.1
|
# $ docker image inspect --format "{{.RepoDigests}}" docker.io/vaultwarden/web-vault:v2025.10.1
|
||||||
# [docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4]
|
# [docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa]
|
||||||
#
|
#
|
||||||
# - Conversely, to get the tag name from the digest:
|
# - Conversely, to get the tag name from the digest:
|
||||||
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4
|
# $ docker image inspect --format "{{.RepoTags}}" docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa
|
||||||
# [docker.io/vaultwarden/web-vault:v2025.9.1]
|
# [docker.io/vaultwarden/web-vault:v2025.10.1]
|
||||||
#
|
#
|
||||||
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:15a126ca967cd2efc4c9625fec49f0b972a3f7d7d81d7770bb0a2502d5e4b8a4 AS vault
|
FROM --platform=linux/amd64 docker.io/vaultwarden/web-vault@sha256:50662dccf4908ac2128cd44981c52fcb4e3e8dd56f21823c8d5e91267ff741fa AS vault
|
||||||
|
|
||||||
########################## Cross Compile Docker Helper Scripts ##########################
|
########################## Cross Compile Docker Helper Scripts ##########################
|
||||||
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
|
## We use the linux/amd64 no matter which Build Platform, since these are all bash scripts
|
||||||
## And these bash scripts do not have any significant difference if at all
|
## And these bash scripts do not have any significant difference if at all
|
||||||
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:9c207bead753dda9430bdd15425c6518fc7a03d866103c516a2c6889188f5894 AS xx
|
FROM --platform=linux/amd64 docker.io/tonistiigi/xx@sha256:add602d55daca18914838a78221f6bbe4284114b452c86a48f96d59aeb00f5c6 AS xx
|
||||||
|
|
||||||
########################## BUILD IMAGE ##########################
|
########################## BUILD IMAGE ##########################
|
||||||
# hadolint ignore=DL3006
|
# hadolint ignore=DL3006
|
||||||
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.89.0-slim-trixie AS build
|
FROM --platform=$BUILDPLATFORM docker.io/library/rust:1.91.0-slim-trixie AS build
|
||||||
COPY --from=xx / /
|
COPY --from=xx / /
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG TARGETVARIANT
|
ARG TARGETVARIANT
|
||||||
|
|||||||
@@ -116,7 +116,7 @@ docker/bake.sh
|
|||||||
```
|
```
|
||||||
|
|
||||||
You can append both `alpine` and `debian` with `-amd64`, `-arm64`, `-armv7` or `-armv6`, which will trigger a build for that specific platform.<br>
|
You can append both `alpine` and `debian` with `-amd64`, `-arm64`, `-armv7` or `-armv6`, which will trigger a build for that specific platform.<br>
|
||||||
This will also append those values to the tag so you can see the builded container when running `docker images`.
|
This will also append those values to the tag so you can see the built container when running `docker images`.
|
||||||
|
|
||||||
You can also append extra arguments after the target if you want. This can be useful for example to print what bake will use.
|
You can also append extra arguments after the target if you want. This can be useful for example to print what bake will use.
|
||||||
```bash
|
```bash
|
||||||
@@ -162,7 +162,7 @@ You can append extra arguments after the target if you want. This can be useful
|
|||||||
|
|
||||||
For the podman builds you can, just like the `bake.sh` script, also append the architecture to build for that specific platform.<br>
|
For the podman builds you can, just like the `bake.sh` script, also append the architecture to build for that specific platform.<br>
|
||||||
|
|
||||||
### Testing podman builded images
|
### Testing podman built images
|
||||||
|
|
||||||
The command to start a podman built container is almost the same as for the docker/bake built containers. The images start with `localhost/`, so you need to prepend that.
|
The command to start a podman built container is almost the same as for the docker/bake built containers. The images start with `localhost/`, so you need to prepend that.
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,11 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "macros"
|
name = "macros"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
repository.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
rust-version.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
publish.workspace = true
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "macros"
|
name = "macros"
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
[toolchain]
|
[toolchain]
|
||||||
channel = "1.89.0"
|
channel = "1.91.0"
|
||||||
components = [ "rustfmt", "clippy" ]
|
components = [ "rustfmt", "clippy" ]
|
||||||
profile = "minimal"
|
profile = "minimal"
|
||||||
|
|||||||
@@ -1,17 +1,16 @@
|
|||||||
use once_cell::sync::Lazy;
|
use std::{env, sync::LazyLock};
|
||||||
use reqwest::Method;
|
|
||||||
use serde::de::DeserializeOwned;
|
|
||||||
use serde_json::Value;
|
|
||||||
use std::env;
|
|
||||||
|
|
||||||
use rocket::serde::json::Json;
|
use reqwest::Method;
|
||||||
use rocket::{
|
use rocket::{
|
||||||
form::Form,
|
form::Form,
|
||||||
http::{Cookie, CookieJar, MediaType, SameSite, Status},
|
http::{Cookie, CookieJar, MediaType, SameSite, Status},
|
||||||
request::{FromRequest, Outcome, Request},
|
request::{FromRequest, Outcome, Request},
|
||||||
response::{content::RawHtml as Html, Redirect},
|
response::{content::RawHtml as Html, Redirect},
|
||||||
|
serde::json::Json,
|
||||||
Catcher, Route,
|
Catcher, Route,
|
||||||
};
|
};
|
||||||
|
use serde::de::DeserializeOwned;
|
||||||
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::{
|
api::{
|
||||||
@@ -82,7 +81,7 @@ pub fn catchers() -> Vec<Catcher> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static DB_TYPE: Lazy<&str> = Lazy::new(|| match ACTIVE_DB_TYPE.get() {
|
static DB_TYPE: LazyLock<&str> = LazyLock::new(|| match ACTIVE_DB_TYPE.get() {
|
||||||
#[cfg(mysql)]
|
#[cfg(mysql)]
|
||||||
Some(DbConnType::Mysql) => "MySQL",
|
Some(DbConnType::Mysql) => "MySQL",
|
||||||
#[cfg(postgresql)]
|
#[cfg(postgresql)]
|
||||||
@@ -93,9 +92,10 @@ static DB_TYPE: Lazy<&str> = Lazy::new(|| match ACTIVE_DB_TYPE.get() {
|
|||||||
});
|
});
|
||||||
|
|
||||||
#[cfg(sqlite)]
|
#[cfg(sqlite)]
|
||||||
static CAN_BACKUP: Lazy<bool> = Lazy::new(|| ACTIVE_DB_TYPE.get().map(|t| *t == DbConnType::Sqlite).unwrap_or(false));
|
static CAN_BACKUP: LazyLock<bool> =
|
||||||
|
LazyLock::new(|| ACTIVE_DB_TYPE.get().map(|t| *t == DbConnType::Sqlite).unwrap_or(false));
|
||||||
#[cfg(not(sqlite))]
|
#[cfg(not(sqlite))]
|
||||||
static CAN_BACKUP: Lazy<bool> = Lazy::new(|| false);
|
static CAN_BACKUP: LazyLock<bool> = LazyLock::new(|| false);
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
fn admin_disabled() -> &'static str {
|
fn admin_disabled() -> &'static str {
|
||||||
@@ -157,10 +157,10 @@ fn admin_login(request: &Request<'_>) -> ApiResult<Html<String>> {
|
|||||||
err_code!("Authorization failed.", Status::Unauthorized.code);
|
err_code!("Authorization failed.", Status::Unauthorized.code);
|
||||||
}
|
}
|
||||||
let redirect = request.segments::<std::path::PathBuf>(0..).unwrap_or_default().display().to_string();
|
let redirect = request.segments::<std::path::PathBuf>(0..).unwrap_or_default().display().to_string();
|
||||||
render_admin_login(None, Some(redirect))
|
render_admin_login(None, Some(&redirect))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_admin_login(msg: Option<&str>, redirect: Option<String>) -> ApiResult<Html<String>> {
|
fn render_admin_login(msg: Option<&str>, redirect: Option<&str>) -> ApiResult<Html<String>> {
|
||||||
// If there is an error, show it
|
// If there is an error, show it
|
||||||
let msg = msg.map(|msg| format!("Error: {msg}"));
|
let msg = msg.map(|msg| format!("Error: {msg}"));
|
||||||
let json = json!({
|
let json = json!({
|
||||||
@@ -194,14 +194,17 @@ fn post_admin_login(
|
|||||||
if crate::ratelimit::check_limit_admin(&ip.ip).is_err() {
|
if crate::ratelimit::check_limit_admin(&ip.ip).is_err() {
|
||||||
return Err(AdminResponse::TooManyRequests(render_admin_login(
|
return Err(AdminResponse::TooManyRequests(render_admin_login(
|
||||||
Some("Too many requests, try again later."),
|
Some("Too many requests, try again later."),
|
||||||
redirect,
|
redirect.as_deref(),
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the token is invalid, redirect to login page
|
// If the token is invalid, redirect to login page
|
||||||
if !_validate_token(&data.token) {
|
if !_validate_token(&data.token) {
|
||||||
error!("Invalid admin token. IP: {}", ip.ip);
|
error!("Invalid admin token. IP: {}", ip.ip);
|
||||||
Err(AdminResponse::Unauthorized(render_admin_login(Some("Invalid admin token, please try again."), redirect)))
|
Err(AdminResponse::Unauthorized(render_admin_login(
|
||||||
|
Some("Invalid admin token, please try again."),
|
||||||
|
redirect.as_deref(),
|
||||||
|
)))
|
||||||
} else {
|
} else {
|
||||||
// If the token received is valid, generate JWT and save it as a cookie
|
// If the token received is valid, generate JWT and save it as a cookie
|
||||||
let claims = generate_admin_claims();
|
let claims = generate_admin_claims();
|
||||||
@@ -308,7 +311,7 @@ async fn invite_user(data: Json<InviteData>, _token: AdminToken, conn: DbConn) -
|
|||||||
err_code!("User already exists", Status::Conflict.code)
|
err_code!("User already exists", Status::Conflict.code)
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut user = User::new(data.email, None);
|
let mut user = User::new(&data.email, None);
|
||||||
|
|
||||||
async fn _generate_invite(user: &User, conn: &DbConn) -> EmptyResult {
|
async fn _generate_invite(user: &User, conn: &DbConn) -> EmptyResult {
|
||||||
if CONFIG.mail_enabled() {
|
if CONFIG.mail_enabled() {
|
||||||
@@ -825,11 +828,7 @@ impl<'r> FromRequest<'r> for AdminToken {
|
|||||||
_ => err_handler!("Error getting Client IP"),
|
_ => err_handler!("Error getting Client IP"),
|
||||||
};
|
};
|
||||||
|
|
||||||
if CONFIG.disable_admin_token() {
|
if !CONFIG.disable_admin_token() {
|
||||||
Outcome::Success(Self {
|
|
||||||
ip,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
let cookies = request.cookies();
|
let cookies = request.cookies();
|
||||||
|
|
||||||
let access_token = match cookies.get(COOKIE_NAME) {
|
let access_token = match cookies.get(COOKIE_NAME) {
|
||||||
@@ -853,10 +852,10 @@ impl<'r> FromRequest<'r> for AdminToken {
|
|||||||
error!("Invalid or expired admin JWT. IP: {}.", &ip.ip);
|
error!("Invalid or expired admin JWT. IP: {}.", &ip.ip);
|
||||||
return Outcome::Error((Status::Unauthorized, "Session expired"));
|
return Outcome::Error((Status::Unauthorized, "Session expired"));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Outcome::Success(Self {
|
Outcome::Success(Self {
|
||||||
ip,
|
ip,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|||||||
@@ -285,7 +285,7 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn:
|
|||||||
|| CONFIG.is_signup_allowed(&email)
|
|| CONFIG.is_signup_allowed(&email)
|
||||||
|| pending_emergency_access.is_some()
|
|| pending_emergency_access.is_some()
|
||||||
{
|
{
|
||||||
User::new(email.clone(), None)
|
User::new(&email, None)
|
||||||
} else {
|
} else {
|
||||||
err!("Registration not allowed or user already exists")
|
err!("Registration not allowed or user already exists")
|
||||||
}
|
}
|
||||||
@@ -295,7 +295,7 @@ pub async fn _register(data: Json<RegisterData>, email_verification: bool, conn:
|
|||||||
// Make sure we don't leave a lingering invitation.
|
// Make sure we don't leave a lingering invitation.
|
||||||
Invitation::take(&email, &conn).await;
|
Invitation::take(&email, &conn).await;
|
||||||
|
|
||||||
set_kdf_data(&mut user, data.kdf)?;
|
set_kdf_data(&mut user, &data.kdf)?;
|
||||||
|
|
||||||
user.set_password(&data.master_password_hash, Some(data.key), true, None);
|
user.set_password(&data.master_password_hash, Some(data.key), true, None);
|
||||||
user.password_hint = password_hint;
|
user.password_hint = password_hint;
|
||||||
@@ -358,7 +358,7 @@ async fn post_set_password(data: Json<SetPasswordData>, headers: Headers, conn:
|
|||||||
let password_hint = clean_password_hint(&data.master_password_hint);
|
let password_hint = clean_password_hint(&data.master_password_hint);
|
||||||
enforce_password_hint_setting(&password_hint)?;
|
enforce_password_hint_setting(&password_hint)?;
|
||||||
|
|
||||||
set_kdf_data(&mut user, data.kdf)?;
|
set_kdf_data(&mut user, &data.kdf)?;
|
||||||
|
|
||||||
user.set_password(
|
user.set_password(
|
||||||
&data.master_password_hash,
|
&data.master_password_hash,
|
||||||
@@ -556,7 +556,7 @@ struct ChangeKdfData {
|
|||||||
key: String,
|
key: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_kdf_data(user: &mut User, data: KDFData) -> EmptyResult {
|
fn set_kdf_data(user: &mut User, data: &KDFData) -> EmptyResult {
|
||||||
if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 {
|
if data.kdf == UserKdfType::Pbkdf2 as i32 && data.kdf_iterations < 100_000 {
|
||||||
err!("PBKDF2 KDF iterations must be at least 100000.")
|
err!("PBKDF2 KDF iterations must be at least 100000.")
|
||||||
}
|
}
|
||||||
@@ -600,7 +600,7 @@ async fn post_kdf(data: Json<ChangeKdfData>, headers: Headers, conn: DbConn, nt:
|
|||||||
err!("Invalid password")
|
err!("Invalid password")
|
||||||
}
|
}
|
||||||
|
|
||||||
set_kdf_data(&mut user, data.kdf)?;
|
set_kdf_data(&mut user, &data.kdf)?;
|
||||||
|
|
||||||
user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
|
user.set_password(&data.new_master_password_hash, Some(data.key), true, None);
|
||||||
let save_result = user.save(&conn).await;
|
let save_result = user.save(&conn).await;
|
||||||
@@ -1279,10 +1279,11 @@ async fn rotate_api_key(data: Json<PasswordOrOtpData>, headers: Headers, conn: D
|
|||||||
|
|
||||||
#[get("/devices/knowndevice")]
|
#[get("/devices/knowndevice")]
|
||||||
async fn get_known_device(device: KnownDevice, conn: DbConn) -> JsonResult {
|
async fn get_known_device(device: KnownDevice, conn: DbConn) -> JsonResult {
|
||||||
let mut result = false;
|
let result = if let Some(user) = User::find_by_mail(&device.email, &conn).await {
|
||||||
if let Some(user) = User::find_by_mail(&device.email, &conn).await {
|
Device::find_by_uuid_and_user(&device.uuid, &user.uuid, &conn).await.is_some()
|
||||||
result = Device::find_by_uuid_and_user(&device.uuid, &user.uuid, &conn).await.is_some();
|
} else {
|
||||||
}
|
false
|
||||||
|
};
|
||||||
Ok(Json(json!(result)))
|
Ok(Json(json!(result)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1269,7 +1269,7 @@ async fn save_attachment(
|
|||||||
attachment.save(&conn).await.expect("Error saving attachment");
|
attachment.save(&conn).await.expect("Error saving attachment");
|
||||||
}
|
}
|
||||||
|
|
||||||
save_temp_file(PathType::Attachments, &format!("{cipher_id}/{file_id}"), data.data, true).await?;
|
save_temp_file(&PathType::Attachments, &format!("{cipher_id}/{file_id}"), data.data, true).await?;
|
||||||
|
|
||||||
nt.send_cipher_update(
|
nt.send_cipher_update(
|
||||||
UpdateType::SyncCipherUpdate,
|
UpdateType::SyncCipherUpdate,
|
||||||
|
|||||||
@@ -245,7 +245,7 @@ async fn send_invite(data: Json<EmergencyAccessInviteData>, headers: Headers, co
|
|||||||
invitation.save(&conn).await?;
|
invitation.save(&conn).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut user = User::new(email.clone(), None);
|
let mut user = User::new(&email, None);
|
||||||
user.save(&conn).await?;
|
user.save(&conn).await?;
|
||||||
(user, true)
|
(user, true)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -202,7 +202,7 @@ async fn create_organization(headers: Headers, data: Json<OrgData>, conn: DbConn
|
|||||||
(None, None)
|
(None, None)
|
||||||
};
|
};
|
||||||
|
|
||||||
let org = Organization::new(data.name, data.billing_email, private_key, public_key);
|
let org = Organization::new(data.name, &data.billing_email, private_key, public_key);
|
||||||
let mut member = Membership::new(headers.user.uuid, org.uuid.clone(), None);
|
let mut member = Membership::new(headers.user.uuid, org.uuid.clone(), None);
|
||||||
let collection = Collection::new(org.uuid.clone(), data.collection_name, None);
|
let collection = Collection::new(org.uuid.clone(), data.collection_name, None);
|
||||||
|
|
||||||
@@ -1124,7 +1124,7 @@ async fn send_invite(
|
|||||||
Invitation::new(email).save(&conn).await?;
|
Invitation::new(email).save(&conn).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut new_user = User::new(email.clone(), None);
|
let mut new_user = User::new(email, None);
|
||||||
new_user.save(&conn).await?;
|
new_user.save(&conn).await?;
|
||||||
user_created = true;
|
user_created = true;
|
||||||
new_user
|
new_user
|
||||||
@@ -1591,7 +1591,7 @@ async fn edit_member(
|
|||||||
// HACK: We need the raw user-type to be sure custom role is selected to determine the access_all permission
|
// HACK: We need the raw user-type to be sure custom role is selected to determine the access_all permission
|
||||||
// The from_str() will convert the custom role type into a manager role type
|
// The from_str() will convert the custom role type into a manager role type
|
||||||
let raw_type = &data.r#type.into_string();
|
let raw_type = &data.r#type.into_string();
|
||||||
// MembershipTyp::from_str will convert custom (4) to manager (3)
|
// MembershipType::from_str will convert custom (4) to manager (3)
|
||||||
let Some(new_type) = MembershipType::from_str(raw_type) else {
|
let Some(new_type) = MembershipType::from_str(raw_type) else {
|
||||||
err!("Invalid type")
|
err!("Invalid type")
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ async fn ldap_import(data: Json<OrgImportData>, token: PublicToken, conn: DbConn
|
|||||||
Some(user) => user, // exists in vaultwarden
|
Some(user) => user, // exists in vaultwarden
|
||||||
None => {
|
None => {
|
||||||
// User does not exist yet
|
// User does not exist yet
|
||||||
let mut new_user = User::new(user_data.email.clone(), None);
|
let mut new_user = User::new(&user_data.email, None);
|
||||||
new_user.save(&conn).await?;
|
new_user.save(&conn).await?;
|
||||||
|
|
||||||
if !CONFIG.mail_enabled() {
|
if !CONFIG.mail_enabled() {
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
use std::path::Path;
|
use std::{path::Path, sync::LazyLock, time::Duration};
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use chrono::{DateTime, TimeDelta, Utc};
|
use chrono::{DateTime, TimeDelta, Utc};
|
||||||
use num_traits::ToPrimitive;
|
use num_traits::ToPrimitive;
|
||||||
use once_cell::sync::Lazy;
|
use rocket::{
|
||||||
use rocket::form::Form;
|
form::Form,
|
||||||
use rocket::fs::NamedFile;
|
fs::{NamedFile, TempFile},
|
||||||
use rocket::fs::TempFile;
|
serde::json::Json,
|
||||||
use rocket::serde::json::Json;
|
};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@@ -23,7 +22,7 @@ use crate::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
|
const SEND_INACCESSIBLE_MSG: &str = "Send does not exist or is no longer available";
|
||||||
static ANON_PUSH_DEVICE: Lazy<Device> = Lazy::new(|| {
|
static ANON_PUSH_DEVICE: LazyLock<Device> = LazyLock::new(|| {
|
||||||
let dt = crate::util::parse_date("1970-01-01T00:00:00.000000Z");
|
let dt = crate::util::parse_date("1970-01-01T00:00:00.000000Z");
|
||||||
Device {
|
Device {
|
||||||
uuid: String::from("00000000-0000-0000-0000-000000000000").into(),
|
uuid: String::from("00000000-0000-0000-0000-000000000000").into(),
|
||||||
@@ -274,7 +273,7 @@ async fn post_send_file(data: Form<UploadData<'_>>, headers: Headers, conn: DbCo
|
|||||||
|
|
||||||
let file_id = crate::crypto::generate_send_file_id();
|
let file_id = crate::crypto::generate_send_file_id();
|
||||||
|
|
||||||
save_temp_file(PathType::Sends, &format!("{}/{file_id}", send.uuid), data, true).await?;
|
save_temp_file(&PathType::Sends, &format!("{}/{file_id}", send.uuid), data, true).await?;
|
||||||
|
|
||||||
let mut data_value: Value = serde_json::from_str(&send.data)?;
|
let mut data_value: Value = serde_json::from_str(&send.data)?;
|
||||||
if let Some(o) = data_value.as_object_mut() {
|
if let Some(o) = data_value.as_object_mut() {
|
||||||
@@ -426,7 +425,7 @@ async fn post_send_file_v2_data(
|
|||||||
|
|
||||||
let file_path = format!("{send_id}/{file_id}");
|
let file_path = format!("{send_id}/{file_id}");
|
||||||
|
|
||||||
save_temp_file(PathType::Sends, &file_path, data.data, false).await?;
|
save_temp_file(&PathType::Sends, &file_path, data.data, false).await?;
|
||||||
|
|
||||||
nt.send_send_update(
|
nt.send_send_update(
|
||||||
UpdateType::SyncSendCreate,
|
UpdateType::SyncSendCreate,
|
||||||
@@ -567,7 +566,7 @@ async fn post_access_file(
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn download_url(host: &Host, send_id: &SendId, file_id: &SendFileId) -> Result<String, crate::Error> {
|
async fn download_url(host: &Host, send_id: &SendId, file_id: &SendFileId) -> Result<String, crate::Error> {
|
||||||
let operator = CONFIG.opendal_operator_for_path_type(PathType::Sends)?;
|
let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?;
|
||||||
|
|
||||||
if operator.info().scheme() == opendal::Scheme::Fs {
|
if operator.info().scheme() == opendal::Scheme::Fs {
|
||||||
let token_claims = crate::auth::generate_send_claims(send_id, file_id);
|
let token_claims = crate::auth::generate_send_claims(send_id, file_id);
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ async fn generate_authenticator(data: Json<PasswordOrOtpData>, headers: Headers,
|
|||||||
|
|
||||||
let (enabled, key) = match twofactor {
|
let (enabled, key) = match twofactor {
|
||||||
Some(tf) => (true, tf.data),
|
Some(tf) => (true, tf.data),
|
||||||
_ => (false, crypto::encode_random_bytes::<20>(BASE32)),
|
_ => (false, crypto::encode_random_bytes::<20>(&BASE32)),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Upstream seems to also return `userVerificationToken`, but doesn't seem to be used at all.
|
// Upstream seems to also return `userVerificationToken`, but doesn't seem to be used at all.
|
||||||
|
|||||||
@@ -126,7 +126,7 @@ async fn recover(data: Json<RecoverTwoFactor>, client_headers: ClientHeaders, co
|
|||||||
|
|
||||||
async fn _generate_recover_code(user: &mut User, conn: &DbConn) {
|
async fn _generate_recover_code(user: &mut User, conn: &DbConn) {
|
||||||
if user.totp_recover.is_none() {
|
if user.totp_recover.is_none() {
|
||||||
let totp_recover = crypto::encode_random_bytes::<20>(BASE32);
|
let totp_recover = crypto::encode_random_bytes::<20>(&BASE32);
|
||||||
user.totp_recover = Some(totp_recover);
|
user.totp_recover = Some(totp_recover);
|
||||||
user.save(conn).await.ok();
|
user.save(conn).await.ok();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
net::IpAddr,
|
net::IpAddr,
|
||||||
sync::Arc,
|
sync::{Arc, LazyLock},
|
||||||
time::{Duration, SystemTime},
|
time::{Duration, SystemTime},
|
||||||
};
|
};
|
||||||
|
|
||||||
use bytes::{Bytes, BytesMut};
|
use bytes::{Bytes, BytesMut};
|
||||||
use futures::{stream::StreamExt, TryFutureExt};
|
use futures::{stream::StreamExt, TryFutureExt};
|
||||||
use once_cell::sync::Lazy;
|
use html5gum::{Emitter, HtmlString, Readable, StringReader, Tokenizer};
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use reqwest::{
|
use reqwest::{
|
||||||
header::{self, HeaderMap, HeaderValue},
|
header::{self, HeaderMap, HeaderValue},
|
||||||
@@ -16,8 +16,6 @@ use reqwest::{
|
|||||||
use rocket::{http::ContentType, response::Redirect, Route};
|
use rocket::{http::ContentType, response::Redirect, Route};
|
||||||
use svg_hush::{data_url_filter, Filter};
|
use svg_hush::{data_url_filter, Filter};
|
||||||
|
|
||||||
use html5gum::{Emitter, HtmlString, Readable, StringReader, Tokenizer};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
config::PathType,
|
config::PathType,
|
||||||
error::Error,
|
error::Error,
|
||||||
@@ -33,7 +31,7 @@ pub fn routes() -> Vec<Route> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static CLIENT: Lazy<Client> = Lazy::new(|| {
|
static CLIENT: LazyLock<Client> = LazyLock::new(|| {
|
||||||
// Generate the default headers
|
// Generate the default headers
|
||||||
let mut default_headers = HeaderMap::new();
|
let mut default_headers = HeaderMap::new();
|
||||||
default_headers.insert(
|
default_headers.insert(
|
||||||
@@ -78,7 +76,7 @@ static CLIENT: Lazy<Client> = Lazy::new(|| {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Build Regex only once since this takes a lot of time.
|
// Build Regex only once since this takes a lot of time.
|
||||||
static ICON_SIZE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap());
|
static ICON_SIZE_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"(?x)(\d+)\D*(\d+)").unwrap());
|
||||||
|
|
||||||
// The function name `icon_external` is checked in the `on_response` function in `AppHeaders`
|
// The function name `icon_external` is checked in the `on_response` function in `AppHeaders`
|
||||||
// It is used to prevent sending a specific header which breaks icon downloads.
|
// It is used to prevent sending a specific header which breaks icon downloads.
|
||||||
@@ -220,7 +218,7 @@ async fn get_cached_icon(path: &str) -> Option<Vec<u8>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Try to read the cached icon, and return it if it exists
|
// Try to read the cached icon, and return it if it exists
|
||||||
if let Ok(operator) = CONFIG.opendal_operator_for_path_type(PathType::IconCache) {
|
if let Ok(operator) = CONFIG.opendal_operator_for_path_type(&PathType::IconCache) {
|
||||||
if let Ok(buf) = operator.read(path).await {
|
if let Ok(buf) = operator.read(path).await {
|
||||||
return Some(buf.to_vec());
|
return Some(buf.to_vec());
|
||||||
}
|
}
|
||||||
@@ -230,7 +228,7 @@ async fn get_cached_icon(path: &str) -> Option<Vec<u8>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn file_is_expired(path: &str, ttl: u64) -> Result<bool, Error> {
|
async fn file_is_expired(path: &str, ttl: u64) -> Result<bool, Error> {
|
||||||
let operator = CONFIG.opendal_operator_for_path_type(PathType::IconCache)?;
|
let operator = CONFIG.opendal_operator_for_path_type(&PathType::IconCache)?;
|
||||||
let meta = operator.stat(path).await?;
|
let meta = operator.stat(path).await?;
|
||||||
let modified =
|
let modified =
|
||||||
meta.last_modified().ok_or_else(|| std::io::Error::other(format!("No last modified time for `{path}`")))?;
|
meta.last_modified().ok_or_else(|| std::io::Error::other(format!("No last modified time for `{path}`")))?;
|
||||||
@@ -246,7 +244,7 @@ async fn icon_is_negcached(path: &str) -> bool {
|
|||||||
match expired {
|
match expired {
|
||||||
// No longer negatively cached, drop the marker
|
// No longer negatively cached, drop the marker
|
||||||
Ok(true) => {
|
Ok(true) => {
|
||||||
match CONFIG.opendal_operator_for_path_type(PathType::IconCache) {
|
match CONFIG.opendal_operator_for_path_type(&PathType::IconCache) {
|
||||||
Ok(operator) => {
|
Ok(operator) => {
|
||||||
if let Err(e) = operator.delete(&miss_indicator).await {
|
if let Err(e) = operator.delete(&miss_indicator).await {
|
||||||
error!("Could not remove negative cache indicator for icon {path:?}: {e:?}");
|
error!("Could not remove negative cache indicator for icon {path:?}: {e:?}");
|
||||||
@@ -462,8 +460,8 @@ async fn get_page_with_referer(url: &str, referer: &str) -> Result<Response, Err
|
|||||||
/// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", "");
|
/// priority2 = get_icon_priority("https://example.com/path/to/a/favicon.ico", "");
|
||||||
/// ```
|
/// ```
|
||||||
fn get_icon_priority(href: &str, sizes: &str) -> u8 {
|
fn get_icon_priority(href: &str, sizes: &str) -> u8 {
|
||||||
static PRIORITY_MAP: Lazy<HashMap<&'static str, u8>> =
|
static PRIORITY_MAP: LazyLock<HashMap<&'static str, u8>> =
|
||||||
Lazy::new(|| [(".png", 10), (".jpg", 20), (".jpeg", 20)].into_iter().collect());
|
LazyLock::new(|| [(".png", 10), (".jpg", 20), (".jpeg", 20)].into_iter().collect());
|
||||||
|
|
||||||
// Check if there is a dimension set
|
// Check if there is a dimension set
|
||||||
let (width, height) = parse_sizes(sizes);
|
let (width, height) = parse_sizes(sizes);
|
||||||
@@ -597,7 +595,7 @@ async fn download_icon(domain: &str) -> Result<(Bytes, Option<&str>), Error> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn save_icon(path: &str, icon: Vec<u8>) {
|
async fn save_icon(path: &str, icon: Vec<u8>) {
|
||||||
let operator = match CONFIG.opendal_operator_for_path_type(PathType::IconCache) {
|
let operator = match CONFIG.opendal_operator_for_path_type(&PathType::IconCache) {
|
||||||
Ok(operator) => operator,
|
Ok(operator) => operator,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
warn!("Failed to get OpenDAL operator while saving icon: {e}");
|
warn!("Failed to get OpenDAL operator while saving icon: {e}");
|
||||||
|
|||||||
@@ -248,7 +248,7 @@ async fn _sso_login(
|
|||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut user = User::new(user_infos.email, user_infos.user_name);
|
let mut user = User::new(&user_infos.email, user_infos.user_name);
|
||||||
user.verified_at = Some(now);
|
user.verified_at = Some(now);
|
||||||
user.save(conn).await?;
|
user.save(conn).await?;
|
||||||
|
|
||||||
@@ -1061,7 +1061,7 @@ async fn oidcsignin_redirect(
|
|||||||
wrapper: impl FnOnce(OIDCState) -> sso::OIDCCodeWrapper,
|
wrapper: impl FnOnce(OIDCState) -> sso::OIDCCodeWrapper,
|
||||||
conn: &DbConn,
|
conn: &DbConn,
|
||||||
) -> ApiResult<Redirect> {
|
) -> ApiResult<Redirect> {
|
||||||
let state = sso::decode_state(base64_state)?;
|
let state = sso::decode_state(&base64_state)?;
|
||||||
let code = sso::encode_code_claims(wrapper(state.clone()));
|
let code = sso::encode_code_claims(wrapper(state.clone()));
|
||||||
|
|
||||||
let nonce = match SsoNonce::find(&state, conn).await {
|
let nonce = match SsoNonce::find(&state, conn).await {
|
||||||
|
|||||||
@@ -1,11 +1,14 @@
|
|||||||
use std::{net::IpAddr, sync::Arc, time::Duration};
|
use std::{
|
||||||
|
net::IpAddr,
|
||||||
|
sync::{Arc, LazyLock},
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
|
||||||
use chrono::{NaiveDateTime, Utc};
|
use chrono::{NaiveDateTime, Utc};
|
||||||
use rmpv::Value;
|
use rmpv::Value;
|
||||||
use rocket::{futures::StreamExt, Route};
|
use rocket::{futures::StreamExt, Route};
|
||||||
use tokio::sync::mpsc::Sender;
|
|
||||||
|
|
||||||
use rocket_ws::{Message, WebSocket};
|
use rocket_ws::{Message, WebSocket};
|
||||||
|
use tokio::sync::mpsc::Sender;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
auth::{ClientIp, WsAccessTokenHeader},
|
auth::{ClientIp, WsAccessTokenHeader},
|
||||||
@@ -16,15 +19,13 @@ use crate::{
|
|||||||
Error, CONFIG,
|
Error, CONFIG,
|
||||||
};
|
};
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
pub static WS_USERS: LazyLock<Arc<WebSocketUsers>> = LazyLock::new(|| {
|
||||||
|
|
||||||
pub static WS_USERS: Lazy<Arc<WebSocketUsers>> = Lazy::new(|| {
|
|
||||||
Arc::new(WebSocketUsers {
|
Arc::new(WebSocketUsers {
|
||||||
map: Arc::new(dashmap::DashMap::new()),
|
map: Arc::new(dashmap::DashMap::new()),
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
pub static WS_ANONYMOUS_SUBSCRIPTIONS: Lazy<Arc<AnonymousWebSocketSubscriptions>> = Lazy::new(|| {
|
pub static WS_ANONYMOUS_SUBSCRIPTIONS: LazyLock<Arc<AnonymousWebSocketSubscriptions>> = LazyLock::new(|| {
|
||||||
Arc::new(AnonymousWebSocketSubscriptions {
|
Arc::new(AnonymousWebSocketSubscriptions {
|
||||||
map: Arc::new(dashmap::DashMap::new()),
|
map: Arc::new(dashmap::DashMap::new()),
|
||||||
})
|
})
|
||||||
@@ -35,7 +36,7 @@ use super::{
|
|||||||
push_send_update, push_user_update,
|
push_send_update, push_user_update,
|
||||||
};
|
};
|
||||||
|
|
||||||
static NOTIFICATIONS_DISABLED: Lazy<bool> = Lazy::new(|| !CONFIG.enable_websocket() && !CONFIG.push_enabled());
|
static NOTIFICATIONS_DISABLED: LazyLock<bool> = LazyLock::new(|| !CONFIG.enable_websocket() && !CONFIG.push_enabled());
|
||||||
|
|
||||||
pub fn routes() -> Vec<Route> {
|
pub fn routes() -> Vec<Route> {
|
||||||
if CONFIG.enable_websocket() {
|
if CONFIG.enable_websocket() {
|
||||||
@@ -109,8 +110,7 @@ fn websockets_hub<'r>(
|
|||||||
ip: ClientIp,
|
ip: ClientIp,
|
||||||
header_token: WsAccessTokenHeader,
|
header_token: WsAccessTokenHeader,
|
||||||
) -> Result<rocket_ws::Stream!['r], Error> {
|
) -> Result<rocket_ws::Stream!['r], Error> {
|
||||||
let addr = ip.ip;
|
info!("Accepting Rocket WS connection from {}", ip.ip);
|
||||||
info!("Accepting Rocket WS connection from {addr}");
|
|
||||||
|
|
||||||
let token = if let Some(token) = data.access_token {
|
let token = if let Some(token) = data.access_token {
|
||||||
token
|
token
|
||||||
@@ -133,7 +133,7 @@ fn websockets_hub<'r>(
|
|||||||
users.map.entry(claims.sub.to_string()).or_default().push((entry_uuid, tx));
|
users.map.entry(claims.sub.to_string()).or_default().push((entry_uuid, tx));
|
||||||
|
|
||||||
// Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map
|
// Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map
|
||||||
(rx, WSEntryMapGuard::new(users, claims.sub, entry_uuid, addr))
|
(rx, WSEntryMapGuard::new(users, claims.sub, entry_uuid, ip.ip))
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok({
|
Ok({
|
||||||
@@ -189,8 +189,7 @@ fn websockets_hub<'r>(
|
|||||||
#[allow(tail_expr_drop_order)]
|
#[allow(tail_expr_drop_order)]
|
||||||
#[get("/anonymous-hub?<token..>")]
|
#[get("/anonymous-hub?<token..>")]
|
||||||
fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> Result<rocket_ws::Stream!['r], Error> {
|
fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> Result<rocket_ws::Stream!['r], Error> {
|
||||||
let addr = ip.ip;
|
info!("Accepting Anonymous Rocket WS connection from {}", ip.ip);
|
||||||
info!("Accepting Anonymous Rocket WS connection from {addr}");
|
|
||||||
|
|
||||||
let (mut rx, guard) = {
|
let (mut rx, guard) = {
|
||||||
let subscriptions = Arc::clone(&WS_ANONYMOUS_SUBSCRIPTIONS);
|
let subscriptions = Arc::clone(&WS_ANONYMOUS_SUBSCRIPTIONS);
|
||||||
@@ -200,7 +199,7 @@ fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> R
|
|||||||
subscriptions.map.insert(token.clone(), tx);
|
subscriptions.map.insert(token.clone(), tx);
|
||||||
|
|
||||||
// Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map
|
// Once the guard goes out of scope, the connection will have been closed and the entry will be deleted from the map
|
||||||
(rx, WSAnonymousEntryMapGuard::new(subscriptions, token, addr))
|
(rx, WSAnonymousEntryMapGuard::new(subscriptions, token, ip.ip))
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok({
|
Ok({
|
||||||
@@ -257,11 +256,11 @@ fn anonymous_websockets_hub<'r>(ws: WebSocket, token: String, ip: ClientIp) -> R
|
|||||||
// Websockets server
|
// Websockets server
|
||||||
//
|
//
|
||||||
|
|
||||||
fn serialize(val: Value) -> Vec<u8> {
|
fn serialize(val: &Value) -> Vec<u8> {
|
||||||
use rmpv::encode::write_value;
|
use rmpv::encode::write_value;
|
||||||
|
|
||||||
let mut buf = Vec::new();
|
let mut buf = Vec::new();
|
||||||
write_value(&mut buf, &val).expect("Error encoding MsgPack");
|
write_value(&mut buf, val).expect("Error encoding MsgPack");
|
||||||
|
|
||||||
// Add size bytes at the start
|
// Add size bytes at the start
|
||||||
// Extracted from BinaryMessageFormat.js
|
// Extracted from BinaryMessageFormat.js
|
||||||
@@ -552,7 +551,7 @@ impl AnonymousWebSocketSubscriptions {
|
|||||||
let data = create_anonymous_update(
|
let data = create_anonymous_update(
|
||||||
vec![("Id".into(), auth_request_id.to_string().into()), ("UserId".into(), user_id.to_string().into())],
|
vec![("Id".into(), auth_request_id.to_string().into()), ("UserId".into(), user_id.to_string().into())],
|
||||||
UpdateType::AuthRequestResponse,
|
UpdateType::AuthRequestResponse,
|
||||||
user_id.clone(),
|
user_id,
|
||||||
);
|
);
|
||||||
self.send_update(auth_request_id, &data).await;
|
self.send_update(auth_request_id, &data).await;
|
||||||
}
|
}
|
||||||
@@ -588,16 +587,19 @@ fn create_update(payload: Vec<(Value, Value)>, ut: UpdateType, acting_device_id:
|
|||||||
])]),
|
])]),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
serialize(value)
|
serialize(&value)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_anonymous_update(payload: Vec<(Value, Value)>, ut: UpdateType, user_id: UserId) -> Vec<u8> {
|
fn create_anonymous_update(payload: Vec<(Value, Value)>, ut: UpdateType, user_id: &UserId) -> Vec<u8> {
|
||||||
use rmpv::Value as V;
|
use rmpv::Value as V;
|
||||||
|
|
||||||
let value = V::Array(vec![
|
let value = V::Array(vec![
|
||||||
1.into(),
|
1.into(),
|
||||||
V::Map(vec![]),
|
V::Map(vec![]),
|
||||||
V::Nil,
|
V::Nil,
|
||||||
|
// This word is misspelled, but upstream has this too
|
||||||
|
// https://github.com/bitwarden/server/blob/dff9f1cf538198819911cf2c20f8cda3307701c5/src/Notifications/HubHelpers.cs#L86
|
||||||
|
// https://github.com/bitwarden/clients/blob/9612a4ac45063e372a6fbe87eb253c7cb3c588fb/libs/common/src/auth/services/anonymous-hub.service.ts#L45
|
||||||
"AuthRequestResponseRecieved".into(),
|
"AuthRequestResponseRecieved".into(),
|
||||||
V::Array(vec![V::Map(vec![
|
V::Array(vec![V::Map(vec![
|
||||||
("Type".into(), (ut as i32).into()),
|
("Type".into(), (ut as i32).into()),
|
||||||
@@ -606,11 +608,11 @@ fn create_anonymous_update(payload: Vec<(Value, Value)>, ut: UpdateType, user_id
|
|||||||
])]),
|
])]),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
serialize(value)
|
serialize(&value)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_ping() -> Vec<u8> {
|
fn create_ping() -> Vec<u8> {
|
||||||
serialize(Value::Array(vec![6.into()]))
|
serialize(&Value::Array(vec![6.into()]))
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://github.com/bitwarden/server/blob/375af7c43b10d9da03525d41452f95de3f921541/src/Core/Enums/PushType.cs
|
// https://github.com/bitwarden/server/blob/375af7c43b10d9da03525d41452f95de3f921541/src/Core/Enums/PushType.cs
|
||||||
|
|||||||
@@ -1,3 +1,8 @@
|
|||||||
|
use std::{
|
||||||
|
sync::LazyLock,
|
||||||
|
time::{Duration, Instant},
|
||||||
|
};
|
||||||
|
|
||||||
use reqwest::{
|
use reqwest::{
|
||||||
header::{ACCEPT, AUTHORIZATION, CONTENT_TYPE},
|
header::{ACCEPT, AUTHORIZATION, CONTENT_TYPE},
|
||||||
Method,
|
Method,
|
||||||
@@ -16,9 +21,6 @@ use crate::{
|
|||||||
CONFIG,
|
CONFIG,
|
||||||
};
|
};
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use std::time::{Duration, Instant};
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct AuthPushToken {
|
struct AuthPushToken {
|
||||||
access_token: String,
|
access_token: String,
|
||||||
@@ -32,7 +34,7 @@ struct LocalAuthPushToken {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async fn get_auth_api_token() -> ApiResult<String> {
|
async fn get_auth_api_token() -> ApiResult<String> {
|
||||||
static API_TOKEN: Lazy<RwLock<LocalAuthPushToken>> = Lazy::new(|| {
|
static API_TOKEN: LazyLock<RwLock<LocalAuthPushToken>> = LazyLock::new(|| {
|
||||||
RwLock::new(LocalAuthPushToken {
|
RwLock::new(LocalAuthPushToken {
|
||||||
access_token: String::new(),
|
access_token: String::new(),
|
||||||
valid_until: Instant::now(),
|
valid_until: Instant::now(),
|
||||||
|
|||||||
56
src/auth.rs
56
src/auth.rs
@@ -1,12 +1,15 @@
|
|||||||
// JWT Handling
|
use std::{
|
||||||
|
env,
|
||||||
|
net::IpAddr,
|
||||||
|
sync::{LazyLock, OnceLock},
|
||||||
|
};
|
||||||
|
|
||||||
use chrono::{DateTime, TimeDelta, Utc};
|
use chrono::{DateTime, TimeDelta, Utc};
|
||||||
use jsonwebtoken::{errors::ErrorKind, Algorithm, DecodingKey, EncodingKey, Header};
|
use jsonwebtoken::{errors::ErrorKind, Algorithm, DecodingKey, EncodingKey, Header};
|
||||||
use num_traits::FromPrimitive;
|
use num_traits::FromPrimitive;
|
||||||
use once_cell::sync::{Lazy, OnceCell};
|
|
||||||
use openssl::rsa::Rsa;
|
use openssl::rsa::Rsa;
|
||||||
use serde::de::DeserializeOwned;
|
use serde::de::DeserializeOwned;
|
||||||
use serde::ser::Serialize;
|
use serde::ser::Serialize;
|
||||||
use std::{env, net::IpAddr};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::ApiResult,
|
api::ApiResult,
|
||||||
@@ -22,27 +25,30 @@ use crate::{
|
|||||||
const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
|
const JWT_ALGORITHM: Algorithm = Algorithm::RS256;
|
||||||
|
|
||||||
// Limit when BitWarden consider the token as expired
|
// Limit when BitWarden consider the token as expired
|
||||||
pub static BW_EXPIRATION: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_minutes(5).unwrap());
|
pub static BW_EXPIRATION: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_minutes(5).unwrap());
|
||||||
|
|
||||||
pub static DEFAULT_REFRESH_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_days(30).unwrap());
|
pub static DEFAULT_REFRESH_VALIDITY: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_days(30).unwrap());
|
||||||
pub static MOBILE_REFRESH_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_days(90).unwrap());
|
pub static MOBILE_REFRESH_VALIDITY: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_days(90).unwrap());
|
||||||
pub static DEFAULT_ACCESS_VALIDITY: Lazy<TimeDelta> = Lazy::new(|| TimeDelta::try_hours(2).unwrap());
|
pub static DEFAULT_ACCESS_VALIDITY: LazyLock<TimeDelta> = LazyLock::new(|| TimeDelta::try_hours(2).unwrap());
|
||||||
static JWT_HEADER: Lazy<Header> = Lazy::new(|| Header::new(JWT_ALGORITHM));
|
static JWT_HEADER: LazyLock<Header> = LazyLock::new(|| Header::new(JWT_ALGORITHM));
|
||||||
|
|
||||||
pub static JWT_LOGIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|login", CONFIG.domain_origin()));
|
pub static JWT_LOGIN_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|login", CONFIG.domain_origin()));
|
||||||
static JWT_INVITE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|invite", CONFIG.domain_origin()));
|
static JWT_INVITE_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|invite", CONFIG.domain_origin()));
|
||||||
static JWT_EMERGENCY_ACCESS_INVITE_ISSUER: Lazy<String> =
|
static JWT_EMERGENCY_ACCESS_INVITE_ISSUER: LazyLock<String> =
|
||||||
Lazy::new(|| format!("{}|emergencyaccessinvite", CONFIG.domain_origin()));
|
LazyLock::new(|| format!("{}|emergencyaccessinvite", CONFIG.domain_origin()));
|
||||||
static JWT_DELETE_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|delete", CONFIG.domain_origin()));
|
static JWT_DELETE_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|delete", CONFIG.domain_origin()));
|
||||||
static JWT_VERIFYEMAIL_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|verifyemail", CONFIG.domain_origin()));
|
static JWT_VERIFYEMAIL_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|verifyemail", CONFIG.domain_origin()));
|
||||||
static JWT_ADMIN_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|admin", CONFIG.domain_origin()));
|
static JWT_ADMIN_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|admin", CONFIG.domain_origin()));
|
||||||
static JWT_SEND_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|send", CONFIG.domain_origin()));
|
static JWT_SEND_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|send", CONFIG.domain_origin()));
|
||||||
static JWT_ORG_API_KEY_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|api.organization", CONFIG.domain_origin()));
|
static JWT_ORG_API_KEY_ISSUER: LazyLock<String> =
|
||||||
static JWT_FILE_DOWNLOAD_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|file_download", CONFIG.domain_origin()));
|
LazyLock::new(|| format!("{}|api.organization", CONFIG.domain_origin()));
|
||||||
static JWT_REGISTER_VERIFY_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|register_verify", CONFIG.domain_origin()));
|
static JWT_FILE_DOWNLOAD_ISSUER: LazyLock<String> =
|
||||||
|
LazyLock::new(|| format!("{}|file_download", CONFIG.domain_origin()));
|
||||||
|
static JWT_REGISTER_VERIFY_ISSUER: LazyLock<String> =
|
||||||
|
LazyLock::new(|| format!("{}|register_verify", CONFIG.domain_origin()));
|
||||||
|
|
||||||
static PRIVATE_RSA_KEY: OnceCell<EncodingKey> = OnceCell::new();
|
static PRIVATE_RSA_KEY: OnceLock<EncodingKey> = OnceLock::new();
|
||||||
static PUBLIC_RSA_KEY: OnceCell<DecodingKey> = OnceCell::new();
|
static PUBLIC_RSA_KEY: OnceLock<DecodingKey> = OnceLock::new();
|
||||||
|
|
||||||
pub async fn initialize_keys() -> Result<(), Error> {
|
pub async fn initialize_keys() -> Result<(), Error> {
|
||||||
use std::io::Error;
|
use std::io::Error;
|
||||||
@@ -54,7 +60,7 @@ pub async fn initialize_keys() -> Result<(), Error> {
|
|||||||
.ok_or_else(|| Error::other("Private RSA key path filename is not valid UTF-8"))?
|
.ok_or_else(|| Error::other("Private RSA key path filename is not valid UTF-8"))?
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
let operator = CONFIG.opendal_operator_for_path_type(PathType::RsaKey).map_err(Error::other)?;
|
let operator = CONFIG.opendal_operator_for_path_type(&PathType::RsaKey).map_err(Error::other)?;
|
||||||
|
|
||||||
let priv_key_buffer = match operator.read(&rsa_key_filename).await {
|
let priv_key_buffer = match operator.read(&rsa_key_filename).await {
|
||||||
Ok(buffer) => Some(buffer),
|
Ok(buffer) => Some(buffer),
|
||||||
@@ -457,7 +463,7 @@ pub fn generate_delete_claims(uuid: String) -> BasicJwtClaims {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_verify_email_claims(user_id: UserId) -> BasicJwtClaims {
|
pub fn generate_verify_email_claims(user_id: &UserId) -> BasicJwtClaims {
|
||||||
let time_now = Utc::now();
|
let time_now = Utc::now();
|
||||||
let expire_hours = i64::from(CONFIG.invitation_expiration_hours());
|
let expire_hours = i64::from(CONFIG.invitation_expiration_hours());
|
||||||
BasicJwtClaims {
|
BasicJwtClaims {
|
||||||
@@ -696,9 +702,9 @@ impl<'r> FromRequest<'r> for OrgHeaders {
|
|||||||
// First check the path, if this is not a valid uuid, try the query values.
|
// First check the path, if this is not a valid uuid, try the query values.
|
||||||
let url_org_id: Option<OrganizationId> = {
|
let url_org_id: Option<OrganizationId> = {
|
||||||
if let Some(Ok(org_id)) = request.param::<OrganizationId>(1) {
|
if let Some(Ok(org_id)) = request.param::<OrganizationId>(1) {
|
||||||
Some(org_id.clone())
|
Some(org_id)
|
||||||
} else if let Some(Ok(org_id)) = request.query_value::<OrganizationId>("organizationId") {
|
} else if let Some(Ok(org_id)) = request.query_value::<OrganizationId>("organizationId") {
|
||||||
Some(org_id.clone())
|
Some(org_id)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|||||||
316
src/config.rs
316
src/config.rs
@@ -1,5 +1,6 @@
|
|||||||
use std::{
|
use std::{
|
||||||
env::consts::EXE_SUFFIX,
|
env::consts::EXE_SUFFIX,
|
||||||
|
fmt,
|
||||||
process::exit,
|
process::exit,
|
||||||
sync::{
|
sync::{
|
||||||
atomic::{AtomicBool, Ordering},
|
atomic::{AtomicBool, Ordering},
|
||||||
@@ -8,15 +9,15 @@ use std::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use job_scheduler_ng::Schedule;
|
use job_scheduler_ng::Schedule;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use reqwest::Url;
|
use reqwest::Url;
|
||||||
|
use serde::de::{self, Deserialize, Deserializer, MapAccess, Visitor};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
error::Error,
|
error::Error,
|
||||||
util::{get_env, get_env_bool, get_web_vault_version, is_valid_email, parse_experimental_client_feature_flags},
|
util::{get_env, get_env_bool, get_web_vault_version, is_valid_email, parse_experimental_client_feature_flags},
|
||||||
};
|
};
|
||||||
|
|
||||||
static CONFIG_FILE: Lazy<String> = Lazy::new(|| {
|
static CONFIG_FILE: LazyLock<String> = LazyLock::new(|| {
|
||||||
let data_folder = get_env("DATA_FOLDER").unwrap_or_else(|| String::from("data"));
|
let data_folder = get_env("DATA_FOLDER").unwrap_or_else(|| String::from("data"));
|
||||||
get_env("CONFIG_FILE").unwrap_or_else(|| format!("{data_folder}/config.json"))
|
get_env("CONFIG_FILE").unwrap_or_else(|| format!("{data_folder}/config.json"))
|
||||||
});
|
});
|
||||||
@@ -33,7 +34,7 @@ static CONFIG_FILENAME: LazyLock<String> = LazyLock::new(|| {
|
|||||||
|
|
||||||
pub static SKIP_CONFIG_VALIDATION: AtomicBool = AtomicBool::new(false);
|
pub static SKIP_CONFIG_VALIDATION: AtomicBool = AtomicBool::new(false);
|
||||||
|
|
||||||
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
pub static CONFIG: LazyLock<Config> = LazyLock::new(|| {
|
||||||
std::thread::spawn(|| {
|
std::thread::spawn(|| {
|
||||||
let rt = tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap_or_else(|e| {
|
let rt = tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap_or_else(|e| {
|
||||||
println!("Error loading config:\n {e:?}\n");
|
println!("Error loading config:\n {e:?}\n");
|
||||||
@@ -55,6 +56,41 @@ pub static CONFIG: Lazy<Config> = Lazy::new(|| {
|
|||||||
pub type Pass = String;
|
pub type Pass = String;
|
||||||
|
|
||||||
macro_rules! make_config {
|
macro_rules! make_config {
|
||||||
|
// Support string print
|
||||||
|
( @supportstr $name:ident, $value:expr, Pass, option ) => { serde_json::to_value(&$value.as_ref().map(|_| String::from("***"))).unwrap() }; // Optional pass, we map to an Option<String> with "***"
|
||||||
|
( @supportstr $name:ident, $value:expr, Pass, $none_action:ident ) => { "***".into() }; // Required pass, we return "***"
|
||||||
|
( @supportstr $name:ident, $value:expr, $ty:ty, option ) => { serde_json::to_value(&$value).unwrap() }; // Optional other or string, we convert to json
|
||||||
|
( @supportstr $name:ident, $value:expr, String, $none_action:ident ) => { $value.as_str().into() }; // Required string value, we convert to json
|
||||||
|
( @supportstr $name:ident, $value:expr, $ty:ty, $none_action:ident ) => { ($value).into() }; // Required other value, we return as is or convert to json
|
||||||
|
|
||||||
|
// Group or empty string
|
||||||
|
( @show ) => { "" };
|
||||||
|
( @show $lit:literal ) => { $lit };
|
||||||
|
|
||||||
|
// Wrap the optionals in an Option type
|
||||||
|
( @type $ty:ty, option) => { Option<$ty> };
|
||||||
|
( @type $ty:ty, $id:ident) => { $ty };
|
||||||
|
|
||||||
|
// Generate the values depending on none_action
|
||||||
|
( @build $value:expr, $config:expr, option, ) => { $value };
|
||||||
|
( @build $value:expr, $config:expr, def, $default:expr ) => { $value.unwrap_or($default) };
|
||||||
|
( @build $value:expr, $config:expr, auto, $default_fn:expr ) => {{
|
||||||
|
match $value {
|
||||||
|
Some(v) => v,
|
||||||
|
None => {
|
||||||
|
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
|
||||||
|
f($config)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}};
|
||||||
|
( @build $value:expr, $config:expr, generated, $default_fn:expr ) => {{
|
||||||
|
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
|
||||||
|
f($config)
|
||||||
|
}};
|
||||||
|
|
||||||
|
( @getenv $name:expr, bool ) => { get_env_bool($name) };
|
||||||
|
( @getenv $name:expr, $ty:ident ) => { get_env($name) };
|
||||||
|
|
||||||
($(
|
($(
|
||||||
$(#[doc = $groupdoc:literal])?
|
$(#[doc = $groupdoc:literal])?
|
||||||
$group:ident $(: $group_enabled:ident)? {
|
$group:ident $(: $group_enabled:ident)? {
|
||||||
@@ -74,10 +110,103 @@ macro_rules! make_config {
|
|||||||
_env: ConfigBuilder,
|
_env: ConfigBuilder,
|
||||||
_usr: ConfigBuilder,
|
_usr: ConfigBuilder,
|
||||||
|
|
||||||
_overrides: Vec<String>,
|
_overrides: Vec<&'static str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default, Deserialize, Serialize)]
|
// Custom Deserialize for ConfigBuilder, mainly based upon https://serde.rs/deserialize-struct.html
|
||||||
|
// This deserialize doesn't care if there are keys missing, or if there are duplicate keys
|
||||||
|
// In case of duplicate keys (which should never be possible unless manually edited), the last value is used!
|
||||||
|
// Main reason for this is removing the `visit_seq` function, which causes a lot of code generation not needed or used for this struct.
|
||||||
|
impl<'de> Deserialize<'de> for ConfigBuilder {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
const FIELDS: &[&str] = &[
|
||||||
|
$($(
|
||||||
|
stringify!($name),
|
||||||
|
)+)+
|
||||||
|
];
|
||||||
|
|
||||||
|
#[allow(non_camel_case_types)]
|
||||||
|
enum Field {
|
||||||
|
$($(
|
||||||
|
$name,
|
||||||
|
)+)+
|
||||||
|
__ignore,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de> Deserialize<'de> for Field {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
struct FieldVisitor;
|
||||||
|
|
||||||
|
impl Visitor<'_> for FieldVisitor {
|
||||||
|
type Value = Field;
|
||||||
|
|
||||||
|
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
formatter.write_str("ConfigBuilder field identifier")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn visit_str<E>(self, value: &str) -> Result<Field, E>
|
||||||
|
where
|
||||||
|
E: de::Error,
|
||||||
|
{
|
||||||
|
match value {
|
||||||
|
$($(
|
||||||
|
stringify!($name) => Ok(Field::$name),
|
||||||
|
)+)+
|
||||||
|
_ => Ok(Field::__ignore),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
deserializer.deserialize_identifier(FieldVisitor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ConfigBuilderVisitor;
|
||||||
|
|
||||||
|
impl<'de> Visitor<'de> for ConfigBuilderVisitor {
|
||||||
|
type Value = ConfigBuilder;
|
||||||
|
|
||||||
|
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
formatter.write_str("struct ConfigBuilder")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
|
||||||
|
where
|
||||||
|
A: MapAccess<'de>,
|
||||||
|
{
|
||||||
|
let mut builder = ConfigBuilder::default();
|
||||||
|
while let Some(key) = map.next_key()? {
|
||||||
|
match key {
|
||||||
|
$($(
|
||||||
|
Field::$name => {
|
||||||
|
if builder.$name.is_some() {
|
||||||
|
return Err(de::Error::duplicate_field(stringify!($name)));
|
||||||
|
}
|
||||||
|
builder.$name = map.next_value()?;
|
||||||
|
}
|
||||||
|
)+)+
|
||||||
|
Field::__ignore => {
|
||||||
|
let _ = map.next_value::<de::IgnoredAny>()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(builder)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
deserializer.deserialize_struct("ConfigBuilder", FIELDS, ConfigBuilderVisitor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Default, Serialize)]
|
||||||
pub struct ConfigBuilder {
|
pub struct ConfigBuilder {
|
||||||
$($(
|
$($(
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
@@ -86,7 +215,6 @@ macro_rules! make_config {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ConfigBuilder {
|
impl ConfigBuilder {
|
||||||
#[allow(clippy::field_reassign_with_default)]
|
|
||||||
fn from_env() -> Self {
|
fn from_env() -> Self {
|
||||||
let env_file = get_env("ENV_FILE").unwrap_or_else(|| String::from(".env"));
|
let env_file = get_env("ENV_FILE").unwrap_or_else(|| String::from(".env"));
|
||||||
match dotenvy::from_path(&env_file) {
|
match dotenvy::from_path(&env_file) {
|
||||||
@@ -148,14 +276,14 @@ macro_rules! make_config {
|
|||||||
|
|
||||||
/// Merges the values of both builders into a new builder.
|
/// Merges the values of both builders into a new builder.
|
||||||
/// If both have the same element, `other` wins.
|
/// If both have the same element, `other` wins.
|
||||||
fn merge(&self, other: &Self, show_overrides: bool, overrides: &mut Vec<String>) -> Self {
|
fn merge(&self, other: &Self, show_overrides: bool, overrides: &mut Vec<&str>) -> Self {
|
||||||
let mut builder = self.clone();
|
let mut builder = self.clone();
|
||||||
$($(
|
$($(
|
||||||
if let v @Some(_) = &other.$name {
|
if let v @Some(_) = &other.$name {
|
||||||
builder.$name = v.clone();
|
builder.$name = v.clone();
|
||||||
|
|
||||||
if self.$name.is_some() {
|
if self.$name.is_some() {
|
||||||
overrides.push(pastey::paste!(stringify!([<$name:upper>])).into());
|
overrides.push(pastey::paste!(stringify!([<$name:upper>])));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)+)+
|
)+)+
|
||||||
@@ -196,6 +324,32 @@ macro_rules! make_config {
|
|||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
struct ConfigItems { $($( $name: make_config! {@type $ty, $none_action}, )+)+ }
|
struct ConfigItems { $($( $name: make_config! {@type $ty, $none_action}, )+)+ }
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct ElementDoc {
|
||||||
|
name: &'static str,
|
||||||
|
description: &'static str,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
struct ElementData {
|
||||||
|
editable: bool,
|
||||||
|
name: &'static str,
|
||||||
|
value: serde_json::Value,
|
||||||
|
default: serde_json::Value,
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
r#type: &'static str,
|
||||||
|
doc: ElementDoc,
|
||||||
|
overridden: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize)]
|
||||||
|
pub struct GroupData {
|
||||||
|
group: &'static str,
|
||||||
|
grouptoggle: &'static str,
|
||||||
|
groupdoc: &'static str,
|
||||||
|
elements: Vec<ElementData>,
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
impl Config {
|
impl Config {
|
||||||
$($(
|
$($(
|
||||||
@@ -207,11 +361,12 @@ macro_rules! make_config {
|
|||||||
|
|
||||||
pub fn prepare_json(&self) -> serde_json::Value {
|
pub fn prepare_json(&self) -> serde_json::Value {
|
||||||
let (def, cfg, overridden) = {
|
let (def, cfg, overridden) = {
|
||||||
|
// Lock the inner as short as possible and clone what is needed to prevent deadlocks
|
||||||
let inner = &self.inner.read().unwrap();
|
let inner = &self.inner.read().unwrap();
|
||||||
(inner._env.build(), inner.config.clone(), inner._overrides.clone())
|
(inner._env.build(), inner.config.clone(), inner._overrides.clone())
|
||||||
};
|
};
|
||||||
|
|
||||||
fn _get_form_type(rust_type: &str) -> &'static str {
|
fn _get_form_type(rust_type: &'static str) -> &'static str {
|
||||||
match rust_type {
|
match rust_type {
|
||||||
"Pass" => "password",
|
"Pass" => "password",
|
||||||
"String" => "text",
|
"String" => "text",
|
||||||
@@ -220,48 +375,36 @@ macro_rules! make_config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _get_doc(doc: &str) -> serde_json::Value {
|
fn _get_doc(doc_str: &'static str) -> ElementDoc {
|
||||||
let mut split = doc.split("|>").map(str::trim);
|
let mut split = doc_str.split("|>").map(str::trim);
|
||||||
|
ElementDoc {
|
||||||
// We do not use the json!() macro here since that causes a lot of macro recursion.
|
name: split.next().unwrap_or_default(),
|
||||||
// This slows down compile time and it also causes issues with rust-analyzer
|
description: split.next().unwrap_or_default(),
|
||||||
serde_json::Value::Object({
|
}
|
||||||
let mut doc_json = serde_json::Map::new();
|
|
||||||
doc_json.insert("name".into(), serde_json::to_value(split.next()).unwrap());
|
|
||||||
doc_json.insert("description".into(), serde_json::to_value(split.next()).unwrap());
|
|
||||||
doc_json
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// We do not use the json!() macro here since that causes a lot of macro recursion.
|
let data: Vec<GroupData> = vec![
|
||||||
// This slows down compile time and it also causes issues with rust-analyzer
|
$( // This repetition is for each group
|
||||||
serde_json::Value::Array(<[_]>::into_vec(Box::new([
|
GroupData {
|
||||||
$(
|
group: stringify!($group),
|
||||||
serde_json::Value::Object({
|
grouptoggle: stringify!($($group_enabled)?),
|
||||||
let mut group = serde_json::Map::new();
|
groupdoc: (make_config! { @show $($groupdoc)? }),
|
||||||
group.insert("group".into(), (stringify!($group)).into());
|
|
||||||
group.insert("grouptoggle".into(), (stringify!($($group_enabled)?)).into());
|
|
||||||
group.insert("groupdoc".into(), (make_config! { @show $($groupdoc)? }).into());
|
|
||||||
|
|
||||||
group.insert("elements".into(), serde_json::Value::Array(<[_]>::into_vec(Box::new([
|
elements: vec![
|
||||||
$(
|
$( // This repetition is for each element within a group
|
||||||
serde_json::Value::Object({
|
ElementData {
|
||||||
let mut element = serde_json::Map::new();
|
editable: $editable,
|
||||||
element.insert("editable".into(), ($editable).into());
|
name: stringify!($name),
|
||||||
element.insert("name".into(), (stringify!($name)).into());
|
value: serde_json::to_value(&cfg.$name).unwrap_or_default(),
|
||||||
element.insert("value".into(), serde_json::to_value(cfg.$name).unwrap());
|
default: serde_json::to_value(&def.$name).unwrap_or_default(),
|
||||||
element.insert("default".into(), serde_json::to_value(def.$name).unwrap());
|
r#type: _get_form_type(stringify!($ty)),
|
||||||
element.insert("type".into(), (_get_form_type(stringify!($ty))).into());
|
doc: _get_doc(concat!($($doc),+)),
|
||||||
element.insert("doc".into(), (_get_doc(concat!($($doc),+))).into());
|
overridden: overridden.contains(&pastey::paste!(stringify!([<$name:upper>]))),
|
||||||
element.insert("overridden".into(), (overridden.contains(&pastey::paste!(stringify!([<$name:upper>])).into())).into());
|
},
|
||||||
element
|
)+], // End of elements repetition
|
||||||
}),
|
},
|
||||||
)+
|
)+]; // End of groups repetition
|
||||||
]))));
|
serde_json::to_value(data).unwrap()
|
||||||
group
|
|
||||||
}),
|
|
||||||
)+
|
|
||||||
])))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_support_json(&self) -> serde_json::Value {
|
pub fn get_support_json(&self) -> serde_json::Value {
|
||||||
@@ -269,8 +412,8 @@ macro_rules! make_config {
|
|||||||
// Pass types will always be masked and no need to put them in the list.
|
// Pass types will always be masked and no need to put them in the list.
|
||||||
// Besides Pass, only String types will be masked via _privacy_mask.
|
// Besides Pass, only String types will be masked via _privacy_mask.
|
||||||
const PRIVACY_CONFIG: &[&str] = &[
|
const PRIVACY_CONFIG: &[&str] = &[
|
||||||
"allowed_iframe_ancestors",
|
|
||||||
"allowed_connect_src",
|
"allowed_connect_src",
|
||||||
|
"allowed_iframe_ancestors",
|
||||||
"database_url",
|
"database_url",
|
||||||
"domain_origin",
|
"domain_origin",
|
||||||
"domain_path",
|
"domain_path",
|
||||||
@@ -278,16 +421,18 @@ macro_rules! make_config {
|
|||||||
"helo_name",
|
"helo_name",
|
||||||
"org_creation_users",
|
"org_creation_users",
|
||||||
"signups_domains_whitelist",
|
"signups_domains_whitelist",
|
||||||
|
"_smtp_img_src",
|
||||||
|
"smtp_from_name",
|
||||||
"smtp_from",
|
"smtp_from",
|
||||||
"smtp_host",
|
"smtp_host",
|
||||||
"smtp_username",
|
"smtp_username",
|
||||||
"_smtp_img_src",
|
|
||||||
"sso_client_id",
|
|
||||||
"sso_authority",
|
"sso_authority",
|
||||||
"sso_callback_path",
|
"sso_callback_path",
|
||||||
|
"sso_client_id",
|
||||||
];
|
];
|
||||||
|
|
||||||
let cfg = {
|
let cfg = {
|
||||||
|
// Lock the inner as short as possible and clone what is needed to prevent deadlocks
|
||||||
let inner = &self.inner.read().unwrap();
|
let inner = &self.inner.read().unwrap();
|
||||||
inner.config.clone()
|
inner.config.clone()
|
||||||
};
|
};
|
||||||
@@ -317,13 +462,21 @@ macro_rules! make_config {
|
|||||||
serde_json::Value::Object({
|
serde_json::Value::Object({
|
||||||
let mut json = serde_json::Map::new();
|
let mut json = serde_json::Map::new();
|
||||||
$($(
|
$($(
|
||||||
json.insert(stringify!($name).into(), make_config! { @supportstr $name, cfg.$name, $ty, $none_action });
|
json.insert(String::from(stringify!($name)), make_config! { @supportstr $name, cfg.$name, $ty, $none_action });
|
||||||
)+)+;
|
)+)+;
|
||||||
|
// Loop through all privacy sensitive keys and mask them
|
||||||
|
for mask_key in PRIVACY_CONFIG {
|
||||||
|
if let Some(value) = json.get_mut(*mask_key) {
|
||||||
|
if let Some(s) = value.as_str() {
|
||||||
|
*value = _privacy_mask(s).into();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
json
|
json
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_overrides(&self) -> Vec<String> {
|
pub fn get_overrides(&self) -> Vec<&'static str> {
|
||||||
let overrides = {
|
let overrides = {
|
||||||
let inner = &self.inner.read().unwrap();
|
let inner = &self.inner.read().unwrap();
|
||||||
inner._overrides.clone()
|
inner._overrides.clone()
|
||||||
@@ -332,55 +485,6 @@ macro_rules! make_config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Support string print
|
|
||||||
( @supportstr $name:ident, $value:expr, Pass, option ) => { serde_json::to_value($value.as_ref().map(|_| String::from("***"))).unwrap() }; // Optional pass, we map to an Option<String> with "***"
|
|
||||||
( @supportstr $name:ident, $value:expr, Pass, $none_action:ident ) => { "***".into() }; // Required pass, we return "***"
|
|
||||||
( @supportstr $name:ident, $value:expr, String, option ) => { // Optional other value, we return as is or convert to string to apply the privacy config
|
|
||||||
if PRIVACY_CONFIG.contains(&stringify!($name)) {
|
|
||||||
serde_json::to_value($value.as_ref().map(|x| _privacy_mask(x) )).unwrap()
|
|
||||||
} else {
|
|
||||||
serde_json::to_value($value).unwrap()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
( @supportstr $name:ident, $value:expr, String, $none_action:ident ) => { // Required other value, we return as is or convert to string to apply the privacy config
|
|
||||||
if PRIVACY_CONFIG.contains(&stringify!($name)) {
|
|
||||||
_privacy_mask(&$value).into()
|
|
||||||
} else {
|
|
||||||
($value).into()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
( @supportstr $name:ident, $value:expr, $ty:ty, option ) => { serde_json::to_value($value).unwrap() }; // Optional other value, we return as is or convert to string to apply the privacy config
|
|
||||||
( @supportstr $name:ident, $value:expr, $ty:ty, $none_action:ident ) => { ($value).into() }; // Required other value, we return as is or convert to string to apply the privacy config
|
|
||||||
|
|
||||||
// Group or empty string
|
|
||||||
( @show ) => { "" };
|
|
||||||
( @show $lit:literal ) => { $lit };
|
|
||||||
|
|
||||||
// Wrap the optionals in an Option type
|
|
||||||
( @type $ty:ty, option) => { Option<$ty> };
|
|
||||||
( @type $ty:ty, $id:ident) => { $ty };
|
|
||||||
|
|
||||||
// Generate the values depending on none_action
|
|
||||||
( @build $value:expr, $config:expr, option, ) => { $value };
|
|
||||||
( @build $value:expr, $config:expr, def, $default:expr ) => { $value.unwrap_or($default) };
|
|
||||||
( @build $value:expr, $config:expr, auto, $default_fn:expr ) => {{
|
|
||||||
match $value {
|
|
||||||
Some(v) => v,
|
|
||||||
None => {
|
|
||||||
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
|
|
||||||
f($config)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}};
|
|
||||||
( @build $value:expr, $config:expr, generated, $default_fn:expr ) => {{
|
|
||||||
let f: &dyn Fn(&ConfigItems) -> _ = &$default_fn;
|
|
||||||
f($config)
|
|
||||||
}};
|
|
||||||
|
|
||||||
( @getenv $name:expr, bool ) => { get_env_bool($name) };
|
|
||||||
( @getenv $name:expr, $ty:ident ) => { get_env($name) };
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//STRUCTURE:
|
//STRUCTURE:
|
||||||
@@ -1518,7 +1622,7 @@ impl Config {
|
|||||||
if let Some(akey) = self._duo_akey() {
|
if let Some(akey) = self._duo_akey() {
|
||||||
akey
|
akey
|
||||||
} else {
|
} else {
|
||||||
let akey_s = crate::crypto::encode_random_bytes::<64>(data_encoding::BASE64);
|
let akey_s = crate::crypto::encode_random_bytes::<64>(&data_encoding::BASE64);
|
||||||
|
|
||||||
// Save the new value
|
// Save the new value
|
||||||
let builder = ConfigBuilder {
|
let builder = ConfigBuilder {
|
||||||
@@ -1542,7 +1646,7 @@ impl Config {
|
|||||||
token.is_some() && !token.unwrap().trim().is_empty()
|
token.is_some() && !token.unwrap().trim().is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn opendal_operator_for_path_type(&self, path_type: PathType) -> Result<opendal::Operator, Error> {
|
pub fn opendal_operator_for_path_type(&self, path_type: &PathType) -> Result<opendal::Operator, Error> {
|
||||||
let path = match path_type {
|
let path = match path_type {
|
||||||
PathType::Data => self.data_folder(),
|
PathType::Data => self.data_folder(),
|
||||||
PathType::IconCache => self.icon_cache_folder(),
|
PathType::IconCache => self.icon_cache_folder(),
|
||||||
@@ -1735,7 +1839,7 @@ fn to_json<'reg, 'rc>(
|
|||||||
|
|
||||||
// Configure the web-vault version as an integer so it can be used as a comparison smaller or greater then.
|
// Configure the web-vault version as an integer so it can be used as a comparison smaller or greater then.
|
||||||
// The default is based upon the version since this feature is added.
|
// The default is based upon the version since this feature is added.
|
||||||
static WEB_VAULT_VERSION: Lazy<semver::Version> = Lazy::new(|| {
|
static WEB_VAULT_VERSION: LazyLock<semver::Version> = LazyLock::new(|| {
|
||||||
let vault_version = get_web_vault_version();
|
let vault_version = get_web_vault_version();
|
||||||
// Use a single regex capture to extract version components
|
// Use a single regex capture to extract version components
|
||||||
let re = regex::Regex::new(r"(\d{4})\.(\d{1,2})\.(\d{1,2})").unwrap();
|
let re = regex::Regex::new(r"(\d{4})\.(\d{1,2})\.(\d{1,2})").unwrap();
|
||||||
@@ -1751,7 +1855,7 @@ static WEB_VAULT_VERSION: Lazy<semver::Version> = Lazy::new(|| {
|
|||||||
|
|
||||||
// Configure the Vaultwarden version as an integer so it can be used as a comparison smaller or greater then.
|
// Configure the Vaultwarden version as an integer so it can be used as a comparison smaller or greater then.
|
||||||
// The default is based upon the version since this feature is added.
|
// The default is based upon the version since this feature is added.
|
||||||
static VW_VERSION: Lazy<semver::Version> = Lazy::new(|| {
|
static VW_VERSION: LazyLock<semver::Version> = LazyLock::new(|| {
|
||||||
let vw_version = crate::VERSION.unwrap_or("1.32.5");
|
let vw_version = crate::VERSION.unwrap_or("1.32.5");
|
||||||
// Use a single regex capture to extract version components
|
// Use a single regex capture to extract version components
|
||||||
let re = regex::Regex::new(r"(\d{1})\.(\d{1,2})\.(\d{1,2})").unwrap();
|
let re = regex::Regex::new(r"(\d{1})\.(\d{1,2})\.(\d{1,2})").unwrap();
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ pub fn get_random_bytes<const N: usize>() -> [u8; N] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Encode random bytes using the provided function.
|
/// Encode random bytes using the provided function.
|
||||||
pub fn encode_random_bytes<const N: usize>(e: Encoding) -> String {
|
pub fn encode_random_bytes<const N: usize>(e: &Encoding) -> String {
|
||||||
e.encode(&get_random_bytes::<N>())
|
e.encode(&get_random_bytes::<N>())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -81,7 +81,7 @@ pub fn get_random_string_alphanum(num_chars: usize) -> String {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_id<const N: usize>() -> String {
|
pub fn generate_id<const N: usize>() -> String {
|
||||||
encode_random_bytes::<N>(HEXLOWER)
|
encode_random_bytes::<N>(&HEXLOWER)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_send_file_id() -> String {
|
pub fn generate_send_file_id() -> String {
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ impl Attachment {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_url(&self, host: &str) -> Result<String, crate::Error> {
|
pub async fn get_url(&self, host: &str) -> Result<String, crate::Error> {
|
||||||
let operator = CONFIG.opendal_operator_for_path_type(PathType::Attachments)?;
|
let operator = CONFIG.opendal_operator_for_path_type(&PathType::Attachments)?;
|
||||||
|
|
||||||
if operator.info().scheme() == opendal::Scheme::Fs {
|
if operator.info().scheme() == opendal::Scheme::Fs {
|
||||||
let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone()));
|
let token = encode_jwt(&generate_file_download_claims(self.cipher_uuid.clone(), self.id.clone()));
|
||||||
@@ -117,7 +117,7 @@ impl Attachment {
|
|||||||
.map_res("Error deleting attachment")
|
.map_res("Error deleting attachment")
|
||||||
}}?;
|
}}?;
|
||||||
|
|
||||||
let operator = CONFIG.opendal_operator_for_path_type(PathType::Attachments)?;
|
let operator = CONFIG.opendal_operator_for_path_type(&PathType::Attachments)?;
|
||||||
let file_path = self.get_file_path();
|
let file_path = self.get_file_path();
|
||||||
|
|
||||||
if let Err(e) = operator.delete(&file_path).await {
|
if let Err(e) = operator.delete(&file_path).await {
|
||||||
|
|||||||
@@ -48,7 +48,7 @@ impl Device {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn refresh_twofactor_remember(&mut self) -> String {
|
pub fn refresh_twofactor_remember(&mut self) -> String {
|
||||||
let twofactor_remember = crypto::encode_random_bytes::<180>(BASE64);
|
let twofactor_remember = crypto::encode_random_bytes::<180>(&BASE64);
|
||||||
self.twofactor_remember = Some(twofactor_remember.clone());
|
self.twofactor_remember = Some(twofactor_remember.clone());
|
||||||
|
|
||||||
twofactor_remember
|
twofactor_remember
|
||||||
@@ -129,7 +129,7 @@ impl Device {
|
|||||||
|
|
||||||
push_uuid: Some(PushId(get_uuid())),
|
push_uuid: Some(PushId(get_uuid())),
|
||||||
push_token: None,
|
push_token: None,
|
||||||
refresh_token: crypto::encode_random_bytes::<64>(BASE64URL),
|
refresh_token: crypto::encode_random_bytes::<64>(&BASE64URL),
|
||||||
twofactor_remember: None,
|
twofactor_remember: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -172,7 +172,7 @@ impl PartialOrd<MembershipType> for i32 {
|
|||||||
|
|
||||||
/// Local methods
|
/// Local methods
|
||||||
impl Organization {
|
impl Organization {
|
||||||
pub fn new(name: String, billing_email: String, private_key: Option<String>, public_key: Option<String>) -> Self {
|
pub fn new(name: String, billing_email: &str, private_key: Option<String>, public_key: Option<String>) -> Self {
|
||||||
let billing_email = billing_email.to_lowercase();
|
let billing_email = billing_email.to_lowercase();
|
||||||
Self {
|
Self {
|
||||||
uuid: OrganizationId(crate::util::get_uuid()),
|
uuid: OrganizationId(crate::util::get_uuid()),
|
||||||
|
|||||||
@@ -225,7 +225,7 @@ impl Send {
|
|||||||
self.update_users_revision(conn).await;
|
self.update_users_revision(conn).await;
|
||||||
|
|
||||||
if self.atype == SendType::File as i32 {
|
if self.atype == SendType::File as i32 {
|
||||||
let operator = CONFIG.opendal_operator_for_path_type(PathType::Sends)?;
|
let operator = CONFIG.opendal_operator_for_path_type(&PathType::Sends)?;
|
||||||
operator.remove_all(&self.uuid).await.ok();
|
operator.remove_all(&self.uuid).await.ok();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ impl User {
|
|||||||
pub const CLIENT_KDF_TYPE_DEFAULT: i32 = UserKdfType::Pbkdf2 as i32;
|
pub const CLIENT_KDF_TYPE_DEFAULT: i32 = UserKdfType::Pbkdf2 as i32;
|
||||||
pub const CLIENT_KDF_ITER_DEFAULT: i32 = 600_000;
|
pub const CLIENT_KDF_ITER_DEFAULT: i32 = 600_000;
|
||||||
|
|
||||||
pub fn new(email: String, name: Option<String>) -> Self {
|
pub fn new(email: &str, name: Option<String>) -> Self {
|
||||||
let now = Utc::now().naive_utc();
|
let now = Utc::now().naive_utc();
|
||||||
let email = email.to_lowercase();
|
let email = email.to_lowercase();
|
||||||
|
|
||||||
|
|||||||
170
src/error.rs
170
src/error.rs
@@ -3,6 +3,7 @@
|
|||||||
//
|
//
|
||||||
use crate::db::models::EventType;
|
use crate::db::models::EventType;
|
||||||
use crate::http_client::CustomHttpClientError;
|
use crate::http_client::CustomHttpClientError;
|
||||||
|
use serde::ser::{Serialize, SerializeStruct, Serializer};
|
||||||
use std::error::Error as StdError;
|
use std::error::Error as StdError;
|
||||||
|
|
||||||
macro_rules! make_error {
|
macro_rules! make_error {
|
||||||
@@ -73,7 +74,7 @@ make_error! {
|
|||||||
Empty(Empty): _no_source, _serialize,
|
Empty(Empty): _no_source, _serialize,
|
||||||
// Used to represent err! calls
|
// Used to represent err! calls
|
||||||
Simple(String): _no_source, _api_error,
|
Simple(String): _no_source, _api_error,
|
||||||
Compact(Compact): _no_source, _api_error_small,
|
Compact(Compact): _no_source, _compact_api_error,
|
||||||
|
|
||||||
// Used in our custom http client to handle non-global IPs and blocked domains
|
// Used in our custom http client to handle non-global IPs and blocked domains
|
||||||
CustomHttpClient(CustomHttpClientError): _has_source, _api_error,
|
CustomHttpClient(CustomHttpClientError): _has_source, _api_error,
|
||||||
@@ -130,6 +131,10 @@ impl Error {
|
|||||||
(usr_msg, log_msg.into()).into()
|
(usr_msg, log_msg.into()).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn new_msg<M: Into<String> + Clone>(usr_msg: M) -> Self {
|
||||||
|
(usr_msg.clone(), usr_msg.into()).into()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn empty() -> Self {
|
pub fn empty() -> Self {
|
||||||
Empty {}.into()
|
Empty {}.into()
|
||||||
}
|
}
|
||||||
@@ -196,38 +201,97 @@ fn _no_source<T, S>(_: T) -> Option<S> {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _serialize(e: &impl serde::Serialize, _msg: &str) -> String {
|
fn _serialize(e: &impl Serialize, _msg: &str) -> String {
|
||||||
serde_json::to_string(e).unwrap()
|
serde_json::to_string(e).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _api_error(_: &impl std::any::Any, msg: &str) -> String {
|
/// This will serialize the default ApiErrorResponse
|
||||||
let json = json!({
|
/// It will add the needed fields which are mostly empty or have multiple copies of the message
|
||||||
"message": msg,
|
/// This is more efficient than having a larger struct and use the Serialize derive
|
||||||
"error": "",
|
/// It also prevents using `json!()` calls to create the final output
|
||||||
"error_description": "",
|
impl Serialize for ApiErrorResponse<'_> {
|
||||||
"validationErrors": {"": [ msg ]},
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
"errorModel": {
|
where
|
||||||
"message": msg,
|
S: Serializer,
|
||||||
"object": "error"
|
{
|
||||||
},
|
#[derive(serde::Serialize)]
|
||||||
"exceptionMessage": null,
|
struct ErrorModel<'a> {
|
||||||
"exceptionStackTrace": null,
|
message: &'a str,
|
||||||
"innerExceptionMessage": null,
|
object: &'static str,
|
||||||
"object": "error"
|
|
||||||
});
|
|
||||||
_serialize(&json, "")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn _api_error_small(_: &impl std::any::Any, msg: &str) -> String {
|
let mut state = serializer.serialize_struct("ApiErrorResponse", 9)?;
|
||||||
let json = json!({
|
|
||||||
"message": msg,
|
state.serialize_field("message", self.0.message)?;
|
||||||
"validationErrors": null,
|
|
||||||
"exceptionMessage": null,
|
let mut validation_errors = std::collections::HashMap::with_capacity(1);
|
||||||
"exceptionStackTrace": null,
|
validation_errors.insert("", vec![self.0.message]);
|
||||||
"innerExceptionMessage": null,
|
state.serialize_field("validationErrors", &validation_errors)?;
|
||||||
"object": "error"
|
|
||||||
});
|
let error_model = ErrorModel {
|
||||||
_serialize(&json, "")
|
message: self.0.message,
|
||||||
|
object: "error",
|
||||||
|
};
|
||||||
|
state.serialize_field("errorModel", &error_model)?;
|
||||||
|
|
||||||
|
state.serialize_field("error", "")?;
|
||||||
|
state.serialize_field("error_description", "")?;
|
||||||
|
state.serialize_field("exceptionMessage", &None::<()>)?;
|
||||||
|
state.serialize_field("exceptionStackTrace", &None::<()>)?;
|
||||||
|
state.serialize_field("innerExceptionMessage", &None::<()>)?;
|
||||||
|
state.serialize_field("object", "error")?;
|
||||||
|
|
||||||
|
state.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This will serialize the smaller CompactApiErrorResponse
|
||||||
|
/// It will add the needed fields which are mostly empty
|
||||||
|
/// This is more efficient than having a larger struct and use the Serialize derive
|
||||||
|
/// It also prevents using `json!()` calls to create the final output
|
||||||
|
impl Serialize for CompactApiErrorResponse<'_> {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
let mut state = serializer.serialize_struct("CompactApiErrorResponse", 6)?;
|
||||||
|
|
||||||
|
state.serialize_field("message", self.0.message)?;
|
||||||
|
state.serialize_field("validationErrors", &None::<()>)?;
|
||||||
|
state.serialize_field("exceptionMessage", &None::<()>)?;
|
||||||
|
state.serialize_field("exceptionStackTrace", &None::<()>)?;
|
||||||
|
state.serialize_field("innerExceptionMessage", &None::<()>)?;
|
||||||
|
state.serialize_field("object", "error")?;
|
||||||
|
|
||||||
|
state.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Main API Error struct template
|
||||||
|
/// This struct which we can be used by both ApiErrorResponse and CompactApiErrorResponse
|
||||||
|
/// is small and doesn't contain unneeded empty fields. This is more memory efficient, but also less code to compile
|
||||||
|
struct ApiErrorMsg<'a> {
|
||||||
|
message: &'a str,
|
||||||
|
}
|
||||||
|
/// Default API Error response struct
|
||||||
|
/// The custom serialization adds all other needed fields
|
||||||
|
struct ApiErrorResponse<'a>(ApiErrorMsg<'a>);
|
||||||
|
/// Compact API Error response struct used for some newer error responses
|
||||||
|
/// The custom serialization adds all other needed fields
|
||||||
|
struct CompactApiErrorResponse<'a>(ApiErrorMsg<'a>);
|
||||||
|
|
||||||
|
fn _api_error(_: &impl std::any::Any, msg: &str) -> String {
|
||||||
|
let response = ApiErrorMsg {
|
||||||
|
message: msg,
|
||||||
|
};
|
||||||
|
serde_json::to_string(&ApiErrorResponse(response)).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn _compact_api_error(_: &impl std::any::Any, msg: &str) -> String {
|
||||||
|
let response = ApiErrorMsg {
|
||||||
|
message: msg,
|
||||||
|
};
|
||||||
|
serde_json::to_string(&CompactApiErrorResponse(response)).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
@@ -258,34 +322,41 @@ impl Responder<'_, 'static> for Error {
|
|||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! err {
|
macro_rules! err {
|
||||||
($kind:ident, $msg:expr) => {{
|
($kind:ident, $msg:expr) => {{
|
||||||
error!("{}", $msg);
|
let msg = $msg;
|
||||||
return Err($crate::error::Error::new($msg, $msg).with_kind($crate::error::ErrorKind::$kind($crate::error::$kind {})));
|
error!("{msg}");
|
||||||
|
return Err($crate::error::Error::new_msg(msg).with_kind($crate::error::ErrorKind::$kind($crate::error::$kind {})));
|
||||||
}};
|
}};
|
||||||
($msg:expr) => {{
|
($msg:expr) => {{
|
||||||
error!("{}", $msg);
|
let msg = $msg;
|
||||||
return Err($crate::error::Error::new($msg, $msg));
|
error!("{msg}");
|
||||||
|
return Err($crate::error::Error::new_msg(msg));
|
||||||
}};
|
}};
|
||||||
($msg:expr, ErrorEvent $err_event:tt) => {{
|
($msg:expr, ErrorEvent $err_event:tt) => {{
|
||||||
error!("{}", $msg);
|
let msg = $msg;
|
||||||
return Err($crate::error::Error::new($msg, $msg).with_event($crate::error::ErrorEvent $err_event));
|
error!("{msg}");
|
||||||
|
return Err($crate::error::Error::new_msg(msg).with_event($crate::error::ErrorEvent $err_event));
|
||||||
}};
|
}};
|
||||||
($usr_msg:expr, $log_value:expr) => {{
|
($usr_msg:expr, $log_value:expr) => {{
|
||||||
error!("{}. {}", $usr_msg, $log_value);
|
let usr_msg = $usr_msg;
|
||||||
return Err($crate::error::Error::new($usr_msg, $log_value));
|
let log_value = $log_value;
|
||||||
|
error!("{usr_msg}. {log_value}");
|
||||||
|
return Err($crate::error::Error::new(usr_msg, log_value));
|
||||||
}};
|
}};
|
||||||
($usr_msg:expr, $log_value:expr, ErrorEvent $err_event:tt) => {{
|
($usr_msg:expr, $log_value:expr, ErrorEvent $err_event:tt) => {{
|
||||||
error!("{}. {}", $usr_msg, $log_value);
|
let usr_msg = $usr_msg;
|
||||||
return Err($crate::error::Error::new($usr_msg, $log_value).with_event($crate::error::ErrorEvent $err_event));
|
let log_value = $log_value;
|
||||||
|
error!("{usr_msg}. {log_value}");
|
||||||
|
return Err($crate::error::Error::new(usr_msg, log_value).with_event($crate::error::ErrorEvent $err_event));
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! err_silent {
|
macro_rules! err_silent {
|
||||||
($msg:expr) => {{
|
($msg:expr) => {{
|
||||||
return Err($crate::error::Error::new($msg, $msg));
|
return Err($crate::error::Error::new_msg($msg));
|
||||||
}};
|
}};
|
||||||
($msg:expr, ErrorEvent $err_event:tt) => {{
|
($msg:expr, ErrorEvent $err_event:tt) => {{
|
||||||
return Err($crate::error::Error::new($msg, $msg).with_event($crate::error::ErrorEvent $err_event));
|
return Err($crate::error::Error::new_msg($msg).with_event($crate::error::ErrorEvent $err_event));
|
||||||
}};
|
}};
|
||||||
($usr_msg:expr, $log_value:expr) => {{
|
($usr_msg:expr, $log_value:expr) => {{
|
||||||
return Err($crate::error::Error::new($usr_msg, $log_value));
|
return Err($crate::error::Error::new($usr_msg, $log_value));
|
||||||
@@ -298,12 +369,15 @@ macro_rules! err_silent {
|
|||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! err_code {
|
macro_rules! err_code {
|
||||||
($msg:expr, $err_code:expr) => {{
|
($msg:expr, $err_code:expr) => {{
|
||||||
error!("{}", $msg);
|
let msg = $msg;
|
||||||
return Err($crate::error::Error::new($msg, $msg).with_code($err_code));
|
error!("{msg}");
|
||||||
|
return Err($crate::error::Error::new_msg(msg).with_code($err_code));
|
||||||
}};
|
}};
|
||||||
($usr_msg:expr, $log_value:expr, $err_code:expr) => {{
|
($usr_msg:expr, $log_value:expr, $err_code:expr) => {{
|
||||||
error!("{}. {}", $usr_msg, $log_value);
|
let usr_msg = $usr_msg;
|
||||||
return Err($crate::error::Error::new($usr_msg, $log_value).with_code($err_code));
|
let log_value = $log_value;
|
||||||
|
error!("{usr_msg}. {log_value}");
|
||||||
|
return Err($crate::error::Error::new(usr_msg, log_value).with_code($err_code));
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -311,7 +385,7 @@ macro_rules! err_code {
|
|||||||
macro_rules! err_discard {
|
macro_rules! err_discard {
|
||||||
($msg:expr, $data:expr) => {{
|
($msg:expr, $data:expr) => {{
|
||||||
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
|
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
|
||||||
return Err($crate::error::Error::new($msg, $msg));
|
return Err($crate::error::Error::new_msg($msg));
|
||||||
}};
|
}};
|
||||||
($usr_msg:expr, $log_value:expr, $data:expr) => {{
|
($usr_msg:expr, $log_value:expr, $data:expr) => {{
|
||||||
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
|
std::io::copy(&mut $data.open(), &mut std::io::sink()).ok();
|
||||||
@@ -336,7 +410,9 @@ macro_rules! err_handler {
|
|||||||
return ::rocket::request::Outcome::Error((rocket::http::Status::Unauthorized, $expr));
|
return ::rocket::request::Outcome::Error((rocket::http::Status::Unauthorized, $expr));
|
||||||
}};
|
}};
|
||||||
($usr_msg:expr, $log_value:expr) => {{
|
($usr_msg:expr, $log_value:expr) => {{
|
||||||
error!(target: "auth", "Unauthorized Error: {}. {}", $usr_msg, $log_value);
|
let usr_msg = $usr_msg;
|
||||||
return ::rocket::request::Outcome::Error((rocket::http::Status::Unauthorized, $usr_msg));
|
let log_value = $log_value;
|
||||||
|
error!(target: "auth", "Unauthorized Error: {usr_msg}. {log_value}");
|
||||||
|
return ::rocket::request::Outcome::Error((rocket::http::Status::Unauthorized, usr_msg));
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,12 +2,11 @@ use std::{
|
|||||||
fmt,
|
fmt,
|
||||||
net::{IpAddr, SocketAddr},
|
net::{IpAddr, SocketAddr},
|
||||||
str::FromStr,
|
str::FromStr,
|
||||||
sync::{Arc, Mutex},
|
sync::{Arc, LazyLock, Mutex},
|
||||||
time::Duration,
|
time::Duration,
|
||||||
};
|
};
|
||||||
|
|
||||||
use hickory_resolver::{name_server::TokioConnectionProvider, TokioResolver};
|
use hickory_resolver::{name_server::TokioConnectionProvider, TokioResolver};
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use reqwest::{
|
use reqwest::{
|
||||||
dns::{Name, Resolve, Resolving},
|
dns::{Name, Resolve, Resolving},
|
||||||
@@ -25,9 +24,10 @@ pub fn make_http_request(method: reqwest::Method, url: &str) -> Result<reqwest::
|
|||||||
err!("Invalid host");
|
err!("Invalid host");
|
||||||
};
|
};
|
||||||
|
|
||||||
should_block_host(host)?;
|
should_block_host(&host)?;
|
||||||
|
|
||||||
static INSTANCE: Lazy<Client> = Lazy::new(|| get_reqwest_client_builder().build().expect("Failed to build client"));
|
static INSTANCE: LazyLock<Client> =
|
||||||
|
LazyLock::new(|| get_reqwest_client_builder().build().expect("Failed to build client"));
|
||||||
|
|
||||||
Ok(INSTANCE.request(method, url))
|
Ok(INSTANCE.request(method, url))
|
||||||
}
|
}
|
||||||
@@ -45,7 +45,7 @@ pub fn get_reqwest_client_builder() -> ClientBuilder {
|
|||||||
return attempt.error("Invalid host");
|
return attempt.error("Invalid host");
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Err(e) = should_block_host(host) {
|
if let Err(e) = should_block_host(&host) {
|
||||||
return attempt.error(e);
|
return attempt.error(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -100,11 +100,11 @@ fn should_block_address_regex(domain_or_ip: &str) -> bool {
|
|||||||
is_match
|
is_match
|
||||||
}
|
}
|
||||||
|
|
||||||
fn should_block_host(host: Host<&str>) -> Result<(), CustomHttpClientError> {
|
fn should_block_host(host: &Host<&str>) -> Result<(), CustomHttpClientError> {
|
||||||
let (ip, host_str): (Option<IpAddr>, String) = match host {
|
let (ip, host_str): (Option<IpAddr>, String) = match host {
|
||||||
Host::Ipv4(ip) => (Some(ip.into()), ip.to_string()),
|
Host::Ipv4(ip) => (Some(IpAddr::V4(*ip)), ip.to_string()),
|
||||||
Host::Ipv6(ip) => (Some(ip.into()), ip.to_string()),
|
Host::Ipv6(ip) => (Some(IpAddr::V6(*ip)), ip.to_string()),
|
||||||
Host::Domain(d) => (None, d.to_string()),
|
Host::Domain(d) => (None, (*d).to_string()),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(ip) = ip {
|
if let Some(ip) = ip {
|
||||||
@@ -179,7 +179,7 @@ type BoxError = Box<dyn std::error::Error + Send + Sync>;
|
|||||||
|
|
||||||
impl CustomDnsResolver {
|
impl CustomDnsResolver {
|
||||||
fn instance() -> Arc<Self> {
|
fn instance() -> Arc<Self> {
|
||||||
static INSTANCE: Lazy<Arc<CustomDnsResolver>> = Lazy::new(CustomDnsResolver::new);
|
static INSTANCE: LazyLock<Arc<CustomDnsResolver>> = LazyLock::new(CustomDnsResolver::new);
|
||||||
Arc::clone(&*INSTANCE)
|
Arc::clone(&*INSTANCE)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -184,7 +184,7 @@ pub async fn send_delete_account(address: &str, user_id: &UserId) -> EmptyResult
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send_verify_email(address: &str, user_id: &UserId) -> EmptyResult {
|
pub async fn send_verify_email(address: &str, user_id: &UserId) -> EmptyResult {
|
||||||
let claims = generate_verify_email_claims(user_id.clone());
|
let claims = generate_verify_email_claims(user_id);
|
||||||
let verify_email_token = encode_jwt(&claims);
|
let verify_email_token = encode_jwt(&claims);
|
||||||
|
|
||||||
let (subject, body_html, body_text) = get_text(
|
let (subject, body_html, body_text) = get_text(
|
||||||
@@ -235,7 +235,7 @@ pub async fn send_welcome(address: &str) -> EmptyResult {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send_welcome_must_verify(address: &str, user_id: &UserId) -> EmptyResult {
|
pub async fn send_welcome_must_verify(address: &str, user_id: &UserId) -> EmptyResult {
|
||||||
let claims = generate_verify_email_claims(user_id.clone());
|
let claims = generate_verify_email_claims(user_id);
|
||||||
let verify_email_token = encode_jwt(&claims);
|
let verify_email_token = encode_jwt(&claims);
|
||||||
|
|
||||||
let (subject, body_html, body_text) = get_text(
|
let (subject, body_html, body_text) = get_text(
|
||||||
|
|||||||
@@ -448,7 +448,7 @@ async fn check_data_folder() {
|
|||||||
|
|
||||||
if data_folder.starts_with("s3://") {
|
if data_folder.starts_with("s3://") {
|
||||||
if let Err(e) = CONFIG
|
if let Err(e) = CONFIG
|
||||||
.opendal_operator_for_path_type(PathType::Data)
|
.opendal_operator_for_path_type(&PathType::Data)
|
||||||
.unwrap_or_else(|e| {
|
.unwrap_or_else(|e| {
|
||||||
error!("Failed to create S3 operator for data folder '{data_folder}': {e:?}");
|
error!("Failed to create S3 operator for data folder '{data_folder}': {e:?}");
|
||||||
exit(1);
|
exit(1);
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
use once_cell::sync::Lazy;
|
use std::{net::IpAddr, num::NonZeroU32, sync::LazyLock, time::Duration};
|
||||||
use std::{net::IpAddr, num::NonZeroU32, time::Duration};
|
|
||||||
|
|
||||||
use governor::{clock::DefaultClock, state::keyed::DashMapStateStore, Quota, RateLimiter};
|
use governor::{clock::DefaultClock, state::keyed::DashMapStateStore, Quota, RateLimiter};
|
||||||
|
|
||||||
@@ -7,13 +6,13 @@ use crate::{Error, CONFIG};
|
|||||||
|
|
||||||
type Limiter<T = IpAddr> = RateLimiter<T, DashMapStateStore<T>, DefaultClock>;
|
type Limiter<T = IpAddr> = RateLimiter<T, DashMapStateStore<T>, DefaultClock>;
|
||||||
|
|
||||||
static LIMITER_LOGIN: Lazy<Limiter> = Lazy::new(|| {
|
static LIMITER_LOGIN: LazyLock<Limiter> = LazyLock::new(|| {
|
||||||
let seconds = Duration::from_secs(CONFIG.login_ratelimit_seconds());
|
let seconds = Duration::from_secs(CONFIG.login_ratelimit_seconds());
|
||||||
let burst = NonZeroU32::new(CONFIG.login_ratelimit_max_burst()).expect("Non-zero login ratelimit burst");
|
let burst = NonZeroU32::new(CONFIG.login_ratelimit_max_burst()).expect("Non-zero login ratelimit burst");
|
||||||
RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero login ratelimit seconds").allow_burst(burst))
|
RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero login ratelimit seconds").allow_burst(burst))
|
||||||
});
|
});
|
||||||
|
|
||||||
static LIMITER_ADMIN: Lazy<Limiter> = Lazy::new(|| {
|
static LIMITER_ADMIN: LazyLock<Limiter> = LazyLock::new(|| {
|
||||||
let seconds = Duration::from_secs(CONFIG.admin_ratelimit_seconds());
|
let seconds = Duration::from_secs(CONFIG.admin_ratelimit_seconds());
|
||||||
let burst = NonZeroU32::new(CONFIG.admin_ratelimit_max_burst()).expect("Non-zero admin ratelimit burst");
|
let burst = NonZeroU32::new(CONFIG.admin_ratelimit_max_burst()).expect("Non-zero admin ratelimit burst");
|
||||||
RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero admin ratelimit seconds").allow_burst(burst))
|
RateLimiter::keyed(Quota::with_period(seconds).expect("Non-zero admin ratelimit seconds").allow_burst(burst))
|
||||||
|
|||||||
19
src/sso.rs
19
src/sso.rs
@@ -1,11 +1,10 @@
|
|||||||
|
use std::{sync::LazyLock, time::Duration};
|
||||||
|
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use derive_more::{AsRef, Deref, Display, From};
|
use derive_more::{AsRef, Deref, Display, From};
|
||||||
use regex::Regex;
|
|
||||||
use std::time::Duration;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use mini_moka::sync::Cache;
|
use mini_moka::sync::Cache;
|
||||||
use once_cell::sync::Lazy;
|
use regex::Regex;
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::ApiResult,
|
api::ApiResult,
|
||||||
@@ -21,12 +20,12 @@ use crate::{
|
|||||||
|
|
||||||
pub static FAKE_IDENTIFIER: &str = "VW_DUMMY_IDENTIFIER_FOR_OIDC";
|
pub static FAKE_IDENTIFIER: &str = "VW_DUMMY_IDENTIFIER_FOR_OIDC";
|
||||||
|
|
||||||
static AC_CACHE: Lazy<Cache<OIDCState, AuthenticatedUser>> =
|
static AC_CACHE: LazyLock<Cache<OIDCState, AuthenticatedUser>> =
|
||||||
Lazy::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build());
|
LazyLock::new(|| Cache::builder().max_capacity(1000).time_to_live(Duration::from_secs(10 * 60)).build());
|
||||||
|
|
||||||
static SSO_JWT_ISSUER: Lazy<String> = Lazy::new(|| format!("{}|sso", CONFIG.domain_origin()));
|
static SSO_JWT_ISSUER: LazyLock<String> = LazyLock::new(|| format!("{}|sso", CONFIG.domain_origin()));
|
||||||
|
|
||||||
pub static NONCE_EXPIRATION: Lazy<chrono::Duration> = Lazy::new(|| chrono::TimeDelta::try_minutes(10).unwrap());
|
pub static NONCE_EXPIRATION: LazyLock<chrono::Duration> = LazyLock::new(|| chrono::TimeDelta::try_minutes(10).unwrap());
|
||||||
|
|
||||||
#[derive(
|
#[derive(
|
||||||
Clone,
|
Clone,
|
||||||
@@ -151,7 +150,7 @@ fn decode_token_claims(token_name: &str, token: &str) -> ApiResult<BasicTokenCla
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn decode_state(base64_state: String) -> ApiResult<OIDCState> {
|
pub fn decode_state(base64_state: &str) -> ApiResult<OIDCState> {
|
||||||
let state = match data_encoding::BASE64.decode(base64_state.as_bytes()) {
|
let state = match data_encoding::BASE64.decode(base64_state.as_bytes()) {
|
||||||
Ok(vec) => match String::from_utf8(vec) {
|
Ok(vec) => match String::from_utf8(vec) {
|
||||||
Ok(valid) => OIDCState(valid),
|
Ok(valid) => OIDCState(valid),
|
||||||
|
|||||||
@@ -1,13 +1,9 @@
|
|||||||
use regex::Regex;
|
use std::{borrow::Cow, sync::LazyLock, time::Duration};
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::time::Duration;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use mini_moka::sync::Cache;
|
use mini_moka::sync::Cache;
|
||||||
use once_cell::sync::Lazy;
|
use openidconnect::{core::*, reqwest, *};
|
||||||
use openidconnect::core::*;
|
use regex::Regex;
|
||||||
use openidconnect::reqwest;
|
use url::Url;
|
||||||
use openidconnect::*;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
api::{ApiResult, EmptyResult},
|
api::{ApiResult, EmptyResult},
|
||||||
@@ -16,8 +12,8 @@ use crate::{
|
|||||||
CONFIG,
|
CONFIG,
|
||||||
};
|
};
|
||||||
|
|
||||||
static CLIENT_CACHE_KEY: Lazy<String> = Lazy::new(|| "sso-client".to_string());
|
static CLIENT_CACHE_KEY: LazyLock<String> = LazyLock::new(|| "sso-client".to_string());
|
||||||
static CLIENT_CACHE: Lazy<Cache<String, Client>> = Lazy::new(|| {
|
static CLIENT_CACHE: LazyLock<Cache<String, Client>> = LazyLock::new(|| {
|
||||||
Cache::builder().max_capacity(1).time_to_live(Duration::from_secs(CONFIG.sso_client_cache_expiration())).build()
|
Cache::builder().max_capacity(1).time_to_live(Duration::from_secs(CONFIG.sso_client_cache_expiration())).build()
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -162,7 +158,7 @@ impl Client {
|
|||||||
if CONFIG.sso_pkce() {
|
if CONFIG.sso_pkce() {
|
||||||
match nonce.verifier {
|
match nonce.verifier {
|
||||||
None => err!(format!("Missing verifier in the DB nonce table")),
|
None => err!(format!("Missing verifier in the DB nonce table")),
|
||||||
Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret.clone())),
|
Some(secret) => exchange = exchange.set_pkce_verifier(PkceCodeVerifier::new(secret)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -842,7 +842,7 @@ pub fn is_global(ip: std::net::IpAddr) -> bool {
|
|||||||
|
|
||||||
/// Saves a Rocket temporary file to the OpenDAL Operator at the given path.
|
/// Saves a Rocket temporary file to the OpenDAL Operator at the given path.
|
||||||
pub async fn save_temp_file(
|
pub async fn save_temp_file(
|
||||||
path_type: PathType,
|
path_type: &PathType,
|
||||||
path: &str,
|
path: &str,
|
||||||
temp_file: rocket::fs::TempFile<'_>,
|
temp_file: rocket::fs::TempFile<'_>,
|
||||||
overwrite: bool,
|
overwrite: bool,
|
||||||
|
|||||||
Reference in New Issue
Block a user