diff --git a/.buildkite/linux/entrypoint b/.buildkite/linux/entrypoint index a519cfc3d82..4a595b860fa 100755 --- a/.buildkite/linux/entrypoint +++ b/.buildkite/linux/entrypoint @@ -16,13 +16,14 @@ if [ "$CLEAR_RUST" = "1" ]; then rm -rf $BUILD_ROOT/rust fi +rm -f out/build.ninja ./ninja pylib qt check echo "--- Ensure libs importable" SKIP_RUN=1 ./run echo "--- Check Rust libs" -cargo install cargo-deny --version 0.14.24 +cargo install cargo-deny@0.19.0 cargo deny check echo "--- Cleanup" diff --git a/.deny.toml b/.deny.toml index 7cdf0cf9933..e370898133f 100644 --- a/.deny.toml +++ b/.deny.toml @@ -7,6 +7,17 @@ db-urls = ["https://github.com/rustsec/advisory-db"] ignore = [ # burn depends on an unmaintained package 'paste' "RUSTSEC-2024-0436", + # bincode is unmaintained (via burn). Alternatives: postcard, bitcode, rkyv, wincode + "RUSTSEC-2025-0141", + # rustls-pemfile is unmaintained. Alternative: use rustls-pki-types directly (PemObject trait) + "RUSTSEC-2025-0134", + # unic-* crates are unmaintained (used for Unicode category detection). + # Alternative: icu_properties + "RUSTSEC-2025-0081", # unic-char-property + "RUSTSEC-2025-0075", # unic-char-range (or use native Rust char ranges since 1.45.0) + "RUSTSEC-2025-0080", # unic-common + "RUSTSEC-2025-0094", # unic-ucd-category + "RUSTSEC-2025-0098", # unic-ucd-version ] [licenses] diff --git a/.idea.dist/repo.iml b/.idea.dist/repo.iml new file mode 100644 index 00000000000..a9ec5ee1a6c --- /dev/null +++ b/.idea.dist/repo.iml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/.version b/.version index 834db75c674..381fa89eff4 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -25.09 +25.09.2 diff --git a/.vscode.dist/tasks.json b/.vscode.dist/tasks.json index 72eab960453..b89704d2e2c 100644 --- a/.vscode.dist/tasks.json +++ b/.vscode.dist/tasks.json @@ -12,8 +12,7 @@ "command": "tools/ninja.bat", "args": [ "pylib", - "qt", - "extract:win_amd64_audio" + "qt" ] } } diff --git a/.yarnrc.yml b/.yarnrc.yml index 3186f3f0795..94f5c254e79 100644 --- a/.yarnrc.yml +++ b/.yarnrc.yml @@ -1 +1,2 @@ nodeLinker: node-modules +enableScripts: false diff --git a/CONTRIBUTORS b/CONTRIBUTORS index b03108e16c8..36a4daae210 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -49,6 +49,7 @@ Sander Santema Thomas Brownback Andrew Gaul kenden +Emil Hamrin Nickolay Yudin neitrinoweb Andreas Reis @@ -188,7 +189,7 @@ Christian Donat Asuka Minato Dillon Baldwin Voczi -Ben Nguyen <105088397+bpnguyen107@users.noreply.github.com> +Ben Nguyen <105088397+bpnguyen107@users.noreply.github.com> Themis Demetriades Luke Bartholomew Gregory Abrasaldo @@ -242,6 +243,24 @@ Lee Doughty <32392044+leedoughty@users.noreply.github.com> memchr Max Romanowski Aldlss +Hanna Nilsén +Elias Johansson Lara +Toby Penner +Danilo Spillebeen +Matbe766 +Amanda Sternberg +arold0 +nav1s +Ranjit Odedra +Eltaurus +jariji +Francisco Esteva +Emma Plante +SelfishPig +defkorean +Michael Lappas +Brett Schwartz +Lovro Boban ******************** diff --git a/Cargo.lock b/Cargo.lock index fe88eb3aba3..e9c74f6ea1f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -46,9 +46,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "ammonia" -version = "4.1.1" +version = "4.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6b346764dd0814805de8abf899fe03065bcee69bb1a4771c785817e39f3978f" +checksum = "17e913097e1a2124b46746c980134e8c954bc17a6a59bb3fde96f088d126dde6" dependencies = [ "cssparser", "html5ever 0.35.0", @@ -3555,6 +3555,7 @@ dependencies = [ name = "launcher" version = "1.0.0" dependencies = [ + "anki_i18n", "anki_io", "anki_process", "anyhow", @@ -3563,6 +3564,7 @@ dependencies = [ "embed-resource", "libc", "libc-stdhandle", + "locale_config", "serde_json", "widestring", "windows 0.61.3", @@ -3702,6 +3704,19 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed" +[[package]] +name = "locale_config" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d2c35b16f4483f6c26f0e4e9550717a2f6575bcd6f12a53ff0c490a94a6934" +dependencies = [ + "lazy_static", + "objc", + "objc-foundation", + "regex", + "winapi", +] + [[package]] name = "lock_api" version = "0.4.13" @@ -4380,6 +4395,26 @@ dependencies = [ "malloc_buf", ] +[[package]] +name = "objc-foundation" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1add1b659e36c9607c7aab864a76c7a4c2760cd0cd2e120f3fb8b952c7e22bf9" +dependencies = [ + "block", + "objc", + "objc_id", +] + +[[package]] +name = "objc_id" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c92d4ddb4bd7b50d730c215ff871754d0da6b2178849f8a2a2ab69712d0c073b" +dependencies = [ + "objc", +] + [[package]] name = "object" version = "0.36.7" diff --git a/Cargo.toml b/Cargo.toml index 2e9489cb89f..fe7f5acd54b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -51,7 +51,7 @@ ninja_gen = { "path" = "build/ninja_gen" } unicase = "=2.6.0" # any changes could invalidate sqlite indexes # normal -ammonia = "4.1.0" +ammonia = "4.1.2" anyhow = "1.0.98" async-compression = { version = "0.4.24", features = ["zstd", "tokio"] } async-stream = "0.3.6" @@ -92,6 +92,7 @@ itertools = "0.14.0" junction = "1.2.0" libc = "0.2" libc-stdhandle = "0.1" +locale_config = "0.3.0" maplit = "1.0.2" nom = "8.0.0" num-format = "0.4.4" diff --git a/build/runner/src/yarn.rs b/build/runner/src/yarn.rs index 9e1bd5b583f..7724ed04ab3 100644 --- a/build/runner/src/yarn.rs +++ b/build/runner/src/yarn.rs @@ -28,7 +28,11 @@ pub fn setup_yarn(args: YarnArgs) { .arg("--ignore-scripts"), ); } else { - run_command(Command::new(&args.yarn_bin).arg("install")); + run_command( + Command::new(&args.yarn_bin) + .arg("install") + .arg("--immutable"), + ); } std::fs::write(args.stamp, b"").unwrap(); diff --git a/cargo/licenses.json b/cargo/licenses.json index 274c060be70..53b832fdaed 100644 --- a/cargo/licenses.json +++ b/cargo/licenses.json @@ -2226,7 +2226,7 @@ { "authors": "Ibraheem Ahmed ", "description": "A high performance, zero-copy URL router.", - "license": "MIT AND BSD-3-Clause", + "license": "BSD-3-Clause AND MIT", "license_file": null, "name": "matchit", "repository": "https://github.com/ibraheemdev/matchit" @@ -4154,7 +4154,7 @@ { "authors": "David Tolnay ", "description": "Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31", - "license": "(MIT OR Apache-2.0) AND Unicode-3.0", + "license": "(Apache-2.0 OR MIT) AND Unicode-3.0", "license_file": null, "name": "unicode-ident", "repository": "https://github.com/dtolnay/unicode-ident" diff --git a/docs/docker/Dockerfile b/docs/docker/Dockerfile index 6682f70f682..381d27d1cd2 100644 --- a/docs/docker/Dockerfile +++ b/docs/docker/Dockerfile @@ -1,35 +1,78 @@ -# This Dockerfile uses three stages. -# 1. Compile anki (and dependencies) and build python wheels. -# 2. Create a virtual environment containing anki and its dependencies. -# 3. Create a final image that only includes anki's virtual environment and required -# system packages. +# This is a user-contributed Dockerfile. No official support is available. -ARG PYTHON_VERSION="3.9" ARG DEBIAN_FRONTEND="noninteractive" -# Build anki. -FROM python:$PYTHON_VERSION AS build -RUN curl -fsSL https://github.com/bazelbuild/bazelisk/releases/download/v1.7.4/bazelisk-linux-amd64 \ - > /usr/local/bin/bazel \ - && chmod +x /usr/local/bin/bazel \ - # Bazel expects /usr/bin/python - && ln -s /usr/local/bin/python /usr/bin/python +FROM ubuntu:24.04 AS build WORKDIR /opt/anki -COPY . . -# Build python wheels. +ENV PYTHON_VERSION="3.13" + + +# System deps +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + git \ + build-essential \ + pkg-config \ + libssl-dev \ + libbz2-dev \ + libreadline-dev \ + libsqlite3-dev \ + libffi-dev \ + zlib1g-dev \ + liblzma-dev \ + ca-certificates \ + ninja-build \ + rsync \ + libglib2.0-0 \ + libgl1 \ + libx11-6 \ + libxext6 \ + libxrender1 \ + libxkbcommon0 \ + libxkbcommon-x11-0 \ + libxcb1 \ + libxcb-render0 \ + libxcb-shm0 \ + libxcb-icccm4 \ + libxcb-image0 \ + libxcb-keysyms1 \ + libxcb-randr0 \ + libxcb-shape0 \ + libxcb-xfixes0 \ + libxcb-xinerama0 \ + libxcb-xinput0 \ + libsm6 \ + libice6 \ + && rm -rf /var/lib/apt/lists/* + +# install rust with rustup +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +ENV PATH="/root/.cargo/bin:${PATH}" + +# Install uv and Python 3.13 with uv +RUN curl -LsSf https://astral.sh/uv/install.sh | sh \ + && ln -s /root/.local/bin/uv /usr/local/bin/uv +ENV PATH="/root/.local/bin:${PATH}" + +RUN uv python install ${PYTHON_VERSION} --default + +COPY . . + RUN ./tools/build + # Install pre-compiled Anki. -FROM python:${PYTHON_VERSION}-slim as installer +FROM python:3.13-slim AS installer WORKDIR /opt/anki/ -COPY --from=build /opt/anki/wheels/ wheels/ +COPY --from=build /opt/anki/out/wheels/ wheels/ # Use virtual environment. RUN python -m venv venv \ && ./venv/bin/python -m pip install --no-cache-dir setuptools wheel \ && ./venv/bin/python -m pip install --no-cache-dir /opt/anki/wheels/*.whl + # We use another build stage here so we don't include the wheels in the final image. -FROM python:${PYTHON_VERSION}-slim as final +FROM python:3.13-slim AS final COPY --from=installer /opt/anki/venv /opt/anki/venv ENV PATH=/opt/anki/venv/bin:$PATH # Install run-time dependencies. @@ -59,9 +102,9 @@ RUN apt-get update \ libxrender1 \ libxtst6 \ && rm -rf /var/lib/apt/lists/* + # Add non-root user. RUN useradd --create-home anki USER anki WORKDIR /work -ENTRYPOINT ["/opt/anki/venv/bin/anki"] -LABEL maintainer="Jakub Kaczmarzyk " +ENTRYPOINT ["/opt/anki/venv/bin/anki"] \ No newline at end of file diff --git a/docs/editing.md b/docs/editing.md index ba3fd6fce91..42a92c5a8fe 100644 --- a/docs/editing.md +++ b/docs/editing.md @@ -46,10 +46,14 @@ see and install a number of recommended extensions. ## PyCharm/IntelliJ -If you decide to use PyCharm instead of VS Code, there are somethings to be -aware of. +### Setting up Python environment -### Pylib References +To make PyCharm recognize `anki` and `aqt` imports, you need to add source paths to _Settings > Project Structure_. +You can copy the provided .idea.dist directory to set up the paths automatically: -You'll need to use File>Project Structure to tell IntelliJ that pylib/ is a -sources root, so it knows references to 'anki' in aqt are valid. +``` +mkdir .idea && cd .idea +ln -sf ../.idea.dist/* . +``` + +You also need to add a new Python interpreter under _Settings > Python > Interpreter_ pointing to the Python executable under `out/pyenv` (available after building Anki). diff --git a/ftl/core-repo b/ftl/core-repo index 6552c95a81d..ec5e4cad624 160000 --- a/ftl/core-repo +++ b/ftl/core-repo @@ -1 +1 @@ -Subproject commit 6552c95a81d162422b2a50126547cc7f1b50c2fd +Subproject commit ec5e4cad6242e538cacf52265243668f0de5da80 diff --git a/ftl/core/custom-study.ftl b/ftl/core/custom-study.ftl index ac2b652571c..8355e3f962c 100644 --- a/ftl/core/custom-study.ftl +++ b/ftl/core/custom-study.ftl @@ -1,33 +1,58 @@ -custom-study-must-rename-deck = Please rename the existing Custom Study deck first. -custom-study-all-cards-in-random-order-dont = All cards in random order (don't reschedule) -custom-study-all-review-cards-in-random-order = All review cards in random order -custom-study-cards = cards -custom-study-cards-from-the-deck = cards from the deck -custom-study-choose-tags = Choose Tags -custom-study-custom-study-session = Custom Study Session -custom-study-due-cards-only = Due cards only +### options related to the Custom Study window custom-study-increase-todays-new-card-limit = Increase today's new card limit +# increase limit by {amount} cards custom-study-increase-todays-new-card-limit-by = Increase today's new card limit by +# the last word in the sentence "increase today's [new/review] card limit by {amount} cards" +custom-study-cards = + { $count -> + [one] card + *[other] cards + } +custom-study-available-new-cards-2 = Available new cards: { $countString } custom-study-increase-todays-review-card-limit = Increase today's review card limit +# increase limit by {amount} cards custom-study-increase-todays-review-limit-by = Increase today's review limit by -custom-study-new-cards-only = New cards only -custom-study-no-cards-matched-the-criteria-you = No cards matched the criteria you provided. -custom-study-ok = OK -custom-study-preview-new-cards = Preview new cards -custom-study-preview-new-cards-added-in-the = Preview new cards added in the last -custom-study-require-one-or-more-of-these = Require one or more of these tags: +custom-study-available-review-cards-2 = Available review cards: { $countString } +custom-study-review-forgotten-cards = Review forgotten cards +custom-study-review-cards-forgotten-in-last = Review cards forgotten in the last +custom-study-days = + { $count -> + [one] day + *[other] days + } custom-study-review-ahead = Review ahead custom-study-review-ahead-by = Review ahead by -custom-study-review-cards-forgotten-in-last = Review cards forgotten in last -custom-study-review-forgotten-cards = Review forgotten cards -custom-study-select = Select -custom-study-select-tags-to-exclude = Select tags to exclude: -custom-study-selective-study = Selective Study +custom-study-preview-new-cards = Preview new cards +custom-study-preview-new-cards-added-in-the = Preview new cards added in the last + +## options for the "study by card state or tag" subsection custom-study-study-by-card-state-or-tag = Study by card state or tag -custom-study-available-new-cards-2 = Available new cards: { $countString } -custom-study-available-review-cards-2 = Available review cards: { $countString } +# verb, not noun. As in "Select {amount} cards from the deck" +custom-study-select = Select +# As in "select {amount} cards from the deck" +custom-study-cards-from-the-deck = + { $count -> + [one] card from the deck + *[other] cards from the deck + } +custom-study-new-cards-only = New cards only +custom-study-due-cards-only = Due cards only +custom-study-all-review-cards-in-random-order = All review cards in random order +custom-study-all-cards-in-random-order-dont = All cards in random order (don't reschedule) +custom-study-choose-tags = Choose Tags + +## +custom-study-ok = OK +custom-study-no-cards-matched-the-criteria-you = No cards matched the criteria you provided. +custom-study-must-rename-deck = Please rename the existing Custom Study deck first. +custom-study-custom-study-session = Custom Study Session custom-study-available-child-count = ({ $count } in subdecks) +## inside the Selective Study window, accessible by selecting "Study by card state or tag" and then clicking "Choose Tags" +custom-study-selective-study = Selective Study +custom-study-require-one-or-more-of-these = Require one or more of these tags: +custom-study-select-tags-to-exclude = Select tags to exclude: + ## NO NEED TO TRANSLATE. This text is no longer used by Anki, and will be removed in the future. custom-study-available-new-cards = Available new cards: { $count } diff --git a/ftl/core/deck-config.ftl b/ftl/core/deck-config.ftl index 1e193dc04eb..4930dbe0efd 100644 --- a/ftl/core/deck-config.ftl +++ b/ftl/core/deck-config.ftl @@ -382,7 +382,7 @@ deck-config-which-deck = Which deck would you like to display options for? ## Messages related to the FSRS scheduler deck-config-updating-cards = Updating cards: { $current_cards_count }/{ $total_cards_count }... -deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default parameters. +deck-config-invalid-parameters = The provided FSRS parameters are invalid. Leave them blank to use the default values. deck-config-not-enough-history = Insufficient review history to perform this operation. deck-config-must-have-400-reviews = { $count -> diff --git a/ftl/core/decks.ftl b/ftl/core/decks.ftl index 409ec7fbeac..01321df1066 100644 --- a/ftl/core/decks.ftl +++ b/ftl/core/decks.ftl @@ -1,33 +1,41 @@ +## In the options window of a filtered deck +decks-limit-to = Limit to +decks-cards-selected-by = cards selected by +decks-reschedule-cards-based-on-my-answers = Reschedule cards based on my answers in this deck +decks-enable-second-filter = Enable second filter +decks_create_even_if_empty = Create/update this deck even if empty +# e.g. "Delay for Again", "Delay for Hard", "Delay for Good" +decks-delay-for-button = Delay for { $button } +# The count of cards waiting to be reviewed +decks-zero-minutes-hint = (0 = return card to original deck) +# filter is a noun here +decks-filter = Filter: +decks-filter-2 = Filter 2 + +## column names on the main "Decks" window +decks-deck = Deck +decks-learn-header = Learn +decks-review-header = Due + +## +decks-unmovable-cards = Show any excluded cards decks-add-new-deck-ctrlandn = Add New Deck (Ctrl+N) decks-build = Build -decks-cards-selected-by = cards selected by decks-create-deck = Create Deck -decks_create_even_if_empty = Create/update this deck even if empty decks-custom-steps-in-minutes = Custom steps (in minutes) -decks-deck = Deck decks-delete-deck = Delete Deck -decks-enable-second-filter = Enable second filter -decks-filter = Filter: -decks-filter-2 = Filter 2 +# a button that links to AnkiWeb for browsing shared decks decks-get-shared = Get Shared +# import deck from file decks-import-file = Import File -decks-limit-to = Limit to decks-minutes = minutes decks-new-deck-name = New deck name: decks-no-deck = [no deck] decks-please-select-something = Please select something. decks-repeat-failed-cards-after = Delay Repeat failed cards after -# e.g. "Delay for Again", "Delay for Hard", "Delay for Good" -decks-delay-for-button = Delay for { $button } -decks-reschedule-cards-based-on-my-answers = Reschedule cards based on my answers in this deck decks-study = Study decks-study-deck = Study Deck decks-filtered-deck-search-empty = No cards matched the provided search. Some cards may have been excluded because they are in a different filtered deck, or suspended. -decks-unmovable-cards = Show any excluded cards -decks-learn-header = Learn -# The count of cards waiting to be reviewed -decks-review-header = Due -decks-zero-minutes-hint = (0 = return card to original deck) ## Sort order of cards @@ -48,8 +56,5 @@ decks-oldest-seen-first = Oldest seen first # Combobox entry: Sort the cards in random order decks-random = Random -## These strings are no longer used - you do not need to translate them if they -## are not already translated. - -# Combobox entry: Sort the cards by relative overdueness, in descending order (most overdue to least overdue) +## NO NEED TO TRANSLATE - these strings are no longer used decks-relative-overdueness = Relative overdueness diff --git a/ftl/core/exporting.ftl b/ftl/core/exporting.ftl index 422a580e4aa..4cea298420c 100644 --- a/ftl/core/exporting.ftl +++ b/ftl/core/exporting.ftl @@ -3,6 +3,7 @@ exporting-anki-20-deck = Anki 2.0 Deck exporting-anki-collection-package = Anki Collection Package exporting-anki-deck-package = Anki Deck Package exporting-cards-in-plain-text = Cards in Plain Text +# used in the filename during the export of a collection package exporting-collection = collection exporting-collection-exported = Collection exported. exporting-colpkg-too-new = Please update to the latest Anki version, then import the .colpkg/.apkg file again. diff --git a/ftl/core/launcher.ftl b/ftl/core/launcher.ftl new file mode 100644 index 00000000000..ee3aa632067 --- /dev/null +++ b/ftl/core/launcher.ftl @@ -0,0 +1,38 @@ +launcher-title = Anki Launcher +launcher-press-enter-to-install = Press the Enter/Return key on your keyboard to install or update Anki. +launcher-press-enter-to-start = Press enter to start Anki. +launcher-anki-will-start-shortly = Anki will start shortly. +launcher-you-can-close-this-window = You can close this window. +launcher-updating-anki = Updating Anki... +launcher-latest-anki = Install Latest Anki (default) +launcher-choose-a-version = Choose a version +launcher-sync-project-changes = Sync project changes +launcher-keep-existing-version = Keep existing version ({ $current }) +launcher-revert-to-previous = Revert to previous version ({ $prev }) +launcher-allow-betas = Allow betas: { $state } +launcher-on = on +launcher-off = off +launcher-cache-downloads = Cache downloads: { $state } +launcher-download-mirror = Download mirror: { $state } +launcher-uninstall = Uninstall Anki +launcher-invalid-input = Invalid input. Please try again. +launcher-latest-releases = Latest releases: { $releases } +launcher-enter-the-version-you-want = Enter the version you want to install: +launcher-versions-before-cant-be-installed = Versions before 2.1.50 can't be installed. +launcher-invalid-version = Invalid version. +launcher-unable-to-check-for-versions = Unable to check for Anki versions. Please check your internet connection. +launcher-checking-for-updates = Checking for updates... +launcher-uninstall-confirm = Uninstall Anki's program files? (y/n) +launcher-uninstall-cancelled = Uninstall cancelled. +launcher-program-files-removed = Program files removed. +launcher-remove-all-profiles-confirm = Remove all profiles/cards? (y/n) +launcher-user-data-removed = User data removed. +launcher-download-mirror-options = Download mirror options: +launcher-mirror-no-mirror = No mirror +launcher-mirror-china = China +launcher-mirror-disabled = Mirror disabled. +launcher-mirror-china-enabled = China mirror enabled. +launcher-beta-releases-enabled = Beta releases enabled. +launcher-beta-releases-disabled = Beta releases disabled. +launcher-download-caching-enabled = Download caching enabled. +launcher-download-caching-disabled = Download caching disabled and cache cleared. diff --git a/ftl/core/studying.ftl b/ftl/core/studying.ftl index ed3f8eb30e5..a317a68bae8 100644 --- a/ftl/core/studying.ftl +++ b/ftl/core/studying.ftl @@ -46,6 +46,20 @@ studying-type-answer-unknown-field = Type answer: unknown field { $val } studying-unbury = Unbury studying-what-would-you-like-to-unbury = What would you like to unbury? studying-you-havent-recorded-your-voice-yet = You haven't recorded your voice yet. +studying-card-studied-in-minute = + { $cards -> + [one] { $cards } card + *[other] { $cards } cards + } studied in + { $minutes -> + [one] { $minutes } minute. + *[other] { $minutes } minutes. + } +studying-question-time-elapsed = Question time elapsed +studying-answer-time-elapsed = Answer time elapsed + +## OBSOLETE; you do not need to translate this + studying-card-studied-in = { $count -> [one] { $count } card studied in @@ -56,5 +70,3 @@ studying-minute = [one] { $count } minute. *[other] { $count } minutes. } -studying-question-time-elapsed = Question time elapsed -studying-answer-time-elapsed = Answer time elapsed diff --git a/ftl/qt-repo b/ftl/qt-repo index dad4e2736a2..0b7c5302333 160000 --- a/ftl/qt-repo +++ b/ftl/qt-repo @@ -1 +1 @@ -Subproject commit dad4e2736a2b53dcdb52d79b5703dd464c05d666 +Subproject commit 0b7c530233390d73b706f012bbe7489539925c7d diff --git a/proto/anki/collection.proto b/proto/anki/collection.proto index de0ff08d63a..33041361334 100644 --- a/proto/anki/collection.proto +++ b/proto/anki/collection.proto @@ -20,6 +20,7 @@ service CollectionService { rpc LatestProgress(generic.Empty) returns (Progress); rpc SetWantsAbort(generic.Empty) returns (generic.Empty); rpc SetLoadBalancerEnabled(generic.Bool) returns (OpChanges); + rpc GetCustomColours(generic.Empty) returns (GetCustomColoursResponse); } // Implicitly includes any of the above methods that are not listed in the @@ -163,3 +164,7 @@ message CreateBackupRequest { bool force = 2; bool wait_for_completion = 3; } + +message GetCustomColoursResponse { + repeated string colours = 1; +} diff --git a/proto/anki/frontend.proto b/proto/anki/frontend.proto index 95b929c5ca5..1d733a369bb 100644 --- a/proto/anki/frontend.proto +++ b/proto/anki/frontend.proto @@ -27,6 +27,9 @@ service FrontendService { rpc deckOptionsRequireClose(generic.Empty) returns (generic.Empty); // Warns python that the deck option web view is ready to receive requests. rpc deckOptionsReady(generic.Empty) returns (generic.Empty); + + // Save colour picker's custom colour palette + rpc SaveCustomColours(generic.Empty) returns (generic.Empty); } service BackendFrontendService {} diff --git a/proto/anki/stats.proto b/proto/anki/stats.proto index a5639f8415b..1bd0fe630ea 100644 --- a/proto/anki/stats.proto +++ b/proto/anki/stats.proto @@ -37,6 +37,8 @@ message CardStatsResponse { uint32 ease = 5; float taken_secs = 6; optional cards.FsrsMemoryState memory_state = 7; + // seconds + uint32 last_interval = 8; } repeated StatsRevlogEntry revlog = 1; int64 card_id = 2; diff --git a/pylib/anki/lang.py b/pylib/anki/lang.py index b639b041622..1b0599a2f37 100644 --- a/pylib/anki/lang.py +++ b/pylib/anki/lang.py @@ -18,7 +18,7 @@ TR = anki._fluent.LegacyTranslationEnum FormatTimeSpan = _pb.FormatTimespanRequest - +# When adding new languages here, check lang_to_disk_lang() below langs = sorted( [ ("Afrikaans", "af_ZA"), @@ -38,6 +38,7 @@ ("Italiano", "it_IT"), ("lo jbobau", "jbo_EN"), ("Lenga d'òc", "oc_FR"), + ("Қазақша", "kk_KZ"), ("Magyar", "hu_HU"), ("Nederlands", "nl_NL"), ("Norsk", "nb_NO"), @@ -64,6 +65,7 @@ ("Українська мова", "uk_UA"), ("Հայերեն", "hy_AM"), ("עִבְרִית", "he_IL"), + ("ייִדיש", "yi"), ("العربية", "ar_SA"), ("فارسی", "fa_IR"), ("ภาษาไทย", "th_TH"), @@ -104,6 +106,7 @@ "it": "it_IT", "ja": "ja_JP", "jbo": "jbo_EN", + "kk": "kk_KZ", "ko": "ko_KR", "la": "la_LA", "mn": "mn_MN", @@ -126,6 +129,7 @@ "uk": "uk_UA", "uz": "uz_UZ", "vi": "vi_VN", + "yi": "yi", } @@ -233,7 +237,7 @@ def get_index_of_language(wanted_locale: str) -> int | None: def is_rtl(lang: str) -> bool: - return lang in ("he", "ar", "fa", "ug") + return lang in ("he", "ar", "fa", "ug", "yi") # strip off unicode isolation markers from a translated string diff --git a/pylib/tests/test_find.py b/pylib/tests/test_find.py index 2360965722b..72e7fdb8a93 100644 --- a/pylib/tests/test_find.py +++ b/pylib/tests/test_find.py @@ -32,6 +32,7 @@ def test_find_cards(): note = col.newNote() note["Front"] = "cat" note["Back"] = "sheep" + note.tags.append("conjunção größte") col.addNote(note) catCard = note.cards()[0] m = col.models.current() @@ -68,6 +69,8 @@ def test_find_cards(): col.tags.bulk_remove(col.db.list("select id from notes"), "foo") assert len(col.find_cards("tag:foo")) == 0 assert len(col.find_cards("tag:bar")) == 5 + assert len(col.find_cards("tag:conjuncao tag:groste")) == 0 + assert len(col.find_cards("tag:nc:conjuncao tag:nc:groste")) == 1 # text searches assert len(col.find_cards("cat")) == 2 assert len(col.find_cards("cat -dog")) == 1 diff --git a/qt/aqt/about.py b/qt/aqt/about.py index 03e989f2ced..95e03403742 100644 --- a/qt/aqt/about.py +++ b/qt/aqt/about.py @@ -226,6 +226,7 @@ def on_dialog_destroyed() -> None: "Anon_0000", "Bilolbek Normuminov", "Sagiv Marzini", + "Zhanibek Rassululy", ) ) diff --git a/qt/aqt/addcards.py b/qt/aqt/addcards.py index a27d8623430..01d7423d848 100644 --- a/qt/aqt/addcards.py +++ b/qt/aqt/addcards.py @@ -289,6 +289,10 @@ def add_current_note(self) -> None: def _add_current_note(self) -> None: note = self.editor.note + # Prevent adding a note that has already been added (e.g., from double-clicking) + if note.id != 0: + return + if not self._note_can_be_added(note): return diff --git a/qt/aqt/browser/browser.py b/qt/aqt/browser/browser.py index e222f62c2a4..d935905f6c0 100644 --- a/qt/aqt/browser/browser.py +++ b/qt/aqt/browser/browser.py @@ -521,7 +521,7 @@ def search_for(self, search: str, prompt: str | None = None) -> None: self.search() def current_search(self) -> str: - return self._line_edit().text() + return self._line_edit().text().replace("\n", " ") def search(self) -> None: """Search triggered programmatically. Caller must have saved note first.""" diff --git a/qt/aqt/browser/previewer.py b/qt/aqt/browser/previewer.py index 4c9a97fb8b1..61096b5b3f6 100644 --- a/qt/aqt/browser/previewer.py +++ b/qt/aqt/browser/previewer.py @@ -13,7 +13,7 @@ from anki.cards import Card from anki.collection import Config from anki.tags import MARKED_TAG -from aqt import AnkiQt, gui_hooks +from aqt import AnkiQt, gui_hooks, is_mac from aqt.qt import ( QCheckBox, QDialog, @@ -81,10 +81,15 @@ def _create_gui(self) -> None: qconnect(self.finished, self._on_finished) self.silentlyClose = True self.vbox = QVBoxLayout() + spacing = 6 self.vbox.setContentsMargins(0, 0, 0, 0) + self.vbox.setSpacing(spacing) self._web: AnkiWebView | None = AnkiWebView(kind=AnkiWebViewKind.PREVIEWER) self.vbox.addWidget(self._web) self.bbox = QDialogButtonBox() + self.bbox.setContentsMargins( + spacing, spacing if is_mac else 0, spacing, spacing + ) self.bbox.setLayoutDirection(Qt.LayoutDirection.LeftToRight) gui_hooks.card_review_webview_did_init(self._web, AnkiWebViewKind.PREVIEWER) diff --git a/qt/aqt/browser/sidebar/item.py b/qt/aqt/browser/sidebar/item.py index ce5ccb62f92..b51910d4b0d 100644 --- a/qt/aqt/browser/sidebar/item.py +++ b/qt/aqt/browser/sidebar/item.py @@ -80,7 +80,7 @@ def __init__( self.search_node = search_node self.on_expanded = on_expanded self.children: list[SidebarItem] = [] - self.tooltip: str | None = None + self.tooltip: str = name self._parent_item: SidebarItem | None = None self._expanded = expanded self._row_in_parent: int | None = None diff --git a/qt/aqt/customstudy.py b/qt/aqt/customstudy.py index ce4e68a306a..f22a071d1fd 100644 --- a/qt/aqt/customstudy.py +++ b/qt/aqt/customstudy.py @@ -35,14 +35,17 @@ class CustomStudy(QDialog): def fetch_data_and_show(mw: aqt.AnkiQt) -> None: def fetch_data( col: Collection, - ) -> tuple[DeckId, CustomStudyDefaults]: + ) -> tuple[DeckId, CustomStudyDefaults, Any]: deck_id = mw.col.decks.get_current_id() defaults = col.sched.custom_study_defaults(deck_id) - return (deck_id, defaults) + card_count = col.decks.card_count(deck_id, True) + return (deck_id, defaults, card_count) - def show_dialog(data: tuple[DeckId, CustomStudyDefaults]) -> None: - deck_id, defaults = data - CustomStudy(mw=mw, deck_id=deck_id, defaults=defaults) + def show_dialog(data: tuple[DeckId, CustomStudyDefaults, Any]) -> None: + deck_id, defaults, card_count = data + CustomStudy( + mw=mw, deck_id=deck_id, card_count=card_count, defaults=defaults + ) QueryOp( parent=mw, op=fetch_data, success=show_dialog @@ -52,12 +55,14 @@ def __init__( self, mw: aqt.AnkiQt, deck_id: DeckId, + card_count: Any, defaults: CustomStudyDefaults, ) -> None: "Don't call this directly; use CustomStudy.fetch_data_and_show()." QDialog.__init__(self, mw) self.mw = mw self.deck_id = deck_id + self.card_count = card_count self.defaults = defaults self.form = aqt.forms.customstudy.Ui_Dialog() self.form.setupUi(self) @@ -75,6 +80,7 @@ def setupSignals(self) -> None: qconnect(f.radioAhead.clicked, lambda: self.onRadioChange(RADIO_AHEAD)) qconnect(f.radioPreview.clicked, lambda: self.onRadioChange(RADIO_PREVIEW)) qconnect(f.radioCram.clicked, lambda: self.onRadioChange(RADIO_CRAM)) + qconnect(f.spin.valueChanged, self.setTextAfterSpinner) def count_with_children(self, parent: int, children: int) -> str: if children: @@ -83,13 +89,14 @@ def count_with_children(self, parent: int, children: int) -> str: return str(parent) def onRadioChange(self, idx: int) -> None: + self.radioIdx = idx form = self.form min_spinner_value = 1 max_spinner_value = DYN_MAX_SIZE current_spinner_value = 1 - text_after_spinner = tr.custom_study_cards() title_text = "" show_cram_type = False + enable_ok_button = self.card_count is not None and self.card_count > 0 ok = tr.custom_study_ok() if idx == RADIO_NEW: @@ -102,6 +109,7 @@ def onRadioChange(self, idx: int) -> None: text_before_spinner = tr.custom_study_increase_todays_new_card_limit_by() current_spinner_value = self.defaults.extend_new min_spinner_value = -DYN_MAX_SIZE + enable_ok_button = True elif idx == RADIO_REV: title_text = tr.custom_study_available_review_cards_2( count_string=self.count_with_children( @@ -112,20 +120,17 @@ def onRadioChange(self, idx: int) -> None: text_before_spinner = tr.custom_study_increase_todays_review_limit_by() current_spinner_value = self.defaults.extend_review min_spinner_value = -DYN_MAX_SIZE + enable_ok_button = True elif idx == RADIO_FORGOT: text_before_spinner = tr.custom_study_review_cards_forgotten_in_last() - text_after_spinner = tr.scheduling_days() max_spinner_value = 30 elif idx == RADIO_AHEAD: text_before_spinner = tr.custom_study_review_ahead_by() - text_after_spinner = tr.scheduling_days() elif idx == RADIO_PREVIEW: text_before_spinner = tr.custom_study_preview_new_cards_added_in_the() - text_after_spinner = tr.scheduling_days() current_spinner_value = 1 elif idx == RADIO_CRAM: text_before_spinner = tr.custom_study_select() - text_after_spinner = tr.custom_study_cards_from_the_deck() ok = tr.custom_study_choose_tags() current_spinner_value = 100 show_cram_type = True @@ -144,13 +149,31 @@ def onRadioChange(self, idx: int) -> None: form.spin.setEnabled(False) form.spin.setValue(current_spinner_value) form.preSpin.setText(text_before_spinner) - form.postSpin.setText(text_after_spinner) + self.setTextAfterSpinner(current_spinner_value) ok_button = form.buttonBox.button(QDialogButtonBox.StandardButton.Ok) assert ok_button is not None ok_button.setText(ok) + ok_button.setEnabled(enable_ok_button) - self.radioIdx = idx + def setTextAfterSpinner(self, newSpinValue) -> None: + form = self.form + text_after_spinner = "" + if self.radioIdx == RADIO_NEW: + text_after_spinner = tr.custom_study_cards(count=newSpinValue) + elif self.radioIdx == RADIO_REV: + text_after_spinner = tr.custom_study_cards(count=newSpinValue) + elif self.radioIdx == RADIO_FORGOT: + text_after_spinner = tr.custom_study_days(count=newSpinValue) + elif self.radioIdx == RADIO_AHEAD: + text_after_spinner = tr.custom_study_days(count=newSpinValue) + elif self.radioIdx == RADIO_PREVIEW: + text_after_spinner = tr.custom_study_days(count=newSpinValue) + elif self.radioIdx == RADIO_CRAM: + text_after_spinner = tr.custom_study_cards_from_the_deck(count=newSpinValue) + else: + assert 0 + form.postSpin.setText(text_after_spinner) def accept(self) -> None: request = CustomStudyRequest(deck_id=self.deck_id) diff --git a/qt/aqt/deckbrowser.py b/qt/aqt/deckbrowser.py index 5dc6881556f..ca754e7838d 100644 --- a/qt/aqt/deckbrowser.py +++ b/qt/aqt/deckbrowser.py @@ -234,7 +234,7 @@ def _render_deck_node(self, node: DeckTreeNode, ctx: RenderDeckNodeContext) -> s if node.collapsed: prefix = "+" else: - prefix = "-" + prefix = "−" def indent() -> str: return " " * 6 * (node.level - 1) diff --git a/qt/aqt/editor.py b/qt/aqt/editor.py index f2f26709790..e86f5fe96c0 100644 --- a/qt/aqt/editor.py +++ b/qt/aqt/editor.py @@ -151,6 +151,7 @@ def __init__( self.add_webview() self.setupWeb() self.setupShortcuts() + self.setupColourPalette() gui_hooks.editor_did_init(self) # Initial setup @@ -349,6 +350,14 @@ def setupShortcuts(self) -> None: keys, fn, _ = row QShortcut(QKeySequence(keys), self.widget, activated=fn) # type: ignore + def setupColourPalette(self) -> None: + if not (colors := self.mw.col.get_config("customColorPickerPalette")): + return + for i, colour in enumerate(colors[: QColorDialog.customCount()]): + if not QColor.isValidColorName(colour): + continue + QColorDialog.setCustomColor(i, QColor.fromString(colour)) + def _addFocusCheck(self, fn: Callable) -> Callable: def checkFocus() -> None: if self.currentField is None: diff --git a/qt/aqt/errors.py b/qt/aqt/errors.py index a6d9251e204..89e15246e1b 100644 --- a/qt/aqt/errors.py +++ b/qt/aqt/errors.py @@ -14,7 +14,7 @@ import aqt from anki.collection import HelpPage -from anki.errors import BackendError, Interrupted +from anki.errors import BackendError, CardTypeError, Interrupted from anki.utils import is_win from aqt.addons import AddonManager, AddonMeta from aqt.qt import * @@ -36,6 +36,14 @@ def show_exception(*, parent: QWidget, exception: Exception) -> None: global _mbox error_lines = [] help_page = HelpPage.TROUBLESHOOTING + + # default to PlainText + text_format = Qt.TextFormat.PlainText + + # set CardTypeError messages as rich text to allow HTML formatting + if isinstance(exception, CardTypeError): + text_format = Qt.TextFormat.RichText + if isinstance(exception, BackendError): if exception.context: error_lines.append(exception.context) @@ -51,7 +59,7 @@ def show_exception(*, parent: QWidget, exception: Exception) -> None: ) error_text = "\n".join(error_lines) print(error_lines) - _mbox = _init_message_box(str(exception), error_text, help_page) + _mbox = _init_message_box(str(exception), error_text, help_page, text_format) _mbox.show() @@ -171,7 +179,10 @@ def excepthook(etype, val, tb) -> None: # type: ignore def _init_message_box( - user_text: str, debug_text: str, help_page=HelpPage.TROUBLESHOOTING + user_text: str, + debug_text: str, + help_page=HelpPage.TROUBLESHOOTING, + text_format=Qt.TextFormat.PlainText, ): global _mbox @@ -179,7 +190,7 @@ def _init_message_box( _mbox.setWindowTitle("Anki") _mbox.setText(user_text) _mbox.setIcon(QMessageBox.Icon.Warning) - _mbox.setTextFormat(Qt.TextFormat.PlainText) + _mbox.setTextFormat(text_format) def show_help(): openHelp(help_page) diff --git a/qt/aqt/fields.py b/qt/aqt/fields.py index 16a3b27790c..f883b58b519 100644 --- a/qt/aqt/fields.py +++ b/qt/aqt/fields.py @@ -130,9 +130,7 @@ def onRowChange(self, idx: int) -> None: self.saveField() self.loadField(idx) - def _uniqueName( - self, prompt: str, ignoreOrd: int | None = None, old: str = "" - ) -> str | None: + def _uniqueName(self, prompt: str, old: str = "") -> str | None: txt = getOnlyText(prompt, default=old).replace('"', "").strip() if not txt: return None @@ -143,10 +141,10 @@ def _uniqueName( if letter in txt: show_warning(tr.fields_name_invalid_letter()) return None + if txt.casefold() == old.casefold(): + return None for f in self.model["flds"]: - if ignoreOrd is not None and f["ord"] == ignoreOrd: - continue - if f["name"] == txt: + if f["name"].casefold() == txt.casefold(): show_warning(tr.fields_that_field_name_is_already_used()) return None return txt @@ -157,7 +155,7 @@ def onRename(self) -> None: idx = self.currentIdx f = self.model["flds"][idx] - name = self._uniqueName(tr.actions_new_name(), self.currentIdx, f["name"]) + name = self._uniqueName(tr.actions_new_name(), f["name"]) if not name: return diff --git a/qt/aqt/forms/filtered_deck.ui b/qt/aqt/forms/filtered_deck.ui index 0a90c40e5f6..a64a3968a30 100644 --- a/qt/aqt/forms/filtered_deck.ui +++ b/qt/aqt/forms/filtered_deck.ui @@ -85,11 +85,11 @@ - - - 60 - 16777215 - + + + 0 + 0 + 1 @@ -168,11 +168,11 @@ - - - 60 - 16777215 - + + + 0 + 0 + 1 diff --git a/qt/aqt/forms/finddupes.ui b/qt/aqt/forms/finddupes.ui index 9a7c44c069b..9bc8be87b43 100644 --- a/qt/aqt/forms/finddupes.ui +++ b/qt/aqt/forms/finddupes.ui @@ -47,6 +47,9 @@ QComboBox::NoInsert + + QComboBox::SizeAdjustPolicy::AdjustToMinimumContentsLengthWithIcon + diff --git a/qt/aqt/mediasrv.py b/qt/aqt/mediasrv.py index 820e762d9b8..bedf23e5bc5 100644 --- a/qt/aqt/mediasrv.py +++ b/qt/aqt/mediasrv.py @@ -170,13 +170,42 @@ def favicon() -> Response: def _mime_for_path(path: str) -> str: "Mime type for provided path/filename." - if path.endswith(".css"): - # some users may have invalid mime type in the Windows registry - return "text/css" - elif path.endswith(".js") or path.endswith(".mjs"): - return "application/javascript" + + _, ext = os.path.splitext(path) + ext = ext.lower() + + # Badly-behaved apps on Windows can alter the standard mime types in the registry, which can completely + # break Anki's UI. So we hard-code the most common extensions. + mime_types = { + ".css": "text/css", + ".js": "application/javascript", + ".mjs": "application/javascript", + ".html": "text/html", + ".htm": "text/html", + ".svg": "image/svg+xml", + ".png": "image/png", + ".jpg": "image/jpeg", + ".jpeg": "image/jpeg", + ".gif": "image/gif", + ".webp": "image/webp", + ".ico": "image/x-icon", + ".json": "application/json", + ".woff": "font/woff", + ".woff2": "font/woff2", + ".ttf": "font/ttf", + ".otf": "font/otf", + ".mp3": "audio/mpeg", + ".mp4": "video/mp4", + ".webm": "video/webm", + ".ogg": "audio/ogg", + ".pdf": "application/pdf", + ".txt": "text/plain", + } + + if mime := mime_types.get(ext): + return mime else: - # autodetect + # fallback to mimetypes, which may consult the registry mime, _encoding = mimetypes.guess_type(path) return mime or "application/octet-stream" @@ -599,6 +628,15 @@ def handle_on_main() -> None: return b"" +def save_custom_colours() -> bytes: + colors = [ + QColorDialog.customColor(i).name(QColor.NameFormat.HexRgb) + for i in range(QColorDialog.customCount()) + ] + aqt.mw.col.set_config("customColorPickerPalette", colors) + return b"" + + post_handler_list = [ congrats_info, get_deck_configs_for_update, @@ -614,12 +652,14 @@ def handle_on_main() -> None: search_in_browser, deck_options_require_close, deck_options_ready, + save_custom_colours, ] exposed_backend_list = [ # CollectionService "latest_progress", + "get_custom_colours", # DeckService "get_deck_names", # I18nService diff --git a/qt/aqt/mediasync.py b/qt/aqt/mediasync.py index 7cfb6d4a7d9..92ecbb0c9f3 100644 --- a/qt/aqt/mediasync.py +++ b/qt/aqt/mediasync.py @@ -93,6 +93,7 @@ def _handle_sync_error( elif is_periodic_sync: print(str(exc)) else: + self._update_progress(tr.sync_media_failed()) show_info(str(exc), modality=Qt.WindowModality.NonModal) def abort(self) -> None: diff --git a/qt/aqt/models.py b/qt/aqt/models.py index 5c7be6bdadb..6c33a3f0044 100644 --- a/qt/aqt/models.py +++ b/qt/aqt/models.py @@ -6,11 +6,10 @@ from collections.abc import Callable, Sequence from concurrent.futures import Future from operator import itemgetter -from typing import Any import aqt.clayout from anki import stdmodels -from anki.collection import Collection +from anki.collection import Collection, OpChangesWithId from anki.lang import without_unicode_isolation from anki.models import NotetypeDict, NotetypeId, NotetypeNameIdUseCount from anki.notes import Note @@ -74,12 +73,18 @@ def __init__( # Models ########################################################################## - def maybe_select_provided_notetype(self) -> None: - if not self.selected_notetype_id: - self.form.modelsList.setCurrentRow(0) + def maybe_select_provided_notetype( + self, selected_notetype_id: NotetypeId | None = None, row: int = 0 + ) -> None: + """Select the provided notetype ID, if any. + Otherwise the one at `self.selected_notetype_id`, + otherwise the `row`-th element.""" + selected_notetype_id = selected_notetype_id or self.selected_notetype_id + if not selected_notetype_id: + self.form.modelsList.setCurrentRow(row) return for i, m in enumerate(self.models): - if m.id == self.selected_notetype_id: + if m.id == selected_notetype_id: self.form.modelsList.setCurrentRow(i) break @@ -117,24 +122,31 @@ def on_done(fut: Future) -> None: self.mw.taskman.with_progress(self.col.models.all_use_counts, on_done, self) maybeHideClose(box) - def refresh_list(self, *ignored_args: Any) -> None: + def refresh_list(self, selected_notetype_id: NotetypeId | None = None) -> None: QueryOp( parent=self, op=lambda col: col.models.all_use_counts(), - success=self.updateModelsList, + success=lambda notetypes: self.updateModelsList( + notetypes, selected_notetype_id + ), ).run_in_background() def onRename(self) -> None: nt = self.current_notetype() text, ok = getText(tr.actions_new_name(), default=nt["name"]) if ok and text.strip(): + selected_notetype_id = nt["id"] nt["name"] = text update_notetype_legacy(parent=self, notetype=nt).success( - self.refresh_list + lambda _: self.refresh_list(selected_notetype_id) ).run_in_background() - def updateModelsList(self, notetypes: Sequence[NotetypeNameIdUseCount]) -> None: + def updateModelsList( + self, + notetypes: Sequence[NotetypeNameIdUseCount], + selected_notetype_id: NotetypeId | None = None, + ) -> None: row = self.form.modelsList.currentRow() if row == -1: row = 0 @@ -145,7 +157,7 @@ def updateModelsList(self, notetypes: Sequence[NotetypeNameIdUseCount]) -> None: mUse = tr.browsing_note_count(count=m.use_count) item = QListWidgetItem(f"{m.name} [{mUse}]") self.form.modelsList.addItem(item) - self.form.modelsList.setCurrentRow(row) + self.maybe_select_provided_notetype(selected_notetype_id, row) def current_notetype(self) -> NotetypeDict: row = self.form.modelsList.currentRow() @@ -154,8 +166,9 @@ def current_notetype(self) -> NotetypeDict: def onAdd(self) -> None: def on_success(notetype: NotetypeDict) -> None: # if legacy add-ons already added the notetype, skip adding - if notetype["id"]: - self.refresh_list() + nid = notetype["id"] + if nid: + self.refresh_list(nid) return # prompt for name @@ -164,8 +177,11 @@ def on_success(notetype: NotetypeDict) -> None: return notetype["name"] = text + def refresh_list(op: OpChangesWithId) -> None: + self.refresh_list(NotetypeId(op.id)) + add_notetype_legacy(parent=self, notetype=notetype).success( - self.refresh_list + refresh_list ).run_in_background() AddModel(self.mw, on_success, self) @@ -188,7 +204,7 @@ def onDelete(self) -> None: nt = self.current_notetype() remove_notetype(parent=self, notetype_id=nt["id"]).success( - lambda _: self.refresh_list() + lambda _: self.refresh_list(None) ).run_in_background() def onAdvanced(self) -> None: @@ -212,7 +228,7 @@ def onAdvanced(self) -> None: nt["latexPre"] = str(frm.latexHeader.toPlainText()) nt["latexPost"] = str(frm.latexFooter.toPlainText()) update_notetype_legacy(parent=self, notetype=nt).success( - self.refresh_list + lambda _: self.refresh_list(nt["id"]) ).run_in_background() def _tmpNote(self) -> Note: diff --git a/qt/aqt/preferences.py b/qt/aqt/preferences.py index afce6d48902..939dd8c2c40 100644 --- a/qt/aqt/preferences.py +++ b/qt/aqt/preferences.py @@ -260,6 +260,7 @@ def on_success(): self.update_login_status() self.confirm_sync_after_login() + self.update_network() sync_login(self.mw, on_success) def sync_logout(self) -> None: diff --git a/qt/aqt/reviewer.py b/qt/aqt/reviewer.py index a8839c5982d..6d68f9e3ac7 100644 --- a/qt/aqt/reviewer.py +++ b/qt/aqt/reviewer.py @@ -17,6 +17,7 @@ import aqt.operations from anki.cards import Card, CardId from anki.collection import Config, OpChanges, OpChangesWithCount +from anki.lang import with_collapsed_whitespace from anki.scheduler.base import ScheduleCardsAsNew from anki.scheduler.v3 import ( CardAnswer, @@ -966,11 +967,15 @@ def check_timebox(self) -> bool: elapsed = self.mw.col.timeboxReached() if elapsed: assert not isinstance(elapsed, bool) - part1 = tr.studying_card_studied_in(count=elapsed[1]) - mins = int(round(elapsed[0] / 60)) - part2 = tr.studying_minute(count=mins) + cards_val = elapsed[1] + minutes_val = int(round(elapsed[0] / 60)) + message = with_collapsed_whitespace( + tr.studying_card_studied_in_minute( + cards=cards_val, minutes=str(minutes_val) + ) + ) fin = tr.studying_finish() - diag = askUserDialog(f"{part1} {part2}", [tr.studying_continue(), fin]) + diag = askUserDialog(message, [tr.studying_continue(), fin]) diag.setIcon(QMessageBox.Icon.Information) if diag.run() == fin: self.mw.moveToState("deckBrowser") diff --git a/qt/aqt/stylesheets.py b/qt/aqt/stylesheets.py index 6b4eff1f55d..6817b706310 100644 --- a/qt/aqt/stylesheets.py +++ b/qt/aqt/stylesheets.py @@ -180,7 +180,7 @@ def button(self, tm: ThemeManager) -> str: QPushButton {{ margin: 1px; }} - QPushButton:focus {{ + QPushButton:focus, QPushButton:default:hover {{ border: 2px solid {tm.var(colors.BORDER_FOCUS)}; outline: none; margin: 0px; @@ -199,9 +199,6 @@ def button(self, tm: ThemeManager) -> str: ) }; }} - QPushButton:default:hover {{ - border-width: 2px; - }} QPushButton:pressed, QPushButton:checked, QSpinBox::up-button:pressed, diff --git a/qt/aqt/sync.py b/qt/aqt/sync.py index 9b29ada209e..75bdeca898d 100644 --- a/qt/aqt/sync.py +++ b/qt/aqt/sync.py @@ -209,11 +209,20 @@ def on_full_sync_timer(mw: aqt.main.AnkiQt, label: str) -> None: return sync_progress = progress.full_sync + # If we've reached total, show the "checking" label if sync_progress.transferred == sync_progress.total: label = tr.sync_checking() + + total = sync_progress.total + transferred = sync_progress.transferred + + # Scale both to kilobytes with floor division + max_for_bar = total // 1024 + value_for_bar = transferred // 1024 + mw.progress.update( - value=sync_progress.transferred, - max=sync_progress.total, + value=value_for_bar, + max=max_for_bar, process=False, label=label, ) diff --git a/qt/aqt/tts.py b/qt/aqt/tts.py index d559fb41f71..f77e5c975d5 100644 --- a/qt/aqt/tts.py +++ b/qt/aqt/tts.py @@ -94,8 +94,15 @@ def voice_for_tag(self, tag: TTSTag) -> TTSVoiceMatch | None: rank -= 1 - # if no preferred voices match, we fall back on language - # with a rank of -100 + # if no requested voices match, use a preferred fallback voice + # (for example, Apple Samantha) with rank of -50 + for avail in avail_voices: + if avail.lang == tag.lang: + if avail.lang == "en_US" and avail.name.startswith("Apple_Samantha"): + return TTSVoiceMatch(voice=avail, rank=-50) + + # if no requested or preferred voices match, we fall back on + # the first available voice for the language, with a rank of -100 for avail in avail_voices: if avail.lang == tag.lang: return TTSVoiceMatch(voice=avail, rank=-100) diff --git a/qt/aqt/utils.py b/qt/aqt/utils.py index 43efc513f9f..ae88dadcbdd 100644 --- a/qt/aqt/utils.py +++ b/qt/aqt/utils.py @@ -809,7 +809,7 @@ def ensureWidgetInScreenBoundaries(widget: QWidget) -> None: wsize = widget.size() cappedWidth = min(geom.width(), wsize.width()) cappedHeight = min(geom.height(), wsize.height()) - if cappedWidth > wsize.width() or cappedHeight > wsize.height(): + if cappedWidth < wsize.width() or cappedHeight < wsize.height(): widget.resize(QSize(cappedWidth, cappedHeight)) # ensure widget is inside top left diff --git a/qt/aqt/webview.py b/qt/aqt/webview.py index 95d84c00ea6..8853558b09e 100644 --- a/qt/aqt/webview.py +++ b/qt/aqt/webview.py @@ -919,14 +919,6 @@ def cleanup(self) -> None: def on_theme_did_change(self) -> None: # avoid flashes if page reloaded self.page().setBackgroundColor(theme_manager.qcolor(colors.CANVAS)) - if hasattr(QWebEngineSettings.WebAttribute, "ForceDarkMode"): - force_dark_mode = getattr(QWebEngineSettings.WebAttribute, "ForceDarkMode") - page_settings = self.page().settings() - if page_settings is not None: - page_settings.setAttribute( - force_dark_mode, - theme_manager.get_night_mode(), - ) # update night-mode class, and legacy nightMode/night-mode body classes self.eval( f""" diff --git a/qt/launcher/Cargo.toml b/qt/launcher/Cargo.toml index 7de321a29f7..5fd1c990096 100644 --- a/qt/launcher/Cargo.toml +++ b/qt/launcher/Cargo.toml @@ -8,11 +8,13 @@ publish = false rust-version.workspace = true [dependencies] +anki_i18n.workspace = true anki_io.workspace = true anki_process.workspace = true anyhow.workspace = true camino.workspace = true dirs.workspace = true +locale_config.workspace = true serde_json.workspace = true [target.'cfg(all(unix, not(target_os = "macos")))'.dependencies] diff --git a/qt/launcher/build.rs b/qt/launcher/build.rs index 3ba75b0e13e..bc30f8dffc7 100644 --- a/qt/launcher/build.rs +++ b/qt/launcher/build.rs @@ -7,4 +7,7 @@ fn main() { .manifest_required() .unwrap(); } + println!("cargo:rerun-if-changed=../../out/buildhash"); + let buildhash = std::fs::read_to_string("../../out/buildhash").unwrap_or_default(); + println!("cargo:rustc-env=BUILDHASH={buildhash}"); } diff --git a/qt/launcher/src/main.rs b/qt/launcher/src/main.rs index 8996f98206d..dab9435ea68 100644 --- a/qt/launcher/src/main.rs +++ b/qt/launcher/src/main.rs @@ -10,6 +10,7 @@ use std::process::Command; use std::time::SystemTime; use std::time::UNIX_EPOCH; +use anki_i18n::I18n; use anki_io::copy_file; use anki_io::create_dir_all; use anki_io::modified_time; @@ -31,6 +32,7 @@ use crate::platform::respawn_launcher; mod platform; struct State { + tr: I18n, current_version: Option, prerelease_marker: std::path::PathBuf, uv_install_root: std::path::PathBuf, @@ -100,7 +102,14 @@ fn run() -> Result<()> { let (exe_dir, resources_dir) = get_exe_and_resources_dirs()?; + let locale = locale_config::Locale::user_default().to_string(); + let mut state = State { + tr: I18n::new(&[if !locale.is_empty() { + locale + } else { + "en".to_owned() + }]), current_version: None, prerelease_marker: uv_install_root.join("prerelease"), uv_install_root: uv_install_root.clone(), @@ -143,7 +152,9 @@ fn run() -> Result<()> { let sync_time = file_timestamp_secs(&state.sync_complete_marker); state.pyproject_modified_by_user = pyproject_time > sync_time; let pyproject_has_changed = state.pyproject_modified_by_user; - if !launcher_requested && !pyproject_has_changed { + let different_launcher = diff_launcher_was_installed(&state)?; + + if !launcher_requested && !pyproject_has_changed && !different_launcher { // If no launcher request and venv is already up to date, launch Anki normally let args: Vec = std::env::args().skip(1).collect(); let cmd = build_python_command(&state, &args)?; @@ -160,10 +171,12 @@ fn run() -> Result<()> { } print!("\x1B[2J\x1B[H"); // Clear screen and move cursor to top - println!("\x1B[1mAnki Launcher\x1B[0m\n"); + println!("\x1B[1m{}\x1B[0m\n", state.tr.launcher_title()); ensure_os_supported()?; + println!("{}\n", state.tr.launcher_press_enter_to_install()); + check_versions(&mut state); main_menu_loop(&state)?; @@ -178,15 +191,18 @@ fn run() -> Result<()> { } if cfg!(unix) && !cfg!(target_os = "macos") { - println!("\nPress enter to start Anki."); + println!("\n{}", state.tr.launcher_press_enter_to_start()); let mut input = String::new(); let _ = stdin().read_line(&mut input); } else { // on Windows/macOS, the user needs to close the terminal/console // currently, but ideas on how we can avoid this would be good! println!(); - println!("Anki will start shortly."); - println!("\x1B[1mYou can close this window.\x1B[0m\n"); + println!("{}", state.tr.launcher_anki_will_start_shortly()); + println!( + "\x1B[1m{}\x1B[0m\n", + state.tr.launcher_you_can_close_this_window() + ); } // respawn the launcher as a disconnected subprocess for normal startup @@ -258,7 +274,7 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re // Remove sync marker before attempting sync let _ = remove_file(&state.sync_complete_marker); - println!("Updating Anki...\n"); + println!("{}\n", state.tr.launcher_updating_anki()); let python_version_trimmed = if state.user_python_version_path.exists() { let python_version = read_file(&state.user_python_version_path)?; @@ -311,7 +327,6 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re } command - .env("UV_CACHE_DIR", &state.uv_cache_dir) .env("UV_PYTHON_INSTALL_DIR", &state.uv_python_install_dir) .env( "UV_HTTP_TIMEOUT", @@ -330,10 +345,6 @@ fn handle_version_install_or_update(state: &State, choice: MainMenuChoice) -> Re } } - if state.no_cache_marker.exists() { - command.env("UV_NO_CACHE", "1"); - } - match command.ensure_success() { Ok(_) => { // Sync succeeded @@ -378,10 +389,10 @@ fn main_menu_loop(state: &State) -> Result<()> { // Toggle beta prerelease file if state.prerelease_marker.exists() { let _ = remove_file(&state.prerelease_marker); - println!("Beta releases disabled."); + println!("{}", state.tr.launcher_beta_releases_disabled()); } else { write_file(&state.prerelease_marker, "")?; - println!("Beta releases enabled."); + println!("{}", state.tr.launcher_beta_releases_enabled()); } println!(); continue; @@ -390,14 +401,14 @@ fn main_menu_loop(state: &State) -> Result<()> { // Toggle cache disable file if state.no_cache_marker.exists() { let _ = remove_file(&state.no_cache_marker); - println!("Download caching enabled."); + println!("{}", state.tr.launcher_download_caching_enabled()); } else { write_file(&state.no_cache_marker, "")?; // Delete the cache directory and everything in it if state.uv_cache_dir.exists() { let _ = anki_io::remove_dir_all(&state.uv_cache_dir); } - println!("Download caching disabled and cache cleared."); + println!("{}", state.tr.launcher_download_caching_disabled()); } println!(); continue; @@ -440,44 +451,62 @@ fn file_timestamp_secs(path: &std::path::Path) -> i64 { fn get_main_menu_choice(state: &State) -> Result { loop { - println!("1) Latest Anki (press Enter)"); - println!("2) Choose a version"); + println!("1) {}", state.tr.launcher_latest_anki()); + println!("2) {}", state.tr.launcher_choose_a_version()); if let Some(current_version) = &state.current_version { let normalized_current = normalize_version(current_version); if state.pyproject_modified_by_user { - println!("3) Sync project changes"); + println!("3) {}", state.tr.launcher_sync_project_changes()); } else { - println!("3) Keep existing version ({normalized_current})"); + println!( + "3) {}", + state.tr.launcher_keep_existing_version(normalized_current) + ); } } if let Some(prev_version) = &state.previous_version { if state.current_version.as_ref() != Some(prev_version) { let normalized_prev = normalize_version(prev_version); - println!("4) Revert to previous version ({normalized_prev})"); + println!( + "4) {}", + state.tr.launcher_revert_to_previous(normalized_prev) + ); } } println!(); let betas_enabled = state.prerelease_marker.exists(); println!( - "5) Allow betas: {}", - if betas_enabled { "on" } else { "off" } + "5) {}", + state.tr.launcher_allow_betas(if betas_enabled { + state.tr.launcher_on() + } else { + state.tr.launcher_off() + }) ); let cache_enabled = !state.no_cache_marker.exists(); println!( - "6) Cache downloads: {}", - if cache_enabled { "on" } else { "off" } + "6) {}", + state.tr.launcher_cache_downloads(if cache_enabled { + state.tr.launcher_on() + } else { + state.tr.launcher_off() + }) ); let mirror_enabled = is_mirror_enabled(state); println!( - "7) Download mirror: {}", - if mirror_enabled { "on" } else { "off" } + "7) {}", + state.tr.launcher_download_mirror(if mirror_enabled { + state.tr.launcher_on() + } else { + state.tr.launcher_off() + }) ); println!(); - println!("8) Uninstall"); + println!("8) {}", state.tr.launcher_uninstall()); print!("> "); let _ = stdout().flush(); @@ -499,7 +528,7 @@ fn get_main_menu_choice(state: &State) -> Result { if state.current_version.is_some() { MainMenuChoice::KeepExisting } else { - println!("Invalid input. Please try again.\n"); + println!("{}\n", state.tr.launcher_invalid_input()); continue; } } @@ -511,7 +540,7 @@ fn get_main_menu_choice(state: &State) -> Result { } } } - println!("Invalid input. Please try again.\n"); + println!("{}\n", state.tr.launcher_invalid_input()); continue; } "5" => MainMenuChoice::ToggleBetas, @@ -519,7 +548,7 @@ fn get_main_menu_choice(state: &State) -> Result { "7" => MainMenuChoice::DownloadMirror, "8" => MainMenuChoice::Uninstall, _ => { - println!("Invalid input. Please try again."); + println!("{}\n", state.tr.launcher_invalid_input()); continue; } }); @@ -534,9 +563,9 @@ fn get_version_kind(state: &State) -> Result> { .map(|v| v.as_str()) .collect::>() .join(", "); - println!("Latest releases: {releases_str}"); + println!("{}", state.tr.launcher_latest_releases(releases_str)); - println!("Enter the version you want to install:"); + println!("{}", state.tr.launcher_enter_the_version_you_want()); print!("> "); let _ = stdout().flush(); @@ -560,29 +589,38 @@ fn get_version_kind(state: &State) -> Result> { Ok(Some(version_kind)) } (None, true) => { - println!("Versions before 2.1.50 can't be installed."); + println!("{}", state.tr.launcher_versions_before_cant_be_installed()); Ok(None) } _ => { - println!("Invalid version.\n"); + println!("{}\n", state.tr.launcher_invalid_version()); Ok(None) } } } fn with_only_latest_patch(versions: &[String]) -> Vec { - // Only show the latest patch release for a given (major, minor) + // Assumes versions are sorted in descending order (newest first) + // Only show the latest patch release for a given (major, minor), + // and exclude pre-releases if a newer major_minor exists let mut seen_major_minor = std::collections::HashSet::new(); versions .iter() .filter(|v| { - let (major, minor, _, _) = parse_version_for_filtering(v); + let (major, minor, _, is_prerelease) = parse_version_for_filtering(v); if major == 2 { return true; } let major_minor = (major, minor); if seen_major_minor.contains(&major_minor) { false + } else if is_prerelease + && seen_major_minor + .iter() + .any(|&(seen_major, seen_minor)| (seen_major, seen_minor) > (major, minor)) + { + // Exclude pre-release if a newer major_minor exists + false } else { seen_major_minor.insert(major_minor); true @@ -700,7 +738,7 @@ fn fetch_versions(state: &State) -> Result> { let output = match cmd.utf8_output() { Ok(output) => output, Err(e) => { - print!("Unable to check for Anki versions. Please check your internet connection.\n\n"); + print!("{}\n\n", state.tr.launcher_unable_to_check_for_versions()); return Err(e.into()); } }; @@ -709,7 +747,7 @@ fn fetch_versions(state: &State) -> Result> { } fn get_releases(state: &State) -> Result { - println!("Checking for updates..."); + println!("{}", state.tr.launcher_checking_for_updates()); let include_prereleases = state.prerelease_marker.exists(); let all_versions = fetch_versions(state)?; let all_versions = filter_and_normalize_versions(all_versions, include_prereleases); @@ -911,7 +949,7 @@ fn get_anki_addons21_path() -> Result { } fn handle_uninstall(state: &State) -> Result { - println!("Uninstall Anki's program files? (y/n)"); + println!("{}", state.tr.launcher_uninstall_confirm()); print!("> "); let _ = stdout().flush(); @@ -920,7 +958,7 @@ fn handle_uninstall(state: &State) -> Result { let input = input.trim().to_lowercase(); if input != "y" { - println!("Uninstall cancelled."); + println!("{}", state.tr.launcher_uninstall_cancelled()); println!(); return Ok(false); } @@ -928,11 +966,11 @@ fn handle_uninstall(state: &State) -> Result { // Remove program files if state.uv_install_root.exists() { anki_io::remove_dir_all(&state.uv_install_root)?; - println!("Program files removed."); + println!("{}", state.tr.launcher_program_files_removed()); } println!(); - println!("Remove all profiles/cards? (y/n)"); + println!("{}", state.tr.launcher_remove_all_profiles_confirm()); print!("> "); let _ = stdout().flush(); @@ -942,7 +980,7 @@ fn handle_uninstall(state: &State) -> Result { if input == "y" && state.anki_base_folder.exists() { anki_io::remove_dir_all(&state.anki_base_folder)?; - println!("User data removed."); + println!("{}", state.tr.launcher_user_data_removed()); } println!(); @@ -981,6 +1019,15 @@ fn uv_command(state: &State) -> Result { .env("UV_DEFAULT_INDEX", &pypi_mirror); } + if state.no_cache_marker.exists() { + command.env("UV_NO_CACHE", "1"); + } else { + command.env("UV_CACHE_DIR", &state.uv_cache_dir); + } + + // have uv use the system certstore instead of webpki-roots' + command.env("UV_NATIVE_TLS", "1"); + Ok(command) } @@ -1036,9 +1083,9 @@ fn get_mirror_urls(state: &State) -> Result> { fn show_mirror_submenu(state: &State) -> Result<()> { loop { - println!("Download mirror options:"); - println!("1) No mirror"); - println!("2) China"); + println!("{}", state.tr.launcher_download_mirror_options()); + println!("1) {}", state.tr.launcher_mirror_no_mirror()); + println!("2) {}", state.tr.launcher_mirror_china()); print!("> "); let _ = stdout().flush(); @@ -1052,14 +1099,14 @@ fn show_mirror_submenu(state: &State) -> Result<()> { if state.mirror_path.exists() { let _ = remove_file(&state.mirror_path); } - println!("Mirror disabled."); + println!("{}", state.tr.launcher_mirror_disabled()); break; } "2" => { // Write China mirror URLs let china_mirrors = "https://registry.npmmirror.com/-/binary/python-build-standalone/\nhttps://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple/"; write_file(&state.mirror_path, china_mirrors)?; - println!("China mirror enabled."); + println!("{}", state.tr.launcher_mirror_china_enabled()); break; } "" => { @@ -1067,7 +1114,7 @@ fn show_mirror_submenu(state: &State) -> Result<()> { break; } _ => { - println!("Invalid input. Please try again."); + println!("{}", state.tr.launcher_invalid_input()); continue; } } @@ -1075,6 +1122,20 @@ fn show_mirror_submenu(state: &State) -> Result<()> { Ok(()) } +fn diff_launcher_was_installed(state: &State) -> Result { + let launcher_version = option_env!("BUILDHASH").unwrap_or("dev").trim(); + let launcher_version_path = state.uv_install_root.join("launcher-version"); + if let Ok(content) = read_file(&launcher_version_path) { + if let Ok(version_str) = String::from_utf8(content) { + if version_str.trim() == launcher_version { + return Ok(false); + } + } + } + write_file(launcher_version_path, launcher_version)?; + Ok(true) +} + #[cfg(test)] mod tests { use super::*; diff --git a/qt/launcher/src/platform/mod.rs b/qt/launcher/src/platform/mod.rs index 6a582f1aa55..eec7634f189 100644 --- a/qt/launcher/src/platform/mod.rs +++ b/qt/launcher/src/platform/mod.rs @@ -134,5 +134,8 @@ pub fn ensure_os_supported() -> Result<()> { #[cfg(all(unix, not(target_os = "macos")))] unix::ensure_glibc_supported()?; + #[cfg(target_os = "windows")] + windows::ensure_windows_version_supported()?; + Ok(()) } diff --git a/qt/launcher/src/platform/windows.rs b/qt/launcher/src/platform/windows.rs index ebdff626147..d20c9a8b46b 100644 --- a/qt/launcher/src/platform/windows.rs +++ b/qt/launcher/src/platform/windows.rs @@ -38,6 +38,26 @@ fn is_windows_10() -> bool { } } +/// Ensures Windows 10 version 1809 or later +pub fn ensure_windows_version_supported() -> Result<()> { + unsafe { + let mut info = OSVERSIONINFOW { + dwOSVersionInfoSize: std::mem::size_of::() as u32, + ..Default::default() + }; + + if RtlGetVersion(&mut info).is_err() { + anyhow::bail!("Failed to get Windows version information"); + } + + if info.dwBuildNumber >= 17763 { + return Ok(()); + } + + anyhow::bail!("Windows 10 version 1809 or later is required.") + } +} + pub fn ensure_terminal_shown() -> Result<()> { unsafe { if !GetConsoleWindow().is_invalid() { diff --git a/rslib/i18n/build.rs b/rslib/i18n/build.rs index 4baa6a709d0..75bc387872f 100644 --- a/rslib/i18n/build.rs +++ b/rslib/i18n/build.rs @@ -8,7 +8,6 @@ mod python; mod typescript; mod write_strings; -use std::env; use std::path::PathBuf; use anki_io::create_dir_all; @@ -23,17 +22,16 @@ use write_strings::write_strings; fn main() -> Result<()> { // generate our own requirements - let map = get_ftl_data(); + let mut map = get_ftl_data(); check(&map); - let modules = get_modules(&map); - write_strings(&map, &modules); + let mut modules = get_modules(&map); + write_strings(&map, &modules, "strings.rs", "All"); typescript::write_ts_interface(&modules)?; python::write_py_interface(&modules)?; // write strings.json file to requested path - println!("cargo:rerun-if-env-changed=STRINGS_JSON"); - if let Ok(path) = env::var("STRINGS_JSON") { + if let Some(path) = option_env!("STRINGS_JSON") { if !path.is_empty() { let path = PathBuf::from(path); let meta_json = serde_json::to_string_pretty(&modules).unwrap(); @@ -41,5 +39,12 @@ fn main() -> Result<()> { write_file_if_changed(path, meta_json)?; } } + + // generate strings for the launcher + map.iter_mut() + .for_each(|(_, modules)| modules.retain(|module, _| module == "launcher")); + modules.retain(|module| module.name == "launcher"); + write_strings(&map, &modules, "strings_launcher.rs", "Launcher"); + Ok(()) } diff --git a/rslib/i18n/python.rs b/rslib/i18n/python.rs index ca780c0410a..a564de48d95 100644 --- a/rslib/i18n/python.rs +++ b/rslib/i18n/python.rs @@ -1,7 +1,6 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use std::env; use std::fmt::Write; use std::path::PathBuf; @@ -21,7 +20,7 @@ pub fn write_py_interface(modules: &[Module]) -> Result<()> { render_methods(modules, &mut out); render_legacy_enum(modules, &mut out); - if let Ok(path) = env::var("STRINGS_PY") { + if let Some(path) = option_env!("STRINGS_PY") { let path = PathBuf::from(path); create_dir_all(path.parent().unwrap())?; write_file_if_changed(path, out)?; diff --git a/rslib/i18n/src/generated.rs b/rslib/i18n/src/generated.rs index f3fa71ce8d5..7463a594e0f 100644 --- a/rslib/i18n/src/generated.rs +++ b/rslib/i18n/src/generated.rs @@ -1,8 +1,15 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -// Include auto-generated content - #![allow(clippy::all)] +#[derive(Clone)] +pub struct All; + +// Include auto-generated content include!(concat!(env!("OUT_DIR"), "/strings.rs")); + +impl Translations for All { + const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>> = &_STRINGS; + const KEYS_BY_MODULE: &[&[&str]] = &_KEYS_BY_MODULE; +} diff --git a/rslib/i18n/src/generated_launcher.rs b/rslib/i18n/src/generated_launcher.rs new file mode 100644 index 00000000000..35dc3f28bca --- /dev/null +++ b/rslib/i18n/src/generated_launcher.rs @@ -0,0 +1,15 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +#![allow(clippy::all)] + +#[derive(Clone)] +pub struct Launcher; + +// Include auto-generated content +include!(concat!(env!("OUT_DIR"), "/strings_launcher.rs")); + +impl Translations for Launcher { + const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>> = &_STRINGS; + const KEYS_BY_MODULE: &[&[&str]] = &_KEYS_BY_MODULE; +} diff --git a/rslib/i18n/src/lib.rs b/rslib/i18n/src/lib.rs index bfd6f5ba28e..55ff94e44ce 100644 --- a/rslib/i18n/src/lib.rs +++ b/rslib/i18n/src/lib.rs @@ -2,8 +2,10 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html mod generated; +mod generated_launcher; use std::borrow::Cow; +use std::marker::PhantomData; use std::sync::Arc; use std::sync::Mutex; @@ -12,8 +14,6 @@ use fluent::FluentArgs; use fluent::FluentResource; use fluent::FluentValue; use fluent_bundle::bundle::FluentBundle as FluentBundleOrig; -use generated::KEYS_BY_MODULE; -use generated::STRINGS; use num_format::Locale; use serde::Serialize; use unic_langid::LanguageIdentifier; @@ -22,6 +22,9 @@ type FluentBundle = FluentBundleOrig { fn round(self) -> Self; } @@ -187,20 +190,66 @@ fn get_bundle_with_extra( get_bundle(text, extra_text, &locales) } +pub trait Translations { + const STRINGS: &phf::Map<&str, &phf::Map<&str, &str>>; + const KEYS_BY_MODULE: &[&[&str]]; +} + #[derive(Clone)] -pub struct I18n { +pub struct I18n { inner: Arc>, + _translations_type: std::marker::PhantomData

, } -fn get_key(module_idx: usize, translation_idx: usize) -> &'static str { - KEYS_BY_MODULE - .get(module_idx) - .and_then(|translations| translations.get(translation_idx)) - .cloned() - .unwrap_or("invalid-module-or-translation-index") -} +impl I18n

{ + fn get_key(module_idx: usize, translation_idx: usize) -> &'static str { + P::KEYS_BY_MODULE + .get(module_idx) + .and_then(|translations| translations.get(translation_idx)) + .cloned() + .unwrap_or("invalid-module-or-translation-index") + } + + fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec { + langs + .iter() + .map(|lang| { + let mut buf = String::new(); + let lang_name = remapped_lang_name(lang); + if let Some(strings) = P::STRINGS.get(lang_name) { + if desired_modules.is_empty() { + // empty list, provide all modules + for value in strings.values() { + buf.push_str(value) + } + } else { + for module_name in desired_modules { + if let Some(text) = strings.get(module_name.as_str()) { + buf.push_str(text); + } + } + } + } + buf + }) + .collect() + } + + /// This temporarily behaves like the older code; in the future we could + /// either access each &str separately, or load them on demand. + fn ftl_localized_text(lang: &LanguageIdentifier) -> Option { + let lang = remapped_lang_name(lang); + if let Some(module) = P::STRINGS.get(lang) { + let mut text = String::new(); + for module_text in module.values() { + text.push_str(module_text) + } + Some(text) + } else { + None + } + } -impl I18n { pub fn template_only() -> Self { Self::new::<&str>(&[]) } @@ -225,7 +274,7 @@ impl I18n { let mut output_langs = vec![]; for lang in input_langs { // if the language is bundled in the binary - if let Some(text) = ftl_localized_text(&lang).or_else(|| { + if let Some(text) = Self::ftl_localized_text(&lang).or_else(|| { // when testing, allow missing translations if cfg!(test) { Some(String::new()) @@ -244,7 +293,7 @@ impl I18n { // add English templates let template_lang = "en-US".parse().unwrap(); - let template_text = ftl_localized_text(&template_lang).unwrap(); + let template_text = Self::ftl_localized_text(&template_lang).unwrap(); let template_bundle = get_bundle_with_extra(&template_text, None).unwrap(); bundles.push(template_bundle); output_langs.push(template_lang); @@ -261,6 +310,7 @@ impl I18n { bundles, langs: output_langs, })), + _translations_type: PhantomData, } } @@ -270,7 +320,7 @@ impl I18n { message_index: usize, args: FluentArgs, ) -> String { - let key = get_key(module_index, message_index); + let key = Self::get_key(module_index, message_index); self.translate(key, Some(args)).into() } @@ -305,7 +355,7 @@ impl I18n { /// implementation. pub fn resources_for_js(&self, desired_modules: &[String]) -> ResourcesForJavascript { let inner = self.inner.lock().unwrap(); - let resources = get_modules(&inner.langs, desired_modules); + let resources = Self::get_modules(&inner.langs, desired_modules); ResourcesForJavascript { langs: inner.langs.iter().map(ToString::to_string).collect(), resources, @@ -313,47 +363,6 @@ impl I18n { } } -fn get_modules(langs: &[LanguageIdentifier], desired_modules: &[String]) -> Vec { - langs - .iter() - .cloned() - .map(|lang| { - let mut buf = String::new(); - let lang_name = remapped_lang_name(&lang); - if let Some(strings) = STRINGS.get(lang_name) { - if desired_modules.is_empty() { - // empty list, provide all modules - for value in strings.values() { - buf.push_str(value) - } - } else { - for module_name in desired_modules { - if let Some(text) = strings.get(module_name.as_str()) { - buf.push_str(text); - } - } - } - } - buf - }) - .collect() -} - -/// This temporarily behaves like the older code; in the future we could either -/// access each &str separately, or load them on demand. -fn ftl_localized_text(lang: &LanguageIdentifier) -> Option { - let lang = remapped_lang_name(lang); - if let Some(module) = STRINGS.get(lang) { - let mut text = String::new(); - for module_text in module.values() { - text.push_str(module_text) - } - Some(text) - } else { - None - } -} - struct I18nInner { // bundles in preferred language order, with template English as the // last element @@ -490,7 +499,7 @@ mod test { #[test] fn i18n() { // English template - let tr = I18n::new(&["zz"]); + let tr = I18n::::new(&["zz"]); assert_eq!(tr.translate("valid-key", None), "a valid key"); assert_eq!(tr.translate("invalid-key", None), "invalid-key"); @@ -513,7 +522,7 @@ mod test { ); // Another language - let tr = I18n::new(&["ja_JP"]); + let tr = I18n::::new(&["ja_JP"]); assert_eq!(tr.translate("valid-key", None), "キー"); assert_eq!(tr.translate("only-in-english", None), "not translated"); assert_eq!(tr.translate("invalid-key", None), "invalid-key"); @@ -524,7 +533,7 @@ mod test { ); // Decimal separator - let tr = I18n::new(&["pl-PL"]); + let tr = I18n::::new(&["pl-PL"]); // Polish will use a comma if the string is translated assert_eq!( tr.translate("one-arg-key", Some(tr_args!["one"=>2.07])), diff --git a/rslib/i18n/typescript.rs b/rslib/i18n/typescript.rs index ce30048e24e..0f483cb586a 100644 --- a/rslib/i18n/typescript.rs +++ b/rslib/i18n/typescript.rs @@ -1,7 +1,6 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use std::env; use std::fmt::Write; use std::path::PathBuf; @@ -22,7 +21,7 @@ pub fn write_ts_interface(modules: &[Module]) -> Result<()> { render_module_map(modules, &mut ts_out); render_methods(modules, &mut ts_out); - if let Ok(path) = env::var("STRINGS_TS") { + if let Some(path) = option_env!("STRINGS_TS") { let path = PathBuf::from(path); create_dir_all(path.parent().unwrap())?; write_file_if_changed(path, ts_out)?; diff --git a/rslib/i18n/write_strings.rs b/rslib/i18n/write_strings.rs index 33905d98fd8..db31be2b78d 100644 --- a/rslib/i18n/write_strings.rs +++ b/rslib/i18n/write_strings.rs @@ -15,7 +15,7 @@ use crate::extract::VariableKind; use crate::gather::TranslationsByFile; use crate::gather::TranslationsByLang; -pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) { +pub fn write_strings(map: &TranslationsByLang, modules: &[Module], out_fn: &str, tag: &str) { let mut buf = String::new(); // lang->module map @@ -25,23 +25,25 @@ pub fn write_strings(map: &TranslationsByLang, modules: &[Module]) { // ordered list of translations by module write_translation_key_index(modules, &mut buf); // methods to generate messages - write_methods(modules, &mut buf); + write_methods(modules, &mut buf, tag); let dir = PathBuf::from(std::env::var("OUT_DIR").unwrap()); - let path = dir.join("strings.rs"); + let path = dir.join(out_fn); fs::write(path, buf).unwrap(); } -fn write_methods(modules: &[Module], buf: &mut String) { +fn write_methods(modules: &[Module], buf: &mut String, tag: &str) { buf.push_str( r#" -use crate::{I18n,Number}; +#[allow(unused_imports)] +use crate::{I18n,Number,Translations}; +#[allow(unused_imports)] use fluent::{FluentValue, FluentArgs}; use std::borrow::Cow; -impl I18n { "#, ); + writeln!(buf, "impl I18n<{tag}> {{").unwrap(); for module in modules { for translation in &module.translations { let func = translation.key.to_snake_case(); @@ -142,7 +144,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) { writeln!( buf, - "pub(crate) const KEYS_BY_MODULE: [&[&str]; {count}] = [", + "pub(crate) const _KEYS_BY_MODULE: [&[&str]; {count}] = [", count = modules.len(), ) .unwrap(); @@ -162,7 +164,7 @@ fn write_translation_key_index(modules: &[Module], buf: &mut String) { fn write_lang_map(map: &TranslationsByLang, buf: &mut String) { buf.push_str( " -pub(crate) const STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! { +pub(crate) const _STRINGS: phf::Map<&str, &phf::Map<&str, &str>> = phf::phf_map! { ", ); diff --git a/rslib/io/src/lib.rs b/rslib/io/src/lib.rs index cb44467e613..0fd85e49008 100644 --- a/rslib/io/src/lib.rs +++ b/rslib/io/src/lib.rs @@ -335,6 +335,15 @@ pub fn write_file_if_changed(path: impl AsRef, contents: impl AsRef<[u8]>) .map(|existing| existing != contents) .unwrap_or(true) }; + + match std::env::var("CARGO_PKG_NAME") { + Ok(pkg) if pkg == "anki_proto" || pkg == "anki_i18n" => { + // at comptime for the proto/i18n crates, register implicit output as input + println!("cargo:rerun-if-changed={}", path.to_str().unwrap()); + } + _ => {} + } + if changed { write_file(path, contents)?; Ok(true) diff --git a/rslib/proto/python.rs b/rslib/proto/python.rs index a5adb417956..5c245de1d0d 100644 --- a/rslib/proto/python.rs +++ b/rslib/proto/python.rs @@ -22,7 +22,7 @@ pub(crate) fn write_python_interface(services: &[BackendService]) -> Result<()> write_header(&mut out)?; for service in services { - if service.name == "BackendAnkidroidService" { + if ["BackendAnkidroidService", "BackendFrontendService"].contains(&service.name.as_str()) { continue; } for method in service.all_methods() { diff --git a/rslib/src/cloze.rs b/rslib/src/cloze.rs index 027c14c0c04..70a5d1703a8 100644 --- a/rslib/src/cloze.rs +++ b/rslib/src/cloze.rs @@ -10,6 +10,7 @@ use std::sync::LazyLock; use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion; use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape; use htmlescape::encode_attribute; +use itertools::Itertools; use nom::branch::alt; use nom::bytes::complete::tag; use nom::bytes::complete::take_while; @@ -26,7 +27,7 @@ use crate::template::RenderContext; use crate::text::strip_html_preserving_entities; static CLOZE: LazyLock = - LazyLock::new(|| Regex::new(r"(?s)\{\{c\d+::(.*?)(::.*?)?\}\}").unwrap()); + LazyLock::new(|| Regex::new(r"(?s)\{\{c[\d,]+::(.*?)(::.*?)?\}\}").unwrap()); static MATHJAX: LazyLock = LazyLock::new(|| { Regex::new( @@ -48,7 +49,7 @@ mod mathjax_caps { #[derive(Debug)] enum Token<'a> { // The parameter is the cloze number as is appears in the field content. - OpenCloze(u16), + OpenCloze(Vec), Text(&'a str), CloseCloze, } @@ -58,21 +59,24 @@ fn tokenize(mut text: &str) -> impl Iterator> { fn open_cloze(text: &str) -> IResult<&str, Token<'_>> { // opening brackets and 'c' let (text, _opening_brackets_and_c) = tag("{{c")(text)?; - // following number - let (text, digits) = take_while(|c: char| c.is_ascii_digit())(text)?; - let digits: u16 = match digits.parse() { - Ok(digits) => digits, - Err(_) => { - // not a valid number; fail to recognize - return Err(nom::Err::Error(nom::error::make_error( - text, - nom::error::ErrorKind::Digit, - ))); - } - }; + // following comma-seperated numbers + let (text, ordinals) = take_while(|c: char| c.is_ascii_digit() || c == ',')(text)?; + let ordinals: Vec = ordinals + .split(',') + .filter_map(|s| s.parse().ok()) + .collect::>() // deduplicate + .into_iter() + .sorted() // set conversion can de-order + .collect(); + if ordinals.is_empty() { + return Err(nom::Err::Error(nom::error::make_error( + text, + nom::error::ErrorKind::Digit, + ))); + } // :: let (text, _colons) = tag("::")(text)?; - Ok((text, Token::OpenCloze(digits))) + Ok((text, Token::OpenCloze(ordinals))) } fn close_cloze(text: &str) -> IResult<&str, Token<'_>> { @@ -121,11 +125,20 @@ enum TextOrCloze<'a> { #[derive(Debug)] struct ExtractedCloze<'a> { // `ordinal` is the cloze number as is appears in the field content. - ordinal: u16, + ordinals: Vec, nodes: Vec>, hint: Option<&'a str>, } +/// Generate a string representation of the ordinals for HTML +fn ordinals_str(ordinals: &[u16]) -> String { + ordinals + .iter() + .map(|o| o.to_string()) + .collect::>() + .join(",") +} + impl ExtractedCloze<'_> { /// Return the cloze's hint, or "..." if none was provided. fn hint(&self) -> &str { @@ -151,6 +164,11 @@ impl ExtractedCloze<'_> { buf.into() } + /// Checks if this cloze is active for a given ordinal + fn contains_ordinal(&self, ordinal: u16) -> bool { + self.ordinals.contains(&ordinal) + } + /// If cloze starts with image-occlusion:, return the text following that. fn image_occlusion(&self) -> Option<&str> { let TextOrCloze::Text(text) = self.nodes.first()? else { @@ -165,10 +183,10 @@ fn parse_text_with_clozes(text: &str) -> Vec> { let mut output = vec![]; for token in tokenize(text) { match token { - Token::OpenCloze(ordinal) => { + Token::OpenCloze(ordinals) => { if open_clozes.len() < 10 { open_clozes.push(ExtractedCloze { - ordinal, + ordinals, nodes: Vec::with_capacity(1), // common case hint: None, }) @@ -214,7 +232,7 @@ fn reveal_cloze_text_in_nodes( output: &mut Vec, ) { if let TextOrCloze::Cloze(cloze) = node { - if cloze.ordinal == cloze_ord { + if cloze.contains_ordinal(cloze_ord) { if question { output.push(cloze.hint().into()) } else { @@ -234,14 +252,15 @@ fn reveal_cloze( active_cloze_found_in_text: &mut bool, buf: &mut String, ) { - let active = cloze.ordinal == cloze_ord; + let active = cloze.contains_ordinal(cloze_ord); *active_cloze_found_in_text |= active; + if let Some(image_occlusion_text) = cloze.image_occlusion() { buf.push_str(&render_image_occlusion( image_occlusion_text, question, active, - cloze.ordinal, + &cloze.ordinals, )); return; } @@ -265,7 +284,7 @@ fn reveal_cloze( buf, r#"[{}]"#, encode_attribute(&content_buf), - cloze.ordinal, + ordinals_str(&cloze.ordinals), cloze.hint() ) .unwrap(); @@ -274,7 +293,7 @@ fn reveal_cloze( write!( buf, r#""#, - cloze.ordinal + ordinals_str(&cloze.ordinals) ) .unwrap(); for node in &cloze.nodes { @@ -292,7 +311,7 @@ fn reveal_cloze( write!( buf, r#""#, - cloze.ordinal + ordinals_str(&cloze.ordinals) ) .unwrap(); for node in &cloze.nodes { @@ -308,23 +327,28 @@ fn reveal_cloze( } } -fn render_image_occlusion(text: &str, question_side: bool, active: bool, ordinal: u16) -> String { - if (question_side && active) || ordinal == 0 { +fn render_image_occlusion( + text: &str, + question_side: bool, + active: bool, + ordinals: &[u16], +) -> String { + if (question_side && active) || ordinals.contains(&0) { format!( r#"

"#, - ordinal, + ordinals_str(ordinals), &get_image_cloze_data(text) ) } else if !active { format!( r#"
"#, - ordinal, + ordinals_str(ordinals), &get_image_cloze_data(text) ) } else if !question_side && active { format!( r#"
"#, - ordinal, + ordinals_str(ordinals), &get_image_cloze_data(text) ) } else { @@ -338,7 +362,10 @@ pub fn parse_image_occlusions(text: &str) -> Vec { if let TextOrCloze::Cloze(cloze) = node { if cloze.image_occlusion().is_some() { if let Some(shape) = parse_image_cloze(cloze.image_occlusion().unwrap()) { - occlusions.entry(cloze.ordinal).or_default().push(shape); + // Associate this occlusion with all ordinals in this cloze + for &ordinal in &cloze.ordinals { + occlusions.entry(ordinal).or_default().push(shape.clone()); + } } } } @@ -420,7 +447,7 @@ pub fn expand_clozes_to_reveal_latex(text: &str) -> String { pub(crate) fn contains_cloze(text: &str) -> bool { parse_text_with_clozes(text) .iter() - .any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinal != 0)) + .any(|node| matches!(node, TextOrCloze::Cloze(e) if e.ordinals.iter().any(|&o| o != 0))) } /// Returns the set of cloze number as they appear in the fields's content. @@ -433,10 +460,12 @@ pub fn cloze_numbers_in_string(html: &str) -> HashSet { fn add_cloze_numbers_in_text_with_clozes(nodes: &[TextOrCloze], set: &mut HashSet) { for node in nodes { if let TextOrCloze::Cloze(cloze) = node { - if cloze.ordinal != 0 { - set.insert(cloze.ordinal); - add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set); + for &ordinal in &cloze.ordinals { + if ordinal != 0 { + set.insert(ordinal); + } } + add_cloze_numbers_in_text_with_clozes(&cloze.nodes, set); } } } @@ -654,4 +683,160 @@ mod test { ) ); } + + #[test] + fn multi_card_card_generation() { + let text = "{{c1,2,3::multi}}"; + assert_eq!( + cloze_number_in_fields(vec![text]), + vec![1, 2, 3].into_iter().collect::>() + ); + } + + #[test] + fn multi_card_cloze_basic() { + let text = "{{c1,2::shared}} word and {{c1::first}} vs {{c2::second}}"; + + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, true)).as_ref(), + "[...] word and [...] vs second" + ); + assert_eq!( + strip_html(&reveal_cloze_text(text, 2, true)).as_ref(), + "[...] word and first vs [...]" + ); + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, false)).as_ref(), + "shared word and first vs second" + ); + assert_eq!( + strip_html(&reveal_cloze_text(text, 2, false)).as_ref(), + "shared word and first vs second" + ); + assert_eq!( + cloze_numbers_in_string(text), + vec![1, 2].into_iter().collect::>() + ); + } + + #[test] + fn multi_card_cloze_html_attributes() { + let text = "{{c1,2,3::multi}}"; + + let card1_html = reveal_cloze_text(text, 1, true); + assert!(card1_html.contains(r#"data-ordinal="1,2,3""#)); + + let card2_html = reveal_cloze_text(text, 2, true); + assert!(card2_html.contains(r#"data-ordinal="1,2,3""#)); + + let card3_html = reveal_cloze_text(text, 3, true); + assert!(card3_html.contains(r#"data-ordinal="1,2,3""#)); + } + + #[test] + fn multi_card_cloze_with_hints() { + let text = "{{c1,2::answer::hint}}"; + + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, true)).as_ref(), + "[hint]" + ); + assert_eq!( + strip_html(&reveal_cloze_text(text, 2, true)).as_ref(), + "[hint]" + ); + + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, false)).as_ref(), + "answer" + ); + assert_eq!( + strip_html(&reveal_cloze_text(text, 2, false)).as_ref(), + "answer" + ); + } + + #[test] + fn multi_card_cloze_edge_cases() { + assert_eq!( + cloze_numbers_in_string("{{c1,1,2::test}}"), + vec![1, 2].into_iter().collect::>() + ); + + assert_eq!( + cloze_numbers_in_string("{{c0,1,2::test}}"), + vec![1, 2].into_iter().collect::>() + ); + + assert_eq!( + cloze_numbers_in_string("{{c1,,3::test}}"), + vec![1, 3].into_iter().collect::>() + ); + } + + #[test] + fn multi_card_cloze_only_filter() { + let text = "{{c1,2::shared}} and {{c1::first}} vs {{c2::second}}"; + + assert_eq!(reveal_cloze_text_only(text, 1, true), "..., ..."); + assert_eq!(reveal_cloze_text_only(text, 2, true), "..., ..."); + assert_eq!(reveal_cloze_text_only(text, 1, false), "shared, first"); + assert_eq!(reveal_cloze_text_only(text, 2, false), "shared, second"); + } + + #[test] + fn multi_card_nested_cloze() { + let text = "{{c1,2::outer {{c3::inner}}}}"; + + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, true)).as_ref(), + "[...]" + ); + + assert_eq!( + strip_html(&reveal_cloze_text(text, 2, true)).as_ref(), + "[...]" + ); + + assert_eq!( + strip_html(&reveal_cloze_text(text, 3, true)).as_ref(), + "outer [...]" + ); + + assert_eq!( + cloze_numbers_in_string(text), + vec![1, 2, 3].into_iter().collect::>() + ); + } + + #[test] + fn nested_parent_child_card_same_cloze() { + let text = "{{c1::outer {{c1::inner}}}}"; + + assert_eq!( + strip_html(&reveal_cloze_text(text, 1, true)).as_ref(), + "[...]" + ); + + assert_eq!( + cloze_numbers_in_string(text), + vec![1].into_iter().collect::>() + ); + } + + #[test] + fn multi_card_image_occlusion() { + let text = "{{c1,2::image-occlusion:rect:left=10:top=20:width=30:height=40}}"; + + let occlusions = parse_image_occlusions(text); + assert_eq!(occlusions.len(), 2); + assert!(occlusions.iter().any(|o| o.ordinal == 1)); + assert!(occlusions.iter().any(|o| o.ordinal == 2)); + + let card1_html = reveal_cloze_text(text, 1, true); + assert!(card1_html.contains(r#"data-ordinal="1,2""#)); + + let card2_html = reveal_cloze_text(text, 2, true); + assert!(card2_html.contains(r#"data-ordinal="1,2""#)); + } } diff --git a/rslib/src/collection/service.rs b/rslib/src/collection/service.rs index 2050a6897c2..a37360782ec 100644 --- a/rslib/src/collection/service.rs +++ b/rslib/src/collection/service.rs @@ -1,8 +1,10 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use anki_proto::collection::GetCustomColoursResponse; use anki_proto::generic; use crate::collection::Collection; +use crate::config::ConfigKey; use crate::error; use crate::prelude::BoolKey; use crate::prelude::Op; @@ -62,4 +64,13 @@ impl crate::services::CollectionService for Collection { }) .map(Into::into) } + + fn get_custom_colours( + &mut self, + ) -> error::Result { + let colours = self + .get_config_optional(ConfigKey::CustomColorPickerPalette) + .unwrap_or_default(); + Ok(GetCustomColoursResponse { colours }) + } } diff --git a/rslib/src/config/bool.rs b/rslib/src/config/bool.rs index c76787cb081..1be9b255698 100644 --- a/rslib/src/config/bool.rs +++ b/rslib/src/config/bool.rs @@ -1,8 +1,6 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use serde::Deserialize; -use serde_aux::field_attributes::deserialize_bool_from_anything; use strum::IntoStaticStr; use crate::prelude::*; @@ -56,15 +54,6 @@ pub enum BoolKey { AddingDefaultsToCurrentDeck, } -/// This is a workaround for old clients that used ints to represent boolean -/// values. For new config items, prefer using a bool directly. -#[derive(Deserialize, Default)] -struct BoolLike( - #[serde(deserialize_with = "deserialize_bool_from_anything")] - #[allow(dead_code)] - bool, -); - impl Collection { pub fn get_config_bool(&self, key: BoolKey) -> bool { match key { diff --git a/rslib/src/config/mod.rs b/rslib/src/config/mod.rs index 5ece5b7e1e6..1e507281ac8 100644 --- a/rslib/src/config/mod.rs +++ b/rslib/src/config/mod.rs @@ -71,6 +71,7 @@ pub(crate) enum ConfigKey { NextNewCardPosition, #[strum(to_string = "schedVer")] SchedulerVersion, + CustomColorPickerPalette, } #[derive(PartialEq, Eq, Serialize_repr, Deserialize_repr, Clone, Copy, Debug)] diff --git a/rslib/src/deckconfig/schema11.rs b/rslib/src/deckconfig/schema11.rs index 2d862a3a0c0..b7ed0acaa80 100644 --- a/rslib/src/deckconfig/schema11.rs +++ b/rslib/src/deckconfig/schema11.rs @@ -184,17 +184,13 @@ where #[derive(Serialize_repr, Deserialize_repr, Debug, PartialEq, Eq, Clone)] #[repr(u8)] +#[derive(Default)] pub enum NewCardOrderSchema11 { Random = 0, + #[default] Due = 1, } -impl Default for NewCardOrderSchema11 { - fn default() -> Self { - Self::Due - } -} - fn hard_factor_default() -> f32 { 1.2 } diff --git a/rslib/src/import_export/package/apkg/import/media.rs b/rslib/src/import_export/package/apkg/import/media.rs index 32bf7c807ae..20543e07498 100644 --- a/rslib/src/import_export/package/apkg/import/media.rs +++ b/rslib/src/import_export/package/apkg/import/media.rs @@ -17,6 +17,7 @@ use crate::import_export::package::media::SafeMediaEntry; use crate::import_export::ImportProgress; use crate::media::files::add_hash_suffix_to_file_stem; use crate::media::files::sha1_of_reader; +use crate::media::Checksums; use crate::prelude::*; use crate::progress::ThrottlingProgressHandler; @@ -75,7 +76,7 @@ impl Context<'_> { fn prepare_media( media_entries: Vec, archive: &mut ZipArchive, - existing_sha1s: &HashMap, + existing_sha1s: &Checksums, progress: &mut ThrottlingProgressHandler, ) -> Result { let mut media_map = MediaUseMap::default(); diff --git a/rslib/src/import_export/text/csv/import.rs b/rslib/src/import_export/text/csv/import.rs index e45bbca1b84..31dee84e498 100644 --- a/rslib/src/import_export/text/csv/import.rs +++ b/rslib/src/import_export/text/csv/import.rs @@ -1,6 +1,7 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use std::collections::HashSet; use std::io::BufRead; use std::io::BufReader; use std::io::Read; @@ -106,6 +107,8 @@ struct ColumnContext { notetype_column: Option, /// Source column indices for the fields of a notetype field_source_columns: FieldSourceColumns, + /// Metadata column indices (1-based) + meta_columns: HashSet, /// How fields are converted to strings. Used for escaping HTML if /// appropriate. stringify: fn(&str) -> String, @@ -119,6 +122,7 @@ impl ColumnContext { deck_column: metadata.deck()?.column(), notetype_column: metadata.notetype()?.column(), field_source_columns: metadata.field_source_columns()?, + meta_columns: metadata.meta_columns(), stringify: stringify_fn(metadata.is_html), }) } @@ -166,11 +170,19 @@ impl ColumnContext { } fn gather_note_fields(&self, record: &csv::StringRecord) -> Vec> { - let stringify = self.stringify; - self.field_source_columns - .iter() - .map(|opt| opt.and_then(|idx| record.get(idx - 1)).map(stringify)) - .collect() + let op = |i| record.get(i - 1).map(self.stringify); + if !self.field_source_columns.is_empty() { + self.field_source_columns + .iter() + .map(|opt| opt.and_then(op)) + .collect() + } else { + // notetype column provided, assume all non-metadata columns are notetype fields + (1..=record.len()) + .filter(|i| !self.meta_columns.contains(i)) + .map(op) + .collect() + } } } diff --git a/rslib/src/import_export/text/csv/metadata.rs b/rslib/src/import_export/text/csv/metadata.rs index d505c60d2f9..cd41508136a 100644 --- a/rslib/src/import_export/text/csv/metadata.rs +++ b/rslib/src/import_export/text/csv/metadata.rs @@ -291,11 +291,8 @@ impl CsvMetadataHelpers for CsvMetadata { .map(|&i| (i > 0).then_some(i as usize)) .collect(), CsvNotetype::NotetypeColumn(_) => { - let meta_columns = self.meta_columns(); - (1..self.column_labels.len() + 1) - .filter(|idx| !meta_columns.contains(idx)) - .map(Some) - .collect() + // each row's notetype could have varying number of fields + vec![] } }) } diff --git a/rslib/src/media/check.rs b/rslib/src/media/check.rs index 20d684826b4..9545441c174 100644 --- a/rslib/src/media/check.rs +++ b/rslib/src/media/check.rs @@ -552,6 +552,8 @@ pub(crate) mod test { use super::*; use crate::collection::CollectionBuilder; + use crate::sync::media::MAX_MEDIA_FILENAME_LENGTH; + use crate::tests::NoteAdder; fn common_setup() -> Result<(TempDir, MediaManager, Collection)> { let dir = tempdir()?; @@ -869,4 +871,30 @@ Unused: unused.jpg Ok(()) } + + #[test] + fn long_filename_rename_not_reported_as_unused() -> Result<()> { + let (_dir, mgr, mut col) = common_setup()?; + + let long_filename = format!("{}.mp3", "a".repeat(MAX_MEDIA_FILENAME_LENGTH + 1)); + + NoteAdder::basic(&mut col) + .fields(&["test", &format!("[sound:{}]", long_filename)]) + .add(&mut col); + + write_file(mgr.media_folder.join(&long_filename), "audio data")?; + + let output = { + let mut checker = col.media_checker()?; + checker.check()? + }; + + assert!(output.renamed.contains_key(&long_filename)); + let new_filename = output.renamed.get(&long_filename).unwrap(); + assert!(new_filename.len() <= MAX_MEDIA_FILENAME_LENGTH); + assert!(!output.unused.contains(new_filename)); + assert!(!output.missing.contains(new_filename)); + + Ok(()) + } } diff --git a/rslib/src/media/files.rs b/rslib/src/media/files.rs index ce17b40bb94..b098eb19e71 100644 --- a/rslib/src/media/files.rs +++ b/rslib/src/media/files.rs @@ -173,7 +173,9 @@ pub fn add_data_to_folder_uniquely<'a, P>( where P: AsRef, { - let normalized_name = normalize_filename(desired_name); + // force lowercase to account for case-insensitive filesystems + // but not within normalize_filename, for existing media refs + let normalized_name: Cow<_> = normalize_filename(desired_name).to_lowercase().into(); let mut target_path = folder.as_ref().join(normalized_name.as_ref()); @@ -496,8 +498,14 @@ mod test { "test.mp3" ); - // different contents + // different contents, filenames differ only by case let h2 = sha1_of_data(b"hello1"); + assert_eq!( + add_data_to_folder_uniquely(dpath, "Test.mp3", b"hello1", h2).unwrap(), + "test-88fdd585121a4ccb3d1540527aee53a77c77abb8.mp3" + ); + + // same contents, filenames differ only by case assert_eq!( add_data_to_folder_uniquely(dpath, "test.mp3", b"hello1", h2).unwrap(), "test-88fdd585121a4ccb3d1540527aee53a77c77abb8.mp3" diff --git a/rslib/src/media/mod.rs b/rslib/src/media/mod.rs index 259dd52f8b4..8a599fbecc4 100644 --- a/rslib/src/media/mod.rs +++ b/rslib/src/media/mod.rs @@ -6,7 +6,6 @@ pub mod files; mod service; use std::borrow::Cow; -use std::collections::HashMap; use std::path::Path; use std::path::PathBuf; @@ -22,6 +21,7 @@ use crate::progress::ThrottlingProgressHandler; use crate::sync::http_client::HttpSyncClient; use crate::sync::login::SyncAuth; use crate::sync::media::database::client::changetracker::ChangeTracker; +pub use crate::sync::media::database::client::Checksums; use crate::sync::media::database::client::MediaDatabase; use crate::sync::media::database::client::MediaEntry; use crate::sync::media::progress::MediaSyncProgress; @@ -157,7 +157,7 @@ impl MediaManager { pub fn all_checksums_after_checking( &self, progress: impl FnMut(usize) -> bool, - ) -> Result> { + ) -> Result { ChangeTracker::new(&self.media_folder, progress).register_changes(&self.db)?; self.db.all_registered_checksums() } @@ -176,7 +176,7 @@ impl MediaManager { /// All checksums without registering changes first. #[cfg(test)] - pub(crate) fn all_checksums_as_is(&self) -> HashMap { + pub(crate) fn all_checksums_as_is(&self) -> Checksums { self.db.all_registered_checksums().unwrap() } } diff --git a/rslib/src/notetype/stock.rs b/rslib/src/notetype/stock.rs index 9b5df66d56b..b27881809fa 100644 --- a/rslib/src/notetype/stock.rs +++ b/rslib/src/notetype/stock.rs @@ -122,7 +122,7 @@ pub(crate) fn basic(tr: &I18n) -> Notetype { pub(crate) fn basic_typing(tr: &I18n) -> Notetype { let mut nt = basic(tr); - nt.config.original_stock_kind = StockKind::BasicTyping as i32; + nt.config.original_stock_kind = OriginalStockKind::BasicTyping as i32; nt.name = tr.notetypes_basic_type_answer_name().into(); let front = tr.notetypes_front_field(); let back = tr.notetypes_back_field(); @@ -138,7 +138,7 @@ pub(crate) fn basic_typing(tr: &I18n) -> Notetype { pub(crate) fn basic_forward_reverse(tr: &I18n) -> Notetype { let mut nt = basic(tr); - nt.config.original_stock_kind = StockKind::BasicAndReversed as i32; + nt.config.original_stock_kind = OriginalStockKind::BasicAndReversed as i32; nt.name = tr.notetypes_basic_reversed_name().into(); let front = tr.notetypes_front_field(); let back = tr.notetypes_back_field(); @@ -156,7 +156,7 @@ pub(crate) fn basic_forward_reverse(tr: &I18n) -> Notetype { pub(crate) fn basic_optional_reverse(tr: &I18n) -> Notetype { let mut nt = basic_forward_reverse(tr); - nt.config.original_stock_kind = StockKind::BasicOptionalReversed as i32; + nt.config.original_stock_kind = OriginalStockKind::BasicOptionalReversed as i32; nt.name = tr.notetypes_basic_optional_reversed_name().into(); let addrev = tr.notetypes_add_reverse_field(); nt.add_field(addrev.as_ref()); diff --git a/rslib/src/revlog/mod.rs b/rslib/src/revlog/mod.rs index f5269838809..3e22890b10c 100644 --- a/rslib/src/revlog/mod.rs +++ b/rslib/src/revlog/mod.rs @@ -85,6 +85,15 @@ impl RevlogEntry { .unwrap() } + pub(crate) fn last_interval_secs(&self) -> u32 { + u32::try_from(if self.last_interval > 0 { + self.last_interval.saturating_mul(86_400) + } else { + self.last_interval.saturating_mul(-1) + }) + .unwrap() + } + /// Returns true if this entry represents a reset operation. /// These entries are created when a card is reset using /// [`Collection::reschedule_cards_as_new`]. @@ -152,7 +161,7 @@ impl Collection { ) -> Result<()> { let ease_factor = u32::from( card.memory_state - .map(|s| ((s.difficulty_shifted() * 1000.) as u16)) + .map(|s| (s.difficulty_shifted() * 1000.) as u16) .unwrap_or(card.ease_factor), ); let entry = RevlogEntry { diff --git a/rslib/src/scheduler/answering/mod.rs b/rslib/src/scheduler/answering/mod.rs index 6ff8c6e2d70..a71c6330fd7 100644 --- a/rslib/src/scheduler/answering/mod.rs +++ b/rslib/src/scheduler/answering/mod.rs @@ -443,9 +443,20 @@ impl Collection { .storage .get_deck(card.deck_id)? .or_not_found(card.deck_id)?; - let config = self.home_deck_config(deck.config_id(), card.original_deck_id)?; + let home_deck = if card.original_deck_id.0 == 0 { + &deck + } else { + &self + .storage + .get_deck(card.original_deck_id)? + .or_not_found(card.original_deck_id)? + }; + let config = self + .storage + .get_deck_config(home_deck.config_id().or_invalid("home deck is filtered")?)? + .unwrap_or_default(); - let desired_retention = deck.effective_desired_retention(&config); + let desired_retention = home_deck.effective_desired_retention(&config); let fsrs_enabled = self.get_config_bool(BoolKey::Fsrs); let fsrs_next_states = if fsrs_enabled { let params = config.fsrs_params(); diff --git a/rslib/src/scheduler/fsrs/error.rs b/rslib/src/scheduler/fsrs/error.rs index d5b596a3615..404ee36054a 100644 --- a/rslib/src/scheduler/fsrs/error.rs +++ b/rslib/src/scheduler/fsrs/error.rs @@ -13,13 +13,7 @@ impl From for AnkiError { FSRSError::OptimalNotFound => AnkiError::FsrsUnableToDetermineDesiredRetention, FSRSError::Interrupted => AnkiError::Interrupted, FSRSError::InvalidParameters => AnkiError::FsrsParamsInvalid, - FSRSError::InvalidInput => AnkiError::InvalidInput { - source: InvalidInputError { - message: "invalid params provided".to_string(), - source: None, - backtrace: None, - }, - }, + FSRSError::InvalidInput => AnkiError::FsrsParamsInvalid, FSRSError::InvalidDeckSize => AnkiError::InvalidInput { source: InvalidInputError { message: "no cards to simulate".to_string(), diff --git a/rslib/src/scheduler/fsrs/memory_state.rs b/rslib/src/scheduler/fsrs/memory_state.rs index 420ead5a334..303bbfd91a9 100644 --- a/rslib/src/scheduler/fsrs/memory_state.rs +++ b/rslib/src/scheduler/fsrs/memory_state.rs @@ -136,6 +136,19 @@ impl Collection { let deckconfig_id = deck.config_id().unwrap(); // reschedule it let original_interval = card.interval; + let min_interval = |interval: u32| { + let previous_interval = + last_info.previous_interval.unwrap_or(0); + if interval > previous_interval { + // interval grew; don't allow fuzzed interval to + // be less than previous+1 + previous_interval + 1 + } else { + // interval shrunk; don't restrict negative fuzz + 0 + } + .max(1) + }; let interval = fsrs.next_interval( Some(state.stability), desired_retention, @@ -146,7 +159,7 @@ impl Collection { .and_then(|r| { r.find_interval( interval, - 1, + min_interval(interval as u32), req.max_interval, days_elapsed as u32, deckconfig_id, @@ -157,7 +170,7 @@ impl Collection { with_review_fuzz( card.get_fuzz_factor(true), interval, - 1, + min_interval(interval as u32), req.max_interval, ) }); @@ -310,6 +323,9 @@ pub(crate) struct LastRevlogInfo { /// reviewed the card and now, so that we can determine an accurate period /// when the card has subsequently been rescheduled to a different day. pub(crate) last_reviewed_at: Option, + /// The interval before the latest review. Used to prevent fuzz from going + /// backwards when rescheduling the card + pub(crate) previous_interval: Option, } /// Return a map of cards to info about last review. @@ -321,14 +337,27 @@ pub(crate) fn get_last_revlog_info(revlogs: &[RevlogEntry]) -> HashMap= 0 && e.button_chosen > 1 { + Some(e.last_interval as u32) + } else { + None + }; } else if e.is_reset() { last_reviewed_at = None; + previous_interval = None; } } - out.insert(card_id, LastRevlogInfo { last_reviewed_at }); + out.insert( + card_id, + LastRevlogInfo { + last_reviewed_at, + previous_interval, + }, + ); }); out } diff --git a/rslib/src/scheduler/fsrs/params.rs b/rslib/src/scheduler/fsrs/params.rs index 726870fe13f..6f438a5d2bc 100644 --- a/rslib/src/scheduler/fsrs/params.rs +++ b/rslib/src/scheduler/fsrs/params.rs @@ -174,7 +174,7 @@ impl Collection { } } - let health_check_passed = if health_check { + let health_check_passed = if health_check && input.train_set.len() > 300 { let fsrs = FSRS::new(None)?; fsrs.evaluate_with_time_series_splits(input, |_| true) .ok() @@ -478,27 +478,42 @@ pub(crate) fn reviews_for_fsrs( })) .collect_vec(); - let skip = if training { 1 } else { 0 }; - // Convert the remaining entries into separate FSRSItems, where each item - // contains all reviews done until then. - let items: Vec<(RevlogId, FSRSItem)> = entries - .iter() - .enumerate() - .skip(skip) - .map(|(outer_idx, entry)| { - let reviews = entries - .iter() - .take(outer_idx + 1) - .enumerate() - .map(|(inner_idx, r)| FSRSReview { - rating: r.button_chosen as u32, - delta_t: delta_ts[inner_idx], - }) - .collect(); - (entry.id, FSRSItem { reviews }) - }) - .filter(|(_, item)| !training || item.reviews.last().unwrap().delta_t > 0) - .collect_vec(); + let items = if training { + // Convert the remaining entries into separate FSRSItems, where each item + // contains all reviews done until then. + let mut items = Vec::with_capacity(entries.len()); + let mut current_reviews = Vec::with_capacity(entries.len()); + for (idx, (entry, &delta_t)) in entries.iter().zip(delta_ts.iter()).enumerate() { + current_reviews.push(FSRSReview { + rating: entry.button_chosen as u32, + delta_t, + }); + if idx >= 1 && delta_t > 0 { + items.push(( + entry.id, + FSRSItem { + reviews: current_reviews.clone(), + }, + )); + } + } + items + } else { + // When not training, we only need the final FSRS item, which represents + // the complete history of the card. This avoids expensive clones in a loop. + let reviews = entries + .iter() + .zip(delta_ts.iter()) + .map(|(entry, &delta_t)| FSRSReview { + rating: entry.button_chosen as u32, + delta_t, + }) + .collect(); + let last_entry = entries.last().unwrap(); + + vec![(last_entry.id, FSRSItem { reviews })] + }; + if items.is_empty() { None } else { @@ -591,8 +606,6 @@ pub(crate) mod tests { }; } - pub(crate) use fsrs_items; - #[test] fn delta_t_is_correct() -> Result<()> { assert_eq!( @@ -738,7 +751,7 @@ pub(crate) mod tests { ], false, ), - fsrs_items!([review(0)], [review(0), review(1)]) + fsrs_items!([review(0), review(1)]) ); } @@ -809,7 +822,7 @@ pub(crate) mod tests { // R | A X R assert_eq!( convert_ignore_before(revlogs, false, days_ago_ms(9)), - fsrs_items!([review(0)], [review(0), review(2)]) + fsrs_items!([review(0), review(2)]) ); } @@ -828,6 +841,9 @@ pub(crate) mod tests { assert_eq!( convert_ignore_before(revlogs, false, days_ago_ms(9)) .unwrap() + .last() + .unwrap() + .reviews .len(), 2 ); @@ -849,6 +865,9 @@ pub(crate) mod tests { assert_eq!( convert_ignore_before(revlogs, false, days_ago_ms(9)) .unwrap() + .last() + .unwrap() + .reviews .len(), 2 ); diff --git a/rslib/src/scheduler/fsrs/rescheduler.rs b/rslib/src/scheduler/fsrs/rescheduler.rs index db490b3e4c9..37c824230eb 100644 --- a/rslib/src/scheduler/fsrs/rescheduler.rs +++ b/rslib/src/scheduler/fsrs/rescheduler.rs @@ -115,13 +115,14 @@ impl Rescheduler { pub fn find_interval( &self, interval: f32, - minimum: u32, - maximum: u32, + minimum_interval: u32, + maximum_interval: u32, days_elapsed: u32, deckconfig_id: DeckConfigId, fuzz_seed: Option, ) -> Option { - let (before_days, after_days) = constrained_fuzz_bounds(interval, minimum, maximum); + let (before_days, after_days) = + constrained_fuzz_bounds(interval, minimum_interval, maximum_interval); // Don't reschedule the card when it's overdue if after_days < days_elapsed { diff --git a/rslib/src/search/parser.rs b/rslib/src/search/parser.rs index cbdba3d9f67..5928bf486bc 100644 --- a/rslib/src/search/parser.rs +++ b/rslib/src/search/parser.rs @@ -392,6 +392,11 @@ fn parse_tag(s: &str) -> ParseResult<'_, SearchNode> { tag: unescape_quotes(re), mode: FieldSearchMode::Regex, } + } else if let Some(nc) = s.strip_prefix("nc:") { + SearchNode::Tag { + tag: unescape(nc)?, + mode: FieldSearchMode::NoCombining, + } } else { SearchNode::Tag { tag: unescape(s)?, diff --git a/rslib/src/search/sqlwriter.rs b/rslib/src/search/sqlwriter.rs index 95249276c7e..2479b87aeef 100644 --- a/rslib/src/search/sqlwriter.rs +++ b/rslib/src/search/sqlwriter.rs @@ -311,8 +311,19 @@ impl SqlWriter<'_> { } s if s.contains(' ') => write!(self.sql, "false").unwrap(), text => { - write!(self.sql, "n.tags regexp ?").unwrap(); - let re = &to_custom_re(text, r"\S"); + let text = if mode == FieldSearchMode::Normal { + write!(self.sql, "n.tags regexp ?").unwrap(); + Cow::from(text) + } else { + write!( + self.sql, + "coalesce(process_text(n.tags, {}), n.tags) regexp ?", + ProcessTextFlags::NoCombining.bits() + ) + .unwrap(); + without_combining(text) + }; + let re = &to_custom_re(&text, r"\S"); self.args.push(format!("(?i).* {re}(::| ).*")); } } @@ -423,9 +434,10 @@ impl SqlWriter<'_> { let timing = self.col.timing_today()?; (timing.days_elapsed, timing.next_day_at, timing.now) }; + const NEW_TYPE: i8 = CardType::New as i8; write!( self.sql, - "extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {elap}, {next_day_at}, {now}) {op} {r}" + "case when c.type = {NEW_TYPE} then false else (extract_fsrs_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, c.ivl, {elap}, {next_day_at}, {now}) {op} {r}) end" ) .unwrap() } diff --git a/rslib/src/stats/card.rs b/rslib/src/stats/card.rs index 008977fe967..0dabff5e52f 100644 --- a/rslib/src/stats/card.rs +++ b/rslib/src/stats/card.rs @@ -76,8 +76,15 @@ impl Collection { note_id: card.note_id.into(), deck: deck.human_name(), added: card.id.as_secs().0, - first_review: revlog.first().map(|entry| entry.id.as_secs().0), - latest_review: revlog.last().map(|entry| entry.id.as_secs().0), + first_review: revlog + .iter() + .find(|entry| entry.has_rating()) + .map(|entry| entry.id.as_secs().0), + // last_review_time is not used to ensure cram revlogs are included. + latest_review: revlog + .iter() + .rfind(|entry| entry.has_rating()) + .map(|entry| entry.id.as_secs().0), due_date: self.due_date(&card)?, due_position: self.position(&card), interval: card.interval, @@ -220,6 +227,7 @@ fn stats_revlog_entry( ease: entry.ease_factor, taken_secs: entry.taken_millis as f32 / 1000., memory_state: None, + last_interval: entry.last_interval_secs(), } } diff --git a/rslib/src/storage/card/filtered.rs b/rslib/src/storage/card/filtered.rs index ef436f6e876..03f845f4e44 100644 --- a/rslib/src/storage/card/filtered.rs +++ b/rslib/src/storage/card/filtered.rs @@ -54,7 +54,7 @@ fn build_retrievability_query( ) -> String { if fsrs { format!( - "extract_fsrs_relative_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, {today}, ivl, {next_day_at}, {now}) {order}" + "extract_fsrs_relative_retrievability(c.data, case when c.odue !=0 then c.odue else c.due end, ivl, {today}, {next_day_at}, {now}) {order}" ) } else { format!( diff --git a/rslib/src/storage/card/mod.rs b/rslib/src/storage/card/mod.rs index 3a5066ff407..9e06edf07f2 100644 --- a/rslib/src/storage/card/mod.rs +++ b/rslib/src/storage/card/mod.rs @@ -837,7 +837,7 @@ impl fmt::Display for ReviewOrderSubclause { let next_day_at = timing.next_day_at.0; let now = timing.now.0; temp_string = - format!("extract_fsrs_relative_retrievability(data, case when odue !=0 then odue else due end, {today}, ivl, {next_day_at}, {now}) {order}"); + format!("extract_fsrs_relative_retrievability(data, case when odue !=0 then odue else due end, ivl, {today}, {next_day_at}, {now}) {order}"); &temp_string } ReviewOrderSubclause::Added => "nid asc, ord asc", diff --git a/rslib/src/storage/sqlite.rs b/rslib/src/storage/sqlite.rs index 3ce1baff016..95853afc90f 100644 --- a/rslib/src/storage/sqlite.rs +++ b/rslib/src/storage/sqlite.rs @@ -332,23 +332,30 @@ fn add_extract_fsrs_retrievability(db: &Connection) -> rusqlite::Result<()> { return Ok(None); }; let seconds_elapsed = if let Some(last_review_time) = card_data.last_review_time { - now.saturating_sub(last_review_time.0) as u32 + // This and any following + // (x as u32).saturating_sub(y as u32) + // must not be changed to + // x.saturating_sub(y) as u32 + // as x and y are i64's and saturating_sub will therfore allow negative numbers + // before converting to u32 in the latter example. + (now as u32).saturating_sub(last_review_time.0 as u32) } else if due > 365_000 { // (re)learning card in seconds let Ok(ivl) = ctx.get_raw(2).as_i64() else { return Ok(None); }; - let last_review_time = due.saturating_sub(ivl); - now.saturating_sub(last_review_time) as u32 + let last_review_time = (due as u32).saturating_sub(ivl as u32); + (now as u32).saturating_sub(last_review_time) } else { let Ok(ivl) = ctx.get_raw(2).as_i64() else { return Ok(None); }; - let Ok(days_elapsed) = ctx.get_raw(3).as_i64() else { + // timing.days_elapsed + let Ok(today) = ctx.get_raw(3).as_i64() else { return Ok(None); }; - let review_day = due.saturating_sub(ivl); - days_elapsed.saturating_sub(review_day) as u32 * 86_400 + let review_day = (due as u32).saturating_sub(ivl as u32); + (today as u32).saturating_sub(review_day) * 86_400 }; let decay = card_data.decay.unwrap_or(FSRS5_DEFAULT_DECAY); let retrievability = card_data.memory_state().map(|state| { @@ -364,7 +371,7 @@ fn add_extract_fsrs_retrievability(db: &Connection) -> rusqlite::Result<()> { } /// eg. extract_fsrs_relative_retrievability(card.data, card.due, -/// timing.days_elapsed, card.ivl, timing.next_day_at, timing.now) -> float | +/// card.ivl, timing.days_elapsed, timing.next_day_at, timing.now) -> float | /// null. The higher the number, the higher the card's retrievability relative /// to the configured desired retention. fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result<()> { @@ -378,25 +385,32 @@ fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result let Ok(due) = ctx.get_raw(1).as_i64() else { return Ok(None); }; - let Ok(interval) = ctx.get_raw(3).as_i64() else { + let Ok(interval) = ctx.get_raw(2).as_i64() else { return Ok(None); }; + /* + // Unused let Ok(next_day_at) = ctx.get_raw(4).as_i64() else { return Ok(None); }; + */ let Ok(now) = ctx.get_raw(5).as_i64() else { return Ok(None); }; - let days_elapsed = if due > 365_000 { - // (re)learning - (next_day_at as u32).saturating_sub(due as u32) / 86_400 + let secs_elapsed = if due > 365_000 { + // (re)learning card with due in seconds + + // Don't change this to now.subtracting_sub(due) as u32 + // for the same reasons listed in the comment + // in add_extract_fsrs_retrievability + (now as u32).saturating_sub(due as u32) } else { - let Ok(days_elapsed) = ctx.get_raw(2).as_i64() else { + // timing.days_elapsed + let Ok(today) = ctx.get_raw(3).as_i64() else { return Ok(None); }; let review_day = due.saturating_sub(interval); - - (days_elapsed as u32).saturating_sub(review_day as u32) + (today as u32).saturating_sub(review_day as u32) * 86_400 }; if let Ok(card_data) = ctx.get_raw(0).as_str() { if !card_data.is_empty() { @@ -410,23 +424,12 @@ fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result let seconds_elapsed = if let Some(last_review_time) = card_data.last_review_time { - now.saturating_sub(last_review_time.0) as u32 - } else if due > 365_000 { - // (re)learning card in seconds - let Ok(ivl) = ctx.get_raw(2).as_i64() else { - return Ok(None); - }; - let last_review_time = due.saturating_sub(ivl); - now.saturating_sub(last_review_time) as u32 + // Don't change this to now.subtracting_sub(due) as u32 + // for the same reasons listed in the comment + // in add_extract_fsrs_retrievability + (now as u32).saturating_sub(last_review_time.0 as u32) } else { - let Ok(ivl) = ctx.get_raw(2).as_i64() else { - return Ok(None); - }; - let Ok(days_elapsed) = ctx.get_raw(3).as_i64() else { - return Ok(None); - }; - let review_day = due.saturating_sub(ivl); - days_elapsed.saturating_sub(review_day) as u32 * 86_400 + secs_elapsed }; let current_retrievability = FSRS::new(None) @@ -441,7 +444,7 @@ fn add_extract_fsrs_relative_retrievability(db: &Connection) -> rusqlite::Result } } } - + let days_elapsed = secs_elapsed / 86_400; // FSRS data missing; fall back to SM2 ordering Ok(Some( -((days_elapsed as f32) + 0.001) / (interval as f32).max(1.0), diff --git a/rslib/src/sync/media/database/client/mod.rs b/rslib/src/sync/media/database/client/mod.rs index f9c6e5ed12d..fe3e7c84014 100644 --- a/rslib/src/sync/media/database/client/mod.rs +++ b/rslib/src/sync/media/database/client/mod.rs @@ -18,6 +18,20 @@ use crate::prelude::*; pub mod changetracker; +pub struct Checksums(HashMap); + +impl Checksums { + // case-fold filenames when checking files to be imported + // to account for case-insensitive filesystems + pub fn get(&self, key: impl AsRef) -> Option<&Sha1Hash> { + self.0.get(key.as_ref().to_lowercase().as_str()) + } + + pub fn contains_key(&self, key: impl AsRef) -> bool { + self.get(key).is_some() + } +} + #[derive(Debug, PartialEq, Eq)] pub struct MediaEntry { pub fname: String, @@ -175,11 +189,12 @@ delete from media where fname=?", } /// Returns all filenames and checksums, where the checksum is not null. - pub(crate) fn all_registered_checksums(&self) -> error::Result> { + pub(crate) fn all_registered_checksums(&self) -> error::Result { self.db .prepare("SELECT fname, csum FROM media WHERE csum IS NOT NULL")? .query_and_then([], row_to_name_and_checksum)? - .collect() + .collect::>() + .map(Checksums) } pub(crate) fn force_resync(&self) -> error::Result<()> { diff --git a/rslib/src/typeanswer.rs b/rslib/src/typeanswer.rs index 08c638e1275..9bf3dc47c0b 100644 --- a/rslib/src/typeanswer.rs +++ b/rslib/src/typeanswer.rs @@ -58,7 +58,7 @@ trait DiffTrait { if self.get_typed() == self.get_expected() { format_typeans!(format!( "{}", - self.get_expected_original() + htmlescape::encode_minimal(&self.get_expected_original()) )) } else { let output = self.to_tokens(); @@ -391,6 +391,15 @@ mod test { assert_eq!(ctx, "123"); } + #[test] + fn correct_input_is_escaped() { + let ctx = Diff::new("source /bin/activate", "source /bin/activate"); + assert_eq!( + ctx.to_html(), + "source <dir>/bin/activate" + ); + } + #[test] fn correct_input_is_collapsed() { let ctx = Diff::new("123", "123"); diff --git a/rslib/src/undo/mod.rs b/rslib/src/undo/mod.rs index c3f81e84eed..7c2707e57d6 100644 --- a/rslib/src/undo/mod.rs +++ b/rslib/src/undo/mod.rs @@ -29,19 +29,14 @@ impl UndoableOp { } } -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Default)] enum UndoMode { + #[default] NormalOp, Undoing, Redoing, } -impl Default for UndoMode { - fn default() -> Self { - Self::NormalOp - } -} - pub struct UndoStatus { pub undo: Option, pub redo: Option, diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 452c65213b4..b6be5554380 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] # older versions may fail to compile; newer versions may fail the clippy tests -channel = "1.89.0" +channel = "1.92.0" diff --git a/tools/minilints/src/main.rs b/tools/minilints/src/main.rs index c99fbe06e65..b72d392947a 100644 --- a/tools/minilints/src/main.rs +++ b/tools/minilints/src/main.rs @@ -202,7 +202,8 @@ fn sveltekit_temp_file(path: &str) -> bool { } fn check_cargo_deny() -> Result<()> { - Command::run("cargo install cargo-deny@0.18.3")?; + // WARNING: make sure to update version in .buildekite/linux as well + Command::run("cargo install cargo-deny@0.19.0")?; Command::run("cargo deny check")?; Ok(()) } @@ -255,9 +256,7 @@ fn check_for_unstaged_changes() { } fn generate_licences() -> Result { - if which::which("cargo-license").is_err() { - Command::run("cargo install cargo-license@0.5.1")?; - } + Command::run("cargo install cargo-license@0.7.0")?; let output = Command::run_with_output([ "cargo-license", "--features", diff --git a/tools/run.py b/tools/run.py index da0baa2c439..e17e22a979d 100644 --- a/tools/run.py +++ b/tools/run.py @@ -5,8 +5,6 @@ import sys sys.path.extend(["pylib", "qt", "out/pylib", "out/qt"]) -if sys.platform == "win32": - os.environ["PATH"] += ";out\\extracted\\win_amd64_audio" import aqt diff --git a/ts/editable/ContentEditable.svelte b/ts/editable/ContentEditable.svelte index d0cfe89df7a..116eb2ffde1 100644 --- a/ts/editable/ContentEditable.svelte +++ b/ts/editable/ContentEditable.svelte @@ -20,6 +20,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html preventBuiltinShortcuts, useFocusHandler, } from "./content-editable"; + import { pageTheme } from "$lib/sveltelib/theme"; export let resolve: (editable: HTMLElement) => void; @@ -41,6 +42,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - + saveCustomColours({})} +/> {#if keyCombination} inputRef.click()} /> diff --git a/ts/editor/editor-toolbar/HighlightColorButton.svelte b/ts/editor/editor-toolbar/HighlightColorButton.svelte index 865ec5668c6..f89f7a99adf 100644 --- a/ts/editor/editor-toolbar/HighlightColorButton.svelte +++ b/ts/editor/editor-toolbar/HighlightColorButton.svelte @@ -19,6 +19,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import ColorPicker from "./ColorPicker.svelte"; import { context as editorToolbarContext } from "./EditorToolbar.svelte"; import WithColorHelper from "./WithColorHelper.svelte"; + import { saveCustomColours } from "@generated/backend"; export let color: string; @@ -134,7 +135,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html color = setColor(event); bridgeCommand(`lastHighlightColor:${color}`); }} - on:change={() => setTextColor()} + on:change={() => { + setTextColor(); + saveCustomColours({}); + }} /> diff --git a/ts/editor/editor-toolbar/TextColorButton.svelte b/ts/editor/editor-toolbar/TextColorButton.svelte index 16595318046..ce80aae49bd 100644 --- a/ts/editor/editor-toolbar/TextColorButton.svelte +++ b/ts/editor/editor-toolbar/TextColorButton.svelte @@ -22,6 +22,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html import ColorPicker from "./ColorPicker.svelte"; import { context as editorToolbarContext } from "./EditorToolbar.svelte"; import WithColorHelper from "./WithColorHelper.svelte"; + import { saveCustomColours } from "@generated/backend"; export let color: string; @@ -158,6 +159,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html setTimeout(() => { setTextColor(); }, 200); + saveCustomColours({}); }} /> diff --git a/ts/lib/components/HelpModal.svelte b/ts/lib/components/HelpModal.svelte index cf629253728..7ee42595010 100644 --- a/ts/lib/components/HelpModal.svelte +++ b/ts/lib/components/HelpModal.svelte @@ -23,6 +23,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html export let title: string; export let url: string; + export let linkLabel: string | undefined = undefined; export let startIndex = 0; export let helpSections: HelpItem[]; export let fsrs = false; @@ -106,11 +107,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
{@html renderMarkdown( tr.helpForMoreInfo({ - link: `${title}`, + link: `${linkLabel ?? title}`, }), )}
diff --git a/ts/lib/tslib/help-page.ts b/ts/lib/tslib/help-page.ts index e3f209c6ae0..e2b2e3da4ef 100644 --- a/ts/lib/tslib/help-page.ts +++ b/ts/lib/tslib/help-page.ts @@ -27,7 +27,8 @@ export const HelpPage = { limitsFromTop: "https://docs.ankiweb.net/deck-options.html#limits-start-from-top", dailyLimits: "https://docs.ankiweb.net/deck-options.html#daily-limits", audio: "https://docs.ankiweb.net/deck-options.html#audio", - fsrs: "http://docs.ankiweb.net/deck-options.html#fsrs", + fsrs: "https://docs.ankiweb.net/deck-options.html#fsrs", + desiredRetention: "https://docs.ankiweb.net/deck-options.html#desired-retention", }, Leeches: { leeches: "https://docs.ankiweb.net/leeches.html#leeches", diff --git a/ts/lib/tslib/uuid.ts b/ts/lib/tslib/uuid.ts deleted file mode 100644 index 8598261b0a8..00000000000 --- a/ts/lib/tslib/uuid.ts +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright: Ankitects Pty Ltd and contributors -// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - -/** - * TODO replace with crypto.randomUUID - */ -export function randomUUID(): string { - const value = `${1e7}-${1e3}-${4e3}-${8e3}-${1e11}`; - - return value.replace(/[018]/g, (character: string): string => - ( - Number(character) - ^ (crypto.getRandomValues(new Uint8Array(1))[0] - & (15 >> (Number(character) / 4))) - ).toString(16)); -} diff --git a/ts/lib/tslib/wrap.ts b/ts/lib/tslib/wrap.ts index 39b10e9d14a..e22c3e6d85f 100644 --- a/ts/lib/tslib/wrap.ts +++ b/ts/lib/tslib/wrap.ts @@ -4,7 +4,12 @@ import { getRange, getSelection } from "./cross-browser"; function wrappedExceptForWhitespace(text: string, front: string, back: string): string { - const match = text.match(/^(\s*)([^]*?)(\s*)$/)!; + const normalizedText = text + .replace(/ /g, " ") + .replace(/ /g, " ") + .replace(/\u00A0/g, " "); + + const match = normalizedText.match(/^(\s*)([^]*?)(\s*)$/)!; return match[1] + front + match[2] + back + match[3]; } diff --git a/ts/reviewer/images.ts b/ts/reviewer/images.ts index 05de2415847..35a2e269fe3 100644 --- a/ts/reviewer/images.ts +++ b/ts/reviewer/images.ts @@ -28,6 +28,8 @@ function extractImageSrcs(fragment: DocumentFragment): string[] { function createImage(src: string): HTMLImageElement { const img = new Image(); img.src = src; + img.decoding = "async"; + img.decode(); return img; } diff --git a/ts/routes/deck-options/DeckOptionsPage.svelte b/ts/routes/deck-options/DeckOptionsPage.svelte index 599ee7f2c25..4b4343961c1 100644 --- a/ts/routes/deck-options/DeckOptionsPage.svelte +++ b/ts/routes/deck-options/DeckOptionsPage.svelte @@ -132,7 +132,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html @use "$lib/sass/breakpoints" as bp; .deck-options-page { - overflow-x: hidden; + overflow-x: auto; :global(.container-columns) { display: grid; diff --git a/ts/routes/graphs/TrueRetention.svelte b/ts/routes/graphs/TrueRetention.svelte index 4a97388310f..12d17079b5a 100644 --- a/ts/routes/graphs/TrueRetention.svelte +++ b/ts/routes/graphs/TrueRetention.svelte @@ -72,7 +72,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html > { modal = e.detail.modal; diff --git a/ts/routes/image-occlusion/Toolbar.svelte b/ts/routes/image-occlusion/Toolbar.svelte index 8775de936b4..b00e420876b 100644 --- a/ts/routes/image-occlusion/Toolbar.svelte +++ b/ts/routes/image-occlusion/Toolbar.svelte @@ -32,6 +32,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html saveNeededStore, opacityStateStore, } from "./store"; + import { get } from "svelte/store"; import { drawEllipse, drawPolygon, drawRectangle, drawText } from "./tools/index"; import { makeMaskTransparent, SHAPE_MASK_COLOR } from "./tools/lib"; import { enableSelectable, stopDraw } from "./tools/lib"; @@ -55,6 +56,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html onWheelDragX, } from "./tools/tool-zoom"; import { fillMask } from "./tools/tool-fill"; + import { getCustomColours, saveCustomColours } from "@generated/backend"; export let canvas; export let iconSize; @@ -76,6 +78,16 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html let colourRef: HTMLInputElement | undefined; const colour = writable(SHAPE_MASK_COLOR); + const customColorPickerPalette = writable([]); + + async function loadCustomColours() { + customColorPickerPalette.set( + (await getCustomColours({})).colours.filter( + (hex) => !hex.startsWith("#ffffff"), + ), + ); + } + function onClick(event: MouseEvent) { const upperCanvas = document.querySelector(".upper-canvas"); if (event.target == upperCanvas) { @@ -222,7 +234,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html } onMount(() => { - opacityStateStore.set(maskOpacity); + maskOpacity = get(opacityStateStore); removeHandlers = singleCallback( on(document, "click", onClick), on(window, "mousemove", onMousemove), @@ -233,6 +245,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html on(document, "touchstart", onTouchstart), on(document, "mousemove", onMousemoveDocument), ); + loadCustomColours(); }); onDestroy(() => { @@ -241,7 +254,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html - + + {#each $customColorPickerPalette as colour} + + {/each} ($colour = e.currentTarget!.value)} + on:change={() => saveCustomColours({})} />
diff --git a/ts/routes/image-occlusion/add-or-update-note.svelte.ts b/ts/routes/image-occlusion/add-or-update-note.svelte.ts index ce31eaaaf48..8494563b4a2 100644 --- a/ts/routes/image-occlusion/add-or-update-note.svelte.ts +++ b/ts/routes/image-occlusion/add-or-update-note.svelte.ts @@ -37,7 +37,9 @@ export const addOrUpdateNote = async function( backExtra, tags, }); - showResult(mode.noteId, result, noteCount); + if (result.note) { + showResult(mode.noteId, result, noteCount); + } } else { const result = await addImageOcclusionNote({ // IOCloningMode is not used on mobile @@ -55,23 +57,12 @@ export const addOrUpdateNote = async function( // show toast message const showResult = (noteId: number | null, result: OpChanges, count: number) => { const props = $state({ - message: "", - type: "error" as "error" | "success", + message: noteId ? tr.browsingCardsUpdated({ count: count }) : tr.importingCardsAdded({ count: count }), + type: "success" as "error" | "success", showToast: true, }); mount(Toast, { target: document.body, props, }); - - if (result.note) { - const msg = noteId ? tr.browsingCardsUpdated({ count: count }) : tr.importingCardsAdded({ count: count }); - props.message = msg; - props.type = "success"; - props.showToast = true; - } else { - const msg = tr.notetypesErrorGeneratingCloze(); - props.message = msg; - props.showToast = true; - } }; diff --git a/ts/routes/image-occlusion/mask-editor.ts b/ts/routes/image-occlusion/mask-editor.ts index 6d4d0d28430..41adbe42328 100644 --- a/ts/routes/image-occlusion/mask-editor.ts +++ b/ts/routes/image-occlusion/mask-editor.ts @@ -8,10 +8,22 @@ import { fabric } from "fabric"; import { get } from "svelte/store"; import { optimumCssSizeForCanvas } from "./canvas-scale"; -import { hideAllGuessOne, notesDataStore, saveNeededStore, tagsWritable, textEditingState } from "./store"; +import { + hideAllGuessOne, + notesDataStore, + opacityStateStore, + saveNeededStore, + tagsWritable, + textEditingState, +} from "./store"; import Toast from "./Toast.svelte"; import { addShapesToCanvasFromCloze } from "./tools/add-from-cloze"; -import { enableSelectable, makeShapesRemainInCanvas, moveShapeToCanvasBoundaries } from "./tools/lib"; +import { + enableSelectable, + makeMaskTransparent, + makeShapesRemainInCanvas, + moveShapeToCanvasBoundaries, +} from "./tools/lib"; import { modifiedPolygon } from "./tools/tool-polygon"; import { undoStack } from "./tools/tool-undo-redo"; import { enablePinchZoom, onResize, setCanvasSize } from "./tools/tool-zoom"; @@ -83,6 +95,7 @@ export const setupMaskEditorForEdit = async ( window.requestAnimationFrame(() => { onImageLoaded({ noteId: BigInt(noteId) }); }); + if (get(opacityStateStore)) { makeMaskTransparent(canvas, true); } }; return canvas; diff --git a/yarn.lock b/yarn.lock index 84bd46e3147..bc1640152a3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6939,8 +6939,8 @@ __metadata: linkType: hard "vite@npm:6": - version: 6.3.5 - resolution: "vite@npm:6.3.5" + version: 6.4.1 + resolution: "vite@npm:6.4.1" dependencies: esbuild: "npm:^0.25.0" fdir: "npm:^6.4.4" @@ -6989,7 +6989,7 @@ __metadata: optional: true bin: vite: bin/vite.js - checksum: 10c0/df70201659085133abffc6b88dcdb8a57ef35f742a01311fc56a4cfcda6a404202860729cc65a2c401a724f6e25f9ab40ce4339ed4946f550541531ced6fe41c + checksum: 10c0/77bb4c5b10f2a185e7859cc9a81c789021bc18009b02900347d1583b453b58e4b19ff07a5e5a5b522b68fc88728460bb45a63b104d969e8c6a6152aea3b849f7 languageName: node linkType: hard