From 039d26feeb7edfc8d0e06803d8ad6b81bae7b477 Mon Sep 17 00:00:00 2001 From: Geometrically <18202329+Geometrically@users.noreply.github.com> Date: Wed, 2 Aug 2023 14:43:04 -0700 Subject: [PATCH] Add launcher analytics (#661) * Add more analytics * finish hydra move * Finish websocket flow * add minecraft account flow * Finish playtime vals + payout automation --- .env | 18 +- COPYING.md | 14 + Cargo.lock | 233 ++++++- Cargo.toml | 10 + assets/auth/style.css | 59 ++ assets/favicon.ico | Bin 0 -> 24838 bytes assets/logo.svg | 4 + docker-compose.yml | 4 + sqlx-data.json | 156 ++--- src/auth/flows.rs | 665 +++++++++++++------- src/auth/minecraft/auth.rs | 154 +++++ src/auth/minecraft/login.rs | 35 ++ src/auth/minecraft/mod.rs | 60 ++ src/auth/minecraft/refresh.rs | 72 +++ src/auth/minecraft/socket.rs | 40 ++ src/auth/minecraft/stages/access_token.rs | 65 ++ src/auth/minecraft/stages/bearer_token.rs | 27 + src/auth/minecraft/stages/login_redirect.rs | 8 + src/auth/minecraft/stages/mod.rs | 27 + src/auth/minecraft/stages/player_info.rs | 33 + src/auth/minecraft/stages/xbl_signin.rs | 55 ++ src/auth/minecraft/stages/xsts_token.rs | 57 ++ src/auth/mod.rs | 10 + src/auth/pats.rs | 12 + src/auth/templates/error.html | 24 + src/auth/templates/mod.rs | 66 ++ src/auth/templates/success.html | 16 + src/clickhouse/mod.rs | 102 +++ src/database/models/flow_item.rs | 6 +- src/main.rs | 114 +++- src/models/analytics.rs | 111 ++++ src/models/ids.rs | 1 - src/models/mod.rs | 1 + src/models/pats.rs | 8 +- src/queue/analytics.rs | 70 +++ src/queue/maxmind.rs | 82 +++ src/queue/mod.rs | 3 + src/queue/payouts.rs | 212 ++++++- src/queue/socket.rs | 15 + src/ratelimit/memory.rs | 11 - src/routes/index.rs | 3 +- src/routes/mod.rs | 27 +- src/routes/v2/admin.rs | 392 ++---------- src/routes/v2/analytics.rs | 248 ++++++++ src/routes/v2/mod.rs | 5 + src/routes/v3/mod.rs | 7 +- src/search/indexing/mod.rs | 26 +- src/util/cors.rs | 10 + src/util/mod.rs | 1 + 49 files changed, 2636 insertions(+), 743 deletions(-) create mode 100644 COPYING.md create mode 100644 assets/auth/style.css create mode 100644 assets/favicon.ico create mode 100644 assets/logo.svg create mode 100644 src/auth/minecraft/auth.rs create mode 100644 src/auth/minecraft/login.rs create mode 100644 src/auth/minecraft/mod.rs create mode 100644 src/auth/minecraft/refresh.rs create mode 100644 src/auth/minecraft/socket.rs create mode 100644 src/auth/minecraft/stages/access_token.rs create mode 100644 src/auth/minecraft/stages/bearer_token.rs create mode 100644 src/auth/minecraft/stages/login_redirect.rs create mode 100644 src/auth/minecraft/stages/mod.rs create mode 100644 src/auth/minecraft/stages/player_info.rs create mode 100644 src/auth/minecraft/stages/xbl_signin.rs create mode 100644 src/auth/minecraft/stages/xsts_token.rs create mode 100644 src/auth/templates/error.html create mode 100644 src/auth/templates/mod.rs create mode 100644 src/auth/templates/success.html create mode 100644 src/clickhouse/mod.rs create mode 100644 src/models/analytics.rs create mode 100644 src/queue/analytics.rs create mode 100644 src/queue/maxmind.rs create mode 100644 src/queue/socket.rs create mode 100644 src/routes/v2/analytics.rs create mode 100644 src/util/cors.rs diff --git a/.env b/.env index a8a148596..16ee17199 100644 --- a/.env +++ b/.env @@ -44,13 +44,10 @@ VERSION_INDEX_INTERVAL=1800 RATE_LIMIT_IGNORE_IPS='["127.0.0.1"]' -WHITELISTED_MODPACK_DOMAINS='["cdn.modrinth.com", "edge.forgecdn.net", "github.com", "raw.githubusercontent.com"]' +WHITELISTED_MODPACK_DOMAINS='["cdn.modrinth.com", "github.com", "raw.githubusercontent.com"]' ALLOWED_CALLBACK_URLS='["localhost", ".modrinth.com", "127.0.0.1"]' -ARIADNE_ADMIN_KEY=feedbeef -ARIADNE_URL=https://staging-ariadne.modrinth.com/v1/ - PAYPAL_API_URL=https://api-m.sandbox.paypal.com/v1/ PAYPAL_CLIENT_ID=none PAYPAL_CLIENT_SECRET=none @@ -82,4 +79,15 @@ SITE_VERIFY_EMAIL_PATH=none SITE_RESET_PASSWORD_PATH=none BEEHIIV_PUBLICATION_ID=none -BEEHIIV_API_KEY=none \ No newline at end of file +BEEHIIV_API_KEY=none + +ANALYTICS_ALLOWED_ORIGINS='["http://127.0.0.1:3000", "http://localhost:3000", "https://modrinth.com", "https://www.modrinth.com", "*"]' + +CLICKHOUSE_URL=http:/localhost:8123 +CLICKHOUSE_USER=default +CLICKHOUSE_PASSWORD= +CLICKHOUSE_DATABASE=staging_ariadne + +MAXMIND_LICENSE_KEY=none + +PAYOUTS_BUDGET=100 diff --git a/COPYING.md b/COPYING.md new file mode 100644 index 000000000..8a720200e --- /dev/null +++ b/COPYING.md @@ -0,0 +1,14 @@ +# Copying + +The source code of the labrinth repository is licensed under the GNU Affero General Public License, Version 3 only, which is provided in the file [LICENSE](./LICENSE). However, some files listed below are licensed under a different license. + +## Modrinth logo + +Any files depicting the Modrinth branding, including the wrench-in-labyrinth logo, the landing image, and variations thereof, are licensed as follows: + +> All rights reserved. © 2020-2023 Rinth, Inc. + +This includes, but may not be limited to, the following files: + +- assets/logo.svg +- assets/favicon.ico \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index d0430a620..cb7802787 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -58,6 +58,29 @@ dependencies = [ "smallvec", ] +[[package]] +name = "actix-files" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d832782fac6ca7369a70c9ee9a20554623c5e51c76e190ad151780ebea1cf689" +dependencies = [ + "actix-http", + "actix-service", + "actix-utils", + "actix-web", + "askama_escape", + "bitflags 1.3.2", + "bytes", + "derive_more", + "futures-core", + "http-range", + "log", + "mime", + "mime_guess", + "percent-encoding", + "pin-project-lite", +] + [[package]] name = "actix-http" version = "3.3.1" @@ -261,6 +284,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "actix-ws" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "535aec173810be3ca6f25dd5b4d431ae7125d62000aa3cbae1ec739921b02cf3" +dependencies = [ + "actix-codec", + "actix-http", + "actix-web", + "futures-core", + "tokio", +] + [[package]] name = "actix_derive" version = "0.6.0" @@ -401,6 +437,12 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "askama_escape" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "619743e34b5ba4e9703bba34deac3427c72507c7159f5fd030aea8cac0cfe341" + [[package]] name = "async-channel" version = "1.8.0" @@ -664,6 +706,15 @@ dependencies = [ "alloc-stdlib", ] +[[package]] +name = "bstr" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" +dependencies = [ + "memchr", +] + [[package]] name = "build_id" version = "0.2.1" @@ -815,6 +866,51 @@ dependencies = [ "inout", ] +[[package]] +name = "clickhouse" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33816ee1fea4f60d97abfeb773b9b566ae85f8bfa891758d00a1fb1e5a606591" +dependencies = [ + "bstr", + "bytes", + "clickhouse-derive", + "clickhouse-rs-cityhash-sys", + "futures", + "hyper", + "hyper-tls", + "lz4", + "sealed", + "serde", + "static_assertions", + "thiserror", + "time 0.3.22", + "tokio", + "url", + "uuid 1.4.0", +] + +[[package]] +name = "clickhouse-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18af5425854858c507eec70f7deb4d5d8cec4216fcb086283a78872387281ea5" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 1.0.109", +] + +[[package]] +name = "clickhouse-rs-cityhash-sys" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4baf9d4700a28d6cb600e17ed6ae2b43298a5245f1f76b4eab63027ebfd592b9" +dependencies = [ + "cc", +] + [[package]] name = "color-thief" version = "0.2.2" @@ -1389,6 +1485,18 @@ dependencies = [ "simd-adler32", ] +[[package]] +name = "filetime" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall 0.2.16", + "windows-sys", +] + [[package]] name = "findshlibs" version = "0.10.2" @@ -1724,6 +1832,15 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "heck" version = "0.4.1" @@ -1806,6 +1923,12 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "http-range" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21dec9db110f5f872ed9699c3ecf50cf16f423502706ba5c72462e28d3157573" + [[package]] name = "httparse" version = "1.8.0" @@ -1981,6 +2104,15 @@ version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" +[[package]] +name = "ipnetwork" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4088d739b183546b239688ddbc79891831df421773df95e236daf7867866d355" +dependencies = [ + "serde", +] + [[package]] name = "is-terminal" version = "0.4.9" @@ -2097,9 +2229,11 @@ version = "2.7.0" dependencies = [ "actix", "actix-cors", + "actix-files", "actix-multipart", "actix-rt", "actix-web", + "actix-ws", "argon2", "async-trait", "base64 0.21.2", @@ -2107,20 +2241,25 @@ dependencies = [ "bytes", "censor", "chrono", + "clickhouse", "color-thief", "dashmap", "deadpool-redis", "dotenvy", "env_logger", + "flate2", "futures", "futures-timer", "hex", "hmac 0.11.0", + "hyper", + "hyper-tls", "image", "itertools 0.11.0", "lazy_static", "lettre", "log", + "maxminddb", "meilisearch-sdk", "rand", "rand_chacha", @@ -2138,12 +2277,14 @@ dependencies = [ "sha2 0.9.9", "spdx", "sqlx", + "tar", "thiserror", "tokio", "tokio-stream", "totp-rs", "url", "urlencoding", + "uuid 1.4.0", "validator", "woothee", "xml-rs", @@ -2296,6 +2437,26 @@ dependencies = [ "linked-hash-map", ] +[[package]] +name = "lz4" +version = "1.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e9e2dd86df36ce760a60f6ff6ad526f7ba1f14ba0356f8254fb6905e6494df1" +dependencies = [ + "libc", + "lz4-sys", +] + +[[package]] +name = "lz4-sys" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57d27b317e207b10f69f5e75494119e391a96f48861ae870d1da6edac98ca900" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "mach" version = "0.3.2" @@ -2311,6 +2472,18 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" +[[package]] +name = "maxminddb" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe2ba61113f9f7a9f0e87c519682d39c43a6f3f79c2cc42c3ba3dda83b1fa334" +dependencies = [ + "ipnetwork", + "log", + "memchr", + "serde", +] + [[package]] name = "maybe-async" version = "0.2.7" @@ -3514,6 +3687,18 @@ version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" +[[package]] +name = "sealed" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b5e421024b5e5edfbaa8e60ecf90bda9dbffc602dbb230e6028763f85f0c68c" +dependencies = [ + "heck 0.3.3", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "security-framework" version = "2.9.1" @@ -3705,6 +3890,17 @@ dependencies = [ "syn 2.0.23", ] +[[package]] +name = "serde_derive_internals" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85bf8229e7920a9f636479437026331ce11aa132b4dde37d121944a44d6e5f3c" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "serde_json" version = "1.0.100" @@ -3992,7 +4188,7 @@ checksum = "9966e64ae989e7e575b19d7265cb79d7fc3cbbdf179835cb0d716f294c2049c9" dependencies = [ "dotenvy", "either", - "heck", + "heck 0.4.1", "hex", "once_cell", "proc-macro2", @@ -4111,6 +4307,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "tar" +version = "0.4.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec96d2ffad078296368d46ff1cb309be1c23c513b4ab0e22a45de0185275ac96" +dependencies = [ + "filetime", + "libc", + "xattr", +] + [[package]] name = "tempfile" version = "3.6.0" @@ -4234,9 +4441,21 @@ dependencies = [ "pin-project-lite", "signal-hook-registry", "socket2", + "tokio-macros", "windows-sys", ] +[[package]] +name = "tokio-macros" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.23", +] + [[package]] name = "tokio-native-tls" version = "0.3.1" @@ -4488,6 +4707,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be" dependencies = [ "getrandom", + "rand", "serde", ] @@ -4843,6 +5063,15 @@ dependencies = [ "tap", ] +[[package]] +name = "xattr" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d1526bbe5aaeb5eb06885f4d987bcdfa5e23187055de9b83fe00156a821fabc" +dependencies = [ + "libc", +] + [[package]] name = "xml-rs" version = "0.8.15" @@ -4865,7 +5094,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ab8bd5c76eebb8380b26833d30abddbdd885b00dd06178412e0d51d5bfc221f" dependencies = [ - "heck", + "heck 0.4.1", "log", "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index c22947347..755adc38f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,6 +16,8 @@ actix-web = "4.3.1" actix-rt = "2.8.0" actix-multipart = "0.6.0" actix-cors = "0.6.4" +actix-ws = "0.2.5" +actix-files = "0.6.2" tokio = { version = "1.29.1", features = ["sync"] } tokio-stream = "0.1.14" @@ -29,6 +31,8 @@ lazy_static = "1.4.0" meilisearch-sdk = "0.22.0" rust-s3 = "0.33.0" reqwest = { version = "0.11.18", features = ["json", "multipart"] } +hyper = { version = "0.14", features = ["full"] } +hyper-tls = "0.5.0" serde_json = "1.0" serde = { version = "1.0", features = ["derive"] } @@ -72,6 +76,12 @@ sqlx = { version = "0.6.3", features = ["offline", "runtime-tokio-rustls", "post rust_decimal = { version = "1.30.0", features = ["serde-with-float", "serde-with-str"] } redis = { version = "0.23.0", features = ["tokio-comp", "ahash", "r2d2"]} deadpool-redis = "0.12.0" +clickhouse = { version = "0.11.2", features = ["uuid", "time"] } +uuid = { version = "1.2.2", features = ["v4", "fast-rng", "serde"] } + +maxminddb = "0.23.0" +flate2 = "1.0.25" +tar = "0.4.38" sentry = { version = "0.31.5", features = ["profiling"] } sentry-actix = "0.31.5" diff --git a/assets/auth/style.css b/assets/auth/style.css new file mode 100644 index 000000000..328335885 --- /dev/null +++ b/assets/auth/style.css @@ -0,0 +1,59 @@ +:root { + --color-bg: #16181c; + --color-fg: #b0bac5; + --color-section-bg: #26292f; + + --content-width: 30%; + --content-max-width: 300px; + --content-padding: 1.5rem; + --edge-rounding: 1rem; +} + +html, body { + height: 100%; + overflow: hidden; +} + +body { + color: var(--color-fg); + background-color: var(--color-bg); + display: flex; + justify-content: center; + align-items: center; + font-family: Inter, -apple-system, BlinkMacSystemFont, 'Segoe UI', Helvetica Neue, Helvetica, + Oxygen, Ubuntu, Roboto, Cantarell, Fira Sans, Droid Sans, 'Apple Color Emoji', 'Segoe UI Emoji', + Arial, sans-serif; +} + +.content { + background-color: var(--color-section-bg); + width: var(--content-width); + max-width: var(--content-max-width); + border-radius: var(--edge-rounding); + padding: var(--content-padding); + justify-content: center; + align-items: center; + box-sizing: border-box; +} + +.content h2 { + margin-bottom: 0; +} + +.logo { + display: block; + width: 100%; + margin-left: auto; + margin-right: auto; + margin-bottom: 2rem; + border-radius: 1.5rem; +} + +a { + color: #4f9cff; + text-decoration: underline; +} + +a:visited { + color: #4f9cff +} \ No newline at end of file diff --git a/assets/favicon.ico b/assets/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..c2ccbab9054b4efd64d26d8d5439fd980ac5e3b4 GIT binary patch literal 24838 zcmc(n2e@5Tm4HtO5K1I;qLde;h*(i6%1EMq6p^ukh=LkKRIm@qFpi%I7NiJ>qBsg; z)L}poQ4t*#@ZkV5fG8qWfroU0(u)L=`Tv` zR@J0x`D*pmE98voy~|hCPE}P+pT4+!_{vrFed=b+SX@38yKPphs#&uZm$!IjRo$>r zRc%2Zn((l=9#P?T+~0xMu3r^wMEL`723!t5g0)~OJOfJmC@)XjKJYtO2#adYLQaO) z!X>abXdC^MmOl=Ufc>t3qhNp78nihCYQ9F+zyi?6Tfln9 zYtH(<6W$EwyD1z7?}Hs+C0GHD18pyZ4ZwDNJ^|K01gFBU>T=Y5k@BiwpDRK8_rmF* z%_(p^+*6mWy8+Gz^R;{onA@>+`84FEwf-)o<8dyW$GhPbunO!4{|ny+`@aYDcQIU2 z>+eR|-}SNWPcQ=3iMhKE)Xh8kf3Mcf$+n$feqDDaa#vUm!jCfM(ERn~TwDdNh3)2h zKN#Zym<`e2oUVbGKj-r|pzW7n3QU6C;LC6k90>Mb39QrBamPN{3*B{l99)CQ!#-2N zy=GkdYV#oM36Ya?`Z3UlzOM$?dtZ1OOot1>Hv1j~_KE&V%kDk%GKRLU`}Qymtas17 z3Ch0Nk7IHzPX%MT9?r)saIUiLxbtA56`nI3P1Os*^Q!a58P)Hv%z?6d^kwi-m$Yu2BgKL-l`Aob1^Wbne034rw-0KS<>^(D{3r&oqn|Jo@Je&=$ zhqYkbJX5z7oD16K7$cN(4D9Z}iTl+0`yqV)1^qy9t`32GpZ$cgYv6rw3Phi;qW>V3 zk3^5LI(KKLwzy|(H;31PIhgOE5dDrp5C5(~_J35ebC0->v~@maL-@#YZR*!ZZKeOO z(-!egMvptseVFIx9LgI(_Fd|p@85y&=lpySTnp=d53Z5@rossjwvNH~nZsc{D1BR= z4p)MG^y&J#hullnxj*)V=({HRcOhbgG)H}fT}aQ>$7*|hz64yy)9X6>{|eTD=(7R( zkKiE)|H@B5*eD&h=UdpEhVGuS?KR-Oj<(~_jT`+}N8bSs2KSWvVsnW0mC?Th(fCvrMfg#g(<%^kuEp_?&uQ1qcgnC)ZV00gIVc_PTfn(4?|yUM3od}% z=UmFZ3uND+Z$W(-!?Cz0T<6b1tm`J|oz75>V5$gbS{rm2wPxs!=&^6!Lcj(wOxdsp~;Ukh#V%DUO9zx(?;so&M;dqUhl<8s(!=aqBr zJ`V@O{ZPiS{4iVxXM<<$JlGV%htIj~x$k|H4}#Wbjr!diza)0$Z;6Gp&aD9Eg zT;Hy7QGNl8LD~Ni>iz{9d)I+-=B<>=ar+MCy|6X-ev$3lbmxCt_#wpmSLEV6>;Y~4 zo=tr@Z_f2$uv(8kZGNI|8hwt0#~|V>uY|l_+B=V)&Dl=qhKLq-^WpI)&bq;yX_fsAN(9#^SpOFYwrZlg3G~u=o$2SSOX%y(mVc4$nkHd z>^=#*&lc_3R^2^u5}-E;1R%b<)uk+Nq{-aD_O{I}%OIdl*056)Zo{u8=q$olXp zaLmz`rG5Szj)w2SUm&-up8&BpXQ1DhYReMVswsA}Bf&gbJ0{@u^+;jnXl z-wy7{uu+}}ac1~letywDhw>MTx-%$8&XL1z)aAJwqx?*WcBOHg&x7Fya6Rk|VY32y z`5b(Kx-!0f%lL;=m&f%p%0AoSyOb-^wh=rn*%<%Fko_D>`O`(+d6ZX$@afriVPP{y z*)t<-hLPrSLE*g9HkjGd({lzPGUGKDqAJ^+92tUT$ z7q)|ZHn`3{E74E675o9BZ%CipU%}4M*ykKl{qE>_obmiQzH(l^Nu7JT@rgbQa{iHn z()wMXu`ljc_tVLc`}iyv+dgxOHplhyx~=g^`Hf&(_7nP_sox2*edx2O_pS(ArE4GU zp6j<2?LVcwX0i#}y{P*I+zR&5DtLGJ`RdphwgJ{4&^+AKc8Xu`mf=;;JSs+&C&0GunpBQ*@UKg;-IjC<8M}f~vTYT$2 z0@2U$IVaZN4BlHK5bYh=7CY)nTguLb&+5nFJaB$JqurOr`5I*ZZ=?JP*s`dv-gwG7 z5oVw)`_^UW*E?r#_;)x4_JjQFWxdI(SHQ=sYUaIFHEpb_ri@nAw;GD~I!8+gXo3rRi zOm&&LCf{CTc7Hoo@7wo*_lbAkW$-8P*>lf}&(!rW3eSd)&qy-LZ8B`0YT1@cUFVp+ zZ~Uy|Id?z!JP*Sa*sTY>#^3r3Zg&v>Va8MV7x@*^JHoTqrW#o!*2I%uT@GjU8n(>Do zeV3&S-$|~5{!aKkOk|wBZ0F-=E9f=;5!(J(#PS`$d(m^XYaX8Dv2Q^l^u&dOSKIO zU&~W|58MT1>?JM7yWRqPz6pA*z0cY&hKTED&3&OcL(!LL4(ylk^T@}*&l*cI*2n1g zXZR(29p-@V1fC!Jf#+Vod(U@*TVa^k_hI)TaJ`x_Ok@&rzuJ#ue;9^|pKWfS&x!D6 z*bJtE??p{sZF=OI+V(+@y^{U+(p{$y!i4uhiWxcXPu&C1D^^>*=Ywu13$p)FRgx_hyINAl6}m;JZtg}rCpozN>rS?}{V7dC>Nf1|g>>{VCzm_hk7 zFp+$GW?q^7^onJDwnO)G=0xWFZ0xr$d^A4$#qHJB;>%}e7Mus=-1jP9g6+o8H<#Rw z?sIx7^zxbOJ=;!z>CnVbUlxC*&$_%m@+4RQIhS6#XYLf}^}Fqph5lH+ApNvb79;Z6m`o{{yen#$aUVG&F9SIV^{PgdB@GZL%$YZ9|q^t z{p=kaZA&75UHcg{2hRH+2gG`S_E$ldFUrRa=r_^4eZJ3!-z_7*&fyE8EAN(P*t2y| zu7?+XT&GKi=r_pklil7r2gO)kK>4--WAUua&%m~n?-|hN3fiw3VDn^;U;l&VU`1?B z8<6LG+Vfu8kFsZDul(K1?|@Cf=fwWKY@dpH?}wK_*Rzny??xXq9^XHr|Iz6E^1hw+ z=hXI-ku%{8@ZRg!uV0&UdK26M4}#@>_GQ~|v2W&}I0s%t+n_c2EH;kc{o+_k|GDh> zI}KvKl&gYscro;w>+C0V?N5ZwU`v<a(H-^pOMkw2J`Tewy8^1B59_jn%Hz4;bb)U(#VUVxvcNOfnfW5#q^;x|K zeAjL4TE_n|+ROO4?3!;1!;Jp}5i8<^jC!Q!z#kyTDD|(=J_%aKug_@fBUh)*z46Sj ziCl{Q?%6VaF30$tgF*ZMkhWMwyq&P|dzKud)ZZ6=l!N}VZ9hHxNp^G5%lNr`9qsP_ z4Pemc(xb3xU9+f1ej2j>HvLR&yqlVJ&bbfLbHA+TeyRR=^tK$L?lRif1^3eh18h&F zy}AEd;ysP_Uk>nL+=Xx$G``SV=QYO|Ru5mu72$^vV{Mb?(r(_~mERj+dkO7p!nj=P zx)J1{`zms{4_n`LnmthJ!;ZJKOMYC}%OPU6$?wp@FJMuWkI+jhllUC+j&E^cTR*(95s&_roio9KZWv6#DhG z&F8!8{*HX8FMV!J-BmEiXIuME=r=(z<+^jQ@pEsPt7Y%7e)oWW z?t$H)>E9d0JQ>+VbMB;Oqm&cVW%CPnW+IpAo}%gbiUA_!RscZUyh-$bAgGYwVP}hHp8s4*Cxu`uCE?cu(rr z%jde-Trh+`@A$94xc9ekOk2*gO~fIIhFNCVcXnUIghLtu{;ZXPbz)&Dqn`} z>%eDxT;4g(FnXI0>U{s+1BN;4BF}TMpAJn<{o-dk^q0Yn5WWV<`>^$ScpfzIO1)ov z+e&-NJ{w!Xi7@CLE$4V0_I_8^_*{b6&iA1A=^Ru2CO8_l0Y8s4@q6hxuVk|ly5AQc z2e&{Or(gLF?Ee{>*f~Z&J@=_~-x)kZ`o(K&bKcGb@6_$UyLSpSIb}U^N_F0ocR`!) zeszA9-6#3!#0&q!$fB?NZ(o?`KFB#be^nMlIrIPw7cn0NT;9lsJ&#?907hUUbvUQ!$o$omPeEpU7 zBfzsI`i=9qR1@&`e6oM5-_K>T>I5sL!b4bEAJ>H-vFKy;lmgg!Mk7$h&;Ob=e(D- zUdF7;?mzE{|AC3zmF{1^C+W&R%1GaPywCJ`7Q7wSg|2a1eyac4a-eQico}>WeD}N! z^7_R(hV=8pUeIg&uJQihdros1xg2e-p=XBA{KoK1@N<0E`1KierHpvXLM~b~R!v&upFbpk;Ku_d z&7HYu(WA-{oB8#DDT@}3Dkm>mG*?Lvg7b?6ixwjJWu7^7Q_iHDa)jR@sQACKr}=vX z*!VjLGm-ojLZ>fs^5{(dnOjYpJLz5}zi}{~ko2pj*1wYwKlu|sebD@RLdQzW9)C0H z`XpDf>V=)8MwQd5Nn^FVxB76Wyl}Cs7L8HnXBTEVc;u8?PF_42j9}DBVDv~co&%H6 zXCkXHB%+#KRXZs0g`8V$S5^O{^#&Cx)(mboli<%bbpELxXLwV_=*?XEI}TGGB`c&o zN6`KK?-A!2>0B!3xDLq9x@_Rwjyip;)0lEvXC0?jtRo$!Vy@xT&Vr9stFf&A59o>@ A + + + \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index c88a72bc6..1fc25bca1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -27,6 +27,10 @@ services: - '6379:6379' volumes: - redis-data:/data + clickhouse: + image: clickhouse/clickhouse-server + ports: + - "8123:8123" volumes: meilisearch-data: db-data: diff --git a/sqlx-data.json b/sqlx-data.json index ebd22959b..cdf3feb57 100644 --- a/sqlx-data.json +++ b/sqlx-data.json @@ -960,6 +960,26 @@ }, "query": "\n SELECT m.id FROM mods m\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND tm.accepted = TRUE\n WHERE tm.user_id = $1\n ORDER BY m.downloads DESC\n " }, + "2265be690ec4c6b03fd142bb8b81a5ebec67d09a08c05e9dba122f5acf2fc98a": { + "describe": { + "columns": [ + { + "name": "exists", + "ordinal": 0, + "type_info": "Bool" + } + ], + "nullable": [ + null + ], + "parameters": { + "Left": [ + "Timestamptz" + ] + } + }, + "query": "SELECT EXISTS(SELECT 1 FROM payouts_values WHERE created = $1)" + }, "232d7d0319c20dd5fff29331b067d6c6373bcff761a77958a2bb5f59068a83a5": { "describe": { "columns": [], @@ -994,38 +1014,6 @@ }, "query": "\n SELECT id FROM loaders\n WHERE loader = $1\n " }, - "2534464b06d567078bcfaa94e0c5e37729db111f5b46c4035cabe72634104b2e": { - "describe": { - "columns": [ - { - "name": "id", - "ordinal": 0, - "type_info": "Int8" - }, - { - "name": "user_id", - "ordinal": 1, - "type_info": "Int8" - }, - { - "name": "payouts_split", - "ordinal": 2, - "type_info": "Numeric" - } - ], - "nullable": [ - false, - false, - false - ], - "parameters": { - "Left": [ - "Int8Array" - ] - } - }, - "query": "\n SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split\n FROM mods m\n INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE\n WHERE m.id = ANY($1)\n " - }, "294f264382ad55475b51776cd5d306c4867e8e6966ab79921bba69dc023f8337": { "describe": { "columns": [], @@ -3787,38 +3775,6 @@ }, "query": "\n SELECT tm.id, tm.team_id, tm.user_id, tm.role, tm.permissions, tm.accepted, tm.payouts_split, tm.ordering FROM mods m\n INNER JOIN team_members tm ON tm.team_id = m.team_id AND user_id = $2 AND accepted = TRUE\n WHERE m.id = $1\n " }, - "a3e27b758ca441fa82f6bcd42915b92fb23a7db19a7eb27db7ed92eeba4b566e": { - "describe": { - "columns": [ - { - "name": "mod_id", - "ordinal": 0, - "type_info": "Int8" - }, - { - "name": "id", - "ordinal": 1, - "type_info": "Int8" - }, - { - "name": "times_depended", - "ordinal": 2, - "type_info": "Int8" - } - ], - "nullable": [ - false, - false, - null - ], - "parameters": { - "Left": [ - "Int8Array" - ] - } - }, - "query": "\n SELECT mv.mod_id, m.id, COUNT(m.id) times_depended FROM versions mv\n INNER JOIN dependencies d ON d.dependent_id = mv.id\n INNER JOIN versions v ON d.dependency_id = v.id\n INNER JOIN mods m ON v.mod_id = m.id OR d.mod_dependency_id = m.id\n WHERE mv.mod_id = ANY($1)\n group by mv.mod_id, m.id;\n " - }, "a440cb2567825c3cc540c9b0831ee840f6e2a6394e89a851b83fc78220594cf2": { "describe": { "columns": [], @@ -4163,6 +4119,39 @@ }, "query": "\n INSERT INTO mods (\n id, team_id, title, description, body,\n published, downloads, icon_url, issues_url,\n source_url, wiki_url, status, requested_status, discord_url,\n client_side, server_side, license_url, license,\n slug, project_type, color, monetization_status\n )\n VALUES (\n $1, $2, $3, $4, $5,\n $6, $7, $8, $9,\n $10, $11, $12, $13, $14,\n $15, $16, $17, $18,\n LOWER($19), $20, $21, $22\n )\n " }, + "b768d9db6c785d6a701324ea746794d33e94121403163a774b6ef775640fd3d3": { + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Int8" + }, + { + "name": "user_id", + "ordinal": 1, + "type_info": "Int8" + }, + { + "name": "payouts_split", + "ordinal": 2, + "type_info": "Numeric" + } + ], + "nullable": [ + false, + false, + false + ], + "parameters": { + "Left": [ + "Int8Array", + "Text" + ] + } + }, + "query": "\n SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split\n FROM mods m\n INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE\n WHERE m.id = ANY($1) AND m.monetization_status = $2\n " + }, "b903ac4e686ef85ba28d698c668da07860e7f276b261d8f2cebb74e73b094970": { "describe": { "columns": [], @@ -5550,45 +5539,6 @@ }, "query": "\n UPDATE versions\n SET featured = $1\n WHERE (id = $2)\n " }, - "e6f1fe134d6af8f0ff888459275fdefd250b2d776961ad3452ed0684f4fdc1b9": { - "describe": { - "columns": [ - { - "name": "id", - "ordinal": 0, - "type_info": "Int8" - }, - { - "name": "user_id", - "ordinal": 1, - "type_info": "Int8" - }, - { - "name": "payouts_split", - "ordinal": 2, - "type_info": "Numeric" - }, - { - "name": "project_type", - "ordinal": 3, - "type_info": "Varchar" - } - ], - "nullable": [ - false, - false, - false, - false - ], - "parameters": { - "Left": [ - "Int8Array", - "Text" - ] - } - }, - "query": "\n SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split, pt.name project_type\n FROM mods m\n INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE\n INNER JOIN project_types pt ON pt.id = m.project_type\n WHERE m.id = ANY($1) AND m.monetization_status = $2\n " - }, "e6f5a150cbd3bd6b9bde9e5cdad224a45c96d678b69ec12508e81246710e3f6d": { "describe": { "columns": [ diff --git a/src/auth/flows.rs b/src/auth/flows.rs index e908faa38..44948c47d 100644 --- a/src/auth/flows.rs +++ b/src/auth/flows.rs @@ -10,12 +10,14 @@ use crate::models::pats::Scopes; use crate::models::users::{Badges, Role}; use crate::parse_strings_from_var; use crate::queue::session::AuthQueue; +use crate::queue::socket::ActiveSockets; use crate::routes::ApiError; use crate::util::captcha::check_turnstile_captcha; use crate::util::ext::{get_image_content_type, get_image_ext}; use crate::util::validate::{validation_errors_to_string, RE_URL_SAFE}; -use actix_web::web::{scope, Data, Query, ServiceConfig}; +use actix_web::web::{scope, Data, Payload, Query, ServiceConfig}; use actix_web::{delete, get, patch, post, web, HttpRequest, HttpResponse}; +use actix_ws::Closed; use argon2::password_hash::SaltString; use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier}; use chrono::{Duration, Utc}; @@ -27,11 +29,13 @@ use serde::{Deserialize, Serialize}; use sqlx::postgres::PgPool; use std::collections::HashMap; use std::sync::Arc; +use tokio::sync::RwLock; use validator::Validate; pub fn config(cfg: &mut ServiceConfig) { cfg.service( scope("auth") + .service(ws_init) .service(init) .service(auth_callback) .service(delete_auth_provider) @@ -46,7 +50,9 @@ pub fn config(cfg: &mut ServiceConfig) { .service(resend_verify_email) .service(set_email) .service(verify_email) - .service(subscribe_newsletter), + .service(subscribe_newsletter) + .service(login_from_minecraft) + .configure(super::minecraft::config), ); } @@ -73,6 +79,167 @@ pub struct TempUser { pub name: Option, } +impl TempUser { + async fn create_account( + self, + provider: AuthProvider, + transaction: &mut sqlx::Transaction<'_, sqlx::Postgres>, + client: &PgPool, + file_host: &Arc, + redis: &deadpool_redis::Pool, + ) -> Result { + if let Some(email) = &self.email { + if crate::database::models::User::get_email(email, client) + .await? + .is_some() + { + return Err(AuthenticationError::DuplicateUser); + } + } + + let user_id = crate::database::models::generate_user_id(transaction).await?; + + let mut username_increment: i32 = 0; + let mut username = None; + + while username.is_none() { + let test_username = format!( + "{}{}", + self.username, + if username_increment > 0 { + username_increment.to_string() + } else { + "".to_string() + } + ); + + let new_id = crate::database::models::User::get(&test_username, client, redis).await?; + + if new_id.is_none() { + username = Some(test_username); + } else { + username_increment += 1; + } + } + + let avatar_url = if let Some(avatar_url) = self.avatar_url { + let cdn_url = dotenvy::var("CDN_URL")?; + + let res = reqwest::get(&avatar_url).await?; + let headers = res.headers().clone(); + + let img_data = if let Some(content_type) = headers + .get(reqwest::header::CONTENT_TYPE) + .and_then(|ct| ct.to_str().ok()) + { + get_image_ext(content_type).map(|ext| (ext, content_type)) + } else if let Some(ext) = avatar_url.rsplit('.').next() { + get_image_content_type(ext).map(|content_type| (ext, content_type)) + } else { + None + }; + + if let Some((ext, content_type)) = img_data { + let bytes = res.bytes().await?; + let hash = sha1::Sha1::from(&bytes).hexdigest(); + + let upload_data = file_host + .upload_file( + content_type, + &format!( + "user/{}/{}.{}", + crate::models::users::UserId::from(user_id), + hash, + ext + ), + bytes, + ) + .await?; + + Some(format!("{}/{}", cdn_url, upload_data.file_name)) + } else { + None + } + } else { + None + }; + + if let Some(username) = username { + crate::database::models::User { + id: user_id, + github_id: if provider == AuthProvider::GitHub { + Some( + self.id + .clone() + .parse() + .map_err(|_| AuthenticationError::InvalidCredentials)?, + ) + } else { + None + }, + discord_id: if provider == AuthProvider::Discord { + Some( + self.id + .parse() + .map_err(|_| AuthenticationError::InvalidCredentials)?, + ) + } else { + None + }, + gitlab_id: if provider == AuthProvider::GitLab { + Some( + self.id + .parse() + .map_err(|_| AuthenticationError::InvalidCredentials)?, + ) + } else { + None + }, + google_id: if provider == AuthProvider::Google { + Some(self.id.clone()) + } else { + None + }, + steam_id: if provider == AuthProvider::Steam { + Some( + self.id + .parse() + .map_err(|_| AuthenticationError::InvalidCredentials)?, + ) + } else { + None + }, + microsoft_id: if provider == AuthProvider::Microsoft { + Some(self.id) + } else { + None + }, + password: None, + totp_secret: None, + username, + name: self.name, + email: self.email, + email_verified: true, + avatar_url, + bio: self.bio, + created: Utc::now(), + role: Role::Developer.to_string(), + badges: Badges::default(), + balance: Decimal::ZERO, + payout_wallet: None, + payout_wallet_type: None, + payout_address: None, + } + .insert(transaction) + .await?; + + Ok(user_id) + } else { + Err(AuthenticationError::InvalidCredentials) + } + } +} + impl AuthProvider { pub fn get_redirect_url(&self, state: String) -> Result { let self_addr = dotenvy::var("SELF_ADDR")?; @@ -771,7 +938,7 @@ pub async fn init( let state = Flow::OAuth { user_id, - url: info.url, + url: Some(info.url), provider: info.provider, } .insert(Duration::minutes(30), &redis) @@ -783,262 +950,286 @@ pub async fn init( .json(serde_json::json!({ "url": url }))) } +#[derive(Serialize, Deserialize)] +pub struct WsInit { + pub provider: AuthProvider, +} + +#[get("ws")] +pub async fn ws_init( + req: HttpRequest, + Query(info): Query, + body: Payload, + db: Data>, + redis: Data, +) -> Result { + let (res, session, _msg_stream) = actix_ws::handle(&req, body)?; + + async fn sock( + mut ws_stream: actix_ws::Session, + info: WsInit, + db: Data>, + redis: Data, + ) -> Result<(), Closed> { + let flow = Flow::OAuth { + user_id: None, + url: None, + provider: info.provider, + } + .insert(Duration::minutes(30), &redis) + .await; + + if let Ok(state) = flow { + if let Ok(url) = info.provider.get_redirect_url(state.clone()) { + ws_stream + .text(serde_json::json!({ "url": url }).to_string()) + .await?; + + let db = db.write().await; + db.auth_sockets.insert(state, ws_stream); + } + } + + Ok(()) + } + + let _ = sock(session, info, db, redis).await; + + Ok(res) +} + #[get("callback")] pub async fn auth_callback( req: HttpRequest, Query(query): Query>, + sockets: Data>, client: Data, file_host: Data>, redis: Data, -) -> Result { - let state = query - .get("state") - .ok_or_else(|| AuthenticationError::InvalidCredentials)?; +) -> Result { + let res = async move { + let state = query + .get("state") + .ok_or_else(|| AuthenticationError::InvalidCredentials)?.clone(); - let flow = Flow::get(state, &redis).await?; + let flow = Flow::get(&state, &redis).await?; - // Extract cookie header from request - if let Some(Flow::OAuth { - user_id, - provider, - url, - }) = flow - { - Flow::remove(state, &redis).await?; - - let token = provider.get_token(query).await?; - let oauth_user = provider.get_user(&token).await?; - - let user_id_opt = provider.get_user_id(&oauth_user.id, &**client).await?; - - let mut transaction = client.begin().await?; - if let Some(id) = user_id { - if user_id_opt.is_some() { - return Err(AuthenticationError::DuplicateUser); - } - - provider - .update_user_id(id, Some(&oauth_user.id), &mut transaction) - .await?; - - let user = crate::database::models::User::get_id(id, &**client, &redis).await?; - if let Some(email) = user.and_then(|x| x.email) { - send_email( - email, - "Authentication method added", - &format!("When logging into Modrinth, you can now log in using the {} authentication provider.", provider.as_str()), - "If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).", - None, - )?; - } - - crate::database::models::User::clear_caches(&[(id, None)], &redis).await?; - transaction.commit().await?; - - Ok(HttpResponse::TemporaryRedirect() - .append_header(("Location", &*url)) - .json(serde_json::json!({ "url": url }))) - } else { - let user_id = if let Some(user_id) = user_id_opt { - let user = crate::database::models::User::get_id(user_id, &**client, &redis) - .await? - .ok_or_else(|| AuthenticationError::InvalidCredentials)?; - - if user.totp_secret.is_some() { - let flow = Flow::Login2FA { user_id: user.id } - .insert(Duration::minutes(30), &redis) - .await?; - - let redirect_url = format!( - "{}{}error=2fa_required&flow={}", + // Extract cookie header from request + if let Some(Flow::OAuth { + user_id, + provider, url, - if url.contains('?') { "&" } else { "?" }, - flow - ); + }) = flow + { + Flow::remove(&state, &redis).await?; - return Ok(HttpResponse::TemporaryRedirect() - .append_header(("Location", &*redirect_url)) - .json(serde_json::json!({ "url": redirect_url }))); + let token = provider.get_token(query).await?; + let oauth_user = provider.get_user(&token).await?; + + let user_id_opt = provider.get_user_id(&oauth_user.id, &**client).await?; + + let mut transaction = client.begin().await?; + if let Some(id) = user_id { + if user_id_opt.is_some() { + return Err(AuthenticationError::DuplicateUser); } - user_id - } else { - if let Some(email) = &oauth_user.email { - if crate::database::models::User::get_email(email, &**client) - .await? - .is_some() - { - return Err(AuthenticationError::DuplicateUser); - } - } - - let user_id = crate::database::models::generate_user_id(&mut transaction).await?; - - let mut username_increment: i32 = 0; - let mut username = None; - - while username.is_none() { - let test_username = format!( - "{}{}", - oauth_user.username, - if username_increment > 0 { - username_increment.to_string() - } else { - "".to_string() - } - ); - - let new_id = - crate::database::models::User::get(&test_username, &**client, &redis) - .await?; - - if new_id.is_none() { - username = Some(test_username); - } else { - username_increment += 1; - } - } - - let avatar_url = if let Some(avatar_url) = oauth_user.avatar_url { - let cdn_url = dotenvy::var("CDN_URL")?; - - let res = reqwest::get(&avatar_url).await?; - let headers = res.headers().clone(); - - let img_data = if let Some(content_type) = headers - .get(reqwest::header::CONTENT_TYPE) - .and_then(|ct| ct.to_str().ok()) - { - get_image_ext(content_type).map(|ext| (ext, content_type)) - } else if let Some(ext) = avatar_url.rsplit('.').next() { - get_image_content_type(ext).map(|content_type| (ext, content_type)) - } else { - None - }; - - if let Some((ext, content_type)) = img_data { - let bytes = res.bytes().await?; - let hash = sha1::Sha1::from(&bytes).hexdigest(); - - let upload_data = file_host - .upload_file( - content_type, - &format!( - "user/{}/{}.{}", - crate::models::users::UserId::from(user_id), - hash, - ext - ), - bytes, - ) - .await?; - - Some(format!("{}/{}", cdn_url, upload_data.file_name)) - } else { - None - } - } else { - None - }; - - if let Some(username) = username { - crate::database::models::User { - id: user_id, - github_id: if provider == AuthProvider::GitHub { - Some( - oauth_user - .id - .clone() - .parse() - .map_err(|_| AuthenticationError::InvalidCredentials)?, - ) - } else { - None - }, - discord_id: if provider == AuthProvider::Discord { - Some( - oauth_user - .id - .parse() - .map_err(|_| AuthenticationError::InvalidCredentials)?, - ) - } else { - None - }, - gitlab_id: if provider == AuthProvider::GitLab { - Some( - oauth_user - .id - .parse() - .map_err(|_| AuthenticationError::InvalidCredentials)?, - ) - } else { - None - }, - google_id: if provider == AuthProvider::Google { - Some(oauth_user.id.clone()) - } else { - None - }, - steam_id: if provider == AuthProvider::Steam { - Some( - oauth_user - .id - .parse() - .map_err(|_| AuthenticationError::InvalidCredentials)?, - ) - } else { - None - }, - microsoft_id: if provider == AuthProvider::Microsoft { - Some(oauth_user.id) - } else { - None - }, - password: None, - totp_secret: None, - username, - name: oauth_user.name, - email: oauth_user.email, - email_verified: true, - avatar_url, - bio: oauth_user.bio, - created: Utc::now(), - role: Role::Developer.to_string(), - badges: Badges::default(), - balance: Decimal::ZERO, - payout_wallet: None, - payout_wallet_type: None, - payout_address: None, - } - .insert(&mut transaction) + provider + .update_user_id(id, Some(&oauth_user.id), &mut transaction) .await?; + let user = crate::database::models::User::get_id(id, &**client, &redis).await?; + if let Some(email) = user.and_then(|x| x.email) { + send_email( + email, + "Authentication method added", + &format!("When logging into Modrinth, you can now log in using the {} authentication provider.", provider.as_str()), + "If you did not make this change, please contact us immediately through our support channels on Discord or via email (support@modrinth.com).", + None, + )?; + } + + crate::database::models::User::clear_caches(&[(id, None)], &redis).await?; + transaction.commit().await?; + + if let Some(url) = url { + Ok(HttpResponse::TemporaryRedirect() + .append_header(("Location", &*url)) + .json(serde_json::json!({ "url": url }))) + } else { + Err(AuthenticationError::InvalidCredentials) + } + } else { + let user_id = if let Some(user_id) = user_id_opt { + let user = crate::database::models::User::get_id(user_id, &**client, &redis) + .await? + .ok_or_else(|| AuthenticationError::InvalidCredentials)?; + + if user.totp_secret.is_some() { + let flow = Flow::Login2FA { user_id: user.id } + .insert(Duration::minutes(30), &redis) + .await?; + + if let Some(url) = url { + let redirect_url = format!( + "{}{}error=2fa_required&flow={}", + url, + if url.contains('?') { "&" } else { "?" }, + flow + ); + + return Ok(HttpResponse::TemporaryRedirect() + .append_header(("Location", &*redirect_url)) + .json(serde_json::json!({ "url": redirect_url }))); + } else { + let mut ws_conn = { + let db = sockets.read().await; + + let mut x = db + .auth_sockets + .get_mut(&state) + .ok_or_else(|| AuthenticationError::SocketError)?; + + x.value_mut().clone() + }; + + ws_conn + .text( + serde_json::json!({ + "error": "2fa_required", + "flow": flow, + }).to_string() + ) + .await.map_err(|_| AuthenticationError::SocketError)?; + + let _ = ws_conn.close(None).await; + + return Ok(super::templates::Success { + icon: user.avatar_url.as_deref().unwrap_or("https://cdn-raw.modrinth.com/placeholder.svg"), + name: &user.username, + }.render()); + } + } + user_id } else { - return Err(AuthenticationError::InvalidCredentials); - } - }; + oauth_user.create_account(provider, &mut transaction, &client, &file_host, &redis).await? + }; - let session = issue_session(req, user_id, &mut transaction, &redis).await?; - transaction.commit().await?; + let session = issue_session(req, user_id, &mut transaction, &redis).await?; + transaction.commit().await?; - let redirect_url = format!( - "{}{}code={}{}", - url, - if url.contains('?') { '&' } else { '?' }, - session.session, - if user_id_opt.is_none() { - "&new_account=true" + if let Some(url) = url { + let redirect_url = format!( + "{}{}code={}{}", + url, + if url.contains('?') { '&' } else { '?' }, + session.session, + if user_id_opt.is_none() { + "&new_account=true" + } else { + "" + } + ); + + Ok(HttpResponse::TemporaryRedirect() + .append_header(("Location", &*redirect_url)) + .json(serde_json::json!({ "url": redirect_url }))) } else { - "" - } - ); + let user = crate::database::models::user_item::User::get_id( + user_id, + &**client, + &redis, + ) + .await?.ok_or_else(|| AuthenticationError::InvalidCredentials)?; - Ok(HttpResponse::TemporaryRedirect() - .append_header(("Location", &*redirect_url)) - .json(serde_json::json!({ "url": redirect_url }))) + let mut ws_conn = { + let db = sockets.read().await; + + let mut x = db + .auth_sockets + .get_mut(&state) + .ok_or_else(|| AuthenticationError::SocketError)?; + + x.value_mut().clone() + }; + + ws_conn + .text( + serde_json::json!({ + "code": session.session, + }).to_string() + ) + .await.map_err(|_| AuthenticationError::SocketError)?; + let _ = ws_conn.close(None).await; + + return Ok(super::templates::Success { + icon: user.avatar_url.as_deref().unwrap_or("https://cdn-raw.modrinth.com/placeholder.svg"), + name: &user.username, + }.render()); + } + } + } else { + Err::(AuthenticationError::InvalidCredentials) } + }.await; + + Ok(res?) +} + +#[derive(Deserialize)] +pub struct MinecraftLogin { + pub flow: String, +} + +#[post("login/minecraft")] +pub async fn login_from_minecraft( + req: HttpRequest, + client: Data, + file_host: Data>, + redis: Data, + login: web::Json, +) -> Result { + let flow = Flow::get(&login.flow, &redis).await?; + + // Extract cookie header from request + if let Some(Flow::MicrosoftLogin { + access_token: token, + }) = flow + { + let provider = AuthProvider::Microsoft; + let oauth_user = provider.get_user(&token).await?; + let user_id_opt = provider.get_user_id(&oauth_user.id, &**client).await?; + + let mut transaction = client.begin().await?; + + let user_id = if let Some(user_id) = user_id_opt { + let user = crate::database::models::User::get_id(user_id, &**client, &redis) + .await? + .ok_or_else(|| AuthenticationError::InvalidCredentials)?; + + if user.totp_secret.is_some() { + let flow = Flow::Login2FA { user_id: user.id } + .insert(Duration::minutes(30), &redis) + .await?; + + return Ok(HttpResponse::Ok().json(serde_json::json!({ + "error": "2fa_required", + "flow": flow + }))); + } + + user_id + } else { + oauth_user + .create_account(provider, &mut transaction, &client, &file_host, &redis) + .await? + }; + + let session = issue_session(req, user_id, &mut transaction, &redis).await?; + Ok(HttpResponse::Ok().json(serde_json::json!({ + "code": session.session + }))) } else { Err(AuthenticationError::InvalidCredentials) } diff --git a/src/auth/minecraft/auth.rs b/src/auth/minecraft/auth.rs new file mode 100644 index 000000000..33101ba49 --- /dev/null +++ b/src/auth/minecraft/auth.rs @@ -0,0 +1,154 @@ +//! Main authentication flow for Hydra +use crate::{auth::minecraft::stages, auth::templates, parse_var}; + +// use crate::db::RuntimeState; +use crate::database::models::flow_item::Flow; +use crate::queue::socket::ActiveSockets; +use actix_web::http::StatusCode; +use actix_web::{get, web, HttpResponse}; +use chrono::Duration; +use serde::Deserialize; +use serde_json::json; +use tokio::sync::RwLock; + +macro_rules! ws_conn_try { + ($ctx:literal $status:path, $res:expr => $ws_conn:expr) => { + match $res { + Ok(res) => res, + Err(err) => { + let error = format!("In {}: {err}", $ctx); + let render = super::Error::render_string(&error); + let _ = $ws_conn.text(render.clone()).await; + let _ = $ws_conn.close(None).await; + return Err(templates::ErrorPage { + code: $status, + message: render, + }); + } + } + }; +} + +#[derive(Deserialize)] +pub struct Query { + pub code: String, + pub state: String, +} + +#[get("callback")] +pub async fn route( + db: web::Data>, + info: web::Query, + redis: web::Data, +) -> Result { + let public_url = parse_var::("SELF_ADDR").unwrap_or(format!( + "http://{}", + parse_var::("BIND_ADDR").unwrap() + )); + let client_id = parse_var::("MICROSOFT_CLIENT_ID").unwrap(); + let client_secret = parse_var::("MICROSOFT_CLIENT_SECRET").unwrap(); + + let code = &info.code; + + let mut ws_conn = { + let db = db.read().await; + + let mut x = db + .auth_sockets + .get_mut(&info.state) + .ok_or_else(|| templates::ErrorPage { + code: StatusCode::BAD_REQUEST, + message: "Invalid state sent, you probably need to get a new websocket".to_string(), + })?; + + x.value_mut().clone() + }; + + let access_token = ws_conn_try!( + "OAuth token exchange" StatusCode::INTERNAL_SERVER_ERROR, + stages::access_token::fetch_token( + public_url, + code, + &client_id, + &client_secret, + ).await + => ws_conn + ); + + let stages::xbl_signin::XBLLogin { + token: xbl_token, + uhs, + } = ws_conn_try!( + "XBox Live token exchange" StatusCode::INTERNAL_SERVER_ERROR, + stages::xbl_signin::login_xbl(&access_token.access_token).await + => ws_conn + ); + + let xsts_response = ws_conn_try!( + "XSTS token exchange" StatusCode::INTERNAL_SERVER_ERROR, + stages::xsts_token::fetch_token(&xbl_token).await + => ws_conn + ); + + match xsts_response { + stages::xsts_token::XSTSResponse::Unauthorized(err) => { + let _ = ws_conn + .text(super::Error::render_string(&format!( + "Error getting XBox Live token: {err}" + ))) + .await; + let _ = ws_conn.close(None).await; + + Err(templates::ErrorPage { + code: StatusCode::FORBIDDEN, + message: err, + }) + } + stages::xsts_token::XSTSResponse::Success { token: xsts_token } => { + let bearer_token = &ws_conn_try!( + "Bearer token flow" StatusCode::INTERNAL_SERVER_ERROR, + stages::bearer_token::fetch_bearer(&xsts_token, &uhs) + .await + => ws_conn + ); + + let player_info = &ws_conn_try!( + "No Minecraft account for profile. Make sure you own the game and have set a username through the official Minecraft launcher." StatusCode::BAD_REQUEST, + stages::player_info::fetch_info(bearer_token) + .await + => ws_conn + ); + + let flow = &ws_conn_try!( + "Error creating microsoft login request flow." StatusCode::INTERNAL_SERVER_ERROR, + Flow::MicrosoftLogin { + access_token: bearer_token.clone(), + } + .insert(Duration::hours(1), &redis) + .await + => ws_conn + ); + + ws_conn + .text( + json!({ + "token": bearer_token, + "refresh_token": &access_token.refresh_token, + "expires_after": 86400, + "flow": flow, + }).to_string() + ) + .await.map_err(|_| templates::ErrorPage { + code: StatusCode::BAD_REQUEST, + message: "Failed to send login details to launcher. Try restarting the login process!".to_string(), + })?; + let _ = ws_conn.close(None).await; + + Ok(templates::Success { + name: &player_info.name, + icon: &format!("https://mc-heads.net/avatar/{}/128", &player_info.id), + } + .render()) + } + } +} diff --git a/src/auth/minecraft/login.rs b/src/auth/minecraft/login.rs new file mode 100644 index 000000000..d61ea14e4 --- /dev/null +++ b/src/auth/minecraft/login.rs @@ -0,0 +1,35 @@ +//! Login route for Hydra, redirects to the Microsoft login page before going to the redirect route +use crate::{auth::minecraft::stages::login_redirect, auth::templates, parse_var}; +use actix_web::http::StatusCode; +use actix_web::{get, web, HttpResponse}; +use serde::{Deserialize, Serialize}; + +#[derive(Deserialize)] +pub struct Query { + pub id: Option, +} + +#[derive(Serialize)] +pub struct AuthorizationInit { + pub url: String, +} + +#[get("init")] +pub async fn route(info: web::Query) -> Result { + let conn_id = info.0.id.ok_or_else(|| templates::ErrorPage { + code: StatusCode::BAD_REQUEST, + message: "No socket ID provided (open a web socket at the / route for one)".to_string(), + })?; + + let public_url = parse_var::("SELF_ADDR").unwrap_or(format!( + "http://{}", + parse_var::("BIND_ADDR").unwrap() + )); + let client_id = parse_var::("MICROSOFT_CLIENT_ID").unwrap(); + + let url = login_redirect::get_url(&public_url, &conn_id, &client_id); + + Ok(HttpResponse::TemporaryRedirect() + .append_header(("Location", &*url)) + .json(AuthorizationInit { url })) +} diff --git a/src/auth/minecraft/mod.rs b/src/auth/minecraft/mod.rs new file mode 100644 index 000000000..98d11c998 --- /dev/null +++ b/src/auth/minecraft/mod.rs @@ -0,0 +1,60 @@ +mod auth; +mod login; +mod refresh; +mod socket; +mod stages; + +use actix_web::http::StatusCode; +use actix_web::web::{scope, ServiceConfig}; +use actix_web::HttpResponse; +use serde_json::json; +use std::fmt::{Display, Formatter}; + +/// Error message +#[derive(Debug)] +pub struct Error { + pub code: StatusCode, + pub reason: String, +} + +impl Error { + pub fn render_string(reason: &str) -> String { + json!({ "error": reason }).to_string() + } +} + +impl Display for Error { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + json!({ + "error": self.reason + }) + )?; + + Ok(()) + } +} + +impl actix_web::ResponseError for Error { + fn status_code(&self) -> StatusCode { + self.code + } + + fn error_response(&self) -> HttpResponse { + HttpResponse::build(self.code).json(json!({ + "error": self.reason + })) + } +} + +pub fn config(cfg: &mut ServiceConfig) { + cfg.service( + scope("minecraft") + .service(auth::route) + .service(login::route) + .service(refresh::route) + .service(socket::route), + ); +} diff --git a/src/auth/minecraft/refresh.rs b/src/auth/minecraft/refresh.rs new file mode 100644 index 000000000..6b269f94e --- /dev/null +++ b/src/auth/minecraft/refresh.rs @@ -0,0 +1,72 @@ +//! Refresh token route +use super::stages; +use crate::parse_var; +use actix_web::http::StatusCode; +use actix_web::{post, web, HttpResponse}; +use serde::Deserialize; +use serde_json::json; + +#[derive(Deserialize)] +pub struct Body { + refresh_token: String, +} + +#[post("refresh")] +pub async fn route(body: web::Json) -> Result { + let public_url = parse_var::("SELF_ADDR").unwrap_or(format!( + "http://{}", + parse_var::("BIND_ADDR").unwrap() + )); + let client_id = parse_var::("MICROSOFT_CLIENT_ID").unwrap(); + let client_secret = parse_var::("MICROSOFT_CLIENT_SECRET").unwrap(); + + let access_token = stages::access_token::refresh_token( + &public_url, + &body.refresh_token, + &client_id, + &client_secret, + ) + .await + .map_err(|_| super::Error { + code: StatusCode::INTERNAL_SERVER_ERROR, + reason: "Error with OAuth token exchange".to_string(), + })?; + + let stages::xbl_signin::XBLLogin { + token: xbl_token, + uhs, + } = stages::xbl_signin::login_xbl(&access_token.access_token) + .await + .map_err(|_| super::Error { + code: StatusCode::INTERNAL_SERVER_ERROR, + reason: "Error with XBox Live token exchange".to_string(), + })?; + + let xsts_response = stages::xsts_token::fetch_token(&xbl_token) + .await + .map_err(|_| super::Error { + code: StatusCode::INTERNAL_SERVER_ERROR, + reason: "Error with XSTS token exchange".to_string(), + })?; + + match xsts_response { + stages::xsts_token::XSTSResponse::Unauthorized(err) => Err(super::Error { + code: StatusCode::UNAUTHORIZED, + reason: format!("Error getting XBox Live token: {err}"), + }), + stages::xsts_token::XSTSResponse::Success { token: xsts_token } => { + let bearer_token = stages::bearer_token::fetch_bearer(&xsts_token, &uhs) + .await + .map_err(|_| super::Error { + code: StatusCode::INTERNAL_SERVER_ERROR, + reason: "Error with Bearer token flow".to_string(), + })?; + + Ok(HttpResponse::Ok().json(&json!({ + "token": bearer_token, + "refresh_token": &access_token.refresh_token, + "expires_after": 86400 + }))) + } + } +} diff --git a/src/auth/minecraft/socket.rs b/src/auth/minecraft/socket.rs new file mode 100644 index 000000000..2fd209b8a --- /dev/null +++ b/src/auth/minecraft/socket.rs @@ -0,0 +1,40 @@ +use crate::database::models::flow_item::Flow; +use crate::queue::socket::ActiveSockets; +use actix_web::web::Payload; +use actix_web::{get, web, HttpRequest, HttpResponse}; +use actix_ws::{Closed, Session}; +use chrono::Duration; +use tokio::sync::RwLock; + +#[get("ws")] +pub async fn route( + req: HttpRequest, + body: Payload, + db: web::Data>, + redis: web::Data, +) -> Result { + let (res, session, _msg_stream) = actix_ws::handle(&req, body)?; + let _ = sock(session, db, redis).await; + + Ok(res) +} + +async fn sock( + mut ws_stream: Session, + db: web::Data>, + redis: web::Data, +) -> Result<(), Closed> { + if let Ok(state) = Flow::MinecraftAuth + .insert(Duration::minutes(30), &redis) + .await + { + ws_stream + .text(serde_json::json!({ "login_code": state }).to_string()) + .await?; + + let db = db.write().await; + db.auth_sockets.insert(state, ws_stream); + } + + Ok(()) +} diff --git a/src/auth/minecraft/stages/access_token.rs b/src/auth/minecraft/stages/access_token.rs new file mode 100644 index 000000000..59989b67f --- /dev/null +++ b/src/auth/minecraft/stages/access_token.rs @@ -0,0 +1,65 @@ +//! Get access token from code +use serde::Deserialize; +use std::collections::HashMap; + +const OAUTH_TOKEN_URL: &str = "https://login.live.com/oauth20_token.srf"; + +#[derive(Deserialize)] +pub struct Tokens { + pub access_token: String, + pub refresh_token: String, +} + +pub async fn fetch_token( + public_uri: String, + code: &str, + client_id: &str, + client_secret: &str, +) -> Result { + let redirect_uri = format!("{}/v2/auth/minecraft/callback", public_uri); + + let mut params = HashMap::new(); + params.insert("client_id", client_id); + params.insert("client_secret", client_secret); + params.insert("code", code); + params.insert("grant_type", "authorization_code"); + params.insert("redirect_uri", redirect_uri.as_str()); + + let client = reqwest::Client::new(); + let result = client + .post(OAUTH_TOKEN_URL) + .form(¶ms) + .send() + .await? + .json::() + .await?; + + Ok(result) +} + +pub async fn refresh_token( + public_uri: &str, + refresh_token: &str, + client_id: &str, + client_secret: &str, +) -> Result { + let redirect_uri = format!("{}/v2/auth/minecraft/callback", public_uri); + + let mut params = HashMap::new(); + params.insert("client_id", client_id); + params.insert("client_secret", client_secret); + params.insert("refresh_token", refresh_token); + params.insert("grant_type", "refresh_token"); + params.insert("redirect_uri", &redirect_uri); + + let client = reqwest::Client::new(); + let result = client + .post(OAUTH_TOKEN_URL) + .form(¶ms) + .send() + .await? + .json::() + .await?; + + Ok(result) +} diff --git a/src/auth/minecraft/stages/bearer_token.rs b/src/auth/minecraft/stages/bearer_token.rs new file mode 100644 index 000000000..5f0c1aadd --- /dev/null +++ b/src/auth/minecraft/stages/bearer_token.rs @@ -0,0 +1,27 @@ +//! Minecraft bearer token +use crate::auth::AuthenticationError; +use serde_json::json; + +const MCSERVICES_AUTH_URL: &str = "https://api.minecraftservices.com/launcher/login"; + +pub async fn fetch_bearer(token: &str, uhs: &str) -> Result { + let client = reqwest::Client::new(); + let body = client + .post(MCSERVICES_AUTH_URL) + .json(&json!({ + "xtoken": format!("XBL3.0 x={};{}", uhs, token), + "platform": "PC_LAUNCHER" + })) + .send() + .await? + .text() + .await?; + + serde_json::from_str::(&body)? + .get("access_token") + .and_then(serde_json::Value::as_str) + .map(String::from) + .ok_or(AuthenticationError::Custom( + "Response didn't contain valid bearer token".to_string(), + )) +} diff --git a/src/auth/minecraft/stages/login_redirect.rs b/src/auth/minecraft/stages/login_redirect.rs new file mode 100644 index 000000000..357a4dd07 --- /dev/null +++ b/src/auth/minecraft/stages/login_redirect.rs @@ -0,0 +1,8 @@ +//! Login redirect step +pub fn get_url(public_uri: &str, conn_id: &str, client_id: &str) -> String { + format!( + "https://login.live.com/oauth20_authorize.srf?client_id={client_id}&response_type=code&redirect_uri={}&scope={}&state={conn_id}&prompt=select_account&cobrandid=8058f65d-ce06-4c30-9559-473c9275a65d", + urlencoding::encode(&format!("{}/v2/auth/minecraft/callback", public_uri)), + urlencoding::encode("XboxLive.signin offline_access") + ) +} diff --git a/src/auth/minecraft/stages/mod.rs b/src/auth/minecraft/stages/mod.rs new file mode 100644 index 000000000..6753b24c1 --- /dev/null +++ b/src/auth/minecraft/stages/mod.rs @@ -0,0 +1,27 @@ +//! MSA authentication stages + +use lazy_static::lazy_static; + +pub mod access_token; +pub mod bearer_token; +pub mod login_redirect; +pub mod player_info; +pub mod xbl_signin; +pub mod xsts_token; + +lazy_static! { + static ref REQWEST_CLIENT: reqwest::Client = { + let mut headers = reqwest::header::HeaderMap::new(); + let header = reqwest::header::HeaderValue::from_str(&format!( + "modrinth/labrinth/{} (support@modrinth.com)", + env!("CARGO_PKG_VERSION") + )) + .unwrap(); + headers.insert(reqwest::header::USER_AGENT, header); + reqwest::Client::builder() + .tcp_keepalive(Some(std::time::Duration::from_secs(10))) + .default_headers(headers) + .build() + .expect("Reqwest Client Building Failed") + }; +} diff --git a/src/auth/minecraft/stages/player_info.rs b/src/auth/minecraft/stages/player_info.rs new file mode 100644 index 000000000..8cc81f093 --- /dev/null +++ b/src/auth/minecraft/stages/player_info.rs @@ -0,0 +1,33 @@ +//! Fetch player info for display +use serde::Deserialize; + +const PROFILE_URL: &str = "https://api.minecraftservices.com/minecraft/profile"; + +#[derive(Deserialize)] +pub struct PlayerInfo { + pub id: String, + pub name: String, +} + +impl Default for PlayerInfo { + fn default() -> Self { + Self { + id: "606e2ff0ed7748429d6ce1d3321c7838".to_string(), + name: String::from("???"), + } + } +} + +pub async fn fetch_info(token: &str) -> Result { + let client = reqwest::Client::new(); + let resp = client + .get(PROFILE_URL) + .header(reqwest::header::AUTHORIZATION, format!("Bearer {token}")) + .send() + .await? + .error_for_status()? + .json() + .await?; + + Ok(resp) +} diff --git a/src/auth/minecraft/stages/xbl_signin.rs b/src/auth/minecraft/stages/xbl_signin.rs new file mode 100644 index 000000000..e84b340a7 --- /dev/null +++ b/src/auth/minecraft/stages/xbl_signin.rs @@ -0,0 +1,55 @@ +//! Signin for XBox Live + +use crate::auth::AuthenticationError; +use serde_json::json; + +const XBL_AUTH_URL: &str = "https://user.auth.xboxlive.com/user/authenticate"; + +// Deserialization +pub struct XBLLogin { + pub token: String, + pub uhs: String, +} + +// Impl +pub async fn login_xbl(token: &str) -> Result { + let client = reqwest::Client::new(); + let body = client + .post(XBL_AUTH_URL) + .header(reqwest::header::ACCEPT, "application/json") + .header("x-xbl-contract-version", "1") + .json(&json!({ + "Properties": { + "AuthMethod": "RPS", + "SiteName": "user.auth.xboxlive.com", + "RpsTicket": format!("d={token}") + }, + "RelyingParty": "http://auth.xboxlive.com", + "TokenType": "JWT" + })) + .send() + .await? + .text() + .await?; + + let json = serde_json::from_str::(&body)?; + let token = Some(&json) + .and_then(|it| it.get("Token")?.as_str().map(String::from)) + .ok_or(AuthenticationError::Custom( + "XBL response didn't contain valid token".to_string(), + ))?; + let uhs = Some(&json) + .and_then(|it| { + it.get("DisplayClaims")? + .get("xui")? + .get(0)? + .get("uhs")? + .as_str() + .map(String::from) + }) + .ok_or(AuthenticationError::Custom( + "XBL response didn't contain valid user hash".to_string(), + ))?; + + Ok(XBLLogin { token, uhs }) +} diff --git a/src/auth/minecraft/stages/xsts_token.rs b/src/auth/minecraft/stages/xsts_token.rs new file mode 100644 index 000000000..0b92f091a --- /dev/null +++ b/src/auth/minecraft/stages/xsts_token.rs @@ -0,0 +1,57 @@ +use crate::auth::AuthenticationError; +use serde_json::json; + +const XSTS_AUTH_URL: &str = "https://xsts.auth.xboxlive.com/xsts/authorize"; + +pub enum XSTSResponse { + Unauthorized(String), + Success { token: String }, +} + +pub async fn fetch_token(token: &str) -> Result { + let client = reqwest::Client::new(); + let resp = client + .post(XSTS_AUTH_URL) + .header(reqwest::header::ACCEPT, "application/json") + .json(&json!({ + "Properties": { + "SandboxId": "RETAIL", + "UserTokens": [ + token + ] + }, + "RelyingParty": "rp://api.minecraftservices.com/", + "TokenType": "JWT" + })) + .send() + .await?; + let status = resp.status(); + + let body = resp.text().await?; + let json = serde_json::from_str::(&body)?; + + if status.is_success() { + Ok(json + .get("Token") + .and_then(|x| x.as_str().map(String::from)) + .map(|it| XSTSResponse::Success { token: it }) + .unwrap_or(XSTSResponse::Unauthorized( + "XSTS response didn't contain valid token!".to_string(), + ))) + } else { + Ok(XSTSResponse::Unauthorized( + #[allow(clippy::unreadable_literal)] + match json.get("XErr").and_then(|x| x.as_i64()) { + Some(2148916238) => { + String::from("This Microsoft account is underage and is not linked to a family.") + }, + Some(2148916235) => { + String::from("XBOX Live/Minecraft is not available in your country.") + }, + Some(2148916233) => String::from("This account does not have a valid XBOX Live profile. Please buy Minecraft and try again!"), + Some(2148916236) | Some(2148916237) => String::from("This account needs adult verification on Xbox page."), + _ => String::from("Unknown error code"), + }, + )) + } +} diff --git a/src/auth/mod.rs b/src/auth/mod.rs index 38de3a27a..0fb59a4cb 100644 --- a/src/auth/mod.rs +++ b/src/auth/mod.rs @@ -1,8 +1,10 @@ pub mod checks; pub mod email; pub mod flows; +pub mod minecraft; pub mod pats; pub mod session; +mod templates; pub mod validate; pub use checks::{ @@ -43,8 +45,12 @@ pub enum AuthenticationError { InvalidClientId, #[error("User email/account is already registered on Modrinth")] DuplicateUser, + #[error("Invalid state sent, you probably need to get a new websocket")] + SocketError, #[error("Invalid callback URL specified")] Url, + #[error("{0}")] + Custom(String), } impl actix_web::ResponseError for AuthenticationError { @@ -63,6 +69,8 @@ impl actix_web::ResponseError for AuthenticationError { AuthenticationError::Url => StatusCode::BAD_REQUEST, AuthenticationError::FileHosting(..) => StatusCode::INTERNAL_SERVER_ERROR, AuthenticationError::DuplicateUser => StatusCode::BAD_REQUEST, + AuthenticationError::Custom(..) => StatusCode::BAD_REQUEST, + AuthenticationError::SocketError => StatusCode::BAD_REQUEST, } } @@ -82,6 +90,8 @@ impl actix_web::ResponseError for AuthenticationError { AuthenticationError::Url => "url_error", AuthenticationError::FileHosting(..) => "file_hosting", AuthenticationError::DuplicateUser => "duplicate_user", + AuthenticationError::Custom(..) => "custom", + AuthenticationError::SocketError => "socket", }, description: &self.to_string(), }) diff --git a/src/auth/pats.rs b/src/auth/pats.rs index aaecb43d0..c38f428b0 100644 --- a/src/auth/pats.rs +++ b/src/auth/pats.rs @@ -180,6 +180,12 @@ pub async fn edit_pat( let mut transaction = pool.begin().await?; if let Some(scopes) = &info.scopes { + if scopes.restricted() { + return Err(ApiError::InvalidInput( + "Invalid scopes requested!".to_string(), + )); + } + sqlx::query!( " UPDATE pats @@ -206,6 +212,12 @@ pub async fn edit_pat( .await?; } if let Some(expires) = &info.expires { + if expires < &Utc::now() { + return Err(ApiError::InvalidInput( + "Expire date must be in the future!".to_string(), + )); + } + sqlx::query!( " UPDATE pats diff --git a/src/auth/templates/error.html b/src/auth/templates/error.html new file mode 100644 index 000000000..e304fb055 --- /dev/null +++ b/src/auth/templates/error.html @@ -0,0 +1,24 @@ + + + + + + Error - Modrinth + + + +
+ +

{{ code }}

+

An error has occurred during the authentication process.

+

+ Try restarting the authentication flow within the launcher. If you are still facing issues, + join our Discord for support! +

+
+ Debug info +

{{ message }}

+
+
+ + \ No newline at end of file diff --git a/src/auth/templates/mod.rs b/src/auth/templates/mod.rs new file mode 100644 index 000000000..6cb20174c --- /dev/null +++ b/src/auth/templates/mod.rs @@ -0,0 +1,66 @@ +use crate::auth::AuthenticationError; +use actix_web::http::StatusCode; +use actix_web::{HttpResponse, ResponseError}; +use std::fmt::{Debug, Display, Formatter}; + +pub struct Success<'a> { + pub icon: &'a str, + pub name: &'a str, +} + +impl<'a> Success<'a> { + pub fn render(self) -> HttpResponse { + let html = include_str!("success.html"); + + HttpResponse::Ok() + .append_header(("Content-Type", "text/html; charset=utf-8")) + .body( + html.replace("{{ icon }}", self.icon) + .replace("{{ name }}", self.name), + ) + } +} + +#[derive(Debug)] +pub struct ErrorPage { + pub code: StatusCode, + pub message: String, +} + +impl Display for ErrorPage { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let html = include_str!("error.html") + .replace("{{ code }}", &self.code.to_string()) + .replace("{{ message }}", &self.message); + write!(f, "{}", html)?; + + Ok(()) + } +} + +impl ErrorPage { + pub fn render(&self) -> HttpResponse { + HttpResponse::Ok() + .append_header(("Content-Type", "text/html; charset=utf-8")) + .body(self.to_string()) + } +} + +impl actix_web::ResponseError for ErrorPage { + fn status_code(&self) -> StatusCode { + self.code + } + + fn error_response(&self) -> HttpResponse { + self.render() + } +} + +impl From for ErrorPage { + fn from(item: AuthenticationError) -> Self { + ErrorPage { + code: item.status_code(), + message: item.to_string(), + } + } +} diff --git a/src/auth/templates/success.html b/src/auth/templates/success.html new file mode 100644 index 000000000..471811867 --- /dev/null +++ b/src/auth/templates/success.html @@ -0,0 +1,16 @@ + + + + + + Login - Modrinth + + + +
+ +

Login Successful

+

Hey, {{ name }}! You can now safely close this tab.

+
+ + \ No newline at end of file diff --git a/src/clickhouse/mod.rs b/src/clickhouse/mod.rs new file mode 100644 index 000000000..1ac9480f8 --- /dev/null +++ b/src/clickhouse/mod.rs @@ -0,0 +1,102 @@ +use hyper::client::HttpConnector; +use hyper_tls::{native_tls, HttpsConnector}; + +pub async fn init_client() -> clickhouse::error::Result { + let database = dotenvy::var("CLICKHOUSE_DATABASE").unwrap(); + + let client = { + let mut http_connector = HttpConnector::new(); + http_connector.enforce_http(false); // allow https URLs + + let tls_connector = native_tls::TlsConnector::builder().build().unwrap().into(); + let https_connector = HttpsConnector::from((http_connector, tls_connector)); + let hyper_client = hyper::client::Client::builder().build(https_connector); + + clickhouse::Client::with_http_client(hyper_client) + .with_url(dotenvy::var("CLICKHOUSE_URL").unwrap()) + .with_user(dotenvy::var("CLICKHOUSE_USER").unwrap()) + .with_password(dotenvy::var("CLICKHOUSE_PASSWORD").unwrap()) + }; + + client + .query(&format!("CREATE DATABASE IF NOT EXISTS {database}")) + .execute() + .await?; + + client + .query(&format!( + " + CREATE TABLE IF NOT EXISTS {database}.views + ( + id UUID, + recorded DateTime64(4), + domain String, + site_path String, + + user_id UInt64, + project_id UInt64, + + ip IPv6, + country String, + user_agent String, + headers Array(Tuple(String, String)), + ) + ENGINE = MergeTree() + PRIMARY KEY (id, recorded) + " + )) + .execute() + .await?; + + client + .query(&format!( + " + CREATE TABLE IF NOT EXISTS {database}.downloads + ( + id UUID, + recorded DateTime64(4), + domain String, + site_path String, + + user_id UInt64, + project_id UInt64, + version_id UInt64, + + ip IPv6, + country String, + user_agent String, + headers Array(Tuple(String, String)), + ) + ENGINE = MergeTree() + PRIMARY KEY (id, recorded) + " + )) + .execute() + .await?; + + client + .query(&format!( + " + CREATE TABLE IF NOT EXISTS {database}.playtime + ( + id UUID, + recorded DateTime64(4), + seconds UInt64, + + user_id UInt64, + project_id UInt64, + version_id UInt64, + + loader String, + game_version String, + parent UInt64, + ) + ENGINE = MergeTree() + PRIMARY KEY (id, recorded) + " + )) + .execute() + .await?; + + Ok(client.with_database(database)) +} diff --git a/src/database/models/flow_item.rs b/src/database/models/flow_item.rs index 8cf44b0f8..8883957c8 100644 --- a/src/database/models/flow_item.rs +++ b/src/database/models/flow_item.rs @@ -16,7 +16,7 @@ const FLOWS_NAMESPACE: &str = "flows"; pub enum Flow { OAuth { user_id: Option, - url: String, + url: Option, provider: AuthProvider, }, Login2FA { @@ -33,6 +33,10 @@ pub enum Flow { user_id: UserId, confirm_email: String, }, + MinecraftAuth, + MicrosoftLogin { + access_token: String, + }, } impl Flow { diff --git a/src/main.rs b/src/main.rs index 32d5c4618..44dad5e5f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,23 +1,26 @@ use crate::file_hosting::S3Host; +use crate::queue::analytics::AnalyticsQueue; use crate::queue::download::DownloadQueue; -use crate::queue::payouts::PayoutsQueue; +use crate::queue::payouts::{process_payout, PayoutsQueue}; use crate::queue::session::AuthQueue; +use crate::queue::socket::ActiveSockets; use crate::ratelimit::errors::ARError; use crate::ratelimit::memory::{MemoryStore, MemoryStoreActor}; use crate::ratelimit::middleware::RateLimiter; +use crate::util::cors::default_cors; use crate::util::env::{parse_strings_from_var, parse_var}; -use actix_cors::Cors; +use actix_files::Files; use actix_web::{web, App, HttpServer}; use chrono::{DateTime, Utc}; use deadpool_redis::{Config, Runtime}; use env_logger::Env; use log::{error, info, warn}; use search::indexing::index_projects; -use search::indexing::IndexingSettings; use std::sync::Arc; -use tokio::sync::Mutex; +use tokio::sync::{Mutex, RwLock}; mod auth; +mod clickhouse; mod database; mod file_hosting; mod models; @@ -121,8 +124,7 @@ async fn main() -> std::io::Result<()> { let search_config_ref = search_config_ref.clone(); async move { info!("Indexing local database"); - let settings = IndexingSettings { index_local: true }; - let result = index_projects(pool_ref, settings, &search_config_ref).await; + let result = index_projects(pool_ref, &search_config_ref).await; if let Err(e) = result { warn!("Local project indexing failed: {:?}", e); } @@ -280,11 +282,75 @@ async fn main() -> std::io::Result<()> { } }); + info!("Initializing clickhouse connection"); + let clickhouse = clickhouse::init_client().await.unwrap(); + + let reader = Arc::new(queue::maxmind::MaxMindIndexer::new().await.unwrap()); + { + let reader_ref = reader.clone(); + scheduler.run(std::time::Duration::from_secs(60 * 60 * 24), move || { + let reader_ref = reader_ref.clone(); + + async move { + info!("Downloading MaxMind GeoLite2 country database"); + let result = reader_ref.index().await; + if let Err(e) = result { + warn!( + "Downloading MaxMind GeoLite2 country database failed: {:?}", + e + ); + } + info!("Done downloading MaxMind GeoLite2 country database"); + } + }); + } + info!("Downloading MaxMind GeoLite2 country database"); + + let analytics_queue = Arc::new(AnalyticsQueue::new()); + { + let client_ref = clickhouse.clone(); + let analytics_queue_ref = analytics_queue.clone(); + scheduler.run(std::time::Duration::from_secs(60 * 5), move || { + let client_ref = client_ref.clone(); + let analytics_queue_ref = analytics_queue_ref.clone(); + + async move { + info!("Indexing analytics queue"); + let result = analytics_queue_ref.index(client_ref).await; + if let Err(e) = result { + warn!("Indexing analytics queue failed: {:?}", e); + } + info!("Done indexing analytics queue"); + } + }); + } + + { + let pool_ref = pool.clone(); + let redis_ref = redis_pool.clone(); + let client_ref = clickhouse.clone(); + scheduler.run(std::time::Duration::from_secs(60 * 60 * 6), move || { + let pool_ref = pool_ref.clone(); + let redis_ref = redis_ref.clone(); + let client_ref = client_ref.clone(); + + async move { + info!("Done running payouts"); + let result = process_payout(&pool_ref, &redis_ref, &client_ref).await; + if let Err(e) = result { + warn!("Payouts run failed: {:?}", e); + } + info!("Done running payouts"); + } + }); + } + let ip_salt = Pepper { pepper: models::ids::Base62Id(models::ids::random_base62(11)).to_string(), }; let payouts_queue = web::Data::new(Mutex::new(PayoutsQueue::new())); + let active_sockets = web::Data::new(RwLock::new(ActiveSockets::default())); let store = MemoryStore::new(); @@ -294,14 +360,6 @@ async fn main() -> std::io::Result<()> { HttpServer::new(move || { App::new() .wrap(actix_web::middleware::Compress::default()) - .wrap( - Cors::default() - .allow_any_origin() - .allow_any_header() - .allow_any_method() - .max_age(3600) - .send_wildcard(), - ) .wrap( RateLimiter::new(MemoryStoreActor::from(store.clone()).start()) .with_identifier(|req| { @@ -323,6 +381,7 @@ async fn main() -> std::io::Result<()> { .with_max_requests(300) .with_ignore_key(dotenvy::var("RATE_LIMIT_IGNORE_KEY").ok()), ) + .wrap(sentry_actix::Sentry::new()) .app_data( web::FormConfig::default().error_handler(|err, _req| { routes::ApiError::Validation(err.to_string()).into() @@ -351,11 +410,15 @@ async fn main() -> std::io::Result<()> { .app_data(session_queue.clone()) .app_data(payouts_queue.clone()) .app_data(web::Data::new(ip_salt.clone())) - .wrap(sentry_actix::Sentry::new()) + .app_data(web::Data::new(analytics_queue.clone())) + .app_data(web::Data::new(clickhouse.clone())) + .app_data(web::Data::new(reader.clone())) + .app_data(active_sockets.clone()) .configure(routes::root_config) .configure(routes::v2::config) .configure(routes::v3::config) - .default_service(web::get().to(routes::not_found)) + .service(Files::new("/", "assets/")) + .default_service(web::get().wrap(default_cors()).to(routes::not_found)) }) .bind(dotenvy::var("BIND_ADDR").unwrap())? .run() @@ -431,9 +494,6 @@ fn check_env_vars() -> bool { failed |= true; } - failed |= check_var::("ARIADNE_ADMIN_KEY"); - failed |= check_var::("ARIADNE_URL"); - failed |= check_var::("PAYPAL_API_URL"); failed |= check_var::("PAYPAL_CLIENT_ID"); failed |= check_var::("PAYPAL_CLIENT_SECRET"); @@ -462,5 +522,21 @@ fn check_env_vars() -> bool { failed |= check_var::("BEEHIIV_PUBLICATION_ID"); failed |= check_var::("BEEHIIV_API_KEY"); + if parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").is_none() { + warn!( + "Variable `ANALYTICS_ALLOWED_ORIGINS` missing in dotenv or not a json array of strings" + ); + failed |= true; + } + + failed |= check_var::("CLICKHOUSE_URL"); + failed |= check_var::("CLICKHOUSE_USER"); + failed |= check_var::("CLICKHOUSE_PASSWORD"); + failed |= check_var::("CLICKHOUSE_DATABASE"); + + failed |= check_var::("MAXMIND_LICENSE_KEY"); + + failed |= check_var::("PAYOUTS_BUDGET"); + failed } diff --git a/src/models/analytics.rs b/src/models/analytics.rs new file mode 100644 index 000000000..a401c4f5b --- /dev/null +++ b/src/models/analytics.rs @@ -0,0 +1,111 @@ +use clickhouse::Row; +use serde::Serialize; +use std::hash::{Hash, Hasher}; +use std::net::Ipv6Addr; +use uuid::Uuid; + +#[derive(Row, Serialize, Clone)] +pub struct Download { + #[serde(with = "uuid::serde::compact")] + pub id: Uuid, + pub recorded: i64, + pub domain: String, + pub site_path: String, + + // Modrinth User ID for logged in users, default 0 + pub user_id: u64, + // default is 0 if unknown + pub project_id: u64, + // default is 0 if unknown + pub version_id: u64, + + // The below information is used exclusively for data aggregation and fraud detection + // (ex: download botting). + pub ip: Ipv6Addr, + pub country: String, + pub user_agent: String, + pub headers: Vec<(String, String)>, +} + +impl PartialEq for Download { + fn eq(&self, other: &Self) -> bool { + self.id == other.id + } +} + +impl Eq for Download {} + +impl Hash for Download { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} + +#[derive(Row, Serialize, Clone)] +pub struct PageView { + #[serde(with = "uuid::serde::compact")] + pub id: Uuid, + pub recorded: i64, + pub domain: String, + pub site_path: String, + + // Modrinth User ID for logged in users + pub user_id: u64, + // Modrinth Project ID (used for payouts) + pub project_id: u64, + + // The below information is used exclusively for data aggregation and fraud detection + // (ex: page view botting). + pub ip: Ipv6Addr, + pub country: String, + pub user_agent: String, + pub headers: Vec<(String, String)>, +} + +impl PartialEq for PageView { + fn eq(&self, other: &Self) -> bool { + self.id == other.id + } +} + +impl Eq for PageView {} + +impl Hash for PageView { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} + +#[derive(Row, Serialize, Clone)] +pub struct Playtime { + #[serde(with = "uuid::serde::compact")] + pub id: Uuid, + pub recorded: i64, + pub seconds: u16, + + // Modrinth User ID for logged in users (unused atm) + pub user_id: u64, + // Modrinth Project ID + pub project_id: u64, + // Modrinth Version ID + pub version_id: u64, + + pub loader: String, + pub game_version: String, + /// Parent modpack this playtime was recorded in + pub parent: u64, +} + +impl PartialEq for Playtime { + fn eq(&self, other: &Self) -> bool { + self.id == other.id + } +} + +impl Eq for Playtime {} + +impl Hash for Playtime { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} diff --git a/src/models/ids.rs b/src/models/ids.rs index b58d5926e..181c08c33 100644 --- a/src/models/ids.rs +++ b/src/models/ids.rs @@ -19,7 +19,6 @@ pub use super::users::UserId; /// /// This method panics if `n` is 0 or greater than 11, since a `u64` /// can only represent up to 11 character base62 strings -#[allow(dead_code)] #[inline] pub fn random_base62(n: usize) -> u64 { random_base62_rng(&mut rand::thread_rng(), n) diff --git a/src/models/mod.rs b/src/models/mod.rs index dfe10c4b6..87a98f888 100644 --- a/src/models/mod.rs +++ b/src/models/mod.rs @@ -1,3 +1,4 @@ +pub mod analytics; pub mod error; pub mod ids; pub mod notifications; diff --git a/src/models/pats.rs b/src/models/pats.rs index f3c9e8f63..429a64f7f 100644 --- a/src/models/pats.rs +++ b/src/models/pats.rs @@ -82,7 +82,10 @@ bitflags::bitflags! { // delete a session const SESSION_DELETE = 1 << 29; - const ALL = 0b111111111111111111111111111111; + // perform analytics action + const PERFORM_ANALYTICS = 1 << 30; + + const ALL = 0b1111111111111111111111111111111; const NOT_RESTRICTED = 0b00000011111111111111100111; const NONE = 0b0; } @@ -99,7 +102,8 @@ impl Scopes { | Scopes::SESSION_READ | Scopes::SESSION_DELETE | Scopes::USER_AUTH_WRITE - | Scopes::USER_DELETE, + | Scopes::USER_DELETE + | Scopes::PERFORM_ANALYTICS, ) } } diff --git a/src/queue/analytics.rs b/src/queue/analytics.rs new file mode 100644 index 000000000..6ff153a42 --- /dev/null +++ b/src/queue/analytics.rs @@ -0,0 +1,70 @@ +use crate::models::analytics::{Download, PageView, Playtime}; +use dashmap::DashSet; + +pub struct AnalyticsQueue { + views_queue: DashSet, + downloads_queue: DashSet, + playtime_queue: DashSet, +} + +// Batches analytics data points + transactions every few minutes +impl AnalyticsQueue { + pub fn new() -> Self { + AnalyticsQueue { + views_queue: DashSet::with_capacity(1000), + downloads_queue: DashSet::with_capacity(1000), + playtime_queue: DashSet::with_capacity(1000), + } + } + + pub async fn add_view(&self, page_view: PageView) { + self.views_queue.insert(page_view); + } + + pub async fn add_download(&self, download: Download) { + self.downloads_queue.insert(download); + } + + pub async fn add_playtime(&self, playtime: Playtime) { + self.playtime_queue.insert(playtime); + } + + pub async fn index(&self, client: clickhouse::Client) -> Result<(), clickhouse::error::Error> { + let views_queue = self.views_queue.clone(); + self.views_queue.clear(); + + let downloads_queue = self.downloads_queue.clone(); + self.downloads_queue.clear(); + + let playtime_queue = self.playtime_queue.clone(); + self.playtime_queue.clear(); + + if !views_queue.is_empty() || !downloads_queue.is_empty() || !playtime_queue.is_empty() { + let mut views = client.insert("views")?; + + for view in views_queue { + views.write(&view).await?; + } + + views.end().await?; + + let mut downloads = client.insert("downloads")?; + + for download in downloads_queue { + downloads.write(&download).await?; + } + + downloads.end().await?; + + let mut playtimes = client.insert("playtime")?; + + for playtime in playtime_queue { + playtimes.write(&playtime).await?; + } + + playtimes.end().await?; + } + + Ok(()) + } +} diff --git a/src/queue/maxmind.rs b/src/queue/maxmind.rs new file mode 100644 index 000000000..4846640a1 --- /dev/null +++ b/src/queue/maxmind.rs @@ -0,0 +1,82 @@ +use flate2::read::GzDecoder; +use log::warn; +use maxminddb::geoip2::Country; +use std::io::{Cursor, Read}; +use std::net::Ipv6Addr; +use tar::Archive; +use tokio::sync::RwLock; + +pub struct MaxMindIndexer { + pub reader: RwLock>>>, +} + +impl MaxMindIndexer { + pub async fn new() -> Result { + let reader = MaxMindIndexer::inner_index(false).await.ok().flatten(); + + Ok(MaxMindIndexer { + reader: RwLock::new(reader), + }) + } + + pub async fn index(&self) -> Result<(), reqwest::Error> { + let reader = MaxMindIndexer::inner_index(false).await?; + + if let Some(reader) = reader { + let mut reader_new = self.reader.write().await; + *reader_new = Some(reader); + } + + Ok(()) + } + + async fn inner_index( + should_panic: bool, + ) -> Result>>, reqwest::Error> { + let response = reqwest::get( + format!( + "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country&license_key={}&suffix=tar.gz", + dotenvy::var("MAXMIND_LICENSE_KEY").unwrap() + ) + ).await?.bytes().await.unwrap().to_vec(); + + let tarfile = GzDecoder::new(Cursor::new(response)); + let mut archive = Archive::new(tarfile); + + if let Ok(entries) = archive.entries() { + for mut file in entries.flatten() { + if let Ok(path) = file.header().path() { + if path.extension().and_then(|x| x.to_str()) == Some("mmdb") { + let mut buf = Vec::new(); + file.read_to_end(&mut buf).unwrap(); + + let reader = maxminddb::Reader::from_source(buf).unwrap(); + + return Ok(Some(reader)); + } + } + } + } + + if should_panic { + panic!("Unable to download maxmind database- did you get a license key?") + } else { + warn!("Unable to download maxmind database."); + + Ok(None) + } + } + + pub async fn query(&self, ip: Ipv6Addr) -> Option { + let maxmind = self.reader.read().await; + + if let Some(ref maxmind) = *maxmind { + maxmind + .lookup::(ip.into()) + .ok() + .and_then(|x| x.country.and_then(|x| x.iso_code.map(|x| x.to_string()))) + } else { + None + } + } +} diff --git a/src/queue/mod.rs b/src/queue/mod.rs index 5c8237cf9..e8136b4cf 100644 --- a/src/queue/mod.rs +++ b/src/queue/mod.rs @@ -1,3 +1,6 @@ +pub mod analytics; pub mod download; +pub mod maxmind; pub mod payouts; pub mod session; +pub mod socket; diff --git a/src/queue/payouts.rs b/src/queue/payouts.rs index 3afc54a56..4f3f9ddd1 100644 --- a/src/queue/payouts.rs +++ b/src/queue/payouts.rs @@ -1,9 +1,12 @@ +use crate::models::projects::MonetizationStatus; use crate::routes::ApiError; +use crate::util::env::parse_var; use base64::Engine; -use chrono::{DateTime, Duration, Utc}; +use chrono::{DateTime, Datelike, Duration, Utc, Weekday}; use rust_decimal::Decimal; use serde::{Deserialize, Serialize}; use serde_json::json; +use sqlx::PgPool; use std::collections::HashMap; pub struct PayoutsQueue { @@ -197,3 +200,210 @@ impl PayoutsQueue { Ok(Decimal::ZERO) } } + +pub async fn process_payout( + pool: &PgPool, + redis: &deadpool_redis::Pool, + client: &clickhouse::Client, +) -> Result<(), ApiError> { + let start: DateTime = DateTime::from_utc( + Utc::now() + .date_naive() + .and_hms_nano_opt(0, 0, 0, 0) + .unwrap_or_default(), + Utc, + ); + + let results = sqlx::query!( + "SELECT EXISTS(SELECT 1 FROM payouts_values WHERE created = $1)", + start, + ) + .fetch_one(pool) + .await?; + + if results.exists.unwrap_or(false) { + return Ok(()); + } + + let end = start + Duration::days(1); + #[derive(Deserialize, clickhouse::Row)] + struct ProjectMultiplier { + pub page_views: u64, + pub project_id: u64, + } + + let (views_values, views_sum, downloads_values, downloads_sum) = futures::future::try_join4( + client + .query( + r#" + SELECT COUNT(id) page_views, project_id + FROM views + WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0) + GROUP BY project_id + ORDER BY page_views DESC + "#, + ) + .bind(start.timestamp()) + .bind(end.timestamp()) + .fetch_all::(), + client + .query("SELECT COUNT(id) FROM views WHERE (recorded BETWEEN ? AND ?) AND (project_id != 0)") + .bind(start.timestamp()) + .bind(end.timestamp()) + .fetch_one::(), + client + .query( + r#" + SELECT COUNT(id) page_views, project_id + FROM downloads + WHERE (recorded BETWEEN ? AND ?) AND (user_id != 0) + GROUP BY project_id + ORDER BY page_views DESC + "#, + ) + .bind(start.timestamp()) + .bind(end.timestamp()) + .fetch_all::(), + client + .query("SELECT COUNT(id) FROM downloads WHERE (recorded BETWEEN ? AND ?) AND (user_id != 0)") + .bind(start.timestamp()) + .bind(end.timestamp()) + .fetch_one::(), + ) + .await?; + + let mut transaction = pool.begin().await?; + + struct PayoutMultipliers { + sum: u64, + values: HashMap, + } + + let mut views_values = views_values + .into_iter() + .map(|x| (x.project_id, x.page_views)) + .collect::>(); + let downloads_values = downloads_values + .into_iter() + .map(|x| (x.project_id, x.page_views)) + .collect::>(); + views_values.extend(downloads_values); + let multipliers: PayoutMultipliers = PayoutMultipliers { + sum: downloads_sum + views_sum, + values: views_values, + }; + + struct Project { + // user_id, payouts_split + team_members: Vec<(i64, Decimal)>, + } + + let mut projects_map: HashMap = HashMap::new(); + + use futures::TryStreamExt; + + sqlx::query!( + " + SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split + FROM mods m + INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE + WHERE m.id = ANY($1) AND m.monetization_status = $2 + ", + &multipliers + .values + .keys() + .map(|x| *x as i64) + .collect::>(), + MonetizationStatus::Monetized.as_str(), + ) + .fetch_many(&mut *transaction) + .try_for_each(|e| { + if let Some(row) = e.right() { + if let Some(project) = projects_map.get_mut(&row.id) { + project.team_members.push((row.user_id, row.payouts_split)); + } else { + projects_map.insert( + row.id, + Project { + team_members: vec![(row.user_id, row.payouts_split)], + }, + ); + } + } + + futures::future::ready(Ok(())) + }) + .await?; + + let amount = Decimal::from(parse_var::("PAYOUTS_BUDGET").unwrap_or(0)); + + let days = Decimal::from(28); + let weekdays = Decimal::from(20); + let weekend_bonus = Decimal::from(5) / Decimal::from(4); + + let weekday_amount = amount / (weekdays + (weekend_bonus) * (days - weekdays)); + let weekend_amount = weekday_amount * weekend_bonus; + + let payout = match start.weekday() { + Weekday::Sat | Weekday::Sun => weekend_amount, + _ => weekday_amount, + }; + + for (id, project) in projects_map { + if let Some(value) = &multipliers.values.get(&(id as u64)) { + let project_multiplier: Decimal = + Decimal::from(**value) / Decimal::from(multipliers.sum); + + let sum_splits: Decimal = project.team_members.iter().map(|x| x.1).sum(); + + let mut clear_cache_users = Vec::new(); + + if sum_splits > Decimal::ZERO { + for (user_id, split) in project.team_members { + let payout: Decimal = payout * project_multiplier * (split / sum_splits); + + if payout > Decimal::ZERO { + sqlx::query!( + " + INSERT INTO payouts_values (user_id, mod_id, amount, created) + VALUES ($1, $2, $3, $4) + ", + user_id, + id, + payout, + start + ) + .execute(&mut *transaction) + .await?; + + sqlx::query!( + " + UPDATE users + SET balance = balance + $1 + WHERE id = $2 + ", + payout, + user_id + ) + .execute(&mut *transaction) + .await?; + clear_cache_users.push(user_id); + } + } + } + + crate::database::models::User::clear_caches( + &clear_cache_users + .into_iter() + .map(|x| (crate::database::models::UserId(x), None)) + .collect::>(), + redis, + ) + .await?; + } + } + + transaction.commit().await?; + + Ok(()) +} diff --git a/src/queue/socket.rs b/src/queue/socket.rs new file mode 100644 index 000000000..5105cda3c --- /dev/null +++ b/src/queue/socket.rs @@ -0,0 +1,15 @@ +//! "Database" for Hydra +use actix_ws::Session; +use dashmap::DashMap; + +pub struct ActiveSockets { + pub auth_sockets: DashMap, +} + +impl Default for ActiveSockets { + fn default() -> Self { + Self { + auth_sockets: DashMap::new(), + } + } +} diff --git a/src/ratelimit/memory.rs b/src/ratelimit/memory.rs index 9b8fcddac..60c4abf06 100644 --- a/src/ratelimit/memory.rs +++ b/src/ratelimit/memory.rs @@ -30,17 +30,6 @@ impl MemoryStore { inner: Arc::new(DashMap::::new()), } } - - #[allow(dead_code)] - /// Create a new hashmap with the provided capacity - pub fn with_capacity(capacity: usize) -> Self { - debug!("Creating new MemoryStore"); - MemoryStore { - inner: Arc::new(DashMap::::with_capacity( - capacity, - )), - } - } } /// Actor for memory store diff --git a/src/routes/index.rs b/src/routes/index.rs index 8e332fe33..9d90f02a0 100644 --- a/src/routes/index.rs +++ b/src/routes/index.rs @@ -1,7 +1,6 @@ -use actix_web::{get, HttpResponse}; +use actix_web::HttpResponse; use serde_json::json; -#[get("/")] pub async fn index_get() -> HttpResponse { let data = json!({ "name": "modrinth-labrinth", diff --git a/src/routes/mod.rs b/src/routes/mod.rs index bae5fe23e..87c046e34 100644 --- a/src/routes/mod.rs +++ b/src/routes/mod.rs @@ -1,4 +1,5 @@ use crate::file_hosting::FileHostingError; +use crate::util::cors::default_cors; use actix_web::http::StatusCode; use actix_web::{web, HttpResponse}; use futures::FutureExt; @@ -14,11 +15,21 @@ mod updates; pub use self::not_found::not_found; pub fn root_config(cfg: &mut web::ServiceConfig) { - cfg.service(index::index_get); - cfg.service(web::scope("maven").configure(maven::config)); - cfg.service(web::scope("updates").configure(updates::config)); + cfg.route("", web::get().wrap(default_cors()).to(index::index_get)); cfg.service( - web::scope("api/v1").wrap_fn(|req, _srv| { + web::scope("maven") + .wrap(default_cors()) + .configure(maven::config), + ); + cfg.service( + web::scope("updates") + .wrap(default_cors()) + .configure(updates::config), + ); + cfg.service( + web::scope("api/v1") + .wrap(default_cors()) + .wrap_fn(|req, _srv| { async { Ok(req.into_response( HttpResponse::Gone() @@ -40,6 +51,8 @@ pub enum ApiError { Database(#[from] crate::database::models::DatabaseError), #[error("Database Error: {0}")] SqlxDatabase(#[from] sqlx::Error), + #[error("Clickhouse Error: {0}")] + Clickhouse(#[from] clickhouse::error::Error), #[error("Internal server error: {0}")] Xml(String), #[error("Deserialization error: {0}")] @@ -56,8 +69,6 @@ pub enum ApiError { Search(#[from] meilisearch_sdk::errors::Error), #[error("Indexing Error: {0}")] Indexing(#[from] crate::search::indexing::IndexingError), - #[error("Ariadne Error: {0}")] - Analytics(String), #[error("Payments Error: {0}")] Payments(String), #[error("Discord Error: {0}")] @@ -82,6 +93,7 @@ impl actix_web::ResponseError for ApiError { ApiError::Env(..) => StatusCode::INTERNAL_SERVER_ERROR, ApiError::Database(..) => StatusCode::INTERNAL_SERVER_ERROR, ApiError::SqlxDatabase(..) => StatusCode::INTERNAL_SERVER_ERROR, + ApiError::Clickhouse(..) => StatusCode::INTERNAL_SERVER_ERROR, ApiError::Authentication(..) => StatusCode::UNAUTHORIZED, ApiError::CustomAuthentication(..) => StatusCode::UNAUTHORIZED, ApiError::Xml(..) => StatusCode::INTERNAL_SERVER_ERROR, @@ -91,7 +103,6 @@ impl actix_web::ResponseError for ApiError { ApiError::FileHosting(..) => StatusCode::INTERNAL_SERVER_ERROR, ApiError::InvalidInput(..) => StatusCode::BAD_REQUEST, ApiError::Validation(..) => StatusCode::BAD_REQUEST, - ApiError::Analytics(..) => StatusCode::FAILED_DEPENDENCY, ApiError::Payments(..) => StatusCode::FAILED_DEPENDENCY, ApiError::Discord(..) => StatusCode::FAILED_DEPENDENCY, ApiError::Turnstile => StatusCode::BAD_REQUEST, @@ -118,7 +129,6 @@ impl actix_web::ResponseError for ApiError { ApiError::FileHosting(..) => "file_hosting_error", ApiError::InvalidInput(..) => "invalid_input", ApiError::Validation(..) => "invalid_input", - ApiError::Analytics(..) => "analytics_error", ApiError::Payments(..) => "payments_error", ApiError::Discord(..) => "discord_error", ApiError::Turnstile => "turnstile_error", @@ -127,6 +137,7 @@ impl actix_web::ResponseError for ApiError { ApiError::PasswordHashing(..) => "password_hashing_error", ApiError::PasswordStrengthCheck(..) => "strength_check_error", ApiError::Mail(..) => "mail_error", + ApiError::Clickhouse(..) => "clickhouse_error", }, description: &self.to_string(), }) diff --git a/src/routes/v2/admin.rs b/src/routes/v2/admin.rs index 73e1df991..f15fb8471 100644 --- a/src/routes/v2/admin.rs +++ b/src/routes/v2/admin.rs @@ -1,23 +1,24 @@ -use crate::database::models::{User, UserId}; +use crate::auth::validate::get_user_record_from_bearer_token; +use crate::models::analytics::Download; use crate::models::ids::ProjectId; -use crate::models::projects::MonetizationStatus; +use crate::models::pats::Scopes; +use crate::queue::analytics::AnalyticsQueue; +use crate::queue::maxmind::MaxMindIndexer; +use crate::queue::session::AuthQueue; use crate::routes::ApiError; use crate::util::guards::admin_key_guard; use crate::DownloadQueue; -use actix_web::{patch, post, web, HttpResponse}; -use chrono::{DateTime, SecondsFormat, Utc}; -use rust_decimal::Decimal; +use actix_web::{patch, web, HttpRequest, HttpResponse}; +use chrono::Utc; use serde::Deserialize; -use serde_json::json; use sqlx::PgPool; use std::collections::HashMap; +use std::net::Ipv4Addr; +use std::sync::Arc; +use uuid::Uuid; pub fn config(cfg: &mut web::ServiceConfig) { - cfg.service( - web::scope("admin") - .service(count_download) - .service(process_payout), - ); + cfg.service(web::scope("admin").service(count_download)); } #[derive(Deserialize)] @@ -32,11 +33,28 @@ pub struct DownloadBody { // This is an internal route, cannot be used without key #[patch("/_count-download", guard = "admin_key_guard")] +#[allow(clippy::too_many_arguments)] pub async fn count_download( + req: HttpRequest, pool: web::Data, + redis: web::Data, + maxmind: web::Data>, + analytics_queue: web::Data>, + session_queue: web::Data, download_body: web::Json, download_queue: web::Data, ) -> Result { + let token = download_body + .headers + .iter() + .find(|x| x.0.to_lowercase() == "authorization") + .map(|x| &**x.1); + + let user = get_user_record_from_bearer_token(&req, token, &**pool, &redis, &session_queue) + .await + .ok() + .flatten(); + let project_id: crate::database::models::ids::ProjectId = download_body.project_id.into(); let id_option = crate::models::ids::base62_impl::parse_base62(&download_body.version_name) @@ -83,322 +101,44 @@ pub async fn count_download( .await; } - let client = reqwest::Client::new(); + let url = url::Url::parse(&download_body.url) + .map_err(|_| ApiError::InvalidInput("invalid download URL specified!".to_string()))?; - client - .post(format!("{}download", dotenvy::var("ARIADNE_URL")?)) - .header("Modrinth-Admin", dotenvy::var("ARIADNE_ADMIN_KEY")?) - .json(&json!({ - "ip": download_body.ip, - "url": download_body.url, - "project_id": download_body.project_id, - "version_id": crate::models::projects::VersionId(version_id as u64).to_string(), - "headers": download_body.headers - })) - .send() - .await - .ok(); - - Ok(HttpResponse::NoContent().body("")) -} - -#[derive(Deserialize)] -pub struct PayoutData { - amount: Decimal, - date: DateTime, -} - -#[post("/_process_payout", guard = "admin_key_guard")] -pub async fn process_payout( - pool: web::Data, - redis: web::Data, - data: web::Json, -) -> Result { - let start: DateTime = DateTime::from_utc( - data.date - .date_naive() - .and_hms_nano_opt(0, 0, 0, 0) - .unwrap_or_default(), - Utc, - ); - - let client = reqwest::Client::new(); - let mut transaction = pool.begin().await?; - - #[derive(Deserialize)] - struct PayoutMultipliers { - sum: u64, - values: HashMap, - } - - let multipliers: PayoutMultipliers = client - .get(format!("{}multipliers", dotenvy::var("ARIADNE_URL")?,)) - .header("Modrinth-Admin", dotenvy::var("ARIADNE_ADMIN_KEY")?) - .query(&[( - "start_date", - start.to_rfc3339_opts(SecondsFormat::Nanos, true), - )]) - .send() - .await - .map_err(|_| ApiError::Analytics("Error while fetching payout multipliers!".to_string()))? - .json() - .await - .map_err(|_| { - ApiError::Analytics("Error while deserializing payout multipliers!".to_string()) - })?; - - struct Project { - project_type: String, - // user_id, payouts_split - team_members: Vec<(i64, Decimal)>, - // user_id, payouts_split, actual_project_id - split_team_members: Vec<(i64, Decimal, i64)>, - } - - let mut projects_map: HashMap = HashMap::new(); - - use futures::TryStreamExt; - - sqlx::query!( - " - SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split, pt.name project_type - FROM mods m - INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE - INNER JOIN project_types pt ON pt.id = m.project_type - WHERE m.id = ANY($1) AND m.monetization_status = $2 - ", - &multipliers - .values - .keys() - .flat_map(|x| x.parse::().ok()) - .collect::>(), - MonetizationStatus::Monetized.as_str(), - ) - .fetch_many(&mut *transaction) - .try_for_each(|e| { - if let Some(row) = e.right() { - if let Some(project) = projects_map.get_mut(&row.id) { - project.team_members.push((row.user_id, row.payouts_split)); - } else { - projects_map.insert( - row.id, - Project { - project_type: row.project_type, - team_members: vec![(row.user_id, row.payouts_split)], - split_team_members: Default::default(), - }, - ); - } - } - - futures::future::ready(Ok(())) - }) - .await?; - - // Specific Payout Conditions (ex: modpack payout split) - let mut projects_split_dependencies = Vec::new(); - - for (id, project) in &projects_map { - if project.project_type == "modpack" { - projects_split_dependencies.push(*id); - } - } - - if !projects_split_dependencies.is_empty() { - // (dependent_id, (dependency_id, times_depended)) - let mut project_dependencies: HashMap> = HashMap::new(); - // dependency_ids to fetch team members from - let mut fetch_team_members: Vec = Vec::new(); - - sqlx::query!( - " - SELECT mv.mod_id, m.id, COUNT(m.id) times_depended FROM versions mv - INNER JOIN dependencies d ON d.dependent_id = mv.id - INNER JOIN versions v ON d.dependency_id = v.id - INNER JOIN mods m ON v.mod_id = m.id OR d.mod_dependency_id = m.id - WHERE mv.mod_id = ANY($1) - group by mv.mod_id, m.id; - ", - &projects_split_dependencies - ) - .fetch_many(&mut *transaction) - .try_for_each(|e| { - if let Some(row) = e.right() { - fetch_team_members.push(row.id); - - if let Some(project) = project_dependencies.get_mut(&row.mod_id) { - project.push((row.id, row.times_depended.unwrap_or(0))); - } else { - project_dependencies - .insert(row.mod_id, vec![(row.id, row.times_depended.unwrap_or(0))]); - } - } - - futures::future::ready(Ok(())) - }) - .await?; - - // (project_id, (user_id, payouts_split)) - let mut team_members: HashMap> = HashMap::new(); - - sqlx::query!( - " - SELECT m.id id, tm.user_id user_id, tm.payouts_split payouts_split - FROM mods m - INNER JOIN team_members tm on m.team_id = tm.team_id AND tm.accepted = TRUE - WHERE m.id = ANY($1) - ", - &*fetch_team_members - ) - .fetch_many(&mut *transaction) - .try_for_each(|e| { - if let Some(row) = e.right() { - if let Some(project) = team_members.get_mut(&row.id) { - project.push((row.user_id, row.payouts_split)); - } else { - team_members.insert(row.id, vec![(row.user_id, row.payouts_split)]); - } - } - - futures::future::ready(Ok(())) - }) - .await?; - - for (project_id, dependencies) in project_dependencies { - let dep_sum: i64 = dependencies.iter().map(|x| x.1).sum(); - - let project = projects_map.get_mut(&project_id); - - if let Some(project) = project { - if dep_sum > 0 { - for dependency in dependencies { - let project_multiplier: Decimal = - Decimal::from(dependency.1) / Decimal::from(dep_sum); - - if let Some(members) = team_members.get(&dependency.0) { - let members_sum: Decimal = members.iter().map(|x| x.1).sum(); - - if members_sum > Decimal::ZERO { - for member in members { - let member_multiplier: Decimal = member.1 / members_sum; - project.split_team_members.push(( - member.0, - member_multiplier * project_multiplier, - project_id, - )); - } - } - } - } - } - } - } - } - - for (id, project) in projects_map { - if let Some(value) = &multipliers.values.get(&id.to_string()) { - let project_multiplier: Decimal = - Decimal::from(**value) / Decimal::from(multipliers.sum); - - let default_split_given = Decimal::ONE; - let split_given = Decimal::ONE / Decimal::from(5); - let split_retention = Decimal::from(4) / Decimal::from(5); - - let sum_splits: Decimal = project.team_members.iter().map(|x| x.1).sum(); - let sum_tm_splits: Decimal = project.split_team_members.iter().map(|x| x.1).sum(); - - let mut clear_cache_users = Vec::new(); - - if sum_splits > Decimal::ZERO { - for (user_id, split) in project.team_members { - let payout: Decimal = data.amount - * project_multiplier - * (split / sum_splits) - * (if !project.split_team_members.is_empty() { - &split_given - } else { - &default_split_given - }); - - if payout > Decimal::ZERO { - sqlx::query!( - " - INSERT INTO payouts_values (user_id, mod_id, amount, created) - VALUES ($1, $2, $3, $4) - ", - user_id, - id, - payout, - start - ) - .execute(&mut *transaction) - .await?; - - sqlx::query!( - " - UPDATE users - SET balance = balance + $1 - WHERE id = $2 - ", - payout, - user_id - ) - .execute(&mut *transaction) - .await?; - clear_cache_users.push(user_id); - } - } - } - - if sum_tm_splits > Decimal::ZERO { - for (user_id, split, project_id) in project.split_team_members { - let payout: Decimal = data.amount - * project_multiplier - * (split / sum_tm_splits) - * split_retention; - - if payout > Decimal::ZERO { - sqlx::query!( - " - INSERT INTO payouts_values (user_id, mod_id, amount, created) - VALUES ($1, $2, $3, $4) - ", - user_id, - project_id, - payout, - start - ) - .execute(&mut *transaction) - .await?; - - sqlx::query!( - " - UPDATE users - SET balance = balance + $1 - WHERE id = $2 - ", - payout, - user_id - ) - .execute(&mut *transaction) - .await?; - clear_cache_users.push(user_id); - } - } - } - - User::clear_caches( - &clear_cache_users - .into_iter() - .map(|x| (UserId(x), None)) - .collect::>(), - &redis, - ) - .await?; - } - } - - transaction.commit().await?; + let ip = super::analytics::convert_to_ip_v6(&download_body.ip) + .unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped()); + + analytics_queue + .add_download(Download { + id: Uuid::new_v4(), + recorded: Utc::now().timestamp_nanos() / 100_000, + domain: url.host_str().unwrap_or_default().to_string(), + site_path: url.path().to_string(), + user_id: user + .and_then(|(scopes, x)| { + if scopes.contains(Scopes::PERFORM_ANALYTICS) { + Some(x.id.0 as u64) + } else { + None + } + }) + .unwrap_or(0), + project_id: project_id as u64, + version_id: version_id as u64, + ip, + country: maxmind.query(ip).await.unwrap_or_default(), + user_agent: download_body + .headers + .get("user-agent") + .cloned() + .unwrap_or_default(), + headers: download_body + .headers + .clone() + .into_iter() + .filter(|x| !super::analytics::FILTERED_HEADERS.contains(&&*x.0.to_lowercase())) + .collect(), + }) + .await; Ok(HttpResponse::NoContent().body("")) } diff --git a/src/routes/v2/analytics.rs b/src/routes/v2/analytics.rs new file mode 100644 index 000000000..09bf112d8 --- /dev/null +++ b/src/routes/v2/analytics.rs @@ -0,0 +1,248 @@ +use crate::auth::get_user_from_headers; +use crate::models::analytics::{PageView, Playtime}; +use crate::models::pats::Scopes; +use crate::queue::maxmind::MaxMindIndexer; +use crate::queue::session::AuthQueue; +use crate::routes::ApiError; +use crate::util::env::parse_strings_from_var; +use crate::AnalyticsQueue; +use actix_cors::Cors; +use actix_web::{post, web}; +use actix_web::{HttpRequest, HttpResponse}; +use chrono::Utc; +use serde::Deserialize; +use sqlx::PgPool; +use std::collections::HashMap; +use std::net::{AddrParseError, IpAddr, Ipv4Addr, Ipv6Addr}; +use std::sync::Arc; +use url::Url; +use uuid::Uuid; + +pub fn config(cfg: &mut web::ServiceConfig) { + cfg.service( + web::scope("v2/analytics") + .wrap( + Cors::default() + .allowed_origin_fn(|origin, _req_head| { + let allowed_origins = + parse_strings_from_var("ANALYTICS_ALLOWED_ORIGINS").unwrap_or_default(); + + allowed_origins.contains(&"*".to_string()) + || allowed_origins + .contains(&origin.to_str().unwrap_or_default().to_string()) + }) + .allowed_methods(vec!["GET", "POST"]) + .allowed_headers(vec![ + actix_web::http::header::AUTHORIZATION, + actix_web::http::header::ACCEPT, + actix_web::http::header::CONTENT_TYPE, + ]) + .max_age(3600), + ) + .service(page_view_ingest) + .service(playtime_ingest), + ); +} + +pub const FILTERED_HEADERS: &[&str] = &[ + "authorization", + "cookie", + "modrinth-admin", + // we already retrieve/use these elsewhere- so they are unneeded + "user-agent", + "cf-connecting-ip", + "cf-ipcountry", + "x-forwarded-for", + "x-real-ip", + // We don't need the information vercel provides from its headers + "x-vercel-ip-city", + "x-vercel-ip-timezone", + "x-vercel-ip-longitude", + "x-vercel-proxy-signature", + "x-vercel-ip-country-region", + "x-vercel-forwarded-for", + "x-vercel-proxied-for", + "x-vercel-proxy-signature-ts", + "x-vercel-ip-latitude", + "x-vercel-ip-country", +]; + +pub fn convert_to_ip_v6(src: &str) -> Result { + let ip_addr: IpAddr = src.parse()?; + + Ok(match ip_addr { + IpAddr::V4(x) => x.to_ipv6_mapped(), + IpAddr::V6(x) => x, + }) +} + +#[derive(Deserialize)] +pub struct UrlInput { + url: String, +} + +//this route should be behind the cloudflare WAF to prevent non-browsers from calling it +#[post("view")] +pub async fn page_view_ingest( + req: HttpRequest, + maxmind: web::Data>, + analytics_queue: web::Data>, + session_queue: web::Data, + url_input: web::Json, + pool: web::Data, + redis: web::Data, +) -> Result { + let user = get_user_from_headers(&req, &**pool, &redis, &session_queue, None) + .await + .ok(); + let conn_info = req.connection_info().peer_addr().map(|x| x.to_string()); + + let url = Url::parse(&url_input.url) + .map_err(|_| ApiError::InvalidInput("invalid page view URL specified!".to_string()))?; + + let domain = url + .host_str() + .ok_or_else(|| ApiError::InvalidInput("invalid page view URL specified!".to_string()))?; + + let allowed_origins = parse_strings_from_var("CORS_ALLOWED_ORIGINS").unwrap_or_default(); + if !(domain.ends_with(".modrinth.com") + || domain == "modrinth.com" + || allowed_origins.contains(&"*".to_string())) + { + return Err(ApiError::InvalidInput( + "invalid page view URL specified!".to_string(), + )); + } + + let headers = req + .headers() + .into_iter() + .map(|(key, val)| { + ( + key.to_string().to_lowercase(), + val.to_str().unwrap_or_default().to_string(), + ) + }) + .collect::>(); + + let ip = convert_to_ip_v6(if let Some(header) = headers.get("cf-connecting-ip") { + header + } else { + conn_info.as_deref().unwrap_or_default() + }) + .unwrap_or_else(|_| Ipv4Addr::new(127, 0, 0, 1).to_ipv6_mapped()); + + let mut view = PageView { + id: Uuid::new_v4(), + recorded: Utc::now().timestamp_nanos() / 100_000, + domain: domain.to_string(), + site_path: url.path().to_string(), + user_id: 0, + project_id: 0, + ip, + country: maxmind.query(ip).await.unwrap_or_default(), + user_agent: headers.get("user-agent").cloned().unwrap_or_default(), + headers: headers + .into_iter() + .filter(|x| !FILTERED_HEADERS.contains(&&*x.0)) + .collect(), + }; + + if let Some(segments) = url.path_segments() { + let segments_vec = segments.collect::>(); + + if segments_vec.len() >= 2 { + const PROJECT_TYPES: &[&str] = &[ + "mod", + "modpack", + "plugin", + "resourcepack", + "shader", + "datapack", + ]; + + if PROJECT_TYPES.contains(&segments_vec[0]) { + let project = + crate::database::models::Project::get(segments_vec[1], &**pool, &redis).await?; + + if let Some(project) = project { + view.project_id = project.inner.id.0 as u64; + } + } + } + } + + if let Some((_, user)) = user { + view.user_id = user.id.0; + } + + analytics_queue.add_view(view).await; + + Ok(HttpResponse::NoContent().body("")) +} + +#[derive(Deserialize)] +pub struct PlaytimeInput { + seconds: u16, + loader: String, + game_version: String, + parent: Option, +} + +#[post("playtime")] +pub async fn playtime_ingest( + req: HttpRequest, + analytics_queue: web::Data>, + session_queue: web::Data, + playtime_input: web::Json>, + pool: web::Data, + redis: web::Data, +) -> Result { + let (_, user) = get_user_from_headers( + &req, + &**pool, + &redis, + &session_queue, + Some(&[Scopes::PERFORM_ANALYTICS]), + ) + .await?; + + let playtimes = playtime_input.0; + + if playtimes.len() > 2000 { + return Err(ApiError::InvalidInput( + "Too much playtime entered for version!".to_string(), + )); + } + + let versions = crate::database::models::Version::get_many( + &playtimes.iter().map(|x| (*x.0).into()).collect::>(), + &**pool, + &redis, + ) + .await?; + + for (id, playtime) in playtimes { + if playtime.seconds > 300 { + continue; + } + + if let Some(version) = versions.iter().find(|x| id == x.inner.id.into()) { + analytics_queue + .add_playtime(Playtime { + id: Default::default(), + recorded: Utc::now().timestamp_nanos() / 100_000, + seconds: playtime.seconds, + user_id: user.id.0, + project_id: version.inner.id.0 as u64, + version_id: version.inner.project_id.0 as u64, + loader: playtime.loader, + game_version: playtime.game_version, + parent: playtime.parent.map(|x| x.0).unwrap_or(0), + }) + .await; + } + } + + Ok(HttpResponse::NoContent().finish()) +} diff --git a/src/routes/v2/mod.rs b/src/routes/v2/mod.rs index cd28ee6c6..c4ecb82d7 100644 --- a/src/routes/v2/mod.rs +++ b/src/routes/v2/mod.rs @@ -1,4 +1,5 @@ mod admin; +mod analytics; mod moderation; mod notifications; pub(crate) mod project_creation; @@ -14,10 +15,12 @@ mod version_file; mod versions; pub use super::ApiError; +use crate::util::cors::default_cors; pub fn config(cfg: &mut actix_web::web::ServiceConfig) { cfg.service( actix_web::web::scope("v2") + .wrap(default_cors()) .configure(admin::config) .configure(crate::auth::session::config) .configure(crate::auth::flows::config) @@ -36,4 +39,6 @@ pub fn config(cfg: &mut actix_web::web::ServiceConfig) { .configure(version_file::config) .configure(versions::config), ); + + cfg.configure(analytics::config); } diff --git a/src/routes/v3/mod.rs b/src/routes/v3/mod.rs index cc7ca6d82..ddfb05e5f 100644 --- a/src/routes/v3/mod.rs +++ b/src/routes/v3/mod.rs @@ -1,9 +1,14 @@ pub use super::ApiError; +use crate::util::cors::default_cors; use actix_web::{web, HttpResponse}; use serde_json::json; pub fn config(cfg: &mut web::ServiceConfig) { - cfg.service(web::scope("v3").route("", web::get().to(hello_world))); + cfg.service( + web::scope("v3") + .wrap(default_cors()) + .route("", web::get().to(hello_world)), + ); } pub async fn hello_world() -> Result { diff --git a/src/search/indexing/mod.rs b/src/search/indexing/mod.rs index a4ae2bce8..9e29f9d45 100644 --- a/src/search/indexing/mod.rs +++ b/src/search/indexing/mod.rs @@ -30,31 +30,10 @@ pub enum IndexingError { // assumes a max average size of 1KiB per project to avoid this cap. const MEILISEARCH_CHUNK_SIZE: usize = 10000; -#[derive(Debug)] -pub struct IndexingSettings { - pub index_local: bool, -} - -impl IndexingSettings { - #[allow(dead_code)] - pub fn from_env() -> Self { - //FIXME: what? - let index_local = true; - - Self { index_local } - } -} - -pub async fn index_projects( - pool: PgPool, - settings: IndexingSettings, - config: &SearchConfig, -) -> Result<(), IndexingError> { +pub async fn index_projects(pool: PgPool, config: &SearchConfig) -> Result<(), IndexingError> { let mut docs_to_add: Vec = vec![]; - if settings.index_local { - docs_to_add.append(&mut index_local(pool.clone()).await?); - } + docs_to_add.append(&mut index_local(pool.clone()).await?); // Write Indices add_projects(docs_to_add, config).await?; @@ -74,7 +53,6 @@ async fn create_index( .await?; match client.get_index(name).await { - // TODO: update index settings on startup (or delete old indices on startup) Ok(index) => { index .set_settings(&default_settings()) diff --git a/src/util/cors.rs b/src/util/cors.rs new file mode 100644 index 000000000..5f35b2bc5 --- /dev/null +++ b/src/util/cors.rs @@ -0,0 +1,10 @@ +use actix_cors::Cors; + +pub fn default_cors() -> Cors { + Cors::default() + .allow_any_origin() + .allow_any_header() + .allow_any_method() + .max_age(3600) + .send_wildcard() +} diff --git a/src/util/mod.rs b/src/util/mod.rs index d34e5a894..fa514c59c 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -1,4 +1,5 @@ pub mod captcha; +pub mod cors; pub mod env; pub mod ext; pub mod guards;