Compare commits
709 Commits
2a08e6204e
...
feature/br
| Author | SHA1 | Date | |
|---|---|---|---|
| fe9984e2d8 | |||
| 369ff757b0 | |||
| 615f1fe468 | |||
| 86ddb50cb8 | |||
| 31c32143d0 | |||
| b63c387806 | |||
| dbbfd42e08 | |||
| 47e12b4452 | |||
| e1a1b4dc7d | |||
| ca01181f37 | |||
| 0aff77eda6 | |||
| 8a95aa1209 | |||
| a4a3c35f4d | |||
| edece36ecc | |||
| 247fc98760 | |||
| a1781b3800 | |||
| 450c73c060 | |||
| 3a1924279f | |||
| 094e7e6fe2 | |||
| ae48a18536 | |||
| 354bda0efa | |||
| 856f4ffc85 | |||
| 20eba36c65 | |||
| 8cd0c8ea4c | |||
| 2939cd8adc | |||
| 41d8253094 | |||
| 5263d1657e | |||
| 1de3d163c1 | |||
| d3ed335fde | |||
| f84be7b11b | |||
| b011c0fd48 | |||
| fe661a5008 | |||
| e97823f99c | |||
| a9dd593ac8 | |||
| 1585e55342 | |||
| 52b608da99 | |||
| 5a4f75537d | |||
| ce4a401c1a | |||
| 7814e3d64d | |||
| 9191d83f84 | |||
| 5d87035885 | |||
| 25b61c6b29 | |||
| 9671ef2508 | |||
| 759520f024 | |||
| a84d8a0c7e | |||
| 20173f7d1c | |||
| 4a835bbfba | |||
| 37a9a284ef | |||
| dce05342df | |||
| 56db33d7f1 | |||
| 40b4703b1a | |||
| 747d550d80 | |||
| 84de0c5538 | |||
| 33ddfc6c31 | |||
| 26b862d601 | |||
| 9fe5fea238 | |||
| 0347b6f5ff | |||
| ffb75e53f7 | |||
| 582ba75643 | |||
| 2db1da3194 | |||
| fd6b0ce5fd | |||
|
|
670a477f9a | ||
|
|
46945197d9 | ||
|
|
4ebc64d13a | ||
|
|
bc9560e56e | ||
|
|
38f5aab9e0 | ||
|
|
95f49a7ca5 | ||
|
|
cd8f5977af | ||
|
|
a218d1309b | ||
|
|
113d4807b2 | ||
|
|
9bc3cdbd0b | ||
|
|
79e6402df3 | ||
|
|
ec2e9444e3 | ||
|
|
a86a2fee85 | ||
|
|
aec67b9db8 | ||
|
|
0bbe1d428a | ||
|
|
a05f0afa8b | ||
| 5e2842774a | |||
| e17690f27b | |||
| cb990b61a3 | |||
| cc837288bb | |||
| 4a26e4f75b | |||
| eee2c1a13d | |||
| 209d5c1a5e | |||
| 4f4affaca4 | |||
| d59710309d | |||
| 88525276c2 | |||
| 1f4b3d3eee | |||
| 76a4c5fb53 | |||
| 8f6b96cb0f | |||
| 76a707c7fd | |||
| ae584abb5b | |||
| eff8278cc3 | |||
| 8432a00691 | |||
| 1ed185a701 | |||
| 562ce3296e | |||
| ddc2d69e54 | |||
| f6863b32e8 | |||
| 9bf9f3d384 | |||
| 998d01c751 | |||
| 57d04ddf1c | |||
| 0ba2d2ecee | |||
| 839cc84c26 | |||
| c80c282118 | |||
| 5acae03c55 | |||
| 49be05d4db | |||
| ae7580252b | |||
| 7c85f51436 | |||
| 83ec475cc8 | |||
| c1c095a73c | |||
| c4e84364c6 | |||
| 8287b82554 | |||
| 56fe8bebbe | |||
| 4fffd1025f | |||
| 576e1ea152 | |||
| 5e1021a18e | |||
| dcbdd01f53 | |||
| 608bf8f33a | |||
| 48994d8bfd | |||
| 4ffcbf36d3 | |||
| e539e0334f | |||
| 1898b3ef3f | |||
|
|
1100a1b66f | ||
|
|
04a0a6ddf4 | ||
| bfbb307d6b | |||
| 1c573f9a12 | |||
| 6b1533402a | |||
| fdf5f795da | |||
| daf5336410 | |||
| 0923dc61d6 | |||
| 4275131645 | |||
| c64d5971ee | |||
| 3968bc3910 | |||
| 99b0748129 | |||
| fcaac9cc41 | |||
| b5c6535ee8 | |||
| cf6150b155 | |||
| 5d1bfeaa9a | |||
| c4ffc08bae | |||
| f73f3608c0 | |||
| 5944d9542e | |||
| 2aefcd2708 | |||
| 3af4c1ac7a | |||
| aff0e8b1df | |||
| e4a9bfa08b | |||
| a41a5ad39a | |||
| 434d59a7ba | |||
| 407de622ec | |||
| be03e7b931 | |||
| d02ae5bd3f | |||
| 87506b0478 | |||
| 3a819007c1 | |||
| 961ba9c616 | |||
| 7b58c7537e | |||
| a1486b3bba | |||
| f3c06e1969 | |||
| 354d9c20a3 | |||
| fbcee18db1 | |||
| c5d21c3554 | |||
| 4410311b80 | |||
| 8f5ee384ff | |||
| bffc48e5d9 | |||
| 9cead2ab0e | |||
| 444c853f54 | |||
| 7751b0d0f8 | |||
| fe93439194 | |||
| 6762b18135 | |||
| 9439d71249 | |||
| b8f86e5d5e | |||
| 597fd6ad55 | |||
| a71a6fcc41 | |||
| 9dde136c9c | |||
| 779cb9a87c | |||
| 79f7c914d7 | |||
| a9d181db8f | |||
| 283ad80632 | |||
| e9f9582110 | |||
| 3a5449df79 | |||
| cf88c165ee | |||
| 2fec47d363 | |||
| 6966d900fa | |||
| 773615e201 | |||
| 080ba76684 | |||
| 25f929026f | |||
| 47a8493824 | |||
| 821a4c0df1 | |||
| 1a371b191a | |||
| 471781f942 | |||
| b4eff32427 | |||
| 2d0ca1c7bf | |||
| 88812da592 | |||
| bffa4aa1ef | |||
| 4adf3d5a1e | |||
| 4b111951b7 | |||
| b91e4ddfd1 | |||
| cd90e7a2d0 | |||
| af2d8caebe | |||
| f32b6a6a27 | |||
| 8116160b4d | |||
| 34511a8edf | |||
| 08fb1d3510 | |||
| 6d61e038e7 | |||
| bcb602d3cf | |||
| f4a8a653d0 | |||
| 2c981bc972 | |||
| b322219173 | |||
| 52567557e8 | |||
| 3f1ef8dfd8 | |||
| 1b43f742d3 | |||
| 5f3f00366f | |||
| a61bb6da20 | |||
| 11611fd577 | |||
| 09a6d085fd | |||
| d4548f71c7 | |||
| 9b67f1aa21 | |||
| 2e91f9399a | |||
| 0eb95e238b | |||
| 65bd2ef9cf | |||
| 9a6c995589 | |||
| 8965395377 | |||
| 38d39dd618 | |||
| 0c009495a3 | |||
| 54c59d26b9 | |||
| 92e49c8ad9 | |||
| 493e6cf92c | |||
| 1dcc0cf8c5 | |||
| d3daf2800e | |||
| d0b5c2d3f9 | |||
| 0930e80b9b | |||
| 044d28cfe9 | |||
| 4b4234314d | |||
| baa8d56799 | |||
| d40728aec9 | |||
| c78347b6f9 | |||
| 021765340a | |||
| 567507c412 | |||
| 8bf0566d72 | |||
| 0874794140 | |||
| 154477e1ad | |||
| f495953f6a | |||
| fba0f34020 | |||
| 4752ef19b2 | |||
| 3e50902f07 | |||
| a0f29eb5b8 | |||
| fcbbe4fcac | |||
| 4ef5d172a0 | |||
| 31bd421e22 | |||
| dd60d1a1c4 | |||
| 1892ea666a | |||
| 3a5297015f | |||
| 8ad00f0fa5 | |||
| 3247a3674f | |||
| d88f905609 | |||
| a01a3f1d7a | |||
| 75e7079087 | |||
| 7f58bf48fe | |||
| f7c41532a5 | |||
| a105372b15 | |||
| 54e26fb863 | |||
| 600d52414e | |||
| 5a9a02d3a4 | |||
| bcac627345 | |||
| 5dd47b3cd4 | |||
| c9328041ce | |||
| ddd18f8d70 | |||
| 1ccc5fb9e7 | |||
| fc930a539b | |||
| e7b4e59b65 | |||
| e2b6ae5e81 | |||
| 827300366d | |||
| 8c05589168 | |||
| f29eb5f35a | |||
| 62370b94b3 | |||
| 1114c7766d | |||
| 0c83b9c401 | |||
| f437119711 | |||
| eaa23134de | |||
| 00fe5d91a7 | |||
| 071022c63b | |||
| 3ace2093b2 | |||
| 42e06bd2e6 | |||
| 6dd6fd764a | |||
| 21888c6d00 | |||
| 2bc0ac1cff | |||
| bf3fd4b39a | |||
| 7eed615991 | |||
| 6e56eba0c2 | |||
| 5f2f4262a5 | |||
| 882ef0288a | |||
| 9416165699 | |||
| c72588800f | |||
| 1c6678d55d | |||
| 91e4e751d8 | |||
| bc4432c057 | |||
| 38185273af | |||
| 5fb7ba074c | |||
| d83be5247b | |||
| 0f87ac6a00 | |||
| f61a2d07fe | |||
| d48577b191 | |||
| 4aec829c74 | |||
| d8496bf094 | |||
| 55a0474602 | |||
| 751f3de4b1 | |||
| 5b211c349e | |||
| a578e8160e | |||
| 9ac533ee73 | |||
| d9644f901e | |||
| 0a26f2986f | |||
| 7cf3f91dac | |||
| 33bedbcd67 | |||
| 8de91a8232 | |||
| 23514ca5a4 | |||
| 79ab0d6a4c | |||
| 1476d4262d | |||
| 724f901bbd | |||
| a4e48eb3f4 | |||
| c6f160c8cf | |||
| 62f2876ade | |||
| 93b7c6bf4d | |||
| 635ff4285e | |||
| 0cf963240e | |||
| 160f02e67f | |||
| 045d2ddadf | |||
| 63ebf3af2d | |||
| bf33cdc95c | |||
| 76aeddbde2 | |||
| 3b1c4475c6 | |||
| 5966512a8f | |||
| 8b65c87750 | |||
| 6f6b619c11 | |||
| 3188a67661 | |||
| 4e7fb953ba | |||
| 173c865a69 | |||
| d5ba8d1cde | |||
| 998db09c09 | |||
| 78d575863d | |||
| 503e859b5c | |||
| 5dc61dc397 | |||
| 7c86d95f5e | |||
| 5c40ab3d00 | |||
| 31867d3c6c | |||
| e2b54b37dd | |||
| 6a6df10825 | |||
| 15ffc9eb3e | |||
| 5095b0b4c0 | |||
| 4c126fd859 | |||
| 8f3fded5fe | |||
| 96ea356c62 | |||
| 4c8f7d5958 | |||
| c5ee827230 | |||
| 208de158bc | |||
| d0c1f33227 | |||
| 71db929fa4 | |||
| 56f1506450 | |||
| fae5f6f735 | |||
| 983f25d6d3 | |||
| 1c9f6f30d9 | |||
| 4a7b305ad4 | |||
| b5deb8889a | |||
| 218bbd54da | |||
| 531e4bf32c | |||
| 65bbbdb2b0 | |||
| 13acff1708 | |||
| ff9c0a0b82 | |||
| 69a848d6a7 | |||
| 6a13e3bb0f | |||
| e4266b0bab | |||
| 5bd9c9750d | |||
| e46de27ba9 | |||
| 7bb70c41df | |||
| a771cd0617 | |||
| 21d9b75a09 | |||
| 71015c2ca3 | |||
| ea99219283 | |||
| 0533863230 | |||
| a5ec1838b1 | |||
| 7fb4b5bd18 | |||
| 87aa39959a | |||
| 8b377123e1 | |||
| fb687d50dd | |||
| 64e0e0ce79 | |||
| 5a6a318b60 | |||
| 1ce12c0980 | |||
| 9c374d789e | |||
| f9a91e3a66 | |||
| c551ca2e70 | |||
| 6a4785cdac | |||
| ec7b25df3c | |||
| c601fcc2a4 | |||
| 1524f141b8 | |||
| 50f2c9d161 | |||
| 7712832b76 | |||
| a973da5bb4 | |||
| 3fde67a87d | |||
| 19c9ef462e | |||
| 56c010975c | |||
| 572f63f12b | |||
| a01ca30f5b | |||
| 6517fc9550 | |||
| dcd9f9e0bf | |||
| 26d83aba7a | |||
| 087f6a7157 | |||
| 7e89a3471f | |||
| 1f9b320f04 | |||
| eba97e967b | |||
| 2f65a538fa | |||
| 57d25b637d | |||
| 9c7a62c384 | |||
| 41482bfd4b | |||
| d369cfe333 | |||
| 2082e2a6e5 | |||
| 7a8f0a1c21 | |||
| 3febfff1db | |||
| ad320ae83e | |||
| 5609184d3b | |||
| 1e8d2aba0a | |||
| ebec80f198 | |||
| 2a21decc94 | |||
| 520b39cb0b | |||
| 1b46184781 | |||
| c1675cdf32 | |||
| c5a5e449d4 | |||
| 69a5dfcc45 | |||
| 7c48a6a1dc | |||
| 1af63dee81 | |||
| d4982017f6 | |||
| 60a56fd098 | |||
| 1d4fa4b977 | |||
| 8b1e42de1c | |||
| 6bab1b0189 | |||
| 26fcd4ba50 | |||
| c731639aa4 | |||
| b358a6f4a9 | |||
| df25eaf905 | |||
| 821c81dd9c | |||
| 3981fa3181 | |||
| a577b5510d | |||
| 1612778baa | |||
| 4cbe78f81f | |||
| 31d38c016e | |||
| 08eebd6071 | |||
| c276a0eeb0 | |||
| 9f91490441 | |||
| e0a44ae199 | |||
| ab388af35f | |||
| 95977f0853 | |||
| b823862cec | |||
| 522718f3a1 | |||
| dfd476411f | |||
| 626d76f406 | |||
| c576fc0241 | |||
| 385c8ce04b | |||
| 34c16c8cdf | |||
| 2f4c8acaa2 | |||
| 960a00101c | |||
| c46dc759d7 | |||
| 16728f1d49 | |||
| 4c625db853 | |||
| fce78df549 | |||
| a4411cfa34 | |||
| a43a44302b | |||
| 451f041206 | |||
| 6595d12108 | |||
| 983ad12dd3 | |||
| 3ff52f944c | |||
| 77282ade62 | |||
| 1223c633d4 | |||
| d55a3050fc | |||
| 62a2280a80 | |||
|
|
c57fca0aee | ||
| 612f91a708 | |||
| a25a434ea2 | |||
| ac9f1d8a40 | |||
| e32baa8d8f | |||
| 9580282c79 | |||
| c24f3bbb4a | |||
| 04e20b29ee | |||
| b2fdc9a453 | |||
| 8708efece2 | |||
| 51f56c0f1f | |||
| e58fbe263f | |||
| ea28f5346c | |||
| 4743581395 | |||
| 3f12bcfd39 | |||
| 10ad7089f4 | |||
| 8d371e6519 | |||
| 76ee4a387c | |||
| 7a4c02d11d | |||
| ae861aa8b4 | |||
| ddc5254e5f | |||
| 543b2e6b4d | |||
| 626e899ca3 | |||
| f5ebd0ada9 | |||
| afe710d955 | |||
| 1946d5eda2 | |||
| 3476d6e6d1 | |||
| 85f63a0e17 | |||
| 1cc779e17b | |||
| b04fc1ba65 | |||
| 5afa046f18 | |||
| fbf21ae3f9 | |||
| 12439b6ef2 | |||
| b72ef072e4 | |||
| ee6a636e68 | |||
| e942fdbffa | |||
| 13e609bcf7 | |||
| d5d5a69ab4 | |||
| 53545605d0 | |||
| d93fa4cb4b | |||
| 35ef4357fb | |||
| d3fe4c4aff | |||
| 1e0d0f465a | |||
| 6e80942beb | |||
| 67636e6d17 | |||
| 713fb4d62b | |||
| 67c299939c | |||
| 1042eb6e58 | |||
| db2ae09ead | |||
| 708bdaa7f6 | |||
| 9c02333e2b | |||
| bfc177a811 | |||
| d53256bcd7 | |||
| 231de135ca | |||
| 5f36b7c6e2 | |||
| 23e46df8a9 | |||
| 6b8b61fa37 | |||
| 25964b6797 | |||
| c0b3e90943 | |||
| 9c4ddea33d | |||
| f41359b8c9 | |||
| 44b797c1de | |||
| 4933553d50 | |||
| 93c9fcc248 | |||
| 2365485a68 | |||
| 27bea7d06f | |||
| c29838b6ee | |||
| c8baa6abf9 | |||
| 9358a86df1 | |||
| 7e8757ec72 | |||
| c1fe419ff9 | |||
| ebf1309b48 | |||
| d83b459408 | |||
| db8472ae06 | |||
| 9d265fa3f9 | |||
| 5169cff892 | |||
| 8f2bd30d54 | |||
| b8266c41fc | |||
| 1a601b93eb | |||
| 1b838676e3 | |||
| 8cc9d0d4d3 | |||
| 8e77a57bc1 | |||
| e74c9688c8 | |||
| 60d6743fcd | |||
| f42d81b9fc | |||
| 774240ca73 | |||
| fb2c31a81b | |||
| eba991f4f5 | |||
| 0fdb056460 | |||
| 17da2c8359 | |||
| 0abb4d605d | |||
| 465d9093bd | |||
| 67e4cacb28 | |||
| a3d1d1b067 | |||
| 2e5919f3e6 | |||
| 9b2db3cc1d | |||
| 9307fc97fb | |||
| b3a998fec2 | |||
| 5ba7f5e3c9 | |||
| 9212fbe6b5 | |||
| 8dcd985c67 | |||
| c9dcd6a9c9 | |||
| afef19fae3 | |||
| 2e2dc80718 | |||
| abc5381adb | |||
| 75dd4120ec | |||
| b0637da11d | |||
| 968935869e | |||
| 74e000c96b | |||
| 8dd885b6a8 | |||
| 042cf595f7 | |||
| 3712ecf8ae | |||
| d20647c825 | |||
| 98010ed1bc | |||
| 76d4fc675f | |||
| e4cc182db4 | |||
| 9ca7a42d56 | |||
| 570c8a97e3 | |||
| 3bde3ea5e9 | |||
| d54e2a2f3f | |||
| a1ee49ba54 | |||
| e638ad81e2 | |||
| bce43096b1 | |||
| 19d10b6219 | |||
| a9ab2e8bb2 | |||
| 9a94e5ac56 | |||
| d93311541e | |||
| 01d2d90df1 | |||
| 7b72963b24 | |||
| c90783f461 | |||
| 9d9adfbdfa | |||
| f43624ca3d | |||
| 3f6f7f1aa0 | |||
| da89b20e5c | |||
| c4817c1e52 | |||
| c444895945 | |||
| 9791ba4b49 | |||
| 6ed144327c | |||
| 472801199c | |||
| a3514e6874 | |||
| 95b2b97dd4 | |||
| df934a8fd2 | |||
| d89fa283dc | |||
| 1592065a8c | |||
| 4c1fbf64a2 | |||
| 3742528e3a | |||
| 232892d397 | |||
| e0b3562e80 | |||
| 71c2e8ea13 | |||
| b73cce5431 | |||
| 0d618116e1 | |||
| b7dbaa6e73 | |||
| 5fe51e03bb | |||
| 306caf9520 | |||
| e6f42b388a | |||
| fd7bd385fc | |||
| 7d97f40826 | |||
| bc01dfb125 | |||
| 5dfb890b84 | |||
| 2beb584e87 | |||
| 1f3607b4d3 | |||
| 0051492bd3 | |||
| 0f5df77d28 | |||
| c80229b7b9 | |||
| 8bc7a471cd | |||
| 91a2854537 | |||
| 3d8e484187 | |||
| be6d2454b1 | |||
| 4e97a22642 | |||
| a749ade30b | |||
| 3d90d9c81d | |||
| 102eae1c98 | |||
| 75cd8b9f71 | |||
| a18ad12ff7 | |||
| f7fdd6fd76 | |||
| 80685fd1cc | |||
| 69409f92e1 | |||
| cfcb858bba | |||
| 8618e1eff7 | |||
| e0a5c654d8 | |||
| e61db5d6e5 | |||
| fac25ab4f4 | |||
| ceeeb23c26 | |||
| ce90fedacb | |||
| 0179c69b82 | |||
| dac79b53ca | |||
| b372fd81d5 | |||
| 205019ce39 | |||
| 9c4d88c8fd | |||
| dd2becaab2 | |||
| 658c8c7702 | |||
| 809b980145 | |||
| 1185880f8e | |||
| 499ecb501d | |||
| b3e7d24d9d | |||
| 78b12d4f33 | |||
| 5caa2d1f8c | |||
| c46f264c4b | |||
| f6b21174bf | |||
| d15b36a0f1 | |||
| 232cdbfad8 | |||
| 55e28162fe | |||
| 49eec2de46 | |||
| 52f5a4e813 | |||
| a5d99fa517 | |||
|
|
c3a482614e | ||
|
|
67576d0a5b | ||
|
|
f395832d32 | ||
|
|
ff834987d4 | ||
|
|
e23e379102 | ||
|
|
f5a3e273a6 | ||
|
|
f9bc1d67ae | ||
|
|
b63b6e7ee7 | ||
|
|
34e18317a2 | ||
|
|
a2b47dab66 | ||
|
|
0e9f0b0682 | ||
|
|
2679b2c873 | ||
|
|
0da4e110c1 | ||
|
|
21316187e0 | ||
|
|
7f22966b41 | ||
|
|
34d04e4240 | ||
|
|
d7dd79336b | ||
|
|
eaca3d613d | ||
|
|
756a80151a | ||
|
|
4395e3a72d | ||
|
|
441bcc1e90 | ||
|
|
17c29c7f4f | ||
|
|
b142949805 | ||
|
|
05136699ee | ||
|
|
c2cc428abe | ||
|
|
1c49780cd4 | ||
|
|
54457cb9c5 | ||
|
|
2c524279f6 | ||
| 44bd146bdf | |||
|
|
9e3306fc3d | ||
|
|
3389c5ce20 | ||
| b71210a644 | |||
|
|
c8a951594c | ||
|
|
da8ee9b9c3 |
1
.cursorignore
Normal file
1
.cursorignore
Normal file
@@ -0,0 +1 @@
|
||||
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)
|
||||
@@ -25,19 +25,11 @@ jobs:
|
||||
git_remote_url: 'ssh://dokku@v2.discours.io:22/discoursio-api'
|
||||
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Check UPLOADER is running
|
||||
id: check_container
|
||||
uses: appleboy/ssh-action@master
|
||||
with:
|
||||
host: staging.discours.io
|
||||
username: dokku
|
||||
key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
script: ps:report uploader | grep 'Processes:' | awk '{print $2}' | grep '^1$'
|
||||
|
||||
- name: Push to dokku for dev branch
|
||||
if: github.ref == 'refs/heads/dev' && steps.check_container.outcome == 'success'
|
||||
if: github.ref == 'refs/heads/dev'
|
||||
uses: dokku/github-action@master
|
||||
with:
|
||||
branch: 'dev'
|
||||
git_remote_url: 'ssh://dokku@staging.discours.io:22/core'
|
||||
force: true
|
||||
git_remote_url: 'ssh://dokku@v2.discours.io:22/core'
|
||||
ssh_private_key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
|
||||
4
.github/workflows/deploy.yml
vendored
4
.github/workflows/deploy.yml
vendored
@@ -17,11 +17,11 @@ jobs:
|
||||
|
||||
- uses: webfactory/ssh-agent@v0.8.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }}
|
||||
ssh-private-key: ${{ github.action.secrets.SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Push to dokku
|
||||
env:
|
||||
HOST_KEY: ${{ secrets.HOST_KEY }}
|
||||
HOST_KEY: ${{ github.action.secrets.HOST_KEY }}
|
||||
run: |
|
||||
echo $HOST_KEY > ~/.ssh/known_hosts
|
||||
git remote add dokku dokku@v2.discours.io:discoursio-api
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -153,3 +153,12 @@ poetry.lock
|
||||
.zed
|
||||
|
||||
dokku_config
|
||||
|
||||
*.db
|
||||
*.sqlite3
|
||||
views.json
|
||||
*.pem
|
||||
*.key
|
||||
*.crt
|
||||
*cache.json
|
||||
.cursor
|
||||
@@ -12,7 +12,7 @@ repos:
|
||||
- id: check-merge-conflict
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.3.5
|
||||
rev: v0.4.7
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix]
|
||||
|
||||
311
CHANGELOG.md
Normal file
311
CHANGELOG.md
Normal file
@@ -0,0 +1,311 @@
|
||||
#### [0.4.16] - 2025-03-22
|
||||
- Added hierarchical comments pagination:
|
||||
- Created new GraphQL query `load_comments_branch` for efficient loading of hierarchical comments
|
||||
- Ability to load root comments with their first N replies
|
||||
- Added pagination for both root and child comments
|
||||
- Using existing `commented` field in `Stat` type to display number of replies
|
||||
- Added special `first_replies` field to store first replies to a comment
|
||||
- Optimized SQL queries for efficient loading of comment hierarchies
|
||||
- Implemented flexible comment sorting system (by time, rating)
|
||||
|
||||
#### [0.4.15] - 2025-03-22
|
||||
- Upgraded caching system described `docs/caching.md`
|
||||
- Module `cache/memorycache.py` removed
|
||||
- Enhanced caching system with backward compatibility:
|
||||
- Unified cache key generation with support for existing naming patterns
|
||||
- Improved Redis operation function with better error handling
|
||||
- Updated precache module to use consistent Redis interface
|
||||
- Integrated revalidator with the invalidation system for better performance
|
||||
- Added comprehensive documentation for the caching system
|
||||
- Enhanced cached_query to support template-based cache keys
|
||||
- Standardized error handling across all cache operations
|
||||
- Optimized cache invalidation system:
|
||||
- Added targeted invalidation for individual entities (authors, topics)
|
||||
- Improved revalidation manager with individual object processing
|
||||
- Implemented batched processing for high-volume invalidations
|
||||
- Reduced Redis operations by using precise key invalidation instead of prefix-based wipes
|
||||
- Added special handling for slug changes in topics
|
||||
- Unified caching system for all models:
|
||||
- Implemented abstract functions `cache_data`, `get_cached_data` and `invalidate_cache_by_prefix`
|
||||
- Added `cached_query` function for unified approach to query caching
|
||||
- Updated resolvers `author.py` and `topic.py` to use the new caching API
|
||||
- Improved logging for cache operations to simplify debugging
|
||||
- Optimized Redis memory usage through key format unification
|
||||
- Improved caching and sorting in Topic and Author modules:
|
||||
- Added support for dictionary sorting parameters in `by` for both modules
|
||||
- Optimized cache key generation for stable behavior with various parameters
|
||||
- Enhanced sorting logic with direction support and arbitrary fields
|
||||
- Added `by` parameter support in the API for getting topics by community
|
||||
- Performance optimizations for author-related queries:
|
||||
- Added SQLAlchemy-managed indexes to `Author`, `AuthorFollower`, `AuthorRating` and `AuthorBookmark` models
|
||||
- Implemented persistent Redis caching for author queries without TTL (invalidated only on changes)
|
||||
- Optimized author retrieval with separate endpoints:
|
||||
- `get_authors_all` - returns all non-deleted authors without statistics
|
||||
- `get_authors_paginated` - returns authors with statistics and pagination support
|
||||
- `load_authors_by` - optimized to use caching and efficient sorting
|
||||
- Improved SQL queries with optimized JOIN conditions and efficient filtering
|
||||
- Added pre-aggregation of statistics (shouts count, followers count) in single efficient queries
|
||||
- Implemented robust cache invalidation on author updates
|
||||
- Created necessary indexes for author lookups by user ID, slug, and timestamps
|
||||
|
||||
#### [0.4.14] - 2025-03-21
|
||||
- Significant performance improvements for topic queries:
|
||||
- Added database indexes to optimize JOIN operations
|
||||
- Implemented persistent Redis caching for topic queries (no TTL, invalidated only on changes)
|
||||
- Optimized topic retrieval with separate endpoints for different use cases:
|
||||
- `get_topics_all` - returns all topics without statistics for lightweight listing
|
||||
- `get_topics_paginated` - returns topics with statistics and pagination support
|
||||
- `get_topics_by_community` - adds pagination and optimized filtering by community
|
||||
- Added SQLAlchemy-managed indexes directly in ORM models for automatic schema maintenance
|
||||
- Created `sync_indexes()` function for automatic index synchronization during app startup
|
||||
- Reduced database load by pre-aggregating statistics in optimized SQL queries
|
||||
- Added robust cache invalidation on topic create/update/delete operations
|
||||
- Improved query optimization with proper JOIN conditions and specific partial indexes
|
||||
|
||||
#### [0.4.13] - 2025-03-20
|
||||
- Fixed Topic objects serialization error in cache/memorycache.py
|
||||
- Improved CustomJSONEncoder to support SQLAlchemy models with dict() method
|
||||
- Enhanced error handling in cache_on_arguments decorator
|
||||
- Modified `load_reactions_by` to include deleted reactions when `include_deleted=true` for proper comment tree building
|
||||
- Fixed featured/unfeatured logic in reaction processing:
|
||||
- Dislike reactions now properly take precedence over likes
|
||||
- Featured status now requires more than 4 likes from users with featured articles
|
||||
- Removed unnecessary filters for deleted reactions since rating reactions are physically deleted
|
||||
- Author's featured status now based on having non-deleted articles with featured_at
|
||||
|
||||
#### [0.4.12] - 2025-03-19
|
||||
- `delete_reaction` detects comments and uses `deleted_at` update
|
||||
- `check_to_unfeature` etc. update
|
||||
- dogpile dep in `services/memorycache.py` optimized
|
||||
|
||||
#### [0.4.11] - 2025-02-12
|
||||
- `create_draft` resolver requires draft_id fixed
|
||||
- `create_draft` resolver defaults body and title fields to empty string
|
||||
|
||||
|
||||
#### [0.4.9] - 2025-02-09
|
||||
- `Shout.draft` field added
|
||||
- `Draft` entity added
|
||||
- `create_draft`, `update_draft`, `delete_draft` mutations and resolvers added
|
||||
- `create_shout`, `update_shout`, `delete_shout` mutations removed from GraphQL API
|
||||
- `load_drafts` resolver implemented
|
||||
- `publish_` and `unpublish_` mutations and resolvers added
|
||||
- `create_`, `update_`, `delete_` mutations and resolvers added for `Draft` entity
|
||||
- tests with pytest for original auth, shouts, drafts
|
||||
- `Dockerfile` and `pyproject.toml` removed for the simplicity: `Procfile` and `requirements.txt`
|
||||
|
||||
#### [0.4.8] - 2025-02-03
|
||||
- `Reaction.deleted_at` filter on `update_reaction` resolver added
|
||||
- `triggers` module updated with `after_shout_handler`, `after_reaction_handler` for cache revalidation
|
||||
- `after_shout_handler`, `after_reaction_handler` now also handle `deleted_at` field
|
||||
- `get_cached_topic_followers` fixed
|
||||
- `get_my_rates_comments` fixed
|
||||
|
||||
#### [0.4.7]
|
||||
- `get_my_rates_shouts` resolver added with:
|
||||
- `shout_id` and `my_rate` fields in response
|
||||
- filters by `Reaction.deleted_at.is_(None)`
|
||||
- filters by `Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value])`
|
||||
- filters by `Reaction.reply_to.is_(None)`
|
||||
- uses `local_session()` context manager
|
||||
- returns empty list on errors
|
||||
- SQLAlchemy syntax updated:
|
||||
- `select()` statement fixed for newer versions
|
||||
- `Reaction` model direct selection instead of labeled columns
|
||||
- proper row access with `row[0].shout` and `row[0].kind`
|
||||
- GraphQL resolver fixes:
|
||||
- added root parameter `_` to match schema
|
||||
- proper async/await handling with `@login_required`
|
||||
- error logging added via `logger.error()`
|
||||
|
||||
#### [0.4.6]
|
||||
- login_accepted decorator added
|
||||
- `docs` added
|
||||
- optimized and unified `load_shouts_*` resolvers with `LoadShoutsOptions`
|
||||
- `load_shouts_bookmarked` resolver fixed
|
||||
- resolvers updates:
|
||||
- new resolvers group `feed`
|
||||
- `load_shouts_authored_by` resolver added
|
||||
- `load_shouts_with_topic` resolver added
|
||||
- `load_shouts_followed` removed
|
||||
- `load_shouts_random_topic` removed
|
||||
- `get_topics_random` removed
|
||||
- model updates:
|
||||
- `ShoutsOrderBy` enum added
|
||||
- `Shout.main_topic` from `ShoutTopic.main` as `Topic` type output
|
||||
- `Shout.created_by` as `Author` type output
|
||||
|
||||
#### [0.4.5]
|
||||
- `bookmark_shout` mutation resolver added
|
||||
- `load_shouts_bookmarked` resolver added
|
||||
- `get_communities_by_author` resolver added
|
||||
- `get_communities_all` resolver fixed
|
||||
- `Community` stats in orm
|
||||
- `Community` CUDL resolvers added
|
||||
- `Reaction` filter by `Reaction.kind`s
|
||||
- `ReactionSort` enum added
|
||||
- `CommunityFollowerRole` enum added
|
||||
- `InviteStatus` enum added
|
||||
- `Topic.parents` ids added
|
||||
- `get_shout` resolver accepts slug or shout_id
|
||||
|
||||
#### [0.4.4]
|
||||
- `followers_stat` removed for shout
|
||||
- sqlite3 support added
|
||||
- `rating_stat` and `commented_stat` fixes
|
||||
|
||||
#### [0.4.3]
|
||||
- cache reimplemented
|
||||
- load shouts queries unified
|
||||
- `followers_stat` removed from shout
|
||||
|
||||
#### [0.4.2]
|
||||
- reactions load resolvers separated for ratings (no stats) and comments
|
||||
- reactions stats improved
|
||||
- `load_comment_ratings` separate resolver
|
||||
|
||||
#### [0.4.1]
|
||||
- follow/unfollow logic updated and unified with cache
|
||||
|
||||
#### [0.4.0]
|
||||
- chore: version migrator synced
|
||||
- feat: precache_data on start
|
||||
- fix: store id list for following cache data
|
||||
- fix: shouts stat filter out deleted
|
||||
|
||||
#### [0.3.5]
|
||||
- cache isolated to services
|
||||
- topics followers and authors cached
|
||||
- redis stores lists of ids
|
||||
|
||||
#### [0.3.4]
|
||||
- `load_authors_by` from cache
|
||||
|
||||
#### [0.3.3]
|
||||
- feat: sentry integration enabled with glitchtip
|
||||
- fix: reindex on update shout
|
||||
- packages upgrade, isort
|
||||
- separated stats queries for author and topic
|
||||
- fix: feed featured filter
|
||||
- fts search removed
|
||||
|
||||
#### [0.3.2]
|
||||
- redis cache for what author follows
|
||||
- redis cache for followers
|
||||
- graphql add query: get topic followers
|
||||
|
||||
#### [0.3.1]
|
||||
- enabling sentry
|
||||
- long query log report added
|
||||
- editor fixes
|
||||
- authors links cannot be updated by `update_shout` anymore
|
||||
|
||||
#### [0.3.0]
|
||||
- `Shout.featured_at` timestamp of the frontpage featuring event
|
||||
- added proposal accepting logics
|
||||
- schema modulized
|
||||
- Shout.visibility removed
|
||||
|
||||
#### [0.2.22]
|
||||
- added precommit hook
|
||||
- fmt
|
||||
- granian asgi
|
||||
|
||||
#### [0.2.21]
|
||||
- fix: rating logix
|
||||
- fix: `load_top_random_shouts`
|
||||
- resolvers: `add_stat_*` refactored
|
||||
- services: use google analytics
|
||||
- services: minor fixes search
|
||||
|
||||
#### [0.2.20]
|
||||
- services: ackee removed
|
||||
- services: following manager fixed
|
||||
- services: import views.json
|
||||
|
||||
#### [0.2.19]
|
||||
- fix: adding `author` role
|
||||
- fix: stripping `user_id` in auth connector
|
||||
|
||||
#### [0.2.18]
|
||||
- schema: added `Shout.seo` string field
|
||||
- resolvers: added `/new-author` webhook resolver
|
||||
- resolvers: added reader.load_shouts_top_random
|
||||
- resolvers: added reader.load_shouts_unrated
|
||||
- resolvers: community follower id property name is `.author`
|
||||
- resolvers: `get_authors_all` and `load_authors_by`
|
||||
- services: auth connector upgraded
|
||||
|
||||
#### [0.2.17]
|
||||
- schema: enum types workaround, `ReactionKind`, `InviteStatus`, `ShoutVisibility`
|
||||
- schema: `Shout.created_by`, `Shout.updated_by`
|
||||
- schema: `Shout.authors` can be empty
|
||||
- resolvers: optimized `reacted_shouts_updates` query
|
||||
|
||||
#### [0.2.16]
|
||||
- resolvers: collab inviting logics
|
||||
- resolvers: queries and mutations revision and renaming
|
||||
- resolvers: `delete_topic(slug)` implemented
|
||||
- resolvers: added `get_shout_followers`
|
||||
- resolvers: `load_shouts_by` filters implemented
|
||||
- orm: invite entity
|
||||
- schema: `Reaction.range` -> `Reaction.quote`
|
||||
- filters: `time_ago` -> `after`
|
||||
- httpx -> aiohttp
|
||||
|
||||
#### [0.2.15]
|
||||
- schema: `Shout.created_by` removed
|
||||
- schema: `Shout.mainTopic` removed
|
||||
- services: cached elasticsearch connector
|
||||
- services: auth is using `user_id` from authorizer
|
||||
- resolvers: `notify_*` usage fixes
|
||||
- resolvers: `getAuthor` now accepts slug, `user_id` or `author_id`
|
||||
- resolvers: login_required usage fixes
|
||||
|
||||
#### [0.2.14]
|
||||
- schema: some fixes from migrator
|
||||
- schema: `.days` -> `.time_ago`
|
||||
- schema: `excludeLayout` + `layout` in filters -> `layouts`
|
||||
- services: db access simpler, no contextmanager
|
||||
- services: removed Base.create() method
|
||||
- services: rediscache updated
|
||||
- resolvers: get_reacted_shouts_updates as followedReactions query
|
||||
|
||||
#### [0.2.13]
|
||||
- services: db context manager
|
||||
- services: `ViewedStorage` fixes
|
||||
- services: views are not stored in core db anymore
|
||||
- schema: snake case in model fields names
|
||||
- schema: no DateTime scalar
|
||||
- resolvers: `get_my_feed` comments filter reactions body.is_not('')
|
||||
- resolvers: `get_my_feed` query fix
|
||||
- resolvers: `LoadReactionsBy.days` -> `LoadReactionsBy.time_ago`
|
||||
- resolvers: `LoadShoutsBy.days` -> `LoadShoutsBy.time_ago`
|
||||
|
||||
#### [0.2.12]
|
||||
- `Author.userpic` -> `Author.pic`
|
||||
- `CommunityFollower.role` is string now
|
||||
- `Author.user` is string now
|
||||
|
||||
#### [0.2.11]
|
||||
- redis interface updated
|
||||
- `viewed` interface updated
|
||||
- `presence` interface updated
|
||||
- notify on create, update, delete for reaction and shout
|
||||
- notify on follow / unfollow author
|
||||
- use pyproject
|
||||
- devmode fixed
|
||||
|
||||
#### [0.2.10]
|
||||
- community resolvers connected
|
||||
|
||||
#### [0.2.9]
|
||||
- starlette is back, aiohttp removed
|
||||
- aioredis replaced with aredis
|
||||
|
||||
#### [0.2.8]
|
||||
- refactored
|
||||
|
||||
|
||||
#### [0.2.7]
|
||||
- `loadFollowedReactions` now with `
|
||||
151
CHANGELOG.txt
151
CHANGELOG.txt
@@ -1,151 +0,0 @@
|
||||
[0.3.4]
|
||||
- load_authors_by from cache
|
||||
|
||||
[0.3.3]
|
||||
- feat: sentry integration enabled with glitchtip
|
||||
- fix: reindex on update shout
|
||||
- packages upgrade, isort
|
||||
- separated stats queries for author and topic
|
||||
- fix: feed featured filter
|
||||
- fts search removed
|
||||
|
||||
[0.3.2]
|
||||
- redis cache for what author follows
|
||||
- redis cache for followers
|
||||
- graphql add query: get topic followers
|
||||
|
||||
[0.3.1]
|
||||
- enabling sentry
|
||||
- long query log report added
|
||||
- editor fixes
|
||||
- authors links cannot be updated by update_shout anymore
|
||||
|
||||
[0.3.0]
|
||||
- Shout.featured_at timestamp of the frontpage featuring event
|
||||
- added proposal accepting logics
|
||||
- schema modulized
|
||||
- Shout.visibility removed
|
||||
|
||||
[0.2.22]
|
||||
- added precommit hook
|
||||
- fmt
|
||||
- granian asgi
|
||||
|
||||
[0.2.21]
|
||||
- fix: rating logix
|
||||
- fix: load_top_random_shouts
|
||||
- resolvers: add_stat_* refactored
|
||||
- services: use google analytics
|
||||
- services: minor fixes search
|
||||
|
||||
[0.2.20]
|
||||
- services: ackee removed
|
||||
- services: following manager fixed
|
||||
- services: import views.json
|
||||
|
||||
[0.2.19]
|
||||
- fix: adding 'author' role
|
||||
- fix: stripping user_id in auth connector
|
||||
|
||||
[0.2.18]
|
||||
- schema: added Shout.seo string field
|
||||
- resolvers: added /new-author webhook resolver
|
||||
- resolvers: added reader.load_shouts_top_random
|
||||
- resolvers: added reader.load_shouts_unrated
|
||||
- resolvers: community follower id property name is .author
|
||||
- resolvers: get_authors_all and load_authors_by
|
||||
- services: auth connector upgraded
|
||||
|
||||
|
||||
[0.2.17]
|
||||
- schema: enum types workaround, ReactionKind, InviteStatus, ShoutVisibility
|
||||
- schema: Shout.created_by, Shout.updated_by
|
||||
- schema: Shout.authors can be empty
|
||||
- resovlers: optimized reacted shouts updates query
|
||||
|
||||
|
||||
[0.2.16]
|
||||
- resolvers: collab inviting logics
|
||||
- resolvers: queries and mutations revision and renaming
|
||||
- resolvers: delete_topic(slug) implemented
|
||||
- resolvers: added get_shout_followers
|
||||
- resolvers: load_shouts_by filters implemented
|
||||
- orm: invite entity
|
||||
- schema: Reaction.range -> Reaction.quote
|
||||
- filters: time_ago -> after
|
||||
- httpx -> aiohttp
|
||||
|
||||
[0.2.15]
|
||||
- schema: Shout.created_by removed
|
||||
- schema: Shout.mainTopic removed
|
||||
- services: cached elasticsearch connector
|
||||
- services: auth is using user_id from authorizer
|
||||
- resolvers: notify_* usage fixes
|
||||
- resolvers: getAuthor now accepts slug, user_id or author_id
|
||||
- resolvers: login_required usage fixes
|
||||
|
||||
[0.2.14]
|
||||
- schema: some fixes from migrator
|
||||
- schema: .days -> .time_ago
|
||||
- schema: excludeLayout + layout in filters -> layouts
|
||||
- services: db access simpler, no contextmanager
|
||||
- services: removed Base.create() method
|
||||
- services: rediscache updated
|
||||
- resolvers: get_reacted_shouts_updates as followedReactions query
|
||||
|
||||
[0.2.13]
|
||||
- services: db context manager
|
||||
- services: ViewedStorage fixes
|
||||
- services: views are not stored in core db anymore
|
||||
- schema: snake case in model fields names
|
||||
- schema: no DateTime scalar
|
||||
- resolvers: get_my_feed comments filter reactions body.is_not('')
|
||||
- resolvers: get_my_feed query fix
|
||||
- resolvers: LoadReactionsBy.days -> LoadReactionsBy.time_ago
|
||||
- resolvers: LoadShoutsBy.days -> LoadShoutsBy.time_ago
|
||||
|
||||
[0.2.12]
|
||||
- Author.userpic -> Author.pic
|
||||
- CommunityAuthor.role is string now
|
||||
- Author.user is string now
|
||||
|
||||
[0.2.11]
|
||||
- redis interface updated
|
||||
- viewed interface updated
|
||||
- presence interface updated
|
||||
- notify on create, update, delete for reaction and shout
|
||||
- notify on follow / unfollow author
|
||||
- use pyproject
|
||||
- devmode fixed
|
||||
|
||||
[0.2.10]
|
||||
- community resolvers connected
|
||||
|
||||
[0.2.9]
|
||||
- starlette is back, aiohttp removed
|
||||
- aioredis replaced with aredis
|
||||
|
||||
[0.2.8]
|
||||
- refactored
|
||||
|
||||
|
||||
[0.2.7]
|
||||
- loadFollowedReactions now with login_required
|
||||
- notifier service api draft
|
||||
- added shout visibility kind in schema
|
||||
- community isolated from author in orm
|
||||
|
||||
|
||||
[0.2.6]
|
||||
- redis connection pool
|
||||
- auth context fixes
|
||||
- communities orm, resolvers, schema
|
||||
|
||||
|
||||
[0.2.5]
|
||||
- restructured
|
||||
- all users have their profiles as authors in core
|
||||
- gittask, inbox and auth logics removed
|
||||
- settings moved to base and now smaller
|
||||
- new outside auth schema
|
||||
- removed gittask, auth, inbox, migration
|
||||
25
Dockerfile
25
Dockerfile
@@ -1,25 +1,18 @@
|
||||
FROM python:alpine
|
||||
FROM python:slim
|
||||
|
||||
# Update package lists and install necessary dependencies
|
||||
RUN apk update && \
|
||||
apk add --no-cache build-base icu-data-full curl python3-dev musl-dev && \
|
||||
curl -sSL https://install.python-poetry.org | python
|
||||
RUN apt-get update && apt-get install -y \
|
||||
postgresql-client \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy only the pyproject.toml file initially
|
||||
COPY pyproject.toml /app/
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install poetry and dependencies
|
||||
RUN pip install poetry && \
|
||||
poetry config virtualenvs.create false && \
|
||||
poetry install --no-root --only main
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
# Copy the rest of the files
|
||||
COPY . /app
|
||||
COPY . .
|
||||
|
||||
# Expose the port
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["python", "server.py"]
|
||||
CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "8000"]
|
||||
114
README.md
114
README.md
@@ -1,56 +1,102 @@
|
||||
## Техстек
|
||||
# GraphQL API Backend
|
||||
|
||||
Backend service providing GraphQL API for content management system with reactions, ratings and comments.
|
||||
|
||||
- sqlalchemy
|
||||
- redis
|
||||
- ariadne
|
||||
- starlette
|
||||
- granian
|
||||
## Core Features
|
||||
|
||||
## Локальная разработка
|
||||
### Shouts (Posts)
|
||||
- CRUD operations via GraphQL mutations
|
||||
- Rich filtering and sorting options
|
||||
- Support for multiple authors and topics
|
||||
- Rating system with likes/dislikes
|
||||
- Comments and nested replies
|
||||
- Bookmarks and following
|
||||
|
||||
Подготовьте зависимости
|
||||
### Reactions System
|
||||
- `ReactionKind` types: LIKE, DISLIKE, COMMENT
|
||||
- Rating calculation for shouts and comments
|
||||
- User-specific reaction tracking
|
||||
- Reaction stats and aggregations
|
||||
- Nested comments support
|
||||
|
||||
osx:
|
||||
```
|
||||
brew install redis nginx postgres
|
||||
brew services start redis
|
||||
```
|
||||
### Authors & Topics
|
||||
- Author profiles with stats
|
||||
- Topic categorization and hierarchy
|
||||
- Following system for authors/topics
|
||||
- Activity tracking and stats
|
||||
- Community features
|
||||
|
||||
debian/ubuntu:
|
||||
```
|
||||
apt install redis nginx
|
||||
```
|
||||
## Tech Stack
|
||||
|
||||
Затем запустите postgres, redis и наш API-сервер:
|
||||
- **(Python)[https://www.python.org/]** 3.12+
|
||||
- **GraphQL** with [Ariadne](https://ariadnegraphql.org/)
|
||||
- **(SQLAlchemy)[https://docs.sqlalchemy.org/en/20/orm/]**
|
||||
- **(PostgreSQL)[https://www.postgresql.org/]/(SQLite)[https://www.sqlite.org/]** support
|
||||
- **(Starlette)[https://www.starlette.io/]** for ASGI server
|
||||
- **(Redis)[https://redis.io/]** for caching
|
||||
|
||||
## Development
|
||||
|
||||
### Prepare environment:
|
||||
|
||||
```shell
|
||||
mkdir .venv
|
||||
python3.12 -m venv .venv
|
||||
poetry env use .venv/bin/python3.12
|
||||
poetry update
|
||||
poetry run server.py
|
||||
python3.12 -m venv venv
|
||||
source venv/bin/activate
|
||||
```
|
||||
## Подключенные сервисы
|
||||
|
||||
Для межсерверной коммуникации используются отдельные логики, папка `services/*` содержит адаптеры для использования базы данных, `redis`, кеширование и клиенты для запросов GraphQL.
|
||||
### Run server
|
||||
|
||||
### auth.py
|
||||
First, certifcates are required to run the server.
|
||||
|
||||
Задайте переменную окружения `WEBHOOK_SECRET` чтобы принимать запросы по адресу `/new-author` от [сервера авторизации](https://dev.discours.io/devstack/authorizer). Событие ожидается при создании нового пользователя. Для авторизованных запросов и мутаций фронтенд добавляет к запросу токен авторизации в заголовок `Authorization`.
|
||||
```shell
|
||||
mkcert -install
|
||||
mkcert localhost
|
||||
```
|
||||
|
||||
### viewed.py
|
||||
Then, run the server:
|
||||
|
||||
Задайте переменные окружения `GOOGLE_KEYFILE_PATH` и `GOOGLE_PROPERTY_ID` для получения данных из [Google Analytics](https://developers.google.com/analytics?hl=ru).
|
||||
```shell
|
||||
python server.py dev
|
||||
```
|
||||
|
||||
### search.py
|
||||
### Useful Commands
|
||||
|
||||
Позволяет получать результаты пользовательских поисковых запросов в кешируемом виде от ElasticSearch с оценкой `score`, объединенные с запросами к базе данных, запрашиваем через GraphQL API `load_shouts_search`. Требует установка `ELASTIC_HOST`, `ELASTIC_PORT`, `ELASTIC_USER` и `ELASTIC_PASSWORD`.
|
||||
```shell
|
||||
# Linting and import sorting
|
||||
ruff check . --fix --select I
|
||||
|
||||
### notify.py
|
||||
# Code formatting
|
||||
ruff format . --line-length=120
|
||||
|
||||
Отправка уведомлений по Redis PubSub каналам, согласно структуре данных, за которую отвечает [сервис уведомлений](https://dev.discours.io/discours.io/notifier)
|
||||
# Run tests
|
||||
pytest
|
||||
|
||||
### unread.py
|
||||
# Type checking
|
||||
mypy .
|
||||
```
|
||||
|
||||
Счетчик непрочитанных сообщений получается через Redis-запрос к данным [сервиса сообщений](https://dev.discours.io/discours.io/inbox).
|
||||
### Code Style
|
||||
|
||||
We use:
|
||||
- Ruff for linting and import sorting
|
||||
- Line length: 120 characters
|
||||
- Python type hints
|
||||
- Docstrings for public methods
|
||||
|
||||
### GraphQL Development
|
||||
|
||||
Test queries in GraphQL Playground at `http://localhost:8000`:
|
||||
|
||||
```graphql
|
||||
# Example query
|
||||
query GetShout($slug: String) {
|
||||
get_shout(slug: $slug) {
|
||||
id
|
||||
title
|
||||
main_author {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
76
alembic/env.py
Normal file
76
alembic/env.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from alembic import context
|
||||
from services.db import Base
|
||||
from settings import DB_URL
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# override DB_URL
|
||||
config.set_section_option(config.config_ini_section, "DB_URL", DB_URL)
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
target_metadata = [Base.metadata]
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
96
auth/authenticate.py
Normal file
96
auth/authenticate.py
Normal file
@@ -0,0 +1,96 @@
|
||||
from functools import wraps
|
||||
from typing import Optional, Tuple
|
||||
|
||||
from graphql.type import GraphQLResolveInfo
|
||||
from sqlalchemy.orm import exc, joinedload
|
||||
from starlette.authentication import AuthenticationBackend
|
||||
from starlette.requests import HTTPConnection
|
||||
|
||||
from auth.credentials import AuthCredentials, AuthUser
|
||||
from auth.exceptions import OperationNotAllowed
|
||||
from auth.tokenstorage import SessionToken
|
||||
from auth.usermodel import Role, User
|
||||
from services.db import local_session
|
||||
from settings import SESSION_TOKEN_HEADER
|
||||
|
||||
|
||||
class JWTAuthenticate(AuthenticationBackend):
|
||||
async def authenticate(self, request: HTTPConnection) -> Optional[Tuple[AuthCredentials, AuthUser]]:
|
||||
if SESSION_TOKEN_HEADER not in request.headers:
|
||||
return AuthCredentials(scopes={}), AuthUser(user_id=None, username="")
|
||||
|
||||
token = request.headers.get(SESSION_TOKEN_HEADER)
|
||||
if not token:
|
||||
print("[auth.authenticate] no token in header %s" % SESSION_TOKEN_HEADER)
|
||||
return AuthCredentials(scopes={}, error_message=str("no token")), AuthUser(user_id=None, username="")
|
||||
|
||||
if len(token.split(".")) > 1:
|
||||
payload = await SessionToken.verify(token)
|
||||
|
||||
with local_session() as session:
|
||||
try:
|
||||
user = (
|
||||
session.query(User)
|
||||
.options(
|
||||
joinedload(User.roles).options(joinedload(Role.permissions)),
|
||||
joinedload(User.ratings),
|
||||
)
|
||||
.filter(User.id == payload.user_id)
|
||||
.one()
|
||||
)
|
||||
|
||||
scopes = {} # TODO: integrate await user.get_permission()
|
||||
|
||||
return (
|
||||
AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True),
|
||||
AuthUser(user_id=user.id, username=""),
|
||||
)
|
||||
except exc.NoResultFound:
|
||||
pass
|
||||
|
||||
return AuthCredentials(scopes={}, error_message=str("Invalid token")), AuthUser(user_id=None, username="")
|
||||
|
||||
|
||||
def login_required(func):
|
||||
@wraps(func)
|
||||
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
if not auth or not auth.logged_in:
|
||||
return {"error": "Please login first"}
|
||||
return await func(parent, info, *args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def permission_required(resource, operation, func):
|
||||
@wraps(func)
|
||||
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
|
||||
print("[auth.authenticate] permission_required for %r with info %r" % (func, info)) # debug only
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
if not auth.logged_in:
|
||||
raise OperationNotAllowed(auth.error_message or "Please login")
|
||||
|
||||
# TODO: add actual check permission logix here
|
||||
|
||||
return await func(parent, info, *args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
||||
|
||||
def login_accepted(func):
|
||||
@wraps(func)
|
||||
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
|
||||
# Если есть авторизация, добавляем данные автора в контекст
|
||||
if auth and auth.logged_in:
|
||||
info.context["author"] = auth.author
|
||||
info.context["user_id"] = auth.author.get("id")
|
||||
else:
|
||||
# Очищаем данные автора из контекста если авторизация отсутствует
|
||||
info.context["author"] = None
|
||||
info.context["user_id"] = None
|
||||
|
||||
return await func(parent, info, *args, **kwargs)
|
||||
|
||||
return wrap
|
||||
43
auth/credentials.py
Normal file
43
auth/credentials.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from typing import List, Optional, Text
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
# from base.exceptions import Unauthorized
|
||||
|
||||
|
||||
class Permission(BaseModel):
|
||||
name: Text
|
||||
|
||||
|
||||
class AuthCredentials(BaseModel):
|
||||
user_id: Optional[int] = None
|
||||
scopes: Optional[dict] = {}
|
||||
logged_in: bool = False
|
||||
error_message: str = ""
|
||||
|
||||
@property
|
||||
def is_admin(self):
|
||||
# TODO: check admin logix
|
||||
return True
|
||||
|
||||
async def permissions(self) -> List[Permission]:
|
||||
if self.user_id is None:
|
||||
# raise Unauthorized("Please login first")
|
||||
return {"error": "Please login first"}
|
||||
else:
|
||||
# TODO: implement permissions logix
|
||||
print(self.user_id)
|
||||
return NotImplemented
|
||||
|
||||
|
||||
class AuthUser(BaseModel):
|
||||
user_id: Optional[int]
|
||||
username: Optional[str]
|
||||
|
||||
@property
|
||||
def is_authenticated(self) -> bool:
|
||||
return self.user_id is not None
|
||||
|
||||
# @property
|
||||
# def display_id(self) -> int:
|
||||
# return self.user_id
|
||||
30
auth/email.py
Normal file
30
auth/email.py
Normal file
@@ -0,0 +1,30 @@
|
||||
import requests
|
||||
|
||||
from settings import MAILGUN_API_KEY, MAILGUN_DOMAIN
|
||||
|
||||
api_url = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN or "discours.io")
|
||||
noreply = "discours.io <noreply@%s>" % (MAILGUN_DOMAIN or "discours.io")
|
||||
lang_subject = {"ru": "Подтверждение почты", "en": "Confirm email"}
|
||||
|
||||
|
||||
async def send_auth_email(user, token, lang="ru", template="email_confirmation"):
|
||||
try:
|
||||
to = "%s <%s>" % (user.name, user.email)
|
||||
if lang not in ["ru", "en"]:
|
||||
lang = "ru"
|
||||
subject = lang_subject.get(lang, lang_subject["en"])
|
||||
template = template + "_" + lang
|
||||
payload = {
|
||||
"from": noreply,
|
||||
"to": to,
|
||||
"subject": subject,
|
||||
"template": template,
|
||||
"h:X-Mailgun-Variables": '{ "token": "%s" }' % token,
|
||||
}
|
||||
print("[auth.email] payload: %r" % payload)
|
||||
# debug
|
||||
# print('http://localhost:3000/?modal=auth&mode=confirm-email&token=%s' % token)
|
||||
response = requests.post(api_url, auth=("api", MAILGUN_API_KEY), data=payload)
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
38
auth/exceptions.py
Normal file
38
auth/exceptions.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from graphql.error import GraphQLError
|
||||
|
||||
# TODO: remove traceback from logs for defined exceptions
|
||||
|
||||
|
||||
class BaseHttpException(GraphQLError):
|
||||
code = 500
|
||||
message = "500 Server error"
|
||||
|
||||
|
||||
class ExpiredToken(BaseHttpException):
|
||||
code = 401
|
||||
message = "401 Expired Token"
|
||||
|
||||
|
||||
class InvalidToken(BaseHttpException):
|
||||
code = 401
|
||||
message = "401 Invalid Token"
|
||||
|
||||
|
||||
class Unauthorized(BaseHttpException):
|
||||
code = 401
|
||||
message = "401 Unauthorized"
|
||||
|
||||
|
||||
class ObjectNotExist(BaseHttpException):
|
||||
code = 404
|
||||
message = "404 Object Does Not Exist"
|
||||
|
||||
|
||||
class OperationNotAllowed(BaseHttpException):
|
||||
code = 403
|
||||
message = "403 Operation Is Not Allowed"
|
||||
|
||||
|
||||
class InvalidPassword(BaseHttpException):
|
||||
code = 403
|
||||
message = "403 Invalid Password"
|
||||
97
auth/identity.py
Normal file
97
auth/identity.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from binascii import hexlify
|
||||
from hashlib import sha256
|
||||
|
||||
from passlib.hash import bcrypt
|
||||
|
||||
from auth.exceptions import ExpiredToken, InvalidToken
|
||||
from auth.jwtcodec import JWTCodec
|
||||
from auth.tokenstorage import TokenStorage
|
||||
from orm.user import User
|
||||
|
||||
# from base.exceptions import InvalidPassword, InvalidToken
|
||||
from services.db import local_session
|
||||
|
||||
|
||||
class Password:
|
||||
@staticmethod
|
||||
def _to_bytes(data: str) -> bytes:
|
||||
return bytes(data.encode())
|
||||
|
||||
@classmethod
|
||||
def _get_sha256(cls, password: str) -> bytes:
|
||||
bytes_password = cls._to_bytes(password)
|
||||
return hexlify(sha256(bytes_password).digest())
|
||||
|
||||
@staticmethod
|
||||
def encode(password: str) -> str:
|
||||
password_sha256 = Password._get_sha256(password)
|
||||
return bcrypt.using(rounds=10).hash(password_sha256)
|
||||
|
||||
@staticmethod
|
||||
def verify(password: str, hashed: str) -> bool:
|
||||
"""
|
||||
Verify that password hash is equal to specified hash. Hash format:
|
||||
|
||||
$2a$10$Ro0CUfOqk6cXEKf3dyaM7OhSCvnwM9s4wIX9JeLapehKK5YdLxKcm
|
||||
\__/\/ \____________________/\_____________________________/ # noqa: W605
|
||||
| | Salt Hash
|
||||
| Cost
|
||||
Version
|
||||
|
||||
More info: https://passlib.readthedocs.io/en/stable/lib/passlib.hash.bcrypt.html
|
||||
|
||||
:param password: clear text password
|
||||
:param hashed: hash of the password
|
||||
:return: True if clear text password matches specified hash
|
||||
"""
|
||||
hashed_bytes = Password._to_bytes(hashed)
|
||||
password_sha256 = Password._get_sha256(password)
|
||||
|
||||
return bcrypt.verify(password_sha256, hashed_bytes)
|
||||
|
||||
|
||||
class Identity:
|
||||
@staticmethod
|
||||
def password(orm_user: User, password: str) -> User:
|
||||
user = User(**orm_user.dict())
|
||||
if not user.password:
|
||||
# raise InvalidPassword("User password is empty")
|
||||
return {"error": "User password is empty"}
|
||||
if not Password.verify(password, user.password):
|
||||
# raise InvalidPassword("Wrong user password")
|
||||
return {"error": "Wrong user password"}
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def oauth(inp) -> User:
|
||||
with local_session() as session:
|
||||
user = session.query(User).filter(User.email == inp["email"]).first()
|
||||
if not user:
|
||||
user = User.create(**inp, emailConfirmed=True)
|
||||
session.commit()
|
||||
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
async def onetime(token: str) -> User:
|
||||
try:
|
||||
print("[auth.identity] using one time token")
|
||||
payload = JWTCodec.decode(token)
|
||||
if not await TokenStorage.exist(f"{payload.user_id}-{payload.username}-{token}"):
|
||||
# raise InvalidToken("Login token has expired, please login again")
|
||||
return {"error": "Token has expired"}
|
||||
except ExpiredToken:
|
||||
# raise InvalidToken("Login token has expired, please try again")
|
||||
return {"error": "Token has expired"}
|
||||
except InvalidToken:
|
||||
# raise InvalidToken("token format error") from e
|
||||
return {"error": "Token format error"}
|
||||
with local_session() as session:
|
||||
user = session.query(User).filter_by(id=payload.user_id).first()
|
||||
if not user:
|
||||
# raise Exception("user not exist")
|
||||
return {"error": "User does not exist"}
|
||||
if not user.emailConfirmed:
|
||||
user.emailConfirmed = True
|
||||
session.commit()
|
||||
return user
|
||||
60
auth/jwtcodec.py
Normal file
60
auth/jwtcodec.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import jwt
|
||||
from pydantic import BaseModel
|
||||
|
||||
from auth.exceptions import ExpiredToken, InvalidToken
|
||||
from settings import JWT_ALGORITHM, JWT_SECRET_KEY
|
||||
|
||||
|
||||
class TokenPayload(BaseModel):
|
||||
user_id: str
|
||||
username: str
|
||||
exp: datetime
|
||||
iat: datetime
|
||||
iss: str
|
||||
|
||||
|
||||
class JWTCodec:
|
||||
@staticmethod
|
||||
def encode(user, exp: datetime) -> str:
|
||||
payload = {
|
||||
"user_id": user.id,
|
||||
"username": user.email or user.phone,
|
||||
"exp": exp,
|
||||
"iat": datetime.now(tz=timezone.utc),
|
||||
"iss": "discours",
|
||||
}
|
||||
try:
|
||||
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
|
||||
except Exception as e:
|
||||
print("[auth.jwtcodec] JWT encode error %r" % e)
|
||||
|
||||
@staticmethod
|
||||
def decode(token: str, verify_exp: bool = True):
|
||||
r = None
|
||||
payload = None
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
key=JWT_SECRET_KEY,
|
||||
options={
|
||||
"verify_exp": verify_exp,
|
||||
# "verify_signature": False
|
||||
},
|
||||
algorithms=[JWT_ALGORITHM],
|
||||
issuer="discours",
|
||||
)
|
||||
r = TokenPayload(**payload)
|
||||
# print('[auth.jwtcodec] debug token %r' % r)
|
||||
return r
|
||||
except jwt.InvalidIssuedAtError:
|
||||
print("[auth.jwtcodec] invalid issued at: %r" % payload)
|
||||
raise ExpiredToken("check token issued time")
|
||||
except jwt.ExpiredSignatureError:
|
||||
print("[auth.jwtcodec] expired signature %r" % payload)
|
||||
raise ExpiredToken("check token lifetime")
|
||||
except jwt.InvalidTokenError:
|
||||
raise InvalidToken("token is not valid")
|
||||
except jwt.InvalidSignatureError:
|
||||
raise InvalidToken("token is not valid")
|
||||
98
auth/oauth.py
Normal file
98
auth/oauth.py
Normal file
@@ -0,0 +1,98 @@
|
||||
from authlib.integrations.starlette_client import OAuth
|
||||
from starlette.responses import RedirectResponse
|
||||
|
||||
from auth.identity import Identity
|
||||
from auth.tokenstorage import TokenStorage
|
||||
from settings import FRONTEND_URL, OAUTH_CLIENTS
|
||||
|
||||
oauth = OAuth()
|
||||
|
||||
oauth.register(
|
||||
name="facebook",
|
||||
client_id=OAUTH_CLIENTS["FACEBOOK"]["id"],
|
||||
client_secret=OAUTH_CLIENTS["FACEBOOK"]["key"],
|
||||
access_token_url="https://graph.facebook.com/v11.0/oauth/access_token",
|
||||
access_token_params=None,
|
||||
authorize_url="https://www.facebook.com/v11.0/dialog/oauth",
|
||||
authorize_params=None,
|
||||
api_base_url="https://graph.facebook.com/",
|
||||
client_kwargs={"scope": "public_profile email"},
|
||||
)
|
||||
|
||||
oauth.register(
|
||||
name="github",
|
||||
client_id=OAUTH_CLIENTS["GITHUB"]["id"],
|
||||
client_secret=OAUTH_CLIENTS["GITHUB"]["key"],
|
||||
access_token_url="https://github.com/login/oauth/access_token",
|
||||
access_token_params=None,
|
||||
authorize_url="https://github.com/login/oauth/authorize",
|
||||
authorize_params=None,
|
||||
api_base_url="https://api.github.com/",
|
||||
client_kwargs={"scope": "user:email"},
|
||||
)
|
||||
|
||||
oauth.register(
|
||||
name="google",
|
||||
client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
|
||||
client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
|
||||
server_metadata_url="https://accounts.google.com/.well-known/openid-configuration",
|
||||
client_kwargs={"scope": "openid email profile"},
|
||||
authorize_state="test",
|
||||
)
|
||||
|
||||
|
||||
async def google_profile(client, request, token):
|
||||
userinfo = token["userinfo"]
|
||||
|
||||
profile = {"name": userinfo["name"], "email": userinfo["email"], "id": userinfo["sub"]}
|
||||
|
||||
if userinfo["picture"]:
|
||||
userpic = userinfo["picture"].replace("=s96", "=s600")
|
||||
profile["userpic"] = userpic
|
||||
|
||||
return profile
|
||||
|
||||
|
||||
async def facebook_profile(client, request, token):
|
||||
profile = await client.get("me?fields=name,id,email", token=token)
|
||||
return profile.json()
|
||||
|
||||
|
||||
async def github_profile(client, request, token):
|
||||
profile = await client.get("user", token=token)
|
||||
return profile.json()
|
||||
|
||||
|
||||
profile_callbacks = {
|
||||
"google": google_profile,
|
||||
"facebook": facebook_profile,
|
||||
"github": github_profile,
|
||||
}
|
||||
|
||||
|
||||
async def oauth_login(request):
|
||||
provider = request.path_params["provider"]
|
||||
request.session["provider"] = provider
|
||||
client = oauth.create_client(provider)
|
||||
redirect_uri = "https://v2.discours.io/oauth-authorize"
|
||||
return await client.authorize_redirect(request, redirect_uri)
|
||||
|
||||
|
||||
async def oauth_authorize(request):
|
||||
provider = request.session["provider"]
|
||||
client = oauth.create_client(provider)
|
||||
token = await client.authorize_access_token(request)
|
||||
get_profile = profile_callbacks[provider]
|
||||
profile = await get_profile(client, request, token)
|
||||
user_oauth_info = "%s:%s" % (provider, profile["id"])
|
||||
user_input = {
|
||||
"oauth": user_oauth_info,
|
||||
"email": profile["email"],
|
||||
"username": profile["name"],
|
||||
"userpic": profile["userpic"],
|
||||
}
|
||||
user = Identity.oauth(user_input)
|
||||
session_token = await TokenStorage.create_session(user)
|
||||
response = RedirectResponse(url=FRONTEND_URL + "/confirm")
|
||||
response.set_cookie("token", session_token)
|
||||
return response
|
||||
215
auth/resolvers.py
Normal file
215
auth/resolvers.py
Normal file
@@ -0,0 +1,215 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import re
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from graphql.type import GraphQLResolveInfo
|
||||
|
||||
from auth.authenticate import login_required
|
||||
from auth.credentials import AuthCredentials
|
||||
from auth.email import send_auth_email
|
||||
from auth.exceptions import InvalidPassword, InvalidToken, ObjectNotExist, Unauthorized
|
||||
from auth.identity import Identity, Password
|
||||
from auth.jwtcodec import JWTCodec
|
||||
from auth.tokenstorage import TokenStorage
|
||||
from orm import Role, User
|
||||
from services.db import local_session
|
||||
from services.schema import mutation, query
|
||||
from settings import SESSION_TOKEN_HEADER
|
||||
|
||||
|
||||
@mutation.field("getSession")
|
||||
@login_required
|
||||
async def get_current_user(_, info):
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
token = info.context["request"].headers.get(SESSION_TOKEN_HEADER)
|
||||
|
||||
with local_session() as session:
|
||||
user = session.query(User).where(User.id == auth.user_id).one()
|
||||
user.lastSeen = datetime.now(tz=timezone.utc)
|
||||
session.commit()
|
||||
|
||||
return {"token": token, "user": user}
|
||||
|
||||
|
||||
@mutation.field("confirmEmail")
|
||||
async def confirm_email(_, info, token):
|
||||
"""confirm owning email address"""
|
||||
try:
|
||||
print("[resolvers.auth] confirm email by token")
|
||||
payload = JWTCodec.decode(token)
|
||||
user_id = payload.user_id
|
||||
await TokenStorage.get(f"{user_id}-{payload.username}-{token}")
|
||||
with local_session() as session:
|
||||
user = session.query(User).where(User.id == user_id).first()
|
||||
session_token = await TokenStorage.create_session(user)
|
||||
user.emailConfirmed = True
|
||||
user.lastSeen = datetime.now(tz=timezone.utc)
|
||||
session.add(user)
|
||||
session.commit()
|
||||
return {"token": session_token, "user": user}
|
||||
except InvalidToken as e:
|
||||
raise InvalidToken(e.message)
|
||||
except Exception as e:
|
||||
print(e) # FIXME: debug only
|
||||
return {"error": "email is not confirmed"}
|
||||
|
||||
|
||||
def create_user(user_dict):
|
||||
user = User(**user_dict)
|
||||
with local_session() as session:
|
||||
user.roles.append(session.query(Role).first())
|
||||
session.add(user)
|
||||
session.commit()
|
||||
return user
|
||||
|
||||
|
||||
def replace_translit(src):
|
||||
ruchars = "абвгдеёжзийклмнопрстуфхцчшщъыьэюя."
|
||||
enchars = [
|
||||
"a",
|
||||
"b",
|
||||
"v",
|
||||
"g",
|
||||
"d",
|
||||
"e",
|
||||
"yo",
|
||||
"zh",
|
||||
"z",
|
||||
"i",
|
||||
"y",
|
||||
"k",
|
||||
"l",
|
||||
"m",
|
||||
"n",
|
||||
"o",
|
||||
"p",
|
||||
"r",
|
||||
"s",
|
||||
"t",
|
||||
"u",
|
||||
"f",
|
||||
"h",
|
||||
"c",
|
||||
"ch",
|
||||
"sh",
|
||||
"sch",
|
||||
"",
|
||||
"y",
|
||||
"'",
|
||||
"e",
|
||||
"yu",
|
||||
"ya",
|
||||
"-",
|
||||
]
|
||||
return src.translate(str.maketrans(ruchars, enchars))
|
||||
|
||||
|
||||
def generate_unique_slug(src):
|
||||
print("[resolvers.auth] generating slug from: " + src)
|
||||
slug = replace_translit(src.lower())
|
||||
slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
|
||||
if slug != src:
|
||||
print("[resolvers.auth] translited name: " + slug)
|
||||
c = 1
|
||||
with local_session() as session:
|
||||
user = session.query(User).where(User.slug == slug).first()
|
||||
while user:
|
||||
user = session.query(User).where(User.slug == slug).first()
|
||||
slug = slug + "-" + str(c)
|
||||
c += 1
|
||||
if not user:
|
||||
unique_slug = slug
|
||||
print("[resolvers.auth] " + unique_slug)
|
||||
return quote_plus(unique_slug.replace("'", "")).replace("+", "-")
|
||||
|
||||
|
||||
@mutation.field("registerUser")
|
||||
async def register_by_email(_, _info, email: str, password: str = "", name: str = ""):
|
||||
email = email.lower()
|
||||
"""creates new user account"""
|
||||
with local_session() as session:
|
||||
user = session.query(User).filter(User.email == email).first()
|
||||
if user:
|
||||
raise Unauthorized("User already exist")
|
||||
else:
|
||||
slug = generate_unique_slug(name)
|
||||
user = session.query(User).where(User.slug == slug).first()
|
||||
if user:
|
||||
slug = generate_unique_slug(email.split("@")[0])
|
||||
user_dict = {
|
||||
"email": email,
|
||||
"username": email, # will be used to store phone number or some messenger network id
|
||||
"name": name,
|
||||
"slug": slug,
|
||||
}
|
||||
if password:
|
||||
user_dict["password"] = Password.encode(password)
|
||||
user = create_user(user_dict)
|
||||
user = await auth_send_link(_, _info, email)
|
||||
return {"user": user}
|
||||
|
||||
|
||||
@mutation.field("sendLink")
|
||||
async def auth_send_link(_, _info, email, lang="ru", template="email_confirmation"):
|
||||
email = email.lower()
|
||||
"""send link with confirm code to email"""
|
||||
with local_session() as session:
|
||||
user = session.query(User).filter(User.email == email).first()
|
||||
if not user:
|
||||
raise ObjectNotExist("User not found")
|
||||
else:
|
||||
token = await TokenStorage.create_onetime(user)
|
||||
await send_auth_email(user, token, lang, template)
|
||||
return user
|
||||
|
||||
|
||||
@query.field("signIn")
|
||||
async def login(_, info, email: str, password: str = "", lang: str = "ru"):
|
||||
email = email.lower()
|
||||
with local_session() as session:
|
||||
orm_user = session.query(User).filter(User.email == email).first()
|
||||
if orm_user is None:
|
||||
print(f"[auth] {email}: email not found")
|
||||
# return {"error": "email not found"}
|
||||
raise ObjectNotExist("User not found") # contains webserver status
|
||||
|
||||
if not password:
|
||||
print(f"[auth] send confirm link to {email}")
|
||||
token = await TokenStorage.create_onetime(orm_user)
|
||||
await send_auth_email(orm_user, token, lang)
|
||||
# FIXME: not an error, warning
|
||||
return {"error": "no password, email link was sent"}
|
||||
|
||||
else:
|
||||
# sign in using password
|
||||
if not orm_user.emailConfirmed:
|
||||
# not an error, warns users
|
||||
return {"error": "please, confirm email"}
|
||||
else:
|
||||
try:
|
||||
user = Identity.password(orm_user, password)
|
||||
session_token = await TokenStorage.create_session(user)
|
||||
print(f"[auth] user {email} authorized")
|
||||
return {"token": session_token, "user": user}
|
||||
except InvalidPassword:
|
||||
print(f"[auth] {email}: invalid password")
|
||||
raise InvalidPassword("invalid password") # contains webserver status
|
||||
# return {"error": "invalid password"}
|
||||
|
||||
|
||||
@query.field("signOut")
|
||||
@login_required
|
||||
async def sign_out(_, info: GraphQLResolveInfo):
|
||||
token = info.context["request"].headers.get(SESSION_TOKEN_HEADER, "")
|
||||
status = await TokenStorage.revoke(token)
|
||||
return status
|
||||
|
||||
|
||||
@query.field("isEmailUsed")
|
||||
async def is_email_used(_, _info, email):
|
||||
email = email.lower()
|
||||
with local_session() as session:
|
||||
user = session.query(User).filter(User.email == email).first()
|
||||
return user is not None
|
||||
73
auth/tokenstorage.py
Normal file
73
auth/tokenstorage.py
Normal file
@@ -0,0 +1,73 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from auth.jwtcodec import JWTCodec
|
||||
from auth.validations import AuthInput
|
||||
from services.redis import redis
|
||||
from settings import ONETIME_TOKEN_LIFE_SPAN, SESSION_TOKEN_LIFE_SPAN
|
||||
|
||||
|
||||
async def save(token_key, life_span, auto_delete=True):
|
||||
await redis.execute("SET", token_key, "True")
|
||||
if auto_delete:
|
||||
expire_at = (datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)).timestamp()
|
||||
await redis.execute("EXPIREAT", token_key, int(expire_at))
|
||||
|
||||
|
||||
class SessionToken:
|
||||
@classmethod
|
||||
async def verify(cls, token: str):
|
||||
"""
|
||||
Rules for a token to be valid.
|
||||
- token format is legal
|
||||
- token exists in redis database
|
||||
- token is not expired
|
||||
"""
|
||||
try:
|
||||
return JWTCodec.decode(token)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
@classmethod
|
||||
async def get(cls, payload, token):
|
||||
return await TokenStorage.get(f"{payload.user_id}-{payload.username}-{token}")
|
||||
|
||||
|
||||
class TokenStorage:
|
||||
@staticmethod
|
||||
async def get(token_key):
|
||||
print("[tokenstorage.get] " + token_key)
|
||||
# 2041-user@domain.zn-eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoyMDQxLCJ1c2VybmFtZSI6ImFudG9uLnJld2luK3Rlc3QtbG9hZGNoYXRAZ21haWwuY29tIiwiZXhwIjoxNjcxNzgwNjE2LCJpYXQiOjE2NjkxODg2MTYsImlzcyI6ImRpc2NvdXJzIn0.Nml4oV6iMjMmc6xwM7lTKEZJKBXvJFEIZ-Up1C1rITQ
|
||||
return await redis.execute("GET", token_key)
|
||||
|
||||
@staticmethod
|
||||
async def create_onetime(user: AuthInput) -> str:
|
||||
life_span = ONETIME_TOKEN_LIFE_SPAN
|
||||
exp = datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)
|
||||
one_time_token = JWTCodec.encode(user, exp)
|
||||
await save(f"{user.id}-{user.username}-{one_time_token}", life_span)
|
||||
return one_time_token
|
||||
|
||||
@staticmethod
|
||||
async def create_session(user: AuthInput) -> str:
|
||||
life_span = SESSION_TOKEN_LIFE_SPAN
|
||||
exp = datetime.now(tz=timezone.utc) + timedelta(seconds=life_span)
|
||||
session_token = JWTCodec.encode(user, exp)
|
||||
await save(f"{user.id}-{user.username}-{session_token}", life_span)
|
||||
return session_token
|
||||
|
||||
@staticmethod
|
||||
async def revoke(token: str) -> bool:
|
||||
payload = None
|
||||
try:
|
||||
print("[auth.tokenstorage] revoke token")
|
||||
payload = JWTCodec.decode(token)
|
||||
except: # noqa
|
||||
pass
|
||||
else:
|
||||
await redis.execute("DEL", f"{payload.user_id}-{payload.username}-{token}")
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
async def revoke_all(user: AuthInput):
|
||||
tokens = await redis.execute("KEYS", f"{user.id}-*")
|
||||
await redis.execute("DEL", *tokens)
|
||||
119
auth/usermodel.py
Normal file
119
auth/usermodel.py
Normal file
@@ -0,0 +1,119 @@
|
||||
import time
|
||||
|
||||
from sqlalchemy import (
|
||||
JSON,
|
||||
Boolean,
|
||||
Column,
|
||||
DateTime,
|
||||
ForeignKey,
|
||||
Integer,
|
||||
String,
|
||||
func,
|
||||
)
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class Permission(Base):
|
||||
__tablename__ = "permission"
|
||||
|
||||
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
|
||||
resource = Column(String, nullable=False)
|
||||
operation = Column(String, nullable=False)
|
||||
|
||||
|
||||
class Role(Base):
|
||||
__tablename__ = "role"
|
||||
|
||||
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
|
||||
name = Column(String, nullable=False)
|
||||
permissions = relationship(Permission)
|
||||
|
||||
|
||||
class AuthorizerUser(Base):
|
||||
__tablename__ = "authorizer_users"
|
||||
|
||||
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
|
||||
key = Column(String)
|
||||
email = Column(String, unique=True)
|
||||
email_verified_at = Column(Integer)
|
||||
family_name = Column(String)
|
||||
gender = Column(String)
|
||||
given_name = Column(String)
|
||||
is_multi_factor_auth_enabled = Column(Boolean)
|
||||
middle_name = Column(String)
|
||||
nickname = Column(String)
|
||||
password = Column(String)
|
||||
phone_number = Column(String, unique=True)
|
||||
phone_number_verified_at = Column(Integer)
|
||||
# preferred_username = Column(String, nullable=False)
|
||||
picture = Column(String)
|
||||
revoked_timestamp = Column(Integer)
|
||||
roles = Column(String, default="author,reader")
|
||||
signup_methods = Column(String, default="magic_link_login")
|
||||
created_at = Column(Integer, default=lambda: int(time.time()))
|
||||
updated_at = Column(Integer, default=lambda: int(time.time()))
|
||||
|
||||
|
||||
class UserRating(Base):
|
||||
__tablename__ = "user_rating"
|
||||
|
||||
id = None
|
||||
rater: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
user: Column = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
value: Column = Column(Integer)
|
||||
|
||||
@staticmethod
|
||||
def init_table():
|
||||
pass
|
||||
|
||||
|
||||
class UserRole(Base):
|
||||
__tablename__ = "user_role"
|
||||
|
||||
id = None
|
||||
user = Column(ForeignKey("user.id"), primary_key=True, index=True)
|
||||
role = Column(ForeignKey("role.id"), primary_key=True, index=True)
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "user"
|
||||
default_user = None
|
||||
|
||||
email = Column(String, unique=True, nullable=False, comment="Email")
|
||||
username = Column(String, nullable=False, comment="Login")
|
||||
password = Column(String, nullable=True, comment="Password")
|
||||
bio = Column(String, nullable=True, comment="Bio") # status description
|
||||
about = Column(String, nullable=True, comment="About") # long and formatted
|
||||
userpic = Column(String, nullable=True, comment="Userpic")
|
||||
name = Column(String, nullable=True, comment="Display name")
|
||||
slug = Column(String, unique=True, comment="User's slug")
|
||||
links = Column(JSON, nullable=True, comment="Links")
|
||||
oauth = Column(String, nullable=True)
|
||||
oid = Column(String, nullable=True)
|
||||
|
||||
muted = Column(Boolean, default=False)
|
||||
confirmed = Column(Boolean, default=False)
|
||||
|
||||
created_at = Column(DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Created at")
|
||||
updated_at = Column(DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Updated at")
|
||||
last_seen = Column(DateTime(timezone=True), nullable=False, server_default=func.now(), comment="Was online at")
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True, comment="Deleted at")
|
||||
|
||||
ratings = relationship(UserRating, foreign_keys=UserRating.user)
|
||||
roles = relationship(lambda: Role, secondary=UserRole.__tablename__)
|
||||
|
||||
def get_permission(self):
|
||||
scope = {}
|
||||
for role in self.roles:
|
||||
for p in role.permissions:
|
||||
if p.resource not in scope:
|
||||
scope[p.resource] = set()
|
||||
scope[p.resource].add(p.operation)
|
||||
print(scope)
|
||||
return scope
|
||||
|
||||
|
||||
# if __name__ == "__main__":
|
||||
# print(User.get_permission(user_id=1))
|
||||
116
auth/validations.py
Normal file
116
auth/validations.py
Normal file
@@ -0,0 +1,116 @@
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
# RFC 5322 compliant email regex pattern
|
||||
EMAIL_PATTERN = r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$"
|
||||
|
||||
|
||||
class AuthInput(BaseModel):
|
||||
"""Base model for authentication input validation"""
|
||||
|
||||
user_id: str = Field(description="Unique user identifier")
|
||||
username: str = Field(min_length=2, max_length=50)
|
||||
token: str = Field(min_length=32)
|
||||
|
||||
@field_validator("user_id")
|
||||
@classmethod
|
||||
def validate_user_id(cls, v: str) -> str:
|
||||
if not v.strip():
|
||||
raise ValueError("user_id cannot be empty")
|
||||
return v
|
||||
|
||||
|
||||
class UserRegistrationInput(BaseModel):
|
||||
"""Validation model for user registration"""
|
||||
|
||||
email: str = Field(max_length=254) # Max email length per RFC 5321
|
||||
password: str = Field(min_length=8, max_length=100)
|
||||
name: str = Field(min_length=2, max_length=50)
|
||||
|
||||
@field_validator("email")
|
||||
@classmethod
|
||||
def validate_email(cls, v: str) -> str:
|
||||
"""Validate email format"""
|
||||
if not re.match(EMAIL_PATTERN, v):
|
||||
raise ValueError("Invalid email format")
|
||||
return v.lower()
|
||||
|
||||
@field_validator("password")
|
||||
@classmethod
|
||||
def validate_password_strength(cls, v: str) -> str:
|
||||
"""Validate password meets security requirements"""
|
||||
if not any(c.isupper() for c in v):
|
||||
raise ValueError("Password must contain at least one uppercase letter")
|
||||
if not any(c.islower() for c in v):
|
||||
raise ValueError("Password must contain at least one lowercase letter")
|
||||
if not any(c.isdigit() for c in v):
|
||||
raise ValueError("Password must contain at least one number")
|
||||
if not any(c in "!@#$%^&*()_+-=[]{}|;:,.<>?" for c in v):
|
||||
raise ValueError("Password must contain at least one special character")
|
||||
return v
|
||||
|
||||
|
||||
class UserLoginInput(BaseModel):
|
||||
"""Validation model for user login"""
|
||||
|
||||
email: str = Field(max_length=254)
|
||||
password: str = Field(min_length=8, max_length=100)
|
||||
|
||||
@field_validator("email")
|
||||
@classmethod
|
||||
def validate_email(cls, v: str) -> str:
|
||||
if not re.match(EMAIL_PATTERN, v):
|
||||
raise ValueError("Invalid email format")
|
||||
return v.lower()
|
||||
|
||||
|
||||
class TokenPayload(BaseModel):
|
||||
"""Validation model for JWT token payload"""
|
||||
|
||||
user_id: str
|
||||
username: str
|
||||
exp: datetime
|
||||
iat: datetime
|
||||
scopes: Optional[List[str]] = []
|
||||
|
||||
|
||||
class OAuthInput(BaseModel):
|
||||
"""Validation model for OAuth input"""
|
||||
|
||||
provider: str = Field(pattern="^(google|github|facebook)$")
|
||||
code: str
|
||||
redirect_uri: Optional[str] = None
|
||||
|
||||
@field_validator("provider")
|
||||
@classmethod
|
||||
def validate_provider(cls, v: str) -> str:
|
||||
valid_providers = ["google", "github", "facebook"]
|
||||
if v.lower() not in valid_providers:
|
||||
raise ValueError(f"Provider must be one of: {', '.join(valid_providers)}")
|
||||
return v.lower()
|
||||
|
||||
|
||||
class AuthResponse(BaseModel):
|
||||
"""Validation model for authentication responses"""
|
||||
|
||||
success: bool
|
||||
token: Optional[str] = None
|
||||
error: Optional[str] = None
|
||||
user: Optional[Dict[str, Union[str, int, bool]]] = None
|
||||
|
||||
@field_validator("error")
|
||||
@classmethod
|
||||
def validate_error_if_not_success(cls, v: Optional[str], info) -> Optional[str]:
|
||||
if not info.data.get("success") and not v:
|
||||
raise ValueError("Error message required when success is False")
|
||||
return v
|
||||
|
||||
@field_validator("token")
|
||||
@classmethod
|
||||
def validate_token_if_success(cls, v: Optional[str], info) -> Optional[str]:
|
||||
if info.data.get("success") and not v:
|
||||
raise ValueError("Token required when success is True")
|
||||
return v
|
||||
627
cache/cache.py
vendored
Normal file
627
cache/cache.py
vendored
Normal file
@@ -0,0 +1,627 @@
|
||||
"""
|
||||
Caching system for the Discours platform
|
||||
----------------------------------------
|
||||
|
||||
This module provides a comprehensive caching solution with these key components:
|
||||
|
||||
1. KEY NAMING CONVENTIONS:
|
||||
- Entity-based keys: "entity:property:value" (e.g., "author:id:123")
|
||||
- Collection keys: "entity:collection:params" (e.g., "authors:stats:limit=10:offset=0")
|
||||
- Special case keys: Maintained for backwards compatibility (e.g., "topic_shouts_123")
|
||||
|
||||
2. CORE FUNCTIONS:
|
||||
- cached_query(): High-level function for retrieving cached data or executing queries
|
||||
|
||||
3. ENTITY-SPECIFIC FUNCTIONS:
|
||||
- cache_author(), cache_topic(): Cache entity data
|
||||
- get_cached_author(), get_cached_topic(): Retrieve entity data from cache
|
||||
- invalidate_cache_by_prefix(): Invalidate all keys with a specific prefix
|
||||
|
||||
4. CACHE INVALIDATION STRATEGY:
|
||||
- Direct invalidation via invalidate_* functions for immediate changes
|
||||
- Delayed invalidation via revalidation_manager for background processing
|
||||
- Event-based triggers for automatic cache updates (see triggers.py)
|
||||
|
||||
To maintain consistency with the existing codebase, this module preserves
|
||||
the original key naming patterns while providing a more structured approach
|
||||
for new cache operations.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
import orjson
|
||||
from sqlalchemy import and_, join, select
|
||||
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from services.db import local_session
|
||||
from services.redis import redis
|
||||
from utils.encoders import CustomJSONEncoder
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
DEFAULT_FOLLOWS = {
|
||||
"topics": [],
|
||||
"authors": [],
|
||||
"shouts": [],
|
||||
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||
}
|
||||
|
||||
CACHE_TTL = 300 # 5 minutes
|
||||
|
||||
# Key templates for common entity types
|
||||
# These are used throughout the codebase and should be maintained for compatibility
|
||||
CACHE_KEYS = {
|
||||
"TOPIC_ID": "topic:id:{}",
|
||||
"TOPIC_SLUG": "topic:slug:{}",
|
||||
"TOPIC_AUTHORS": "topic:authors:{}",
|
||||
"TOPIC_FOLLOWERS": "topic:followers:{}",
|
||||
"TOPIC_SHOUTS": "topic_shouts_{}",
|
||||
"AUTHOR_ID": "author:id:{}",
|
||||
"AUTHOR_USER": "author:user:{}",
|
||||
"SHOUTS": "shouts:{}",
|
||||
}
|
||||
|
||||
|
||||
# Cache topic data
|
||||
async def cache_topic(topic: dict):
|
||||
payload = json.dumps(topic, cls=CustomJSONEncoder)
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"topic:id:{topic['id']}", payload),
|
||||
redis.execute("SET", f"topic:slug:{topic['slug']}", payload),
|
||||
)
|
||||
|
||||
|
||||
# Cache author data
|
||||
async def cache_author(author: dict):
|
||||
payload = json.dumps(author, cls=CustomJSONEncoder)
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"author:user:{author['user'].strip()}", str(author["id"])),
|
||||
redis.execute("SET", f"author:id:{author['id']}", payload),
|
||||
)
|
||||
|
||||
|
||||
# Cache follows data
|
||||
async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_insert=True):
|
||||
key = f"author:follows-{entity_type}s:{follower_id}"
|
||||
follows_str = await redis.execute("GET", key)
|
||||
follows = orjson.loads(follows_str) if follows_str else DEFAULT_FOLLOWS[entity_type]
|
||||
if is_insert:
|
||||
if entity_id not in follows:
|
||||
follows.append(entity_id)
|
||||
else:
|
||||
follows = [eid for eid in follows if eid != entity_id]
|
||||
await redis.execute("SET", key, json.dumps(follows, cls=CustomJSONEncoder))
|
||||
await update_follower_stat(follower_id, entity_type, len(follows))
|
||||
|
||||
|
||||
# Update follower statistics
|
||||
async def update_follower_stat(follower_id, entity_type, count):
|
||||
follower_key = f"author:id:{follower_id}"
|
||||
follower_str = await redis.execute("GET", follower_key)
|
||||
follower = orjson.loads(follower_str) if follower_str else None
|
||||
if follower:
|
||||
follower["stat"] = {f"{entity_type}s": count}
|
||||
await cache_author(follower)
|
||||
|
||||
|
||||
# Get author from cache
|
||||
async def get_cached_author(author_id: int, get_with_stat):
|
||||
author_key = f"author:id:{author_id}"
|
||||
result = await redis.execute("GET", author_key)
|
||||
if result:
|
||||
return orjson.loads(result)
|
||||
# Load from database if not found in cache
|
||||
q = select(Author).where(Author.id == author_id)
|
||||
authors = get_with_stat(q)
|
||||
if authors:
|
||||
author = authors[0]
|
||||
await cache_author(author.dict())
|
||||
return author.dict()
|
||||
return None
|
||||
|
||||
|
||||
# Function to get cached topic
|
||||
async def get_cached_topic(topic_id: int):
|
||||
"""
|
||||
Fetch topic data from cache or database by id.
|
||||
|
||||
Args:
|
||||
topic_id (int): The identifier for the topic.
|
||||
|
||||
Returns:
|
||||
dict: Topic data or None if not found.
|
||||
"""
|
||||
topic_key = f"topic:id:{topic_id}"
|
||||
cached_topic = await redis.execute("GET", topic_key)
|
||||
if cached_topic:
|
||||
return orjson.loads(cached_topic)
|
||||
|
||||
# If not in cache, fetch from the database
|
||||
with local_session() as session:
|
||||
topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none()
|
||||
if topic:
|
||||
topic_dict = topic.dict()
|
||||
await redis.execute("SET", topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder))
|
||||
return topic_dict
|
||||
|
||||
return None
|
||||
|
||||
|
||||
# Get topic by slug from cache
|
||||
async def get_cached_topic_by_slug(slug: str, get_with_stat):
|
||||
topic_key = f"topic:slug:{slug}"
|
||||
result = await redis.execute("GET", topic_key)
|
||||
if result:
|
||||
return orjson.loads(result)
|
||||
# Load from database if not found in cache
|
||||
topic_query = select(Topic).where(Topic.slug == slug)
|
||||
topics = get_with_stat(topic_query)
|
||||
if topics:
|
||||
topic_dict = topics[0].dict()
|
||||
await cache_topic(topic_dict)
|
||||
return topic_dict
|
||||
return None
|
||||
|
||||
|
||||
# Get list of authors by ID from cache
|
||||
async def get_cached_authors_by_ids(author_ids: List[int]) -> List[dict]:
|
||||
# Fetch all author data concurrently
|
||||
keys = [f"author:id:{author_id}" for author_id in author_ids]
|
||||
results = await asyncio.gather(*(redis.execute("GET", key) for key in keys))
|
||||
authors = [orjson.loads(result) if result else None for result in results]
|
||||
# Load missing authors from database and cache
|
||||
missing_indices = [index for index, author in enumerate(authors) if author is None]
|
||||
if missing_indices:
|
||||
missing_ids = [author_ids[index] for index in missing_indices]
|
||||
with local_session() as session:
|
||||
query = select(Author).where(Author.id.in_(missing_ids))
|
||||
missing_authors = session.execute(query).scalars().all()
|
||||
await asyncio.gather(*(cache_author(author.dict()) for author in missing_authors))
|
||||
for index, author in zip(missing_indices, missing_authors):
|
||||
authors[index] = author.dict()
|
||||
return authors
|
||||
|
||||
|
||||
async def get_cached_topic_followers(topic_id: int):
|
||||
"""
|
||||
Получает подписчиков темы по ID, используя кеш Redis.
|
||||
|
||||
Args:
|
||||
topic_id: ID темы
|
||||
|
||||
Returns:
|
||||
List[dict]: Список подписчиков с их данными
|
||||
"""
|
||||
try:
|
||||
cache_key = CACHE_KEYS["TOPIC_FOLLOWERS"].format(topic_id)
|
||||
cached = await redis.execute("GET", cache_key)
|
||||
|
||||
if cached:
|
||||
followers_ids = orjson.loads(cached)
|
||||
logger.debug(f"Found {len(followers_ids)} cached followers for topic #{topic_id}")
|
||||
return await get_cached_authors_by_ids(followers_ids)
|
||||
|
||||
with local_session() as session:
|
||||
followers_ids = [
|
||||
f[0]
|
||||
for f in session.query(Author.id)
|
||||
.join(TopicFollower, TopicFollower.follower == Author.id)
|
||||
.filter(TopicFollower.topic == topic_id)
|
||||
.all()
|
||||
]
|
||||
|
||||
await redis.execute("SETEX", cache_key, CACHE_TTL, orjson.dumps(followers_ids))
|
||||
followers = await get_cached_authors_by_ids(followers_ids)
|
||||
logger.debug(f"Cached {len(followers)} followers for topic #{topic_id}")
|
||||
return followers
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting followers for topic #{topic_id}: {str(e)}")
|
||||
return []
|
||||
|
||||
|
||||
# Get cached author followers
|
||||
async def get_cached_author_followers(author_id: int):
|
||||
# Check cache for data
|
||||
cached = await redis.execute("GET", f"author:followers:{author_id}")
|
||||
if cached:
|
||||
followers_ids = orjson.loads(cached)
|
||||
followers = await get_cached_authors_by_ids(followers_ids)
|
||||
logger.debug(f"Cached followers for author #{author_id}: {len(followers)}")
|
||||
return followers
|
||||
|
||||
# Query database if cache is empty
|
||||
with local_session() as session:
|
||||
followers_ids = [
|
||||
f[0]
|
||||
for f in session.query(Author.id)
|
||||
.join(AuthorFollower, AuthorFollower.follower == Author.id)
|
||||
.filter(AuthorFollower.author == author_id, Author.id != author_id)
|
||||
.all()
|
||||
]
|
||||
await redis.execute("SET", f"author:followers:{author_id}", orjson.dumps(followers_ids))
|
||||
followers = await get_cached_authors_by_ids(followers_ids)
|
||||
return followers
|
||||
|
||||
|
||||
# Get cached follower authors
|
||||
async def get_cached_follower_authors(author_id: int):
|
||||
# Attempt to retrieve authors from cache
|
||||
cached = await redis.execute("GET", f"author:follows-authors:{author_id}")
|
||||
if cached:
|
||||
authors_ids = orjson.loads(cached)
|
||||
else:
|
||||
# Query authors from database
|
||||
with local_session() as session:
|
||||
authors_ids = [
|
||||
a[0]
|
||||
for a in session.execute(
|
||||
select(Author.id)
|
||||
.select_from(join(Author, AuthorFollower, Author.id == AuthorFollower.author))
|
||||
.where(AuthorFollower.follower == author_id)
|
||||
).all()
|
||||
]
|
||||
await redis.execute("SET", f"author:follows-authors:{author_id}", orjson.dumps(authors_ids))
|
||||
|
||||
authors = await get_cached_authors_by_ids(authors_ids)
|
||||
return authors
|
||||
|
||||
|
||||
# Get cached follower topics
|
||||
async def get_cached_follower_topics(author_id: int):
|
||||
# Attempt to retrieve topics from cache
|
||||
cached = await redis.execute("GET", f"author:follows-topics:{author_id}")
|
||||
if cached:
|
||||
topics_ids = orjson.loads(cached)
|
||||
else:
|
||||
# Load topics from database and cache them
|
||||
with local_session() as session:
|
||||
topics_ids = [
|
||||
t[0]
|
||||
for t in session.query(Topic.id)
|
||||
.join(TopicFollower, TopicFollower.topic == Topic.id)
|
||||
.where(TopicFollower.follower == author_id)
|
||||
.all()
|
||||
]
|
||||
await redis.execute("SET", f"author:follows-topics:{author_id}", orjson.dumps(topics_ids))
|
||||
|
||||
topics = []
|
||||
for topic_id in topics_ids:
|
||||
topic_str = await redis.execute("GET", f"topic:id:{topic_id}")
|
||||
if topic_str:
|
||||
topic = orjson.loads(topic_str)
|
||||
if topic and topic not in topics:
|
||||
topics.append(topic)
|
||||
|
||||
logger.debug(f"Cached topics for author#{author_id}: {len(topics)}")
|
||||
return topics
|
||||
|
||||
|
||||
# Get author by user ID from cache
|
||||
async def get_cached_author_by_user_id(user_id: str, get_with_stat):
|
||||
"""
|
||||
Retrieve author information by user_id, checking the cache first, then the database.
|
||||
|
||||
Args:
|
||||
user_id (str): The user identifier for which to retrieve the author.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary with author data or None if not found.
|
||||
"""
|
||||
# Attempt to find author ID by user_id in Redis cache
|
||||
author_id = await redis.execute("GET", f"author:user:{user_id.strip()}")
|
||||
if author_id:
|
||||
# If ID is found, get full author data by ID
|
||||
author_data = await redis.execute("GET", f"author:id:{author_id}")
|
||||
if author_data:
|
||||
return orjson.loads(author_data)
|
||||
|
||||
# If data is not found in cache, query the database
|
||||
author_query = select(Author).where(Author.user == user_id)
|
||||
authors = get_with_stat(author_query)
|
||||
if authors:
|
||||
# Cache the retrieved author data
|
||||
author = authors[0]
|
||||
author_dict = author.dict()
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"author:user:{user_id.strip()}", str(author.id)),
|
||||
redis.execute("SET", f"author:id:{author.id}", orjson.dumps(author_dict)),
|
||||
)
|
||||
return author_dict
|
||||
|
||||
# Return None if author is not found
|
||||
return None
|
||||
|
||||
|
||||
# Get cached topic authors
|
||||
async def get_cached_topic_authors(topic_id: int):
|
||||
"""
|
||||
Retrieve a list of authors for a given topic, using cache or database.
|
||||
|
||||
Args:
|
||||
topic_id (int): The identifier of the topic for which to retrieve authors.
|
||||
|
||||
Returns:
|
||||
List[dict]: A list of dictionaries containing author data.
|
||||
"""
|
||||
# Attempt to get a list of author IDs from cache
|
||||
rkey = f"topic:authors:{topic_id}"
|
||||
cached_authors_ids = await redis.execute("GET", rkey)
|
||||
if cached_authors_ids:
|
||||
authors_ids = orjson.loads(cached_authors_ids)
|
||||
else:
|
||||
# If cache is empty, get data from the database
|
||||
with local_session() as session:
|
||||
query = (
|
||||
select(ShoutAuthor.author)
|
||||
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
|
||||
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
|
||||
.where(and_(ShoutTopic.topic == topic_id, Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
)
|
||||
authors_ids = [author_id for (author_id,) in session.execute(query).all()]
|
||||
# Cache the retrieved author IDs
|
||||
await redis.execute("SET", rkey, orjson.dumps(authors_ids))
|
||||
|
||||
# Retrieve full author details from cached IDs
|
||||
if authors_ids:
|
||||
authors = await get_cached_authors_by_ids(authors_ids)
|
||||
logger.debug(f"Topic#{topic_id} authors fetched and cached: {len(authors)} authors found.")
|
||||
return authors
|
||||
|
||||
return []
|
||||
|
||||
|
||||
async def invalidate_shouts_cache(cache_keys: List[str]):
|
||||
"""
|
||||
Инвалидирует кэш выборок публикаций по переданным ключам.
|
||||
"""
|
||||
for key in cache_keys:
|
||||
try:
|
||||
# Формируем полный ключ кэша
|
||||
cache_key = f"shouts:{key}"
|
||||
|
||||
# Удаляем основной кэш
|
||||
await redis.execute("DEL", cache_key)
|
||||
logger.debug(f"Invalidated cache key: {cache_key}")
|
||||
|
||||
# Добавляем ключ в список инвалидированных с TTL
|
||||
await redis.execute("SETEX", f"{cache_key}:invalidated", CACHE_TTL, "1")
|
||||
|
||||
# Если это кэш темы, инвалидируем также связанные ключи
|
||||
if key.startswith("topic_"):
|
||||
topic_id = key.split("_")[1]
|
||||
related_keys = [
|
||||
f"topic:id:{topic_id}",
|
||||
f"topic:authors:{topic_id}",
|
||||
f"topic:followers:{topic_id}",
|
||||
f"topic:stats:{topic_id}",
|
||||
]
|
||||
for related_key in related_keys:
|
||||
await redis.execute("DEL", related_key)
|
||||
logger.debug(f"Invalidated related key: {related_key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error invalidating cache key {key}: {e}")
|
||||
|
||||
|
||||
async def cache_topic_shouts(topic_id: int, shouts: List[dict]):
|
||||
"""Кэширует список публикаций для темы"""
|
||||
key = f"topic_shouts_{topic_id}"
|
||||
payload = json.dumps(shouts, cls=CustomJSONEncoder)
|
||||
await redis.execute("SETEX", key, CACHE_TTL, payload)
|
||||
|
||||
|
||||
async def get_cached_topic_shouts(topic_id: int) -> List[dict]:
|
||||
"""Получает кэшированный список публикаций для темы"""
|
||||
key = f"topic_shouts_{topic_id}"
|
||||
cached = await redis.execute("GET", key)
|
||||
if cached:
|
||||
return orjson.loads(cached)
|
||||
return None
|
||||
|
||||
|
||||
async def cache_related_entities(shout: Shout):
|
||||
"""
|
||||
Кэширует все связанные с публикацией сущности (авторов и темы)
|
||||
"""
|
||||
tasks = []
|
||||
for author in shout.authors:
|
||||
tasks.append(cache_by_id(Author, author.id, cache_author))
|
||||
for topic in shout.topics:
|
||||
tasks.append(cache_by_id(Topic, topic.id, cache_topic))
|
||||
await asyncio.gather(*tasks)
|
||||
|
||||
|
||||
async def invalidate_shout_related_cache(shout: Shout, author_id: int):
|
||||
"""
|
||||
Инвалидирует весь кэш, связанный с публикацией и её связями
|
||||
|
||||
Args:
|
||||
shout: Объект публикации
|
||||
author_id: ID автора
|
||||
"""
|
||||
cache_keys = {
|
||||
"feed", # основная лента
|
||||
f"author_{author_id}", # публикации автора
|
||||
"random_top", # случайные топовые
|
||||
"unrated", # неоцененные
|
||||
"recent", # последние
|
||||
"coauthored", # совместные
|
||||
}
|
||||
|
||||
# Добавляем ключи авторов
|
||||
cache_keys.update(f"author_{a.id}" for a in shout.authors)
|
||||
cache_keys.update(f"authored_{a.id}" for a in shout.authors)
|
||||
|
||||
# Добавляем ключи тем
|
||||
cache_keys.update(f"topic_{t.id}" for t in shout.topics)
|
||||
cache_keys.update(f"topic_shouts_{t.id}" for t in shout.topics)
|
||||
|
||||
await invalidate_shouts_cache(list(cache_keys))
|
||||
|
||||
|
||||
# Function removed - direct Redis calls used throughout the module instead
|
||||
|
||||
|
||||
async def get_cached_entity(entity_type: str, entity_id: int, get_method, cache_method):
|
||||
"""
|
||||
Универсальная функция получения кэшированной сущности
|
||||
|
||||
Args:
|
||||
entity_type: 'author' или 'topic'
|
||||
entity_id: ID сущности
|
||||
get_method: метод получения из БД
|
||||
cache_method: метод кэширования
|
||||
"""
|
||||
key = f"{entity_type}:id:{entity_id}"
|
||||
cached = await redis.execute("GET", key)
|
||||
if cached:
|
||||
return orjson.loads(cached)
|
||||
|
||||
entity = await get_method(entity_id)
|
||||
if entity:
|
||||
await cache_method(entity)
|
||||
return entity
|
||||
return None
|
||||
|
||||
|
||||
async def cache_by_id(entity, entity_id: int, cache_method):
|
||||
"""
|
||||
Кэширует сущность по ID, используя указанный метод кэширования
|
||||
|
||||
Args:
|
||||
entity: класс сущности (Author/Topic)
|
||||
entity_id: ID сущности
|
||||
cache_method: функция кэширования
|
||||
"""
|
||||
from resolvers.stat import get_with_stat
|
||||
|
||||
caching_query = select(entity).filter(entity.id == entity_id)
|
||||
result = get_with_stat(caching_query)
|
||||
if not result or not result[0]:
|
||||
logger.warning(f"{entity.__name__} with id {entity_id} not found")
|
||||
return
|
||||
x = result[0]
|
||||
d = x.dict()
|
||||
await cache_method(d)
|
||||
return d
|
||||
|
||||
|
||||
# Универсальная функция для сохранения данных в кеш
|
||||
async def cache_data(key: str, data: Any, ttl: Optional[int] = None) -> None:
|
||||
"""
|
||||
Сохраняет данные в кеш по указанному ключу.
|
||||
|
||||
Args:
|
||||
key: Ключ кеша
|
||||
data: Данные для сохранения
|
||||
ttl: Время жизни кеша в секундах (None - бессрочно)
|
||||
"""
|
||||
try:
|
||||
payload = json.dumps(data, cls=CustomJSONEncoder)
|
||||
if ttl:
|
||||
await redis.execute("SETEX", key, ttl, payload)
|
||||
else:
|
||||
await redis.execute("SET", key, payload)
|
||||
logger.debug(f"Данные сохранены в кеш по ключу {key}")
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при сохранении данных в кеш: {e}")
|
||||
|
||||
|
||||
# Универсальная функция для получения данных из кеша
|
||||
async def get_cached_data(key: str) -> Optional[Any]:
|
||||
"""
|
||||
Получает данные из кеша по указанному ключу.
|
||||
|
||||
Args:
|
||||
key: Ключ кеша
|
||||
|
||||
Returns:
|
||||
Any: Данные из кеша или None, если данных нет
|
||||
"""
|
||||
try:
|
||||
cached_data = await redis.execute("GET", key)
|
||||
if cached_data:
|
||||
logger.debug(f"Данные получены из кеша по ключу {key}")
|
||||
return orjson.loads(cached_data)
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при получении данных из кеша: {e}")
|
||||
return None
|
||||
|
||||
|
||||
# Универсальная функция для инвалидации кеша по префиксу
|
||||
async def invalidate_cache_by_prefix(prefix: str) -> None:
|
||||
"""
|
||||
Инвалидирует все ключи кеша с указанным префиксом.
|
||||
|
||||
Args:
|
||||
prefix: Префикс ключей кеша для инвалидации
|
||||
"""
|
||||
try:
|
||||
keys = await redis.execute("KEYS", f"{prefix}:*")
|
||||
if keys:
|
||||
await redis.execute("DEL", *keys)
|
||||
logger.debug(f"Удалено {len(keys)} ключей кеша с префиксом {prefix}")
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при инвалидации кеша: {e}")
|
||||
|
||||
|
||||
# Универсальная функция для получения и кеширования данных
|
||||
async def cached_query(
|
||||
cache_key: str,
|
||||
query_func: callable,
|
||||
ttl: Optional[int] = None,
|
||||
force_refresh: bool = False,
|
||||
use_key_format: bool = True,
|
||||
**query_params,
|
||||
) -> Any:
|
||||
"""
|
||||
Gets data from cache or executes query and saves result to cache.
|
||||
Supports existing key formats for compatibility.
|
||||
|
||||
Args:
|
||||
cache_key: Cache key or key template from CACHE_KEYS
|
||||
query_func: Function to execute the query
|
||||
ttl: Cache TTL in seconds (None - indefinite)
|
||||
force_refresh: Force cache refresh
|
||||
use_key_format: Whether to check if cache_key matches a key template in CACHE_KEYS
|
||||
**query_params: Parameters to pass to the query function
|
||||
|
||||
Returns:
|
||||
Any: Data from cache or query result
|
||||
"""
|
||||
# Check if cache_key matches a pattern in CACHE_KEYS
|
||||
actual_key = cache_key
|
||||
if use_key_format and "{}" in cache_key:
|
||||
# Look for a template match in CACHE_KEYS
|
||||
for key_name, key_format in CACHE_KEYS.items():
|
||||
if cache_key == key_format:
|
||||
# We have a match, now look for the id or value to format with
|
||||
for param_name, param_value in query_params.items():
|
||||
if param_name in ["id", "slug", "user", "topic_id", "author_id"]:
|
||||
actual_key = cache_key.format(param_value)
|
||||
break
|
||||
|
||||
# If not forcing refresh, try to get data from cache
|
||||
if not force_refresh:
|
||||
cached_result = await get_cached_data(actual_key)
|
||||
if cached_result is not None:
|
||||
return cached_result
|
||||
|
||||
# If data not in cache or refresh required, execute query
|
||||
try:
|
||||
result = await query_func(**query_params)
|
||||
if result is not None:
|
||||
# Save result to cache
|
||||
await cache_data(actual_key, result, ttl)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing query for caching: {e}")
|
||||
# In case of error, return data from cache if not forcing refresh
|
||||
if not force_refresh:
|
||||
return await get_cached_data(actual_key)
|
||||
raise
|
||||
133
cache/precache.py
vendored
Normal file
133
cache/precache.py
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
from sqlalchemy import and_, join, select
|
||||
|
||||
from cache.cache import cache_author, cache_topic
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.db import local_session
|
||||
from services.redis import redis
|
||||
from utils.encoders import CustomJSONEncoder
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
# Предварительное кеширование подписчиков автора
|
||||
async def precache_authors_followers(author_id, session):
|
||||
authors_followers = set()
|
||||
followers_query = select(AuthorFollower.follower).where(AuthorFollower.author == author_id)
|
||||
result = session.execute(followers_query)
|
||||
authors_followers.update(row[0] for row in result if row[0])
|
||||
|
||||
followers_payload = json.dumps(list(authors_followers), cls=CustomJSONEncoder)
|
||||
await redis.execute("SET", f"author:followers:{author_id}", followers_payload)
|
||||
|
||||
|
||||
# Предварительное кеширование подписок автора
|
||||
async def precache_authors_follows(author_id, session):
|
||||
follows_topics_query = select(TopicFollower.topic).where(TopicFollower.follower == author_id)
|
||||
follows_authors_query = select(AuthorFollower.author).where(AuthorFollower.follower == author_id)
|
||||
follows_shouts_query = select(ShoutReactionsFollower.shout).where(ShoutReactionsFollower.follower == author_id)
|
||||
|
||||
follows_topics = {row[0] for row in session.execute(follows_topics_query) if row[0]}
|
||||
follows_authors = {row[0] for row in session.execute(follows_authors_query) if row[0]}
|
||||
follows_shouts = {row[0] for row in session.execute(follows_shouts_query) if row[0]}
|
||||
|
||||
topics_payload = json.dumps(list(follows_topics), cls=CustomJSONEncoder)
|
||||
authors_payload = json.dumps(list(follows_authors), cls=CustomJSONEncoder)
|
||||
shouts_payload = json.dumps(list(follows_shouts), cls=CustomJSONEncoder)
|
||||
|
||||
await asyncio.gather(
|
||||
redis.execute("SET", f"author:follows-topics:{author_id}", topics_payload),
|
||||
redis.execute("SET", f"author:follows-authors:{author_id}", authors_payload),
|
||||
redis.execute("SET", f"author:follows-shouts:{author_id}", shouts_payload),
|
||||
)
|
||||
|
||||
|
||||
# Предварительное кеширование авторов тем
|
||||
async def precache_topics_authors(topic_id: int, session):
|
||||
topic_authors_query = (
|
||||
select(ShoutAuthor.author)
|
||||
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
|
||||
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
|
||||
.filter(
|
||||
and_(
|
||||
ShoutTopic.topic == topic_id,
|
||||
Shout.published_at.is_not(None),
|
||||
Shout.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
)
|
||||
topic_authors = {row[0] for row in session.execute(topic_authors_query) if row[0]}
|
||||
|
||||
authors_payload = json.dumps(list(topic_authors), cls=CustomJSONEncoder)
|
||||
await redis.execute("SET", f"topic:authors:{topic_id}", authors_payload)
|
||||
|
||||
|
||||
# Предварительное кеширование подписчиков тем
|
||||
async def precache_topics_followers(topic_id: int, session):
|
||||
followers_query = select(TopicFollower.follower).where(TopicFollower.topic == topic_id)
|
||||
topic_followers = {row[0] for row in session.execute(followers_query) if row[0]}
|
||||
|
||||
followers_payload = json.dumps(list(topic_followers), cls=CustomJSONEncoder)
|
||||
await redis.execute("SET", f"topic:followers:{topic_id}", followers_payload)
|
||||
|
||||
|
||||
async def precache_data():
|
||||
logger.info("precaching...")
|
||||
try:
|
||||
key = "authorizer_env"
|
||||
# cache reset
|
||||
value = await redis.execute("HGETALL", key)
|
||||
await redis.execute("FLUSHDB")
|
||||
logger.info("redis: FLUSHDB")
|
||||
|
||||
# Преобразуем словарь в список аргументов для HSET
|
||||
if value:
|
||||
# Если значение - словарь, преобразуем его в плоский список для HSET
|
||||
if isinstance(value, dict):
|
||||
flattened = []
|
||||
for field, val in value.items():
|
||||
flattened.extend([field, val])
|
||||
await redis.execute("HSET", key, *flattened)
|
||||
else:
|
||||
# Предполагаем, что значение уже содержит список
|
||||
await redis.execute("HSET", key, *value)
|
||||
logger.info(f"redis hash '{key}' was restored")
|
||||
|
||||
with local_session() as session:
|
||||
# topics
|
||||
q = select(Topic).where(Topic.community == 1)
|
||||
topics = get_with_stat(q)
|
||||
for topic in topics:
|
||||
topic_dict = topic.dict() if hasattr(topic, "dict") else topic
|
||||
await cache_topic(topic_dict)
|
||||
await asyncio.gather(
|
||||
precache_topics_followers(topic_dict["id"], session),
|
||||
precache_topics_authors(topic_dict["id"], session),
|
||||
)
|
||||
logger.info(f"{len(topics)} topics and their followings precached")
|
||||
|
||||
# authors
|
||||
authors = get_with_stat(select(Author).where(Author.user.is_not(None)))
|
||||
logger.info(f"{len(authors)} authors found in database")
|
||||
for author in authors:
|
||||
if isinstance(author, Author):
|
||||
profile = author.dict()
|
||||
author_id = profile.get("id")
|
||||
user_id = profile.get("user", "").strip()
|
||||
if author_id and user_id:
|
||||
await cache_author(profile)
|
||||
await asyncio.gather(
|
||||
precache_authors_followers(author_id, session), precache_authors_follows(author_id, session)
|
||||
)
|
||||
else:
|
||||
logger.error(f"fail caching {author}")
|
||||
logger.info(f"{len(authors)} authors and their followings precached")
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
logger.error(f"Error in precache_data: {exc}")
|
||||
157
cache/revalidator.py
vendored
Normal file
157
cache/revalidator.py
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
import asyncio
|
||||
|
||||
from cache.cache import (
|
||||
cache_author,
|
||||
cache_topic,
|
||||
get_cached_author,
|
||||
get_cached_topic,
|
||||
invalidate_cache_by_prefix,
|
||||
)
|
||||
from resolvers.stat import get_with_stat
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
CACHE_REVALIDATION_INTERVAL = 300 # 5 minutes
|
||||
|
||||
|
||||
class CacheRevalidationManager:
|
||||
def __init__(self, interval=CACHE_REVALIDATION_INTERVAL):
|
||||
"""Инициализация менеджера с заданным интервалом проверки (в секундах)."""
|
||||
self.interval = interval
|
||||
self.items_to_revalidate = {"authors": set(), "topics": set(), "shouts": set(), "reactions": set()}
|
||||
self.lock = asyncio.Lock()
|
||||
self.running = True
|
||||
self.MAX_BATCH_SIZE = 10 # Максимальное количество элементов для поштучной обработки
|
||||
|
||||
async def start(self):
|
||||
"""Запуск фонового воркера для ревалидации кэша."""
|
||||
self.task = asyncio.create_task(self.revalidate_cache())
|
||||
|
||||
async def revalidate_cache(self):
|
||||
"""Циклическая проверка и ревалидация кэша каждые self.interval секунд."""
|
||||
try:
|
||||
while self.running:
|
||||
await asyncio.sleep(self.interval)
|
||||
await self.process_revalidation()
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Revalidation worker was stopped.")
|
||||
except Exception as e:
|
||||
logger.error(f"An error occurred in the revalidation worker: {e}")
|
||||
|
||||
async def process_revalidation(self):
|
||||
"""Обновление кэша для всех сущностей, требующих ревалидации."""
|
||||
async with self.lock:
|
||||
# Ревалидация кэша авторов
|
||||
if self.items_to_revalidate["authors"]:
|
||||
logger.debug(f"Revalidating {len(self.items_to_revalidate['authors'])} authors")
|
||||
for author_id in self.items_to_revalidate["authors"]:
|
||||
if author_id == "all":
|
||||
await invalidate_cache_by_prefix("authors")
|
||||
break
|
||||
author = await get_cached_author(author_id, get_with_stat)
|
||||
if author:
|
||||
await cache_author(author)
|
||||
self.items_to_revalidate["authors"].clear()
|
||||
|
||||
# Ревалидация кэша тем
|
||||
if self.items_to_revalidate["topics"]:
|
||||
logger.debug(f"Revalidating {len(self.items_to_revalidate['topics'])} topics")
|
||||
for topic_id in self.items_to_revalidate["topics"]:
|
||||
if topic_id == "all":
|
||||
await invalidate_cache_by_prefix("topics")
|
||||
break
|
||||
topic = await get_cached_topic(topic_id)
|
||||
if topic:
|
||||
await cache_topic(topic)
|
||||
self.items_to_revalidate["topics"].clear()
|
||||
|
||||
# Ревалидация шаутов (публикаций)
|
||||
if self.items_to_revalidate["shouts"]:
|
||||
shouts_count = len(self.items_to_revalidate["shouts"])
|
||||
logger.debug(f"Revalidating {shouts_count} shouts")
|
||||
|
||||
# Проверяем наличие специального флага 'all'
|
||||
if "all" in self.items_to_revalidate["shouts"]:
|
||||
await invalidate_cache_by_prefix("shouts")
|
||||
# Если элементов много, но не 'all', используем специфический подход
|
||||
elif shouts_count > self.MAX_BATCH_SIZE:
|
||||
# Инвалидируем только collections keys, которые затрагивают много сущностей
|
||||
collection_keys = await asyncio.create_task(self._redis.execute("KEYS", "shouts:*"))
|
||||
if collection_keys:
|
||||
await self._redis.execute("DEL", *collection_keys)
|
||||
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей шаутов")
|
||||
|
||||
# Обновляем кеш каждого конкретного шаута
|
||||
for shout_id in self.items_to_revalidate["shouts"]:
|
||||
if shout_id != "all":
|
||||
# Точечная инвалидация для каждого shout_id
|
||||
specific_keys = [f"shout:id:{shout_id}"]
|
||||
for key in specific_keys:
|
||||
await self._redis.execute("DEL", key)
|
||||
logger.debug(f"Удален ключ кеша {key}")
|
||||
else:
|
||||
# Если элементов немного, обрабатываем каждый
|
||||
for shout_id in self.items_to_revalidate["shouts"]:
|
||||
if shout_id != "all":
|
||||
# Точечная инвалидация для каждого shout_id
|
||||
specific_keys = [f"shout:id:{shout_id}"]
|
||||
for key in specific_keys:
|
||||
await self._redis.execute("DEL", key)
|
||||
logger.debug(f"Удален ключ кеша {key}")
|
||||
|
||||
self.items_to_revalidate["shouts"].clear()
|
||||
|
||||
# Аналогично для реакций - точечная инвалидация
|
||||
if self.items_to_revalidate["reactions"]:
|
||||
reactions_count = len(self.items_to_revalidate["reactions"])
|
||||
logger.debug(f"Revalidating {reactions_count} reactions")
|
||||
|
||||
if "all" in self.items_to_revalidate["reactions"]:
|
||||
await invalidate_cache_by_prefix("reactions")
|
||||
elif reactions_count > self.MAX_BATCH_SIZE:
|
||||
# Инвалидируем только collections keys для реакций
|
||||
collection_keys = await asyncio.create_task(self._redis.execute("KEYS", "reactions:*"))
|
||||
if collection_keys:
|
||||
await self._redis.execute("DEL", *collection_keys)
|
||||
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей реакций")
|
||||
|
||||
# Точечная инвалидация для каждой реакции
|
||||
for reaction_id in self.items_to_revalidate["reactions"]:
|
||||
if reaction_id != "all":
|
||||
specific_keys = [f"reaction:id:{reaction_id}"]
|
||||
for key in specific_keys:
|
||||
await self._redis.execute("DEL", key)
|
||||
logger.debug(f"Удален ключ кеша {key}")
|
||||
else:
|
||||
# Точечная инвалидация для каждой реакции
|
||||
for reaction_id in self.items_to_revalidate["reactions"]:
|
||||
if reaction_id != "all":
|
||||
specific_keys = [f"reaction:id:{reaction_id}"]
|
||||
for key in specific_keys:
|
||||
await self._redis.execute("DEL", key)
|
||||
logger.debug(f"Удален ключ кеша {key}")
|
||||
|
||||
self.items_to_revalidate["reactions"].clear()
|
||||
|
||||
def mark_for_revalidation(self, entity_id, entity_type):
|
||||
"""Отметить сущность для ревалидации."""
|
||||
if entity_id and entity_type:
|
||||
self.items_to_revalidate[entity_type].add(entity_id)
|
||||
|
||||
def invalidate_all(self, entity_type):
|
||||
"""Пометить для инвалидации все элементы указанного типа."""
|
||||
logger.debug(f"Marking all {entity_type} for invalidation")
|
||||
# Особый флаг для полной инвалидации
|
||||
self.items_to_revalidate[entity_type].add("all")
|
||||
|
||||
async def stop(self):
|
||||
"""Остановка фонового воркера."""
|
||||
self.running = False
|
||||
if hasattr(self, "task"):
|
||||
self.task.cancel()
|
||||
try:
|
||||
await self.task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
|
||||
revalidation_manager = CacheRevalidationManager()
|
||||
131
cache/triggers.py
vendored
Normal file
131
cache/triggers.py
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
from sqlalchemy import event
|
||||
|
||||
from cache.revalidator import revalidation_manager
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from services.db import local_session
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def mark_for_revalidation(entity, *args):
|
||||
"""Отметка сущности для ревалидации."""
|
||||
entity_type = (
|
||||
"authors"
|
||||
if isinstance(entity, Author)
|
||||
else "topics"
|
||||
if isinstance(entity, Topic)
|
||||
else "reactions"
|
||||
if isinstance(entity, Reaction)
|
||||
else "shouts"
|
||||
if isinstance(entity, Shout)
|
||||
else None
|
||||
)
|
||||
if entity_type:
|
||||
revalidation_manager.mark_for_revalidation(entity.id, entity_type)
|
||||
|
||||
|
||||
def after_follower_handler(mapper, connection, target, is_delete=False):
|
||||
"""Обработчик добавления, обновления или удаления подписки."""
|
||||
entity_type = None
|
||||
if isinstance(target, AuthorFollower):
|
||||
entity_type = "authors"
|
||||
elif isinstance(target, TopicFollower):
|
||||
entity_type = "topics"
|
||||
elif isinstance(target, ShoutReactionsFollower):
|
||||
entity_type = "shouts"
|
||||
|
||||
if entity_type:
|
||||
revalidation_manager.mark_for_revalidation(
|
||||
target.author if entity_type == "authors" else target.topic, entity_type
|
||||
)
|
||||
if not is_delete:
|
||||
revalidation_manager.mark_for_revalidation(target.follower, "authors")
|
||||
|
||||
|
||||
def after_shout_handler(mapper, connection, target):
|
||||
"""Обработчик изменения статуса публикации"""
|
||||
if not isinstance(target, Shout):
|
||||
return
|
||||
|
||||
# Проверяем изменение статуса публикации
|
||||
# was_published = target.published_at is not None and target.deleted_at is None
|
||||
|
||||
# Всегда обновляем счетчики для авторов и тем при любом изменении поста
|
||||
for author in target.authors:
|
||||
revalidation_manager.mark_for_revalidation(author.id, "authors")
|
||||
|
||||
for topic in target.topics:
|
||||
revalidation_manager.mark_for_revalidation(topic.id, "topics")
|
||||
|
||||
# Обновляем сам пост
|
||||
revalidation_manager.mark_for_revalidation(target.id, "shouts")
|
||||
|
||||
|
||||
def after_reaction_handler(mapper, connection, target):
|
||||
"""Обработчик для комментариев"""
|
||||
if not isinstance(target, Reaction):
|
||||
return
|
||||
|
||||
# Проверяем что это комментарий
|
||||
is_comment = target.kind == ReactionKind.COMMENT.value
|
||||
|
||||
# Получаем связанный пост
|
||||
shout_id = target.shout if isinstance(target.shout, int) else target.shout.id
|
||||
if not shout_id:
|
||||
return
|
||||
|
||||
# Обновляем счетчики для автора комментария
|
||||
if target.created_by:
|
||||
revalidation_manager.mark_for_revalidation(target.created_by, "authors")
|
||||
|
||||
# Обновляем счетчики для поста
|
||||
revalidation_manager.mark_for_revalidation(shout_id, "shouts")
|
||||
|
||||
if is_comment:
|
||||
# Для комментариев обновляем также авторов и темы
|
||||
with local_session() as session:
|
||||
shout = (
|
||||
session.query(Shout)
|
||||
.filter(Shout.id == shout_id, Shout.published_at.is_not(None), Shout.deleted_at.is_(None))
|
||||
.first()
|
||||
)
|
||||
|
||||
if shout:
|
||||
for author in shout.authors:
|
||||
revalidation_manager.mark_for_revalidation(author.id, "authors")
|
||||
|
||||
for topic in shout.topics:
|
||||
revalidation_manager.mark_for_revalidation(topic.id, "topics")
|
||||
|
||||
|
||||
def events_register():
|
||||
"""Регистрация обработчиков событий для всех сущностей."""
|
||||
event.listen(ShoutAuthor, "after_insert", mark_for_revalidation)
|
||||
event.listen(ShoutAuthor, "after_update", mark_for_revalidation)
|
||||
event.listen(ShoutAuthor, "after_delete", mark_for_revalidation)
|
||||
|
||||
event.listen(AuthorFollower, "after_insert", after_follower_handler)
|
||||
event.listen(AuthorFollower, "after_update", after_follower_handler)
|
||||
event.listen(AuthorFollower, "after_delete", lambda *args: after_follower_handler(*args, is_delete=True))
|
||||
|
||||
event.listen(TopicFollower, "after_insert", after_follower_handler)
|
||||
event.listen(TopicFollower, "after_update", after_follower_handler)
|
||||
event.listen(TopicFollower, "after_delete", lambda *args: after_follower_handler(*args, is_delete=True))
|
||||
|
||||
event.listen(ShoutReactionsFollower, "after_insert", after_follower_handler)
|
||||
event.listen(ShoutReactionsFollower, "after_update", after_follower_handler)
|
||||
event.listen(ShoutReactionsFollower, "after_delete", lambda *args: after_follower_handler(*args, is_delete=True))
|
||||
|
||||
event.listen(Reaction, "after_update", mark_for_revalidation)
|
||||
event.listen(Author, "after_update", mark_for_revalidation)
|
||||
event.listen(Topic, "after_update", mark_for_revalidation)
|
||||
event.listen(Shout, "after_update", after_shout_handler)
|
||||
event.listen(Shout, "after_delete", after_shout_handler)
|
||||
|
||||
event.listen(Reaction, "after_insert", after_reaction_handler)
|
||||
event.listen(Reaction, "after_update", after_reaction_handler)
|
||||
event.listen(Reaction, "after_delete", after_reaction_handler)
|
||||
|
||||
logger.info("Event handlers registered successfully.")
|
||||
279
docs/caching.md
Normal file
279
docs/caching.md
Normal file
@@ -0,0 +1,279 @@
|
||||
# Система кеширования Discours
|
||||
|
||||
## Общее описание
|
||||
|
||||
Система кеширования Discours - это комплексное решение для повышения производительности платформы. Она использует Redis для хранения часто запрашиваемых данных и уменьшения нагрузки на основную базу данных.
|
||||
|
||||
Кеширование реализовано как многоуровневая система, состоящая из нескольких модулей:
|
||||
|
||||
- `cache.py` - основной модуль с функциями кеширования
|
||||
- `revalidator.py` - асинхронный менеджер ревалидации кеша
|
||||
- `triggers.py` - триггеры событий SQLAlchemy для автоматической ревалидации
|
||||
- `precache.py` - предварительное кеширование данных при старте приложения
|
||||
|
||||
## Ключевые компоненты
|
||||
|
||||
### 1. Форматы ключей кеша
|
||||
|
||||
Система поддерживает несколько форматов ключей для обеспечения совместимости и удобства использования:
|
||||
|
||||
- **Ключи сущностей**: `entity:property:value` (например, `author:id:123`)
|
||||
- **Ключи коллекций**: `entity:collection:params` (например, `authors:stats:limit=10:offset=0`)
|
||||
- **Специальные ключи**: для обратной совместимости (например, `topic_shouts_123`)
|
||||
|
||||
Все стандартные форматы ключей хранятся в словаре `CACHE_KEYS`:
|
||||
|
||||
```python
|
||||
CACHE_KEYS = {
|
||||
"TOPIC_ID": "topic:id:{}",
|
||||
"TOPIC_SLUG": "topic:slug:{}",
|
||||
"AUTHOR_ID": "author:id:{}",
|
||||
# и другие...
|
||||
}
|
||||
```
|
||||
|
||||
### 2. Основные функции кеширования
|
||||
|
||||
#### Структура ключей
|
||||
|
||||
Вместо генерации ключей через вспомогательные функции, система следует строгим конвенциям формирования ключей:
|
||||
|
||||
1. **Ключи для отдельных сущностей** строятся по шаблону:
|
||||
```
|
||||
entity:property:value
|
||||
```
|
||||
Например:
|
||||
- `topic:id:123` - тема с ID 123
|
||||
- `author:slug:john-doe` - автор со слагом "john-doe"
|
||||
- `shout:id:456` - публикация с ID 456
|
||||
|
||||
2. **Ключи для коллекций** строятся по шаблону:
|
||||
```
|
||||
entity:collection[:filter1=value1:filter2=value2:...]
|
||||
```
|
||||
Например:
|
||||
- `topics:all:basic` - базовый список всех тем
|
||||
- `authors:stats:limit=10:offset=0:sort=name` - отсортированный список авторов с пагинацией
|
||||
- `shouts:feed:limit=20:community=1` - лента публикаций с фильтром по сообществу
|
||||
|
||||
3. **Специальные форматы ключей** для обратной совместимости:
|
||||
```
|
||||
entity_action_id
|
||||
```
|
||||
Например:
|
||||
- `topic_shouts_123` - публикации для темы с ID 123
|
||||
|
||||
Во всех модулях системы разработчики должны явно формировать ключи в соответствии с этими конвенциями, что обеспечивает единообразие и предсказуемость кеширования.
|
||||
|
||||
#### Работа с данными в кеше
|
||||
|
||||
```python
|
||||
async def cache_data(key, data, ttl=None)
|
||||
async def get_cached_data(key)
|
||||
```
|
||||
|
||||
Эти функции предоставляют универсальный интерфейс для сохранения и получения данных из кеша. Они напрямую используют Redis через вызовы `redis.execute()`.
|
||||
|
||||
#### Высокоуровневое кеширование запросов
|
||||
|
||||
```python
|
||||
async def cached_query(cache_key, query_func, ttl=None, force_refresh=False, **query_params)
|
||||
```
|
||||
|
||||
Функция `cached_query` объединяет получение данных из кеша и выполнение запроса в случае отсутствия данных в кеше. Это основная функция, которую следует использовать в резолверах для кеширования результатов запросов.
|
||||
|
||||
### 3. Кеширование сущностей
|
||||
|
||||
Для основных типов сущностей реализованы специальные функции:
|
||||
|
||||
```python
|
||||
async def cache_topic(topic: dict)
|
||||
async def cache_author(author: dict)
|
||||
async def get_cached_topic(topic_id: int)
|
||||
async def get_cached_author(author_id: int, get_with_stat)
|
||||
```
|
||||
|
||||
Эти функции упрощают работу с часто используемыми типами данных и обеспечивают единообразный подход к их кешированию.
|
||||
|
||||
### 4. Работа со связями
|
||||
|
||||
Для работы со связями между сущностями предназначены функции:
|
||||
|
||||
```python
|
||||
async def cache_follows(follower_id, entity_type, entity_id, is_insert=True)
|
||||
async def get_cached_topic_followers(topic_id)
|
||||
async def get_cached_author_followers(author_id)
|
||||
async def get_cached_follower_topics(author_id)
|
||||
```
|
||||
|
||||
Они позволяют эффективно кешировать и получать информацию о подписках, связях между авторами, темами и публикациями.
|
||||
|
||||
## Система инвалидации кеша
|
||||
|
||||
### 1. Прямая инвалидация
|
||||
|
||||
Система поддерживает два типа инвалидации кеша:
|
||||
|
||||
#### 1.1. Инвалидация по префиксу
|
||||
|
||||
```python
|
||||
async def invalidate_cache_by_prefix(prefix)
|
||||
```
|
||||
|
||||
Позволяет инвалидировать все ключи кеша, начинающиеся с указанного префикса. Используется в резолверах для инвалидации группы кешей при массовых изменениях.
|
||||
|
||||
#### 1.2. Точечная инвалидация
|
||||
|
||||
```python
|
||||
async def invalidate_authors_cache(author_id=None)
|
||||
async def invalidate_topics_cache(topic_id=None)
|
||||
```
|
||||
|
||||
Эти функции позволяют инвалидировать кеш только для конкретной сущности, что снижает нагрузку на Redis и предотвращает ненужную потерю кешированных данных. Если ID сущности не указан, используется инвалидация по префиксу.
|
||||
|
||||
Примеры использования точечной инвалидации:
|
||||
|
||||
```python
|
||||
# Инвалидация кеша только для автора с ID 123
|
||||
await invalidate_authors_cache(123)
|
||||
|
||||
# Инвалидация кеша только для темы с ID 456
|
||||
await invalidate_topics_cache(456)
|
||||
```
|
||||
|
||||
### 2. Отложенная инвалидация
|
||||
|
||||
Модуль `revalidator.py` реализует систему отложенной инвалидации кеша через класс `CacheRevalidationManager`:
|
||||
|
||||
```python
|
||||
class CacheRevalidationManager:
|
||||
# ...
|
||||
async def process_revalidation(self):
|
||||
# ...
|
||||
def mark_for_revalidation(self, entity_id, entity_type):
|
||||
# ...
|
||||
```
|
||||
|
||||
Менеджер ревалидации работает как асинхронный фоновый процесс, который периодически (по умолчанию каждые 5 минут) проверяет наличие сущностей для ревалидации.
|
||||
|
||||
Особенности реализации:
|
||||
- Для авторов и тем используется поштучная ревалидация каждой записи
|
||||
- Для шаутов и реакций используется батчевая обработка, с порогом в 10 элементов
|
||||
- При достижении порога система переключается на инвалидацию коллекций вместо поштучной обработки
|
||||
- Специальный флаг `all` позволяет запустить полную инвалидацию всех записей типа
|
||||
|
||||
### 3. Автоматическая инвалидация через триггеры
|
||||
|
||||
Модуль `triggers.py` регистрирует обработчики событий SQLAlchemy, которые автоматически отмечают сущности для ревалидации при изменении данных в базе:
|
||||
|
||||
```python
|
||||
def events_register():
|
||||
event.listen(Author, "after_update", mark_for_revalidation)
|
||||
event.listen(Topic, "after_update", mark_for_revalidation)
|
||||
# и другие...
|
||||
```
|
||||
|
||||
Триггеры имеют следующие особенности:
|
||||
- Реагируют на события вставки, обновления и удаления
|
||||
- Отмечают затронутые сущности для отложенной ревалидации
|
||||
- Учитывают связи между сущностями (например, при изменении темы обновляются связанные шауты)
|
||||
|
||||
## Предварительное кеширование
|
||||
|
||||
Модуль `precache.py` реализует предварительное кеширование часто используемых данных при старте приложения:
|
||||
|
||||
```python
|
||||
async def precache_data():
|
||||
# ...
|
||||
```
|
||||
|
||||
Эта функция выполняется при запуске приложения и заполняет кеш данными, которые будут часто запрашиваться пользователями.
|
||||
|
||||
## Примеры использования
|
||||
|
||||
### Простое кеширование результата запроса
|
||||
|
||||
```python
|
||||
async def get_topics_with_stats(limit=10, offset=0, by="title"):
|
||||
# Формирование ключа кеша по конвенции
|
||||
cache_key = f"topics:stats:limit={limit}:offset={offset}:sort={by}"
|
||||
|
||||
cached_data = await get_cached_data(cache_key)
|
||||
if cached_data:
|
||||
return cached_data
|
||||
|
||||
# Выполнение запроса к базе данных
|
||||
result = ... # логика получения данных
|
||||
|
||||
await cache_data(cache_key, result, ttl=300)
|
||||
return result
|
||||
```
|
||||
|
||||
### Использование обобщенной функции cached_query
|
||||
|
||||
```python
|
||||
async def get_topics_with_stats(limit=10, offset=0, by="title"):
|
||||
async def fetch_data(limit, offset, by):
|
||||
# Логика получения данных
|
||||
return result
|
||||
|
||||
# Формирование ключа кеша по конвенции
|
||||
cache_key = f"topics:stats:limit={limit}:offset={offset}:sort={by}"
|
||||
|
||||
return await cached_query(
|
||||
cache_key,
|
||||
fetch_data,
|
||||
ttl=300,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
by=by
|
||||
)
|
||||
```
|
||||
|
||||
### Точечная инвалидация кеша при изменении данных
|
||||
|
||||
```python
|
||||
async def update_topic(topic_id, new_data):
|
||||
# Обновление данных в базе
|
||||
# ...
|
||||
|
||||
# Точечная инвалидация кеша только для измененной темы
|
||||
await invalidate_topics_cache(topic_id)
|
||||
|
||||
return updated_topic
|
||||
```
|
||||
|
||||
## Отладка и мониторинг
|
||||
|
||||
Система кеширования использует логгер для отслеживания операций:
|
||||
|
||||
```python
|
||||
logger.debug(f"Данные получены из кеша по ключу {key}")
|
||||
logger.debug(f"Удалено {len(keys)} ключей кеша с префиксом {prefix}")
|
||||
logger.error(f"Ошибка при инвалидации кеша: {e}")
|
||||
```
|
||||
|
||||
Это позволяет отслеживать работу кеша и выявлять возможные проблемы на ранних стадиях.
|
||||
|
||||
## Рекомендации по использованию
|
||||
|
||||
1. **Следуйте конвенциям формирования ключей** - это критически важно для консистентности и предсказуемости кеша.
|
||||
2. **Не создавайте собственные форматы ключей** - используйте существующие шаблоны для обеспечения единообразия.
|
||||
3. **Не забывайте об инвалидации** - всегда инвалидируйте кеш при изменении данных.
|
||||
4. **Используйте точечную инвалидацию** - вместо инвалидации по префиксу для снижения нагрузки на Redis.
|
||||
5. **Устанавливайте разумные TTL** - используйте разные значения TTL в зависимости от частоты изменения данных.
|
||||
6. **Не кешируйте большие объемы данных** - кешируйте только то, что действительно необходимо для повышения производительности.
|
||||
|
||||
## Технические детали реализации
|
||||
|
||||
- **Сериализация данных**: используется `orjson` для эффективной сериализации и десериализации данных.
|
||||
- **Форматирование даты и времени**: для корректной работы с датами используется `CustomJSONEncoder`.
|
||||
- **Асинхронность**: все операции кеширования выполняются асинхронно для минимального влияния на производительность API.
|
||||
- **Прямое взаимодействие с Redis**: все операции выполняются через прямые вызовы `redis.execute()` с обработкой ошибок.
|
||||
- **Батчевая обработка**: для массовых операций используется пороговое значение, после которого применяются оптимизированные стратегии.
|
||||
|
||||
## Известные ограничения
|
||||
|
||||
1. **Согласованность данных** - система не гарантирует абсолютную согласованность данных в кеше и базе данных.
|
||||
2. **Память** - необходимо следить за объемом данных в кеше, чтобы избежать проблем с памятью Redis.
|
||||
3. **Производительность Redis** - при большом количестве операций с кешем может стать узким местом.
|
||||
165
docs/comments-pagination.md
Normal file
165
docs/comments-pagination.md
Normal file
@@ -0,0 +1,165 @@
|
||||
# Пагинация комментариев
|
||||
|
||||
## Обзор
|
||||
|
||||
Реализована система пагинации комментариев по веткам, которая позволяет эффективно загружать и отображать вложенные ветки обсуждений. Основные преимущества:
|
||||
|
||||
1. Загрузка только необходимых комментариев, а не всего дерева
|
||||
2. Снижение нагрузки на сервер и клиент
|
||||
3. Возможность эффективной навигации по большим обсуждениям
|
||||
4. Предзагрузка первых N ответов для улучшения UX
|
||||
|
||||
## API для иерархической загрузки комментариев
|
||||
|
||||
### GraphQL запрос `load_comments_branch`
|
||||
|
||||
```graphql
|
||||
query LoadCommentsBranch(
|
||||
$shout: Int!,
|
||||
$parentId: Int,
|
||||
$limit: Int,
|
||||
$offset: Int,
|
||||
$sort: ReactionSort,
|
||||
$childrenLimit: Int,
|
||||
$childrenOffset: Int
|
||||
) {
|
||||
load_comments_branch(
|
||||
shout: $shout,
|
||||
parent_id: $parentId,
|
||||
limit: $limit,
|
||||
offset: $offset,
|
||||
sort: $sort,
|
||||
children_limit: $childrenLimit,
|
||||
children_offset: $childrenOffset
|
||||
) {
|
||||
id
|
||||
body
|
||||
created_at
|
||||
created_by {
|
||||
id
|
||||
name
|
||||
slug
|
||||
pic
|
||||
}
|
||||
kind
|
||||
reply_to
|
||||
stat {
|
||||
rating
|
||||
commented
|
||||
}
|
||||
first_replies {
|
||||
id
|
||||
body
|
||||
created_at
|
||||
created_by {
|
||||
id
|
||||
name
|
||||
slug
|
||||
pic
|
||||
}
|
||||
kind
|
||||
reply_to
|
||||
stat {
|
||||
rating
|
||||
commented
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Параметры запроса
|
||||
|
||||
| Параметр | Тип | По умолчанию | Описание |
|
||||
|----------|-----|--------------|----------|
|
||||
| shout | Int! | - | ID статьи, к которой относятся комментарии |
|
||||
| parent_id | Int | null | ID родительского комментария. Если null, загружаются корневые комментарии |
|
||||
| limit | Int | 10 | Максимальное количество комментариев для загрузки |
|
||||
| offset | Int | 0 | Смещение для пагинации |
|
||||
| sort | ReactionSort | newest | Порядок сортировки: newest, oldest, like |
|
||||
| children_limit | Int | 3 | Максимальное количество дочерних комментариев для каждого родительского |
|
||||
| children_offset | Int | 0 | Смещение для пагинации дочерних комментариев |
|
||||
|
||||
### Поля в ответе
|
||||
|
||||
Каждый комментарий содержит следующие основные поля:
|
||||
|
||||
- `id`: ID комментария
|
||||
- `body`: Текст комментария
|
||||
- `created_at`: Время создания
|
||||
- `created_by`: Информация об авторе
|
||||
- `kind`: Тип реакции (COMMENT)
|
||||
- `reply_to`: ID родительского комментария (null для корневых)
|
||||
- `first_replies`: Первые N дочерних комментариев
|
||||
- `stat`: Статистика комментария, включающая:
|
||||
- `commented`: Количество ответов на комментарий
|
||||
- `rating`: Рейтинг комментария
|
||||
|
||||
## Примеры использования
|
||||
|
||||
### Загрузка корневых комментариев с первыми ответами
|
||||
|
||||
```javascript
|
||||
const { data } = await client.query({
|
||||
query: LOAD_COMMENTS_BRANCH,
|
||||
variables: {
|
||||
shout: 222,
|
||||
limit: 10,
|
||||
offset: 0,
|
||||
sort: "newest",
|
||||
childrenLimit: 3
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Загрузка ответов на конкретный комментарий
|
||||
|
||||
```javascript
|
||||
const { data } = await client.query({
|
||||
query: LOAD_COMMENTS_BRANCH,
|
||||
variables: {
|
||||
shout: 222,
|
||||
parentId: 123, // ID комментария, для которого загружаем ответы
|
||||
limit: 10,
|
||||
offset: 0,
|
||||
sort: "oldest" // Сортируем ответы от старых к новым
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
### Пагинация дочерних комментариев
|
||||
|
||||
Для загрузки дополнительных ответов на комментарий:
|
||||
|
||||
```javascript
|
||||
const { data } = await client.query({
|
||||
query: LOAD_COMMENTS_BRANCH,
|
||||
variables: {
|
||||
shout: 222,
|
||||
parentId: 123,
|
||||
limit: 10,
|
||||
offset: 0,
|
||||
childrenLimit: 5,
|
||||
childrenOffset: 3 // Пропускаем первые 3 комментария (уже загруженные)
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## Рекомендации по клиентской реализации
|
||||
|
||||
1. Для эффективной работы со сложными ветками обсуждений рекомендуется:
|
||||
|
||||
- Сначала загружать только корневые комментарии с первыми N ответами
|
||||
- При наличии дополнительных ответов (когда `stat.commented > first_replies.length`)
|
||||
добавить кнопку "Показать все ответы"
|
||||
- При нажатии на кнопку загружать дополнительные ответы с помощью запроса с указанным `parentId`
|
||||
|
||||
2. Для сортировки:
|
||||
- По умолчанию использовать `newest` для отображения свежих обсуждений
|
||||
- Предусмотреть переключатель сортировки для всего дерева комментариев
|
||||
- При изменении сортировки перезагружать данные с новым параметром `sort`
|
||||
|
||||
3. Для улучшения производительности:
|
||||
- Кешировать результаты запросов на клиенте
|
||||
- Использовать оптимистичные обновления при добавлении/редактировании комментариев
|
||||
- При необходимости загружать комментарии порциями (ленивая загрузка)
|
||||
48
docs/features.md
Normal file
48
docs/features.md
Normal file
@@ -0,0 +1,48 @@
|
||||
## Просмотры публикаций
|
||||
|
||||
- Интеграция с Google Analytics для отслеживания просмотров публикаций
|
||||
- Подсчет уникальных пользователей и общего количества просмотров
|
||||
- Автоматическое обновление статистики при запросе данных публикации
|
||||
|
||||
## Мультидоменная авторизация
|
||||
|
||||
- Поддержка авторизации для разных доменов
|
||||
- Автоматическое определение сервера авторизации
|
||||
- Корректная обработка CORS для всех поддерживаемых доменов
|
||||
|
||||
## Система кеширования
|
||||
|
||||
- Redis используется в качестве основного механизма кеширования
|
||||
- Поддержка как синхронных, так и асинхронных функций в декораторе cache_on_arguments
|
||||
- Автоматическая сериализация/десериализация данных в JSON с использованием CustomJSONEncoder
|
||||
- Резервная сериализация через pickle для сложных объектов
|
||||
- Генерация уникальных ключей кеша на основе сигнатуры функции и переданных аргументов
|
||||
- Настраиваемое время жизни кеша (TTL)
|
||||
- Возможность ручной инвалидации кеша для конкретных функций и аргументов
|
||||
|
||||
## Webhooks
|
||||
|
||||
- Автоматическая регистрация вебхука для события user.login
|
||||
- Предотвращение создания дублирующихся вебхуков
|
||||
- Автоматическая очистка устаревших вебхуков
|
||||
- Поддержка авторизации вебхуков через WEBHOOK_SECRET
|
||||
- Обработка ошибок при операциях с вебхуками
|
||||
- Динамическое определение endpoint'а на основе окружения
|
||||
|
||||
## CORS Configuration
|
||||
|
||||
- Поддерживаемые методы: GET, POST, OPTIONS
|
||||
- Настроена поддержка credentials
|
||||
- Разрешенные заголовки: Authorization, Content-Type, X-Requested-With, DNT, Cache-Control
|
||||
- Настроено кэширование preflight-ответов на 20 дней (1728000 секунд)
|
||||
|
||||
## Пагинация комментариев по веткам
|
||||
|
||||
- Эффективная загрузка комментариев с учетом их иерархической структуры
|
||||
- Отдельный запрос `load_comments_branch` для оптимизированной загрузки ветки комментариев
|
||||
- Возможность загрузки корневых комментариев статьи с первыми ответами на них
|
||||
- Гибкая пагинация как для корневых, так и для дочерних комментариев
|
||||
- Использование поля `stat.commented` для отображения количества ответов на комментарий
|
||||
- Добавление специального поля `first_replies` для хранения первых ответов на комментарий
|
||||
- Поддержка различных методов сортировки (новые, старые, популярные)
|
||||
- Оптимизированные SQL запросы для минимизации нагрузки на базу данных
|
||||
94
docs/follower.md
Normal file
94
docs/follower.md
Normal file
@@ -0,0 +1,94 @@
|
||||
# Following System
|
||||
|
||||
## Overview
|
||||
System supports following different entity types:
|
||||
- Authors
|
||||
- Topics
|
||||
- Communities
|
||||
- Shouts (Posts)
|
||||
|
||||
## GraphQL API
|
||||
|
||||
### Mutations
|
||||
|
||||
#### follow
|
||||
Follow an entity (author/topic/community/shout).
|
||||
|
||||
**Parameters:**
|
||||
- `what: String!` - Entity type (`AUTHOR`, `TOPIC`, `COMMUNITY`, `SHOUT`)
|
||||
- `slug: String` - Entity slug
|
||||
- `entity_id: Int` - Optional entity ID
|
||||
|
||||
**Returns:**
|
||||
```typescript
|
||||
{
|
||||
authors?: Author[] // For AUTHOR type
|
||||
topics?: Topic[] // For TOPIC type
|
||||
communities?: Community[] // For COMMUNITY type
|
||||
shouts?: Shout[] // For SHOUT type
|
||||
error?: String // Error message if any
|
||||
}
|
||||
```
|
||||
|
||||
#### unfollow
|
||||
Unfollow an entity.
|
||||
|
||||
**Parameters:** Same as `follow`
|
||||
|
||||
**Returns:** Same as `follow`
|
||||
|
||||
### Queries
|
||||
|
||||
#### get_shout_followers
|
||||
Get list of users who reacted to a shout.
|
||||
|
||||
**Parameters:**
|
||||
- `slug: String` - Shout slug
|
||||
- `shout_id: Int` - Optional shout ID
|
||||
|
||||
**Returns:**
|
||||
```typescript
|
||||
Author[] // List of authors who reacted
|
||||
```
|
||||
|
||||
## Caching System
|
||||
|
||||
### Supported Entity Types
|
||||
- Authors: `cache_author`, `get_cached_follower_authors`
|
||||
- Topics: `cache_topic`, `get_cached_follower_topics`
|
||||
- Communities: No cache
|
||||
- Shouts: No cache
|
||||
|
||||
### Cache Flow
|
||||
1. On follow/unfollow:
|
||||
- Update entity in cache
|
||||
- Update follower's following list
|
||||
2. Cache is updated before notifications
|
||||
|
||||
## Notifications
|
||||
|
||||
- Sent when author is followed/unfollowed
|
||||
- Contains:
|
||||
- Follower info
|
||||
- Author ID
|
||||
- Action type ("follow"/"unfollow")
|
||||
|
||||
## Error Handling
|
||||
|
||||
- Unauthorized access check
|
||||
- Entity existence validation
|
||||
- Duplicate follow prevention
|
||||
- Full error logging
|
||||
- Transaction safety with `local_session()`
|
||||
|
||||
## Database Schema
|
||||
|
||||
### Follower Tables
|
||||
- `AuthorFollower`
|
||||
- `TopicFollower`
|
||||
- `CommunityFollower`
|
||||
- `ShoutReactionsFollower`
|
||||
|
||||
Each table contains:
|
||||
- `follower` - ID of following user
|
||||
- `{entity_type}` - ID of followed entity
|
||||
80
docs/load_shouts.md
Normal file
80
docs/load_shouts.md
Normal file
@@ -0,0 +1,80 @@
|
||||
# Система загрузки публикаций
|
||||
|
||||
## Особенности реализации
|
||||
|
||||
### Базовый запрос
|
||||
- Автоматически подгружает основного автора
|
||||
- Добавляет основную тему публикации
|
||||
- Поддерживает гибкую систему фильтрации
|
||||
- Оптимизирует запросы на основе запрошенных полей
|
||||
|
||||
### Статистика
|
||||
- Подсчёт лайков/дислайков
|
||||
- Количество комментариев
|
||||
- Дата последней реакции
|
||||
- Статистика подгружается только при запросе поля `stat`
|
||||
|
||||
### Оптимизация производительности
|
||||
- Ленивая загрузка связанных данных
|
||||
- Кэширование результатов на 5 минут
|
||||
- Пакетная загрузка авторов и тем
|
||||
- Использование подзапросов для сложных выборок
|
||||
|
||||
## Типы лент
|
||||
|
||||
### Случайные топовые посты (load_shouts_random_top)
|
||||
**Преимущества:**
|
||||
- Разнообразный контент
|
||||
- Быстрая выборка из кэша топовых постов
|
||||
- Настраиваемый размер пула для выборки
|
||||
|
||||
**Ограничения:**
|
||||
- Обновление раз в 5 минут
|
||||
- Максимальный размер пула: 100 постов
|
||||
- Учитываются только лайки/дислайки (без комментариев)
|
||||
|
||||
### Неоцененные посты (load_shouts_unrated)
|
||||
**Преимущества:**
|
||||
- Помогает найти новый контент
|
||||
- Равномерное распределение оценок
|
||||
- Случайный порядок выдачи
|
||||
|
||||
**Ограничения:**
|
||||
- Только посты с менее чем 3 реакциями
|
||||
- Не учитываются комментарии
|
||||
- Без сортировки по рейтингу
|
||||
|
||||
### Закладки (load_shouts_bookmarked)
|
||||
**Преимущества:**
|
||||
- Персонализированная выборка
|
||||
- Быстрый доступ к сохраненному
|
||||
- Поддержка всех фильтров
|
||||
|
||||
**Ограничения:**
|
||||
- Требует авторизации
|
||||
- Ограничение на количество закладок
|
||||
- Кэширование отключено
|
||||
|
||||
## Важные моменты
|
||||
|
||||
### Пагинация
|
||||
- Стандартный размер страницы: 10
|
||||
- Максимальный размер: 100
|
||||
- Поддержка курсор-пагинации
|
||||
|
||||
### Кэширование
|
||||
- TTL: 5 минут
|
||||
- Инвалидация при изменении поста
|
||||
- Отдельный кэш для каждого типа сортировки
|
||||
|
||||
### Сортировка
|
||||
- По рейтингу (лайки минус дислайки)
|
||||
- По количеству комментариев
|
||||
- По дате последней реакции
|
||||
- По дате публикации (по умолчанию)
|
||||
|
||||
### Безопасность
|
||||
- Проверка прав доступа
|
||||
- Фильтрация удаленного контента
|
||||
- Защита от SQL-инъекций
|
||||
- Валидация входных данных
|
||||
82
docs/rating.md
Normal file
82
docs/rating.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# Rating System
|
||||
|
||||
## GraphQL Resolvers
|
||||
|
||||
### Queries
|
||||
|
||||
#### get_my_rates_shouts
|
||||
Get user's reactions (LIKE/DISLIKE) for specified posts.
|
||||
|
||||
**Parameters:**
|
||||
- `shouts: [Int!]!` - array of shout IDs
|
||||
|
||||
**Returns:**
|
||||
```typescript
|
||||
[{
|
||||
shout_id: Int
|
||||
my_rate: ReactionKind // LIKE or DISLIKE
|
||||
}]
|
||||
```
|
||||
|
||||
#### get_my_rates_comments
|
||||
Get user's reactions (LIKE/DISLIKE) for specified comments.
|
||||
|
||||
**Parameters:**
|
||||
- `comments: [Int!]!` - array of comment IDs
|
||||
|
||||
**Returns:**
|
||||
```typescript
|
||||
[{
|
||||
comment_id: Int
|
||||
my_rate: ReactionKind // LIKE or DISLIKE
|
||||
}]
|
||||
```
|
||||
|
||||
### Mutations
|
||||
|
||||
#### rate_author
|
||||
Rate another author (karma system).
|
||||
|
||||
**Parameters:**
|
||||
- `rated_slug: String!` - author's slug
|
||||
- `value: Int!` - rating value (positive/negative)
|
||||
|
||||
## Rating Calculation
|
||||
|
||||
### Author Rating Components
|
||||
|
||||
#### Shouts Rating
|
||||
- Calculated from LIKE/DISLIKE reactions on author's posts
|
||||
- Each LIKE: +1
|
||||
- Each DISLIKE: -1
|
||||
- Excludes deleted reactions
|
||||
- Excludes comment reactions
|
||||
|
||||
#### Comments Rating
|
||||
- Calculated from LIKE/DISLIKE reactions on author's comments
|
||||
- Each LIKE: +1
|
||||
- Each DISLIKE: -1
|
||||
- Only counts reactions to COMMENT type reactions
|
||||
- Excludes deleted reactions
|
||||
|
||||
#### Legacy Karma
|
||||
- Based on direct author ratings via `rate_author` mutation
|
||||
- Stored in `AuthorRating` table
|
||||
- Each positive rating: +1
|
||||
- Each negative rating: -1
|
||||
|
||||
### Helper Functions
|
||||
|
||||
- `count_author_comments_rating()` - Calculate comment rating
|
||||
- `count_author_shouts_rating()` - Calculate posts rating
|
||||
- `get_author_rating_old()` - Get legacy karma rating
|
||||
- `get_author_rating_shouts()` - Get posts rating (optimized)
|
||||
- `get_author_rating_comments()` - Get comments rating (optimized)
|
||||
- `add_author_rating_columns()` - Add rating columns to author query
|
||||
|
||||
## Notes
|
||||
|
||||
- All ratings exclude deleted content
|
||||
- Reactions are unique per user/content
|
||||
- Rating calculations are optimized with SQLAlchemy
|
||||
- System supports both direct author rating and content-based rating
|
||||
78
main.py
78
main.py
@@ -1,17 +1,25 @@
|
||||
import asyncio
|
||||
import os
|
||||
import sys
|
||||
from importlib import import_module
|
||||
from os.path import exists
|
||||
|
||||
from ariadne import load_schema_from_path, make_executable_schema
|
||||
from ariadne.asgi import GraphQL
|
||||
from starlette.applications import Starlette
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse, Response
|
||||
from starlette.routing import Route
|
||||
|
||||
from services.rediscache import redis
|
||||
from services.schema import resolvers
|
||||
from services.sentry import start_sentry
|
||||
from cache.precache import precache_data
|
||||
from cache.revalidator import revalidation_manager
|
||||
from services.exception import ExceptionHandlerMiddleware
|
||||
from services.redis import redis
|
||||
from services.schema import create_all_tables, resolvers
|
||||
from services.search import search_service
|
||||
from services.viewed import ViewedStorage
|
||||
from services.webhook import WebhookEndpoint
|
||||
from services.webhook import WebhookEndpoint, create_webhook_endpoint
|
||||
from settings import DEV_SERVER_PID_FILE_NAME, MODE
|
||||
|
||||
import_module("resolvers")
|
||||
@@ -27,19 +35,61 @@ async def start():
|
||||
print(f"[main] process started in {MODE} mode")
|
||||
|
||||
|
||||
# main starlette app object with ariadne mounted in root
|
||||
async def lifespan(_app):
|
||||
try:
|
||||
create_all_tables()
|
||||
await asyncio.gather(
|
||||
redis.connect(),
|
||||
precache_data(),
|
||||
ViewedStorage.init(),
|
||||
create_webhook_endpoint(),
|
||||
search_service.info(),
|
||||
start(),
|
||||
revalidation_manager.start(),
|
||||
)
|
||||
yield
|
||||
finally:
|
||||
tasks = [redis.disconnect(), ViewedStorage.stop(), revalidation_manager.stop()]
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
|
||||
# Создаем экземпляр GraphQL
|
||||
graphql_app = GraphQL(schema, debug=True)
|
||||
|
||||
|
||||
# Оборачиваем GraphQL-обработчик для лучшей обработки ошибок
|
||||
async def graphql_handler(request: Request):
|
||||
if request.method not in ["GET", "POST"]:
|
||||
return JSONResponse({"error": "Method Not Allowed"}, status_code=405)
|
||||
|
||||
try:
|
||||
result = await graphql_app.handle_request(request)
|
||||
if isinstance(result, Response):
|
||||
return result
|
||||
return JSONResponse(result)
|
||||
except asyncio.CancelledError:
|
||||
return JSONResponse({"error": "Request cancelled"}, status_code=499)
|
||||
except Exception as e:
|
||||
print(f"GraphQL error: {str(e)}")
|
||||
return JSONResponse({"error": str(e)}, status_code=500)
|
||||
|
||||
|
||||
# Обновляем маршрут в Starlette
|
||||
app = Starlette(
|
||||
routes=[
|
||||
Route("/", GraphQL(schema, debug=True)),
|
||||
Route("/", graphql_handler, methods=["GET", "POST"]),
|
||||
Route("/new-author", WebhookEndpoint),
|
||||
],
|
||||
on_startup=[
|
||||
redis.connect,
|
||||
ViewedStorage.init,
|
||||
# search_service.info,
|
||||
start_sentry,
|
||||
start,
|
||||
],
|
||||
on_shutdown=[redis.disconnect],
|
||||
lifespan=lifespan,
|
||||
debug=True,
|
||||
)
|
||||
|
||||
app.add_middleware(ExceptionHandlerMiddleware)
|
||||
if "dev" in sys.argv:
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["https://localhost:3000"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
@@ -1,18 +1,14 @@
|
||||
log_format custom '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'origin=$http_origin allow_origin=$allow_origin status=$status '
|
||||
'"$http_referer" "$http_user_agent"';
|
||||
|
||||
{{ $proxy_settings := "proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; proxy_set_header Connection $http_connection; proxy_set_header Host $http_host; proxy_set_header X-Request-Start $msec;" }}
|
||||
{{ $gzip_settings := "gzip on; gzip_min_length 1100; gzip_buffers 4 32k; gzip_types text/css text/javascript text/xml text/plain text/x-component application/javascript application/x-javascript application/json application/xml application/rss+xml font/truetype application/x-font-ttf font/opentype application/vnd.ms-fontobject image/svg+xml; gzip_vary on; gzip_comp_level 6;" }}
|
||||
|
||||
{{ $cors_headers_options := "if ($request_method = 'OPTIONS') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS'; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization'; add_header 'Access-Control-Allow-Credentials' 'true'; add_header 'Access-Control-Max-Age' 1728000; add_header 'Content-Type' 'text/plain; charset=utf-8'; add_header 'Content-Length' 0; return 204; }" }}
|
||||
{{ $cors_headers_post := "if ($request_method = 'POST') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always; add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }}
|
||||
{{ $cors_headers_get := "if ($request_method = 'GET') { add_header 'Access-Control-Allow-Origin' '$allow_origin' always; add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always; add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always; add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always; add_header 'Access-Control-Allow-Credentials' 'true' always; }" }}
|
||||
|
||||
map $http_origin $allow_origin {
|
||||
~^https?:\/\/((.*\.)?localhost(:\d+)?|discoursio-webapp(-(.*))?\.vercel\.app|(.*\.)?discours\.io)$ $http_origin;
|
||||
default "";
|
||||
}
|
||||
|
||||
proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=my_cache:10m max_size=1g
|
||||
proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=my_cache:10m max_size=1g
|
||||
inactive=60m use_temp_path=off;
|
||||
limit_conn_zone $binary_remote_addr zone=addr:10m;
|
||||
limit_req_zone $binary_remote_addr zone=req_zone:10m rate=20r/s;
|
||||
|
||||
{{ range $port_map := .PROXY_PORT_MAP | split " " }}
|
||||
{{ $port_map_list := $port_map | split ":" }}
|
||||
@@ -25,7 +21,7 @@ server {
|
||||
listen [::]:{{ $listen_port }};
|
||||
listen {{ $listen_port }};
|
||||
server_name {{ $.NOSSL_SERVER_NAME }};
|
||||
access_log /var/log/nginx/{{ $.APP }}-access.log;
|
||||
access_log /var/log/nginx/{{ $.APP }}-access.log custom;
|
||||
error_log /var/log/nginx/{{ $.APP }}-error.log;
|
||||
client_max_body_size 100M;
|
||||
|
||||
@@ -33,7 +29,7 @@ server {
|
||||
listen [::]:{{ $listen_port }} ssl http2;
|
||||
listen {{ $listen_port }} ssl http2;
|
||||
server_name {{ $.NOSSL_SERVER_NAME }};
|
||||
access_log /var/log/nginx/{{ $.APP }}-access.log;
|
||||
access_log /var/log/nginx/{{ $.APP }}-access.log custom;
|
||||
error_log /var/log/nginx/{{ $.APP }}-error.log;
|
||||
ssl_certificate {{ $.APP_SSL_PATH }}/server.crt;
|
||||
ssl_certificate_key {{ $.APP_SSL_PATH }}/server.key;
|
||||
@@ -43,43 +39,63 @@ server {
|
||||
keepalive_timeout 70;
|
||||
keepalive_requests 500;
|
||||
proxy_read_timeout 3600;
|
||||
limit_conn addr 1000;
|
||||
limit_conn addr 10000;
|
||||
client_max_body_size 100M;
|
||||
{{ end }}
|
||||
|
||||
|
||||
location / {
|
||||
proxy_pass http://{{ $.APP }}-{{ $upstream_port }};
|
||||
proxy_pass http://{{ $.APP }}-{{ $upstream_port }};
|
||||
{{ $proxy_settings }}
|
||||
{{ $gzip_settings }}
|
||||
{{ $cors_headers_options }}
|
||||
{{ $cors_headers_post }}
|
||||
{{ $cors_headers_get }}
|
||||
|
||||
# Handle CORS for OPTIONS method
|
||||
if ($request_method = 'OPTIONS') {
|
||||
add_header 'Access-Control-Allow-Origin' $allow_origin always;
|
||||
add_header 'Access-Control-Allow-Methods' 'POST, GET, OPTIONS';
|
||||
add_header 'Access-Control-Allow-Headers' 'Content-Type, Authorization' always;
|
||||
add_header 'Access-Control-Allow-Credentials' 'true' always;
|
||||
add_header 'Access-Control-Max-Age' 1728000;
|
||||
add_header 'Content-Type' 'text/plain; charset=utf-8';
|
||||
add_header 'Content-Length' 0;
|
||||
return 204;
|
||||
}
|
||||
|
||||
# Handle CORS for POST method
|
||||
if ($request_method = 'POST') {
|
||||
add_header 'Access-Control-Allow-Origin' $allow_origin always;
|
||||
add_header 'Access-Control-Allow-Methods' 'POST, GET, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'Content-Type, Authorization' always;
|
||||
add_header 'Access-Control-Allow-Credentials' 'true' always;
|
||||
}
|
||||
|
||||
# Handle CORS for GET method
|
||||
if ($request_method = 'GET') {
|
||||
add_header 'Access-Control-Allow-Origin' $allow_origin always;
|
||||
add_header 'Access-Control-Allow-Methods' 'POST, GET, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'Content-Type, Authorization' always;
|
||||
add_header 'Access-Control-Allow-Credentials' 'true' always;
|
||||
}
|
||||
|
||||
proxy_cache my_cache;
|
||||
proxy_cache_revalidate on;
|
||||
proxy_cache_min_uses 2;
|
||||
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
|
||||
proxy_cache_background_update on;
|
||||
proxy_cache_lock on;
|
||||
}
|
||||
proxy_cache_lock on;
|
||||
|
||||
# Custom location block for /upload
|
||||
location /upload {
|
||||
proxy_pass http://uploader-8080/;
|
||||
{{ $proxy_settings }}
|
||||
{{ $gzip_settings }}
|
||||
{{ $cors_headers_options }}
|
||||
{{ $cors_headers_post }}
|
||||
{{ $cors_headers_get }}
|
||||
# Connections and request limits increase (bad for DDos)
|
||||
limit_req zone=req_zone burst=10 nodelay;
|
||||
}
|
||||
|
||||
location ~* \.(jpg|jpeg|png|gif|ico|css|js)$ {
|
||||
expires 30d; # This means that the client can cache these resources for 30 days.
|
||||
add_header Cache-Control "public, no-transform";
|
||||
proxy_pass http://{{ $.APP }}-{{ $upstream_port }};
|
||||
expires 30d;
|
||||
add_header Cache-Control "public, no-transform";
|
||||
}
|
||||
|
||||
location ~* \.(mp3)$ {
|
||||
location ~* \.(mp3|wav|ogg|flac|aac|aif|webm)$ {
|
||||
proxy_pass http://{{ $.APP }}-{{ $upstream_port }};
|
||||
if ($request_method = 'GET') {
|
||||
add_header 'Access-Control-Allow-Origin' $allow_origin always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always;
|
||||
|
||||
@@ -1,12 +1,22 @@
|
||||
import time
|
||||
|
||||
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
|
||||
# from sqlalchemy_utils import TSVectorType
|
||||
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String
|
||||
|
||||
from services.db import Base
|
||||
|
||||
# from sqlalchemy_utils import TSVectorType
|
||||
|
||||
|
||||
class AuthorRating(Base):
|
||||
"""
|
||||
Рейтинг автора от другого автора.
|
||||
|
||||
Attributes:
|
||||
rater (int): ID оценивающего автора
|
||||
author (int): ID оцениваемого автора
|
||||
plus (bool): Положительная/отрицательная оценка
|
||||
"""
|
||||
|
||||
__tablename__ = "author_rating"
|
||||
|
||||
id = None # type: ignore
|
||||
@@ -14,8 +24,26 @@ class AuthorRating(Base):
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
plus = Column(Boolean)
|
||||
|
||||
# Определяем индексы
|
||||
__table_args__ = (
|
||||
# Индекс для быстрого поиска всех оценок конкретного автора
|
||||
Index("idx_author_rating_author", "author"),
|
||||
# Индекс для быстрого поиска всех оценок, оставленных конкретным автором
|
||||
Index("idx_author_rating_rater", "rater"),
|
||||
)
|
||||
|
||||
|
||||
class AuthorFollower(Base):
|
||||
"""
|
||||
Подписка одного автора на другого.
|
||||
|
||||
Attributes:
|
||||
follower (int): ID подписчика
|
||||
author (int): ID автора, на которого подписываются
|
||||
created_at (int): Время создания подписки
|
||||
auto (bool): Признак автоматической подписки
|
||||
"""
|
||||
|
||||
__tablename__ = "author_follower"
|
||||
|
||||
id = None # type: ignore
|
||||
@@ -24,8 +52,57 @@ class AuthorFollower(Base):
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
auto = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
# Определяем индексы
|
||||
__table_args__ = (
|
||||
# Индекс для быстрого поиска всех подписчиков автора
|
||||
Index("idx_author_follower_author", "author"),
|
||||
# Индекс для быстрого поиска всех авторов, на которых подписан конкретный автор
|
||||
Index("idx_author_follower_follower", "follower"),
|
||||
)
|
||||
|
||||
|
||||
class AuthorBookmark(Base):
|
||||
"""
|
||||
Закладка автора на публикацию.
|
||||
|
||||
Attributes:
|
||||
author (int): ID автора
|
||||
shout (int): ID публикации
|
||||
"""
|
||||
|
||||
__tablename__ = "author_bookmark"
|
||||
|
||||
id = None # type: ignore
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
|
||||
# Определяем индексы
|
||||
__table_args__ = (
|
||||
# Индекс для быстрого поиска всех закладок автора
|
||||
Index("idx_author_bookmark_author", "author"),
|
||||
# Индекс для быстрого поиска всех авторов, добавивших публикацию в закладки
|
||||
Index("idx_author_bookmark_shout", "shout"),
|
||||
)
|
||||
|
||||
|
||||
class Author(Base):
|
||||
"""
|
||||
Модель автора в системе.
|
||||
|
||||
Attributes:
|
||||
user (str): Идентификатор пользователя в системе авторизации
|
||||
name (str): Отображаемое имя
|
||||
slug (str): Уникальный строковый идентификатор
|
||||
bio (str): Краткая биография/статус
|
||||
about (str): Полное описание
|
||||
pic (str): URL изображения профиля
|
||||
links (dict): Ссылки на социальные сети и сайты
|
||||
created_at (int): Время создания профиля
|
||||
last_seen (int): Время последнего посещения
|
||||
updated_at (int): Время последнего обновления
|
||||
deleted_at (int): Время удаления (если профиль удален)
|
||||
"""
|
||||
|
||||
__tablename__ = "author"
|
||||
|
||||
user = Column(String) # unbounded link with authorizer's User type
|
||||
@@ -44,3 +121,17 @@ class Author(Base):
|
||||
# search_vector = Column(
|
||||
# TSVectorType("name", "slug", "bio", "about", regconfig="pg_catalog.russian")
|
||||
# )
|
||||
|
||||
# Определяем индексы
|
||||
__table_args__ = (
|
||||
# Индекс для быстрого поиска по slug
|
||||
Index("idx_author_slug", "slug"),
|
||||
# Индекс для быстрого поиска по идентификатору пользователя
|
||||
Index("idx_author_user", "user"),
|
||||
# Индекс для фильтрации неудаленных авторов
|
||||
Index("idx_author_deleted_at", "deleted_at", postgresql_where=deleted_at.is_(None)),
|
||||
# Индекс для сортировки по времени создания (для новых авторов)
|
||||
Index("idx_author_created_at", "created_at"),
|
||||
# Индекс для сортировки по времени последнего посещения
|
||||
Index("idx_author_last_seen", "last_seen"),
|
||||
)
|
||||
|
||||
@@ -1,20 +1,38 @@
|
||||
import enum
|
||||
import time
|
||||
|
||||
from sqlalchemy import Column, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy import Column, ForeignKey, Integer, String, Text, distinct, func
|
||||
from sqlalchemy.ext.hybrid import hybrid_property
|
||||
|
||||
from orm.author import Author
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class CommunityAuthor(Base):
|
||||
class CommunityRole(enum.Enum):
|
||||
READER = "reader" # can read and comment
|
||||
AUTHOR = "author" # + can vote and invite collaborators
|
||||
ARTIST = "artist" # + can be credited as featured artist
|
||||
EXPERT = "expert" # + can add proof or disproof to shouts, can manage topics
|
||||
EDITOR = "editor" # + can manage topics, comments and community settings
|
||||
|
||||
@classmethod
|
||||
def as_string_array(cls, roles):
|
||||
return [role.value for role in roles]
|
||||
|
||||
|
||||
class CommunityFollower(Base):
|
||||
__tablename__ = "community_author"
|
||||
|
||||
id = None # type: ignore
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
community = Column(ForeignKey("community.id"), primary_key=True)
|
||||
joined_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
role = Column(String, nullable=False)
|
||||
roles = Column(Text, nullable=True, comment="Roles (comma-separated)")
|
||||
|
||||
def set_roles(self, roles):
|
||||
self.roles = CommunityRole.as_string_array(roles)
|
||||
|
||||
def get_roles(self):
|
||||
return [CommunityRole(role) for role in self.roles]
|
||||
|
||||
|
||||
class Community(Base):
|
||||
@@ -25,5 +43,64 @@ class Community(Base):
|
||||
desc = Column(String, nullable=False, default="")
|
||||
pic = Column(String, nullable=False, default="")
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
created_by = Column(ForeignKey("author.id"), nullable=False)
|
||||
|
||||
authors = relationship(Author, secondary="community_author")
|
||||
@hybrid_property
|
||||
def stat(self):
|
||||
return CommunityStats(self)
|
||||
|
||||
@property
|
||||
def role_list(self):
|
||||
return self.roles.split(",") if self.roles else []
|
||||
|
||||
@role_list.setter
|
||||
def role_list(self, value):
|
||||
self.roles = ",".join(value) if value else None
|
||||
|
||||
|
||||
class CommunityStats:
|
||||
def __init__(self, community):
|
||||
self.community = community
|
||||
|
||||
@property
|
||||
def shouts(self):
|
||||
from orm.shout import Shout
|
||||
|
||||
return self.community.session.query(func.count(Shout.id)).filter(Shout.community == self.community.id).scalar()
|
||||
|
||||
@property
|
||||
def followers(self):
|
||||
return (
|
||||
self.community.session.query(func.count(CommunityFollower.author))
|
||||
.filter(CommunityFollower.community == self.community.id)
|
||||
.scalar()
|
||||
)
|
||||
|
||||
@property
|
||||
def authors(self):
|
||||
from orm.shout import Shout
|
||||
|
||||
# author has a shout with community id and its featured_at is not null
|
||||
return (
|
||||
self.community.session.query(func.count(distinct(Author.id)))
|
||||
.join(Shout)
|
||||
.filter(Shout.community == self.community.id, Shout.featured_at.is_not(None), Author.id.in_(Shout.authors))
|
||||
.scalar()
|
||||
)
|
||||
|
||||
|
||||
class CommunityAuthor(Base):
|
||||
__tablename__ = "community_author"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
community_id = Column(Integer, ForeignKey("community.id"))
|
||||
author_id = Column(Integer, ForeignKey("author.id"))
|
||||
roles = Column(Text, nullable=True, comment="Roles (comma-separated)")
|
||||
|
||||
@property
|
||||
def role_list(self):
|
||||
return self.roles.split(",") if self.roles else []
|
||||
|
||||
@role_list.setter
|
||||
def role_list(self, value):
|
||||
self.roles = ",".join(value) if value else None
|
||||
|
||||
55
orm/draft.py
Normal file
55
orm/draft.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import time
|
||||
|
||||
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from orm.author import Author
|
||||
from orm.topic import Topic
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class DraftTopic(Base):
|
||||
__tablename__ = "draft_topic"
|
||||
|
||||
id = None # type: ignore
|
||||
shout = Column(ForeignKey("draft.id"), primary_key=True, index=True)
|
||||
topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
|
||||
main = Column(Boolean, nullable=True)
|
||||
|
||||
|
||||
class DraftAuthor(Base):
|
||||
__tablename__ = "draft_author"
|
||||
|
||||
id = None # type: ignore
|
||||
shout = Column(ForeignKey("draft.id"), primary_key=True, index=True)
|
||||
author = Column(ForeignKey("author.id"), primary_key=True, index=True)
|
||||
caption = Column(String, nullable=True, default="")
|
||||
|
||||
|
||||
class Draft(Base):
|
||||
__tablename__ = "draft"
|
||||
# required
|
||||
created_at: int = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
created_by: int = Column(ForeignKey("author.id"), nullable=False)
|
||||
|
||||
# optional
|
||||
layout: str = Column(String, nullable=True, default="article")
|
||||
slug: str = Column(String, unique=True)
|
||||
title: str = Column(String, nullable=True)
|
||||
subtitle: str | None = Column(String, nullable=True)
|
||||
lead: str | None = Column(String, nullable=True)
|
||||
description: str | None = Column(String, nullable=True)
|
||||
body: str = Column(String, nullable=False, comment="Body")
|
||||
media: dict | None = Column(JSON, nullable=True)
|
||||
cover: str | None = Column(String, nullable=True, comment="Cover image url")
|
||||
cover_caption: str | None = Column(String, nullable=True, comment="Cover image alt caption")
|
||||
lang: str = Column(String, nullable=False, default="ru", comment="Language")
|
||||
seo: str | None = Column(String, nullable=True) # JSON
|
||||
|
||||
# auto
|
||||
updated_at: int | None = Column(Integer, nullable=True, index=True)
|
||||
deleted_at: int | None = Column(Integer, nullable=True, index=True)
|
||||
updated_by: int | None = Column(ForeignKey("author.id"), nullable=True)
|
||||
deleted_by: int | None = Column(ForeignKey("author.id"), nullable=True)
|
||||
authors = relationship(Author, secondary="draft_author")
|
||||
topics = relationship(Topic, secondary="draft_topic")
|
||||
@@ -1,4 +1,4 @@
|
||||
from enum import Enum as Enumeration
|
||||
import enum
|
||||
|
||||
from sqlalchemy import Column, ForeignKey, String
|
||||
from sqlalchemy.orm import relationship
|
||||
@@ -6,11 +6,15 @@ from sqlalchemy.orm import relationship
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class InviteStatus(Enumeration):
|
||||
class InviteStatus(enum.Enum):
|
||||
PENDING = "PENDING"
|
||||
ACCEPTED = "ACCEPTED"
|
||||
REJECTED = "REJECTED"
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, value):
|
||||
return cls(value)
|
||||
|
||||
|
||||
class Invite(Base):
|
||||
__tablename__ = "invite"
|
||||
@@ -20,6 +24,12 @@ class Invite(Base):
|
||||
shout_id = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
status = Column(String, default=InviteStatus.PENDING.value)
|
||||
|
||||
inviter = relationship("author", foreign_keys=[inviter_id])
|
||||
author = relationship("author", foreign_keys=[author_id])
|
||||
shout = relationship("shout")
|
||||
inviter = relationship("Author", foreign_keys=[inviter_id])
|
||||
author = relationship("Author", foreign_keys=[author_id])
|
||||
shout = relationship("Shout")
|
||||
|
||||
def set_status(self, status: InviteStatus):
|
||||
self.status = status.value
|
||||
|
||||
def get_status(self) -> InviteStatus:
|
||||
return InviteStatus.from_string(self.status)
|
||||
|
||||
@@ -1,22 +1,25 @@
|
||||
import enum
|
||||
import time
|
||||
from enum import Enum as Enumeration
|
||||
|
||||
from sqlalchemy import JSON, Column, ForeignKey, Integer, String
|
||||
from sqlalchemy.exc import ProgrammingError
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from orm.author import Author
|
||||
from services.db import Base, engine
|
||||
from services.logger import root_logger as logger
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class NotificationEntity(Enumeration):
|
||||
class NotificationEntity(enum.Enum):
|
||||
REACTION = "reaction"
|
||||
SHOUT = "shout"
|
||||
FOLLOWER = "follower"
|
||||
COMMUNITY = "community"
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, value):
|
||||
return cls(value)
|
||||
|
||||
|
||||
class NotificationAction(Enumeration):
|
||||
class NotificationAction(enum.Enum):
|
||||
CREATE = "create"
|
||||
UPDATE = "update"
|
||||
DELETE = "delete"
|
||||
@@ -24,28 +27,37 @@ class NotificationAction(Enumeration):
|
||||
FOLLOW = "follow"
|
||||
UNFOLLOW = "unfollow"
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, value):
|
||||
return cls(value)
|
||||
|
||||
|
||||
class NotificationSeen(Base):
|
||||
__tablename__ = "notification_seen"
|
||||
|
||||
viewer = Column(ForeignKey("author.id"))
|
||||
notification = Column(ForeignKey("notification.id"))
|
||||
viewer = Column(ForeignKey("author.id"), primary_key=True)
|
||||
notification = Column(ForeignKey("notification.id"), primary_key=True)
|
||||
|
||||
|
||||
class Notification(Base):
|
||||
__tablename__ = "notification"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
created_at = Column(Integer, server_default=str(int(time.time())))
|
||||
entity = Column(String, nullable=False)
|
||||
action = Column(String, nullable=False)
|
||||
payload = Column(JSON, nullable=True)
|
||||
|
||||
seen = relationship(lambda: Author, secondary="notification_seen")
|
||||
seen = relationship(Author, secondary="notification_seen")
|
||||
|
||||
def set_entity(self, entity: NotificationEntity):
|
||||
self.entity = entity.value
|
||||
|
||||
try:
|
||||
Notification.__table__.create(engine)
|
||||
logger.info("Table `notification` was created.")
|
||||
except ProgrammingError:
|
||||
# Handle the exception here, for example by printing a message
|
||||
logger.info("Table `notification` already exists.")
|
||||
def get_entity(self) -> NotificationEntity:
|
||||
return NotificationEntity.from_string(self.entity)
|
||||
|
||||
def set_action(self, action: NotificationAction):
|
||||
self.action = action.value
|
||||
|
||||
def get_action(self) -> NotificationAction:
|
||||
return NotificationAction.from_string(self.action)
|
||||
|
||||
@@ -32,14 +32,14 @@ class Reaction(Base):
|
||||
__tablename__ = "reaction"
|
||||
|
||||
body = Column(String, default="", comment="Reaction Body")
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
updated_at = Column(Integer, nullable=True, comment="Updated at")
|
||||
deleted_at = Column(Integer, nullable=True, comment="Deleted at")
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()), index=True)
|
||||
updated_at = Column(Integer, nullable=True, comment="Updated at", index=True)
|
||||
deleted_at = Column(Integer, nullable=True, comment="Deleted at", index=True)
|
||||
deleted_by = Column(ForeignKey("author.id"), nullable=True)
|
||||
reply_to = Column(ForeignKey("reaction.id"), nullable=True)
|
||||
quote = Column(String, nullable=True, comment="Original quoted text")
|
||||
shout = Column(ForeignKey("shout.id"), nullable=False)
|
||||
shout = Column(ForeignKey("shout.id"), nullable=False, index=True)
|
||||
created_by = Column(ForeignKey("author.id"), nullable=False)
|
||||
kind = Column(String, nullable=False)
|
||||
kind = Column(String, nullable=False, index=True)
|
||||
|
||||
oid = Column(String)
|
||||
|
||||
120
orm/shout.py
120
orm/shout.py
@@ -1,83 +1,127 @@
|
||||
import time
|
||||
|
||||
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
|
||||
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from orm.author import Author
|
||||
from orm.community import Community
|
||||
from orm.reaction import Reaction
|
||||
from orm.topic import Topic
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class ShoutTopic(Base):
|
||||
"""
|
||||
Связь между публикацией и темой.
|
||||
|
||||
Attributes:
|
||||
shout (int): ID публикации
|
||||
topic (int): ID темы
|
||||
main (bool): Признак основной темы
|
||||
"""
|
||||
|
||||
__tablename__ = "shout_topic"
|
||||
|
||||
id = None # type: ignore
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
topic = Column(ForeignKey("topic.id"), primary_key=True)
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
|
||||
topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
|
||||
main = Column(Boolean, nullable=True)
|
||||
|
||||
# Определяем дополнительные индексы
|
||||
__table_args__ = (
|
||||
# Оптимизированный составной индекс для запросов, которые ищут публикации по теме
|
||||
Index("idx_shout_topic_topic_shout", "topic", "shout"),
|
||||
)
|
||||
|
||||
|
||||
class ShoutReactionsFollower(Base):
|
||||
__tablename__ = "shout_reactions_followers"
|
||||
|
||||
id = None # type: ignore
|
||||
follower = Column(ForeignKey("author.id"), primary_key=True)
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
follower = Column(ForeignKey("author.id"), primary_key=True, index=True)
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
|
||||
auto = Column(Boolean, nullable=False, default=False)
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
deleted_at = Column(Integer, nullable=True)
|
||||
|
||||
|
||||
class ShoutAuthor(Base):
|
||||
"""
|
||||
Связь между публикацией и автором.
|
||||
|
||||
Attributes:
|
||||
shout (int): ID публикации
|
||||
author (int): ID автора
|
||||
caption (str): Подпись автора
|
||||
"""
|
||||
|
||||
__tablename__ = "shout_author"
|
||||
|
||||
id = None # type: ignore
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
|
||||
author = Column(ForeignKey("author.id"), primary_key=True, index=True)
|
||||
caption = Column(String, nullable=True, default="")
|
||||
|
||||
|
||||
class ShoutCommunity(Base):
|
||||
__tablename__ = "shout_community"
|
||||
|
||||
id = None # type: ignore
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
community = Column(ForeignKey("community.id"), primary_key=True)
|
||||
# Определяем дополнительные индексы
|
||||
__table_args__ = (
|
||||
# Оптимизированный индекс для запросов, которые ищут публикации по автору
|
||||
Index("idx_shout_author_author_shout", "author", "shout"),
|
||||
)
|
||||
|
||||
|
||||
class Shout(Base):
|
||||
"""
|
||||
Публикация в системе.
|
||||
"""
|
||||
|
||||
__tablename__ = "shout"
|
||||
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
updated_at = Column(Integer, nullable=True)
|
||||
published_at = Column(Integer, nullable=True)
|
||||
featured_at = Column(Integer, nullable=True)
|
||||
deleted_at = Column(Integer, nullable=True)
|
||||
created_at: int = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
updated_at: int | None = Column(Integer, nullable=True, index=True)
|
||||
published_at: int | None = Column(Integer, nullable=True, index=True)
|
||||
featured_at: int | None = Column(Integer, nullable=True, index=True)
|
||||
deleted_at: int | None = Column(Integer, nullable=True, index=True)
|
||||
|
||||
created_by = Column(ForeignKey("author.id"), nullable=False)
|
||||
updated_by = Column(ForeignKey("author.id"), nullable=True)
|
||||
deleted_by = Column(ForeignKey("author.id"), nullable=True)
|
||||
created_by: int = Column(ForeignKey("author.id"), nullable=False)
|
||||
updated_by: int | None = Column(ForeignKey("author.id"), nullable=True)
|
||||
deleted_by: int | None = Column(ForeignKey("author.id"), nullable=True)
|
||||
community: int = Column(ForeignKey("community.id"), nullable=False)
|
||||
|
||||
body = Column(String, nullable=False, comment="Body")
|
||||
slug = Column(String, unique=True)
|
||||
cover = Column(String, nullable=True, comment="Cover image url")
|
||||
cover_caption = Column(String, nullable=True, comment="Cover image alt caption")
|
||||
lead = Column(String, nullable=True)
|
||||
description = Column(String, nullable=True)
|
||||
title = Column(String, nullable=False)
|
||||
subtitle = Column(String, nullable=True)
|
||||
layout = Column(String, nullable=False, default="article")
|
||||
media = Column(JSON, nullable=True)
|
||||
body: str = Column(String, nullable=False, comment="Body")
|
||||
slug: str = Column(String, unique=True)
|
||||
cover: str | None = Column(String, nullable=True, comment="Cover image url")
|
||||
cover_caption: str | None = Column(String, nullable=True, comment="Cover image alt caption")
|
||||
lead: str | None = Column(String, nullable=True)
|
||||
description: str | None = Column(String, nullable=True)
|
||||
title: str = Column(String, nullable=False)
|
||||
subtitle: str | None = Column(String, nullable=True)
|
||||
layout: str = Column(String, nullable=False, default="article")
|
||||
media: dict | None = Column(JSON, nullable=True)
|
||||
|
||||
authors = relationship(Author, secondary="shout_author")
|
||||
topics = relationship(Topic, secondary="shout_topic")
|
||||
communities = relationship(Community, secondary="shout_community")
|
||||
reactions = relationship(Reaction)
|
||||
|
||||
lang = Column(String, nullable=False, default="ru", comment="Language")
|
||||
version_of = Column(ForeignKey("shout.id"), nullable=True)
|
||||
oid = Column(String, nullable=True)
|
||||
lang: str = Column(String, nullable=False, default="ru", comment="Language")
|
||||
version_of: int | None = Column(ForeignKey("shout.id"), nullable=True)
|
||||
oid: str | None = Column(String, nullable=True)
|
||||
|
||||
seo = Column(String, nullable=True) # JSON
|
||||
seo: str | None = Column(String, nullable=True) # JSON
|
||||
|
||||
draft: int | None = Column(ForeignKey("draft.id"), nullable=True)
|
||||
|
||||
# Определяем индексы
|
||||
__table_args__ = (
|
||||
# Индекс для быстрого поиска неудаленных публикаций
|
||||
Index("idx_shout_deleted_at", "deleted_at", postgresql_where=deleted_at.is_(None)),
|
||||
# Индекс для быстрой фильтрации по community
|
||||
Index("idx_shout_community", "community"),
|
||||
# Индекс для быстрого поиска по slug
|
||||
Index("idx_shout_slug", "slug"),
|
||||
# Составной индекс для фильтрации опубликованных неудаленных публикаций
|
||||
Index(
|
||||
"idx_shout_published_deleted",
|
||||
"published_at",
|
||||
"deleted_at",
|
||||
postgresql_where=published_at.is_not(None) & deleted_at.is_(None),
|
||||
),
|
||||
)
|
||||
|
||||
42
orm/topic.py
42
orm/topic.py
@@ -1,11 +1,21 @@
|
||||
import time
|
||||
|
||||
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String
|
||||
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String
|
||||
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class TopicFollower(Base):
|
||||
"""
|
||||
Связь между топиком и его подписчиком.
|
||||
|
||||
Attributes:
|
||||
follower (int): ID подписчика
|
||||
topic (int): ID топика
|
||||
created_at (int): Время создания связи
|
||||
auto (bool): Автоматическая подписка
|
||||
"""
|
||||
|
||||
__tablename__ = "topic_followers"
|
||||
|
||||
id = None # type: ignore
|
||||
@@ -14,8 +24,29 @@ class TopicFollower(Base):
|
||||
created_at = Column(Integer, nullable=False, default=int(time.time()))
|
||||
auto = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
# Определяем индексы
|
||||
__table_args__ = (
|
||||
# Индекс для быстрого поиска всех подписчиков топика
|
||||
Index("idx_topic_followers_topic", "topic"),
|
||||
# Индекс для быстрого поиска всех топиков, на которые подписан автор
|
||||
Index("idx_topic_followers_follower", "follower"),
|
||||
)
|
||||
|
||||
|
||||
class Topic(Base):
|
||||
"""
|
||||
Модель топика (темы) публикаций.
|
||||
|
||||
Attributes:
|
||||
slug (str): Уникальный строковый идентификатор темы
|
||||
title (str): Название темы
|
||||
body (str): Описание темы
|
||||
pic (str): URL изображения темы
|
||||
community (int): ID сообщества
|
||||
oid (str): Старый ID
|
||||
parent_ids (list): IDs родительских тем
|
||||
"""
|
||||
|
||||
__tablename__ = "topic"
|
||||
|
||||
slug = Column(String, unique=True)
|
||||
@@ -24,3 +55,12 @@ class Topic(Base):
|
||||
pic = Column(String, nullable=True, comment="Picture")
|
||||
community = Column(ForeignKey("community.id"), default=1)
|
||||
oid = Column(String, nullable=True, comment="Old ID")
|
||||
parent_ids = Column(JSON, nullable=True, comment="Parent Topic IDs")
|
||||
|
||||
# Определяем индексы
|
||||
__table_args__ = (
|
||||
# Индекс для быстрого поиска по slug
|
||||
Index("idx_topic_slug", "slug"),
|
||||
# Индекс для быстрого поиска по сообществу
|
||||
Index("idx_topic_community", "community"),
|
||||
)
|
||||
|
||||
30
orm/user.py
30
orm/user.py
@@ -1,30 +0,0 @@
|
||||
import time
|
||||
|
||||
from sqlalchemy import Boolean, Column, Integer, String
|
||||
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "authorizer_users"
|
||||
|
||||
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
|
||||
key = Column(String)
|
||||
email = Column(String, unique=True)
|
||||
email_verified_at = Column(Integer)
|
||||
family_name = Column(String)
|
||||
gender = Column(String)
|
||||
given_name = Column(String)
|
||||
is_multi_factor_auth_enabled = Column(Boolean)
|
||||
middle_name = Column(String)
|
||||
nickname = Column(String)
|
||||
password = Column(String)
|
||||
phone_number = Column(String, unique=True)
|
||||
phone_number_verified_at = Column(Integer)
|
||||
# preferred_username = Column(String, nullable=False)
|
||||
picture = Column(String)
|
||||
revoked_timestamp = Column(Integer)
|
||||
roles = Column(String, default="author, reader")
|
||||
signup_methods = Column(String, default="magic_link_login")
|
||||
created_at = Column(Integer, default=lambda: int(time.time()))
|
||||
updated_at = Column(Integer, default=lambda: int(time.time()))
|
||||
@@ -1,42 +0,0 @@
|
||||
[tool.poetry]
|
||||
name = "core"
|
||||
version = "0.3.4"
|
||||
description = "core module for discours.io"
|
||||
authors = ["discoursio devteam"]
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.12"
|
||||
SQLAlchemy = "^2.0.29"
|
||||
psycopg2-binary = "^2.9.9"
|
||||
redis = {extras = ["hiredis"], version = "^5.0.1"}
|
||||
sentry-sdk = {version = "^1.44.1", extras = ["starlette", "ariadne", "sqlalchemy"]}
|
||||
starlette = "^0.37.2"
|
||||
gql = "^3.5.0"
|
||||
ariadne = "^0.23.0"
|
||||
pre-commit = "^3.7.0"
|
||||
granian = "^1.2.1"
|
||||
google-analytics-data = "^0.18.7"
|
||||
opensearch-py = "^2.5.0"
|
||||
httpx = "^0.27.0"
|
||||
dogpile-cache = "^1.3.1"
|
||||
colorlog = "^6.8.2"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "^0.3.5"
|
||||
isort = "^5.13.2"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.pyright]
|
||||
venvPath = "."
|
||||
venv = ".venv"
|
||||
|
||||
[tool.isort]
|
||||
multi_line_output = 3
|
||||
include_trailing_comma = true
|
||||
force_grid_wrap = 0
|
||||
line_length = 120
|
||||
6
requirements.dev.txt
Normal file
6
requirements.dev.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
fakeredis
|
||||
pytest
|
||||
pytest-asyncio
|
||||
pytest-cov
|
||||
mypy
|
||||
ruff
|
||||
17
requirements.txt
Normal file
17
requirements.txt
Normal file
@@ -0,0 +1,17 @@
|
||||
# own auth
|
||||
bcrypt
|
||||
authlib
|
||||
passlib
|
||||
opensearch-py
|
||||
google-analytics-data
|
||||
colorlog
|
||||
psycopg2-binary
|
||||
httpx
|
||||
redis[hiredis]
|
||||
sentry-sdk[starlette,sqlalchemy]
|
||||
starlette
|
||||
gql
|
||||
ariadne
|
||||
granian
|
||||
orjson
|
||||
pydantic
|
||||
@@ -1,3 +1,4 @@
|
||||
from cache.triggers import events_register
|
||||
from resolvers.author import ( # search_authors,
|
||||
get_author,
|
||||
get_author_followers,
|
||||
@@ -10,43 +11,53 @@ from resolvers.author import ( # search_authors,
|
||||
update_author,
|
||||
)
|
||||
from resolvers.community import get_communities_all, get_community
|
||||
from resolvers.editor import create_shout, delete_shout, update_shout
|
||||
from resolvers.follower import (
|
||||
follow,
|
||||
get_shout_followers,
|
||||
get_topic_followers,
|
||||
unfollow,
|
||||
from resolvers.draft import (
|
||||
create_draft,
|
||||
delete_draft,
|
||||
load_drafts,
|
||||
publish_draft,
|
||||
unpublish_draft,
|
||||
update_draft,
|
||||
)
|
||||
from resolvers.feed import (
|
||||
load_shouts_coauthored,
|
||||
load_shouts_discussed,
|
||||
load_shouts_feed,
|
||||
load_shouts_followed_by,
|
||||
)
|
||||
from resolvers.follower import follow, get_shout_followers, unfollow
|
||||
from resolvers.notifier import (
|
||||
load_notifications,
|
||||
notification_mark_seen,
|
||||
notifications_seen_after,
|
||||
notifications_seen_thread,
|
||||
)
|
||||
from resolvers.rating import rate_author
|
||||
from resolvers.rating import get_my_rates_comments, get_my_rates_shouts, rate_author
|
||||
from resolvers.reaction import (
|
||||
create_reaction,
|
||||
delete_reaction,
|
||||
load_comment_ratings,
|
||||
load_comments_branch,
|
||||
load_reactions_by,
|
||||
load_shouts_followed,
|
||||
load_shout_comments,
|
||||
load_shout_ratings,
|
||||
update_reaction,
|
||||
)
|
||||
from resolvers.reader import (
|
||||
get_shout,
|
||||
load_shouts_by,
|
||||
load_shouts_feed,
|
||||
load_shouts_random_top,
|
||||
load_shouts_random_topic,
|
||||
load_shouts_search,
|
||||
load_shouts_unrated,
|
||||
)
|
||||
from resolvers.topic import (
|
||||
get_topic,
|
||||
get_topic_authors,
|
||||
get_topic_followers,
|
||||
get_topics_all,
|
||||
get_topics_by_author,
|
||||
get_topics_by_community,
|
||||
)
|
||||
from services.triggers import events_register
|
||||
|
||||
events_register()
|
||||
|
||||
@@ -54,12 +65,12 @@ __all__ = [
|
||||
# author
|
||||
"get_author",
|
||||
"get_author_id",
|
||||
"get_author_followers",
|
||||
"get_author_follows",
|
||||
"get_author_follows_topics",
|
||||
"get_author_follows_authors",
|
||||
"get_authors_all",
|
||||
"load_authors_by",
|
||||
"rate_author",
|
||||
"update_author",
|
||||
## "search_authors",
|
||||
# community
|
||||
@@ -70,33 +81,50 @@ __all__ = [
|
||||
"get_topics_all",
|
||||
"get_topics_by_community",
|
||||
"get_topics_by_author",
|
||||
"get_topic_followers",
|
||||
"get_topic_authors",
|
||||
# reader
|
||||
"get_shout",
|
||||
"load_shouts_by",
|
||||
"load_shouts_feed",
|
||||
"load_shouts_search",
|
||||
"load_shouts_followed",
|
||||
"load_shouts_unrated",
|
||||
"load_shouts_random_top",
|
||||
"load_shouts_random_topic",
|
||||
"load_shouts_search",
|
||||
"load_shouts_unrated",
|
||||
# feed
|
||||
"load_shouts_feed",
|
||||
"load_shouts_coauthored",
|
||||
"load_shouts_discussed",
|
||||
"load_shouts_with_topic",
|
||||
"load_shouts_followed_by",
|
||||
"load_shouts_authored_by",
|
||||
# follower
|
||||
"follow",
|
||||
"unfollow",
|
||||
"get_topic_followers",
|
||||
"get_shout_followers",
|
||||
"get_author_followers",
|
||||
# editor
|
||||
"create_shout",
|
||||
"update_shout",
|
||||
"delete_shout",
|
||||
# reaction
|
||||
"create_reaction",
|
||||
"update_reaction",
|
||||
"delete_reaction",
|
||||
"load_reactions_by",
|
||||
"load_shout_comments",
|
||||
"load_shout_ratings",
|
||||
"load_comment_ratings",
|
||||
"load_comments_branch",
|
||||
# notifier
|
||||
"load_notifications",
|
||||
"notifications_seen_thread",
|
||||
"notifications_seen_after",
|
||||
"notification_mark_seen",
|
||||
# rating
|
||||
"rate_author",
|
||||
"get_my_rates_comments",
|
||||
"get_my_rates_shouts",
|
||||
# draft
|
||||
"load_drafts",
|
||||
"create_draft",
|
||||
"update_draft",
|
||||
"delete_draft",
|
||||
"publish_draft",
|
||||
"publish_shout",
|
||||
"unpublish_shout",
|
||||
"unpublish_draft",
|
||||
]
|
||||
|
||||
@@ -1,20 +1,195 @@
|
||||
import json
|
||||
import asyncio
|
||||
import time
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import and_, desc, or_, select, text
|
||||
from sqlalchemy.orm import aliased
|
||||
from sqlalchemy import select, text
|
||||
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.shout import ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from resolvers.stat import author_follows_authors, author_follows_topics, get_with_stat
|
||||
from cache.cache import (
|
||||
cache_author,
|
||||
cached_query,
|
||||
get_cached_author,
|
||||
get_cached_author_by_user_id,
|
||||
get_cached_author_followers,
|
||||
get_cached_follower_authors,
|
||||
get_cached_follower_topics,
|
||||
invalidate_cache_by_prefix,
|
||||
)
|
||||
from orm.author import Author
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.auth import login_required
|
||||
from services.cache import cache_author, cache_follower
|
||||
from services.db import local_session
|
||||
from services.encoders import CustomJSONEncoder
|
||||
from services.logger import root_logger as logger
|
||||
from services.rediscache import redis
|
||||
from services.redis import redis
|
||||
from services.schema import mutation, query
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
DEFAULT_COMMUNITIES = [1]
|
||||
|
||||
|
||||
# Вспомогательная функция для получения всех авторов без статистики
|
||||
async def get_all_authors():
|
||||
"""
|
||||
Получает всех авторов без статистики.
|
||||
Используется для случаев, когда нужен полный список авторов без дополнительной информации.
|
||||
|
||||
Returns:
|
||||
list: Список всех авторов без статистики
|
||||
"""
|
||||
cache_key = "authors:all:basic"
|
||||
|
||||
# Функция для получения всех авторов из БД
|
||||
async def fetch_all_authors():
|
||||
logger.debug("Получаем список всех авторов из БД и кешируем результат")
|
||||
|
||||
with local_session() as session:
|
||||
# Запрос на получение базовой информации об авторах
|
||||
authors_query = select(Author).where(Author.deleted_at.is_(None))
|
||||
authors = session.execute(authors_query).scalars().all()
|
||||
|
||||
# Преобразуем авторов в словари
|
||||
return [author.dict() for author in authors]
|
||||
|
||||
# Используем универсальную функцию для кеширования запросов
|
||||
return await cached_query(cache_key, fetch_all_authors)
|
||||
|
||||
|
||||
# Вспомогательная функция для получения авторов со статистикой с пагинацией
|
||||
async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None):
|
||||
"""
|
||||
Получает авторов со статистикой с пагинацией.
|
||||
|
||||
Args:
|
||||
limit: Максимальное количество возвращаемых авторов
|
||||
offset: Смещение для пагинации
|
||||
by: Опциональный параметр сортировки (new/active)
|
||||
|
||||
Returns:
|
||||
list: Список авторов с их статистикой
|
||||
"""
|
||||
# Формируем ключ кеша с помощью универсальной функции
|
||||
cache_key = f"authors:stats:limit={limit}:offset={offset}"
|
||||
|
||||
# Функция для получения авторов из БД
|
||||
async def fetch_authors_with_stats():
|
||||
logger.debug(f"Выполняем запрос на получение авторов со статистикой: limit={limit}, offset={offset}, by={by}")
|
||||
|
||||
with local_session() as session:
|
||||
# Базовый запрос для получения авторов
|
||||
base_query = select(Author).where(Author.deleted_at.is_(None))
|
||||
|
||||
# Применяем сортировку
|
||||
if by:
|
||||
if isinstance(by, dict):
|
||||
# Обработка словаря параметров сортировки
|
||||
from sqlalchemy import asc, desc
|
||||
|
||||
for field, direction in by.items():
|
||||
column = getattr(Author, field, None)
|
||||
if column:
|
||||
if direction.lower() == "desc":
|
||||
base_query = base_query.order_by(desc(column))
|
||||
else:
|
||||
base_query = base_query.order_by(column)
|
||||
elif by == "new":
|
||||
base_query = base_query.order_by(desc(Author.created_at))
|
||||
elif by == "active":
|
||||
base_query = base_query.order_by(desc(Author.last_seen))
|
||||
else:
|
||||
# По умолчанию сортируем по времени создания
|
||||
base_query = base_query.order_by(desc(Author.created_at))
|
||||
else:
|
||||
base_query = base_query.order_by(desc(Author.created_at))
|
||||
|
||||
# Применяем лимит и смещение
|
||||
base_query = base_query.limit(limit).offset(offset)
|
||||
|
||||
# Получаем авторов
|
||||
authors = session.execute(base_query).scalars().all()
|
||||
author_ids = [author.id for author in authors]
|
||||
|
||||
if not author_ids:
|
||||
return []
|
||||
|
||||
# Оптимизированный запрос для получения статистики по публикациям для авторов
|
||||
shouts_stats_query = f"""
|
||||
SELECT sa.author, COUNT(DISTINCT s.id) as shouts_count
|
||||
FROM shout_author sa
|
||||
JOIN shout s ON sa.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
|
||||
WHERE sa.author IN ({",".join(map(str, author_ids))})
|
||||
GROUP BY sa.author
|
||||
"""
|
||||
shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query))}
|
||||
|
||||
# Запрос на получение статистики по подписчикам для авторов
|
||||
followers_stats_query = f"""
|
||||
SELECT author, COUNT(DISTINCT follower) as followers_count
|
||||
FROM author_follower
|
||||
WHERE author IN ({",".join(map(str, author_ids))})
|
||||
GROUP BY author
|
||||
"""
|
||||
followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))}
|
||||
|
||||
# Формируем результат с добавлением статистики
|
||||
result = []
|
||||
for author in authors:
|
||||
author_dict = author.dict()
|
||||
author_dict["stat"] = {
|
||||
"shouts": shouts_stats.get(author.id, 0),
|
||||
"followers": followers_stats.get(author.id, 0),
|
||||
}
|
||||
result.append(author_dict)
|
||||
|
||||
# Кешируем каждого автора отдельно для использования в других функциях
|
||||
await cache_author(author_dict)
|
||||
|
||||
return result
|
||||
|
||||
# Используем универсальную функцию для кеширования запросов
|
||||
return await cached_query(cache_key, fetch_authors_with_stats)
|
||||
|
||||
|
||||
# Функция для инвалидации кеша авторов
|
||||
async def invalidate_authors_cache(author_id=None):
|
||||
"""
|
||||
Инвалидирует кеши авторов при изменении данных.
|
||||
|
||||
Args:
|
||||
author_id: Опциональный ID автора для точечной инвалидации.
|
||||
Если не указан, инвалидируются все кеши авторов.
|
||||
"""
|
||||
if author_id:
|
||||
# Точечная инвалидация конкретного автора
|
||||
logger.debug(f"Инвалидация кеша для автора #{author_id}")
|
||||
specific_keys = [
|
||||
f"author:id:{author_id}",
|
||||
f"author:followers:{author_id}",
|
||||
f"author:follows-authors:{author_id}",
|
||||
f"author:follows-topics:{author_id}",
|
||||
f"author:follows-shouts:{author_id}",
|
||||
]
|
||||
|
||||
# Получаем user_id автора, если есть
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.id == author_id).first()
|
||||
if author and author.user:
|
||||
specific_keys.append(f"author:user:{author.user.strip()}")
|
||||
|
||||
# Удаляем конкретные ключи
|
||||
for key in specific_keys:
|
||||
try:
|
||||
await redis.execute("DEL", key)
|
||||
logger.debug(f"Удален ключ кеша {key}")
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при удалении ключа {key}: {e}")
|
||||
|
||||
# Также ищем и удаляем ключи коллекций, содержащих данные об этом авторе
|
||||
collection_keys = await redis.execute("KEYS", "authors:stats:*")
|
||||
if collection_keys:
|
||||
await redis.execute("DEL", *collection_keys)
|
||||
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей авторов")
|
||||
else:
|
||||
# Общая инвалидация всех кешей авторов
|
||||
logger.debug("Полная инвалидация кеша авторов")
|
||||
await invalidate_cache_by_prefix("authors")
|
||||
|
||||
|
||||
@mutation.field("update_author")
|
||||
@@ -30,8 +205,14 @@ async def update_author(_, info, profile):
|
||||
Author.update(author, profile)
|
||||
session.add(author)
|
||||
session.commit()
|
||||
[author] = get_with_stat(select(Author).where(Author.user == user_id))
|
||||
await cache_author(author.dict())
|
||||
author_query = select(Author).where(Author.user == user_id)
|
||||
result = get_with_stat(author_query)
|
||||
if result:
|
||||
author_with_stat = result[0]
|
||||
if isinstance(author_with_stat, Author):
|
||||
author_dict = author_with_stat.dict()
|
||||
# await cache_author(author_dict)
|
||||
asyncio.create_task(cache_author(author_dict))
|
||||
return {"error": None, "author": author}
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
@@ -41,45 +222,51 @@ async def update_author(_, info, profile):
|
||||
|
||||
|
||||
@query.field("get_authors_all")
|
||||
def get_authors_all(_, _info):
|
||||
with local_session() as session:
|
||||
authors = session.query(Author).all()
|
||||
return authors
|
||||
async def get_authors_all(_, _info):
|
||||
"""
|
||||
Получает список всех авторов без статистики.
|
||||
|
||||
Returns:
|
||||
list: Список всех авторов
|
||||
"""
|
||||
return await get_all_authors()
|
||||
|
||||
|
||||
@query.field("get_authors_paginated")
|
||||
async def get_authors_paginated(_, _info, limit=50, offset=0, by=None):
|
||||
"""
|
||||
Получает список авторов с пагинацией и статистикой.
|
||||
|
||||
Args:
|
||||
limit: Максимальное количество возвращаемых авторов
|
||||
offset: Смещение для пагинации
|
||||
by: Параметр сортировки (new/active)
|
||||
|
||||
Returns:
|
||||
list: Список авторов с их статистикой
|
||||
"""
|
||||
return await get_authors_with_stats(limit, offset, by)
|
||||
|
||||
|
||||
@query.field("get_author")
|
||||
async def get_author(_, _info, slug="", author_id=0):
|
||||
author_query = ""
|
||||
author = None
|
||||
author_dict = None
|
||||
try:
|
||||
# lookup for cached author
|
||||
author_query = select(Author).filter(
|
||||
or_(Author.slug == slug, Author.id == author_id)
|
||||
)
|
||||
lookup_result = local_session().execute(author_query).first()
|
||||
if lookup_result:
|
||||
[found_author] = lookup_result
|
||||
logger.debug(found_author)
|
||||
if found_author:
|
||||
logger.debug(f"found author id: {found_author.id}")
|
||||
author_id = found_author.id if found_author.id else author_id
|
||||
if author_id:
|
||||
cached_result = await redis.execute("GET", f"author:{author_id}")
|
||||
if isinstance(cached_result, str):
|
||||
author_dict = json.loads(cached_result)
|
||||
author_id = get_author_id_from(slug=slug, user="", author_id=author_id)
|
||||
if not author_id:
|
||||
raise ValueError("cant find")
|
||||
author_dict = await get_cached_author(int(author_id), get_with_stat)
|
||||
|
||||
# update stat from db
|
||||
if not author_dict or not author_dict.get("stat"):
|
||||
# update stat from db
|
||||
author_query = select(Author).filter(Author.id == author_id)
|
||||
result = get_with_stat(author_query)
|
||||
if not result:
|
||||
raise ValueError("Author not found")
|
||||
[author] = result
|
||||
# use found author
|
||||
if isinstance(author, Author):
|
||||
logger.debug(f"update @{author.slug} with id {author.id}")
|
||||
author_dict = author.dict()
|
||||
await cache_author(author_dict)
|
||||
if result:
|
||||
author_with_stat = result[0]
|
||||
if isinstance(author_with_stat, Author):
|
||||
author_dict = author_with_stat.dict()
|
||||
# await cache_author(author_dict)
|
||||
asyncio.create_task(cache_author(author_dict))
|
||||
except ValueError:
|
||||
pass
|
||||
except Exception as exc:
|
||||
@@ -89,246 +276,125 @@ async def get_author(_, _info, slug="", author_id=0):
|
||||
return author_dict
|
||||
|
||||
|
||||
async def get_author_by_user_id(user_id: str):
|
||||
@query.field("get_author_id")
|
||||
async def get_author_id(_, _info, user: str):
|
||||
user_id = user.strip()
|
||||
logger.info(f"getting author id for {user_id}")
|
||||
redis_key = f"user:{user_id}"
|
||||
author = None
|
||||
try:
|
||||
res = await redis.execute("GET", redis_key)
|
||||
if isinstance(res, str):
|
||||
author = json.loads(res)
|
||||
author_id = author.get("id")
|
||||
author_slug = author.get("slug")
|
||||
if author_id:
|
||||
logger.debug(f"got author @{author_slug} #{author_id} cached")
|
||||
return author
|
||||
author = await get_cached_author_by_user_id(user_id, get_with_stat)
|
||||
if author:
|
||||
return author
|
||||
|
||||
author_query = select(Author).filter(Author.user == user_id)
|
||||
result = get_with_stat(author_query)
|
||||
if result:
|
||||
[author] = result
|
||||
await cache_author(author.dict())
|
||||
author_with_stat = result[0]
|
||||
if isinstance(author_with_stat, Author):
|
||||
author_dict = author_with_stat.dict()
|
||||
# await cache_author(author_dict)
|
||||
asyncio.create_task(cache_author(author_dict))
|
||||
return author_with_stat
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
logger.error(exc)
|
||||
return author
|
||||
|
||||
|
||||
@query.field("get_author_id")
|
||||
async def get_author_id(_, _info, user: str):
|
||||
return await get_author_by_user_id(user)
|
||||
logger.error(f"Error getting author: {exc}")
|
||||
return None
|
||||
|
||||
|
||||
@query.field("load_authors_by")
|
||||
async def load_authors_by(_, _info, by, limit, offset):
|
||||
logger.debug(f"loading authors by {by}")
|
||||
q = select(Author)
|
||||
if by.get("slug"):
|
||||
q = q.filter(Author.slug.ilike(f"%{by['slug']}%"))
|
||||
elif by.get("name"):
|
||||
q = q.filter(Author.name.ilike(f"%{by['name']}%"))
|
||||
elif by.get("topic"):
|
||||
q = (
|
||||
q.join(ShoutAuthor)
|
||||
.join(ShoutTopic)
|
||||
.join(Topic)
|
||||
.where(Topic.slug == str(by["topic"]))
|
||||
)
|
||||
"""
|
||||
Загружает авторов по заданному критерию с пагинацией.
|
||||
|
||||
if by.get("last_seen"): # in unix time
|
||||
before = int(time.time()) - by["last_seen"]
|
||||
q = q.filter(Author.last_seen > before)
|
||||
elif by.get("created_at"): # in unix time
|
||||
before = int(time.time()) - by["created_at"]
|
||||
q = q.filter(Author.created_at > before)
|
||||
Args:
|
||||
by: Критерий сортировки авторов (new/active)
|
||||
limit: Максимальное количество возвращаемых авторов
|
||||
offset: Смещение для пагинации
|
||||
|
||||
order = by.get("order")
|
||||
if order in ["shouts", "followers"]:
|
||||
q = q.order_by(desc(text(f"{order}_stat")))
|
||||
Returns:
|
||||
list: Список авторов с учетом критерия
|
||||
"""
|
||||
# Используем оптимизированную функцию для получения авторов
|
||||
return await get_authors_with_stats(limit, offset, by)
|
||||
|
||||
q = q.limit(limit).offset(offset)
|
||||
|
||||
authors_nostat = local_session().execute(q)
|
||||
authors = []
|
||||
if authors_nostat:
|
||||
for [a] in authors_nostat:
|
||||
if isinstance(a, Author):
|
||||
author_id = a.id
|
||||
if bool(author_id):
|
||||
cached_result = await redis.execute("GET", f"author:{author_id}")
|
||||
if isinstance(cached_result, str):
|
||||
author_dict = json.loads(cached_result)
|
||||
authors.append(author_dict)
|
||||
|
||||
return authors
|
||||
def get_author_id_from(slug="", user=None, author_id=None):
|
||||
try:
|
||||
author_id = None
|
||||
if author_id:
|
||||
return author_id
|
||||
with local_session() as session:
|
||||
author = None
|
||||
if slug:
|
||||
author = session.query(Author).filter(Author.slug == slug).first()
|
||||
if author:
|
||||
author_id = author.id
|
||||
return author_id
|
||||
if user:
|
||||
author = session.query(Author).filter(Author.user == user).first()
|
||||
if author:
|
||||
author_id = author.id
|
||||
except Exception as exc:
|
||||
logger.error(exc)
|
||||
return author_id
|
||||
|
||||
|
||||
@query.field("get_author_follows")
|
||||
async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
||||
try:
|
||||
author_query = select(Author)
|
||||
if user:
|
||||
author_query = author_query.filter(Author.user == user)
|
||||
elif slug:
|
||||
author_query = author_query.filter(Author.slug == slug)
|
||||
elif author_id:
|
||||
author_query = author_query.filter(Author.id == author_id)
|
||||
else:
|
||||
raise ValueError("One of slug, user, or author_id must be provided")
|
||||
[result] = local_session().execute(author_query)
|
||||
if len(result) > 0:
|
||||
# logger.debug(result)
|
||||
[author] = result
|
||||
# logger.debug(author)
|
||||
if author and isinstance(author, Author):
|
||||
# logger.debug(author.dict())
|
||||
author_id = author.id if not author_id else author_id
|
||||
topics = []
|
||||
authors = []
|
||||
if bool(author_id):
|
||||
rkey = f"author:{author_id}:follows-authors"
|
||||
logger.debug(f"getting {author_id} follows authors")
|
||||
cached = await redis.execute("GET", rkey)
|
||||
if not cached:
|
||||
authors = author_follows_authors(author_id) # type: ignore
|
||||
prepared = [author.dict() for author in authors]
|
||||
await redis.execute(
|
||||
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
||||
)
|
||||
elif isinstance(cached, str):
|
||||
authors = json.loads(cached)
|
||||
logger.debug(f"getting follows for @{slug}")
|
||||
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
|
||||
if not author_id:
|
||||
return {}
|
||||
|
||||
rkey = f"author:{author_id}:follows-topics"
|
||||
cached = await redis.execute("GET", rkey)
|
||||
if cached and isinstance(cached, str):
|
||||
topics = json.loads(cached)
|
||||
if not cached:
|
||||
topics = author_follows_topics(author_id) # type: ignore
|
||||
prepared = [topic.dict() for topic in topics]
|
||||
await redis.execute(
|
||||
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
||||
)
|
||||
return {
|
||||
"topics": topics,
|
||||
"authors": authors,
|
||||
"communities": [
|
||||
{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}
|
||||
],
|
||||
}
|
||||
except Exception:
|
||||
import traceback
|
||||
followed_authors = await get_cached_follower_authors(author_id)
|
||||
followed_topics = await get_cached_follower_topics(author_id)
|
||||
|
||||
traceback.print_exc()
|
||||
return {"error": "Author not found"}
|
||||
# TODO: Get followed communities too
|
||||
return {
|
||||
"authors": followed_authors,
|
||||
"topics": followed_topics,
|
||||
"communities": DEFAULT_COMMUNITIES,
|
||||
"shouts": [],
|
||||
}
|
||||
|
||||
|
||||
@query.field("get_author_follows_topics")
|
||||
async def get_author_follows_topics(_, _info, slug="", user=None, author_id=None):
|
||||
with local_session() as session:
|
||||
if user or slug:
|
||||
author_id_result = (
|
||||
session.query(Author.id)
|
||||
.filter(or_(Author.user == user, Author.slug == slug))
|
||||
.first()
|
||||
)
|
||||
author_id = author_id_result[0] if author_id_result else None
|
||||
if not author_id:
|
||||
raise ValueError("Author not found")
|
||||
logger.debug(f"getting {author_id} follows topics")
|
||||
rkey = f"author:{author_id}:follows-topics"
|
||||
cached = await redis.execute("GET", rkey)
|
||||
topics = []
|
||||
if isinstance(cached, str):
|
||||
topics = json.loads(cached)
|
||||
if not cached:
|
||||
topics = author_follows_topics(author_id)
|
||||
prepared = [topic.dict() for topic in topics]
|
||||
await redis.execute(
|
||||
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
||||
)
|
||||
return topics
|
||||
logger.debug(f"getting followed topics for @{slug}")
|
||||
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
|
||||
if not author_id:
|
||||
return []
|
||||
followed_topics = await get_cached_follower_topics(author_id)
|
||||
return followed_topics
|
||||
|
||||
|
||||
@query.field("get_author_follows_authors")
|
||||
async def get_author_follows_authors(_, _info, slug="", user=None, author_id=None):
|
||||
with local_session() as session:
|
||||
if user or slug:
|
||||
author_id_result = (
|
||||
session.query(Author.id)
|
||||
.filter(or_(Author.user == user, Author.slug == slug))
|
||||
.first()
|
||||
)
|
||||
author_id = author_id_result[0] if author_id_result else None
|
||||
if author_id:
|
||||
logger.debug(f"getting {author_id} follows authors")
|
||||
rkey = f"author:{author_id}:follows-authors"
|
||||
cached = await redis.execute("GET", rkey)
|
||||
authors = []
|
||||
if isinstance(cached, str):
|
||||
authors = json.loads(cached)
|
||||
if not authors:
|
||||
authors = author_follows_authors(author_id)
|
||||
prepared = [author.dict() for author in authors]
|
||||
await redis.execute(
|
||||
"SET", rkey, json.dumps(prepared, cls=CustomJSONEncoder)
|
||||
)
|
||||
return authors
|
||||
else:
|
||||
raise ValueError("Author not found")
|
||||
logger.debug(f"getting followed authors for @{slug}")
|
||||
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
|
||||
if not author_id:
|
||||
return []
|
||||
followed_authors = await get_cached_follower_authors(author_id)
|
||||
return followed_authors
|
||||
|
||||
|
||||
def create_author(user_id: str, slug: str, name: str = ""):
|
||||
author = Author()
|
||||
author.user = user_id # Связь с user_id из системы авторизации
|
||||
author.slug = slug # Идентификатор из системы авторизации
|
||||
author.created_at = author.updated_at = int(time.time())
|
||||
author.name = name or slug # если не указано
|
||||
|
||||
with local_session() as session:
|
||||
try:
|
||||
author = None
|
||||
if user_id:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
elif slug:
|
||||
author = session.query(Author).filter(Author.slug == slug).first()
|
||||
if not author:
|
||||
new_author = Author(user=user_id, slug=slug, name=name)
|
||||
session.add(new_author)
|
||||
session.commit()
|
||||
logger.info(f"author created by webhook {new_author.dict()}")
|
||||
except Exception as exc:
|
||||
logger.debug(exc)
|
||||
session.add(author)
|
||||
session.commit()
|
||||
return author
|
||||
|
||||
|
||||
@query.field("get_author_followers")
|
||||
async def get_author_followers(_, _info, slug: str):
|
||||
logger.debug(f"getting followers for @{slug}")
|
||||
try:
|
||||
author_alias = aliased(Author)
|
||||
author_query = select(author_alias).filter(author_alias.slug == slug)
|
||||
result = local_session().execute(author_query).first()
|
||||
if result:
|
||||
[author] = result
|
||||
author_id = author.id
|
||||
cached = await redis.execute("GET", f"author:{author_id}:followers")
|
||||
if not cached:
|
||||
author_follower_alias = aliased(AuthorFollower, name="af")
|
||||
q = select(Author).join(
|
||||
author_follower_alias,
|
||||
and_(
|
||||
author_follower_alias.author == author_id,
|
||||
author_follower_alias.follower == Author.id,
|
||||
),
|
||||
)
|
||||
results = get_with_stat(q)
|
||||
if isinstance(results, list):
|
||||
for follower in results:
|
||||
await cache_follower(follower.dict(), author.dict())
|
||||
logger.debug(f"@{slug} cache updated with {len(results)} followers")
|
||||
return results
|
||||
else:
|
||||
logger.debug(f"@{slug} got followers cached")
|
||||
if isinstance(cached, str):
|
||||
return json.loads(cached)
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
|
||||
logger.error(exc)
|
||||
logger.error(traceback.format_exc())
|
||||
async def get_author_followers(_, _info, slug: str = "", user: str = "", author_id: int = 0):
|
||||
logger.debug(f"getting followers for author @{slug} or ID:{author_id}")
|
||||
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
|
||||
if not author_id:
|
||||
return []
|
||||
followers = await get_cached_author_followers(author_id)
|
||||
return followers
|
||||
|
||||
83
resolvers/bookmark.py
Normal file
83
resolvers/bookmark.py
Normal file
@@ -0,0 +1,83 @@
|
||||
from operator import and_
|
||||
|
||||
from graphql import GraphQLError
|
||||
from sqlalchemy import delete, insert
|
||||
|
||||
from orm.author import AuthorBookmark
|
||||
from orm.shout import Shout
|
||||
from resolvers.feed import apply_options
|
||||
from resolvers.reader import get_shouts_with_links, query_with_stat
|
||||
from services.auth import login_required
|
||||
from services.common_result import CommonResult
|
||||
from services.db import local_session
|
||||
from services.schema import mutation, query
|
||||
|
||||
|
||||
@query.field("load_shouts_bookmarked")
|
||||
@login_required
|
||||
def load_shouts_bookmarked(_, info, options):
|
||||
"""
|
||||
Load bookmarked shouts for the authenticated user.
|
||||
|
||||
Args:
|
||||
limit (int): Maximum number of shouts to return.
|
||||
offset (int): Number of shouts to skip.
|
||||
|
||||
Returns:
|
||||
list: List of bookmarked shouts.
|
||||
"""
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
if not author_id:
|
||||
raise GraphQLError("User not authenticated")
|
||||
|
||||
q = query_with_stat(info)
|
||||
q = q.join(AuthorBookmark)
|
||||
q = q.filter(
|
||||
and_(
|
||||
Shout.id == AuthorBookmark.shout,
|
||||
AuthorBookmark.author == author_id,
|
||||
)
|
||||
)
|
||||
q, limit, offset = apply_options(q, options, author_id)
|
||||
return get_shouts_with_links(info, q, limit, offset)
|
||||
|
||||
|
||||
@mutation.field("toggle_bookmark_shout")
|
||||
def toggle_bookmark_shout(_, info, slug: str) -> CommonResult:
|
||||
"""
|
||||
Toggle bookmark status for a specific shout.
|
||||
|
||||
Args:
|
||||
slug (str): Unique identifier of the shout.
|
||||
|
||||
Returns:
|
||||
CommonResult: Result of the operation with bookmark status.
|
||||
"""
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
if not author_id:
|
||||
raise GraphQLError("User not authenticated")
|
||||
|
||||
with local_session() as db:
|
||||
shout = db.query(Shout).filter(Shout.slug == slug).first()
|
||||
if not shout:
|
||||
raise GraphQLError("Shout not found")
|
||||
|
||||
existing_bookmark = (
|
||||
db.query(AuthorBookmark)
|
||||
.filter(AuthorBookmark.author == author_id, AuthorBookmark.shout == shout.id)
|
||||
.first()
|
||||
)
|
||||
|
||||
if existing_bookmark:
|
||||
db.execute(
|
||||
delete(AuthorBookmark).where(AuthorBookmark.author == author_id, AuthorBookmark.shout == shout.id)
|
||||
)
|
||||
result = False
|
||||
else:
|
||||
db.execute(insert(AuthorBookmark).values(author=author_id, shout=shout.id))
|
||||
result = True
|
||||
|
||||
db.commit()
|
||||
return result
|
||||
@@ -18,18 +18,12 @@ async def accept_invite(_, info, invite_id: int):
|
||||
with local_session() as session:
|
||||
# Check if the invite exists
|
||||
invite = session.query(Invite).filter(Invite.id == invite_id).first()
|
||||
if (
|
||||
invite
|
||||
and invite.author_id is author_id
|
||||
and invite.status is InviteStatus.PENDING.value
|
||||
):
|
||||
if invite and invite.author_id is author_id and invite.status is InviteStatus.PENDING.value:
|
||||
# Add the user to the shout authors
|
||||
shout = session.query(Shout).filter(Shout.id == invite.shout_id).first()
|
||||
if shout:
|
||||
if author_id not in shout.authors:
|
||||
author = (
|
||||
session.query(Author).filter(Author.id == author_id).first()
|
||||
)
|
||||
author = session.query(Author).filter(Author.id == author_id).first()
|
||||
if author:
|
||||
shout.authors.append(author)
|
||||
session.add(shout)
|
||||
@@ -57,11 +51,7 @@ async def reject_invite(_, info, invite_id: int):
|
||||
author_id = int(author_id)
|
||||
# Check if the invite exists
|
||||
invite = session.query(Invite).filter(Invite.id == invite_id).first()
|
||||
if (
|
||||
invite
|
||||
and invite.author_id is author_id
|
||||
and invite.status is InviteStatus.PENDING.value
|
||||
):
|
||||
if invite and invite.author_id is author_id and invite.status is InviteStatus.PENDING.value:
|
||||
# Delete the invite
|
||||
session.delete(invite)
|
||||
session.commit()
|
||||
@@ -124,9 +114,7 @@ async def remove_author(_, info, slug: str = "", author_id: int = 0):
|
||||
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||
# NOTE: owner should be first in a list
|
||||
if shout and author.id is shout.created_by:
|
||||
shout.authors = [
|
||||
author for author in shout.authors if author.id != author_id
|
||||
]
|
||||
shout.authors = [author for author in shout.authors if author.id != author_id]
|
||||
session.commit()
|
||||
return {}
|
||||
return {"error": "Access denied"}
|
||||
|
||||
@@ -1,73 +1,97 @@
|
||||
from sqlalchemy import and_, distinct, func, select
|
||||
|
||||
from orm.author import Author
|
||||
from orm.community import Community, CommunityAuthor
|
||||
from orm.shout import ShoutCommunity
|
||||
from orm.community import Community, CommunityFollower
|
||||
from services.db import local_session
|
||||
from services.logger import root_logger as logger
|
||||
from services.schema import query
|
||||
|
||||
|
||||
def get_communities_from_query(q):
|
||||
ccc = []
|
||||
with local_session() as session:
|
||||
for [c, shouts_stat, followers_stat] in session.execute(q):
|
||||
c.stat = {
|
||||
"shouts": session.execute(
|
||||
select(func.count(distinct(ShoutCommunity.shout))).filter(
|
||||
ShoutCommunity.community == c.id
|
||||
)
|
||||
),
|
||||
# "authors": session.execute(select(func.count(distinct(ShoutCommunity.shout))).filter(ShoutCommunity.community == c.id)),
|
||||
# "followers": session.execute(select(func.count(distinct(ShoutCommunity.shout))).filter(ShoutCommunity.community == c.id)),
|
||||
# "commented": commented_stat,
|
||||
}
|
||||
ccc.append(c)
|
||||
|
||||
return ccc
|
||||
|
||||
|
||||
# for mutation.field("follow")
|
||||
def community_follow(follower_id, slug):
|
||||
try:
|
||||
with local_session() as session:
|
||||
community = session.query(Community).where(Community.slug == slug).first()
|
||||
if isinstance(community, Community):
|
||||
cf = CommunityAuthor(author=follower_id, community=community.id)
|
||||
session.add(cf)
|
||||
session.commit()
|
||||
return True
|
||||
except Exception as ex:
|
||||
logger.debug(ex)
|
||||
return False
|
||||
|
||||
|
||||
# for mutation.field("unfollow")
|
||||
def community_unfollow(follower_id, slug):
|
||||
with local_session() as session:
|
||||
flw = (
|
||||
session.query(CommunityAuthor)
|
||||
.join(Community, Community.id == CommunityAuthor.community)
|
||||
.filter(and_(CommunityAuthor.author == follower_id, Community.slug == slug))
|
||||
.first()
|
||||
)
|
||||
if flw:
|
||||
session.delete(flw)
|
||||
session.commit()
|
||||
return True
|
||||
return False
|
||||
from services.schema import mutation, query
|
||||
|
||||
|
||||
@query.field("get_communities_all")
|
||||
async def get_communities_all(_, _info):
|
||||
q = select(Author)
|
||||
|
||||
return get_communities_from_query(q)
|
||||
return local_session().query(Community).all()
|
||||
|
||||
|
||||
@query.field("get_community")
|
||||
async def get_community(_, _info, slug: str):
|
||||
q = select(Community).where(Community.slug == slug)
|
||||
q = local_session().query(Community).where(Community.slug == slug)
|
||||
return q.first()
|
||||
|
||||
communities = get_communities_from_query(q)
|
||||
return communities[0]
|
||||
|
||||
@query.field("get_communities_by_author")
|
||||
async def get_communities_by_author(_, _info, slug="", user="", author_id=0):
|
||||
with local_session() as session:
|
||||
q = session.query(Community).join(CommunityFollower)
|
||||
if slug:
|
||||
author_id = session.query(Author).where(Author.slug == slug).first().id
|
||||
q = q.where(CommunityFollower.author == author_id)
|
||||
if user:
|
||||
author_id = session.query(Author).where(Author.user == user).first().id
|
||||
q = q.where(CommunityFollower.author == author_id)
|
||||
if author_id:
|
||||
q = q.where(CommunityFollower.author == author_id)
|
||||
return q.all()
|
||||
return []
|
||||
|
||||
|
||||
@mutation.field("join_community")
|
||||
async def join_community(_, info, slug: str):
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
with local_session() as session:
|
||||
community = session.query(Community).where(Community.slug == slug).first()
|
||||
if not community:
|
||||
return {"ok": False, "error": "Community not found"}
|
||||
session.add(CommunityFollower(community=community.id, author=author_id))
|
||||
session.commit()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@mutation.field("leave_community")
|
||||
async def leave_community(_, info, slug: str):
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
with local_session() as session:
|
||||
session.query(CommunityFollower).where(
|
||||
CommunityFollower.author == author_id, CommunityFollower.community == slug
|
||||
).delete()
|
||||
session.commit()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@mutation.field("create_community")
|
||||
async def create_community(_, info, community_data):
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
with local_session() as session:
|
||||
session.add(Community(author=author_id, **community_data))
|
||||
session.commit()
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@mutation.field("update_community")
|
||||
async def update_community(_, info, community_data):
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
slug = community_data.get("slug")
|
||||
if slug:
|
||||
with local_session() as session:
|
||||
try:
|
||||
session.query(Community).where(Community.created_by == author_id, Community.slug == slug).update(
|
||||
community_data
|
||||
)
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
return {"ok": False, "error": str(e)}
|
||||
return {"ok": True}
|
||||
return {"ok": False, "error": "Please, set community slug in input"}
|
||||
|
||||
|
||||
@mutation.field("delete_community")
|
||||
async def delete_community(_, info, slug: str):
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
with local_session() as session:
|
||||
try:
|
||||
session.query(Community).where(Community.slug == slug, Community.created_by == author_id).delete()
|
||||
session.commit()
|
||||
return {"ok": True}
|
||||
except Exception as e:
|
||||
return {"ok": False, "error": str(e)}
|
||||
|
||||
352
resolvers/draft.py
Normal file
352
resolvers/draft.py
Normal file
@@ -0,0 +1,352 @@
|
||||
import time
|
||||
from operator import or_
|
||||
|
||||
from sqlalchemy.sql import and_
|
||||
|
||||
from cache.cache import (
|
||||
cache_author,
|
||||
cache_by_id,
|
||||
cache_topic,
|
||||
invalidate_shout_related_cache,
|
||||
invalidate_shouts_cache,
|
||||
)
|
||||
from orm.author import Author
|
||||
from orm.draft import Draft
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.notify import notify_shout
|
||||
from services.schema import mutation, query
|
||||
from services.search import search_service
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def create_shout_from_draft(session, draft, author_id):
|
||||
# Создаем новую публикацию
|
||||
shout = Shout(
|
||||
body=draft.body,
|
||||
slug=draft.slug,
|
||||
cover=draft.cover,
|
||||
cover_caption=draft.cover_caption,
|
||||
lead=draft.lead,
|
||||
description=draft.description,
|
||||
title=draft.title,
|
||||
subtitle=draft.subtitle,
|
||||
layout=draft.layout,
|
||||
media=draft.media,
|
||||
lang=draft.lang,
|
||||
seo=draft.seo,
|
||||
created_by=author_id,
|
||||
community=draft.community,
|
||||
draft=draft.id,
|
||||
deleted_at=None,
|
||||
)
|
||||
return shout
|
||||
|
||||
|
||||
@query.field("load_drafts")
|
||||
@login_required
|
||||
async def load_drafts(_, info):
|
||||
user_id = info.context.get("user_id")
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
|
||||
if not user_id or not author_id:
|
||||
return {"error": "User ID and author ID are required"}
|
||||
|
||||
with local_session() as session:
|
||||
drafts = (
|
||||
session.query(Draft)
|
||||
.filter(or_(Draft.authors.any(Author.id == author_id), Draft.created_by == author_id))
|
||||
.all()
|
||||
)
|
||||
return {"drafts": drafts}
|
||||
|
||||
|
||||
@mutation.field("create_draft")
|
||||
@login_required
|
||||
async def create_draft(_, info, draft_input):
|
||||
"""Create a new draft.
|
||||
|
||||
Args:
|
||||
info: GraphQL context
|
||||
draft_input (dict): Draft data including optional fields:
|
||||
- title (str, required) - заголовок черновика
|
||||
- body (str, required) - текст черновика
|
||||
- slug (str)
|
||||
- etc.
|
||||
|
||||
Returns:
|
||||
dict: Contains either:
|
||||
- draft: The created draft object
|
||||
- error: Error message if creation failed
|
||||
|
||||
Example:
|
||||
>>> async def test_create():
|
||||
... context = {'user_id': '123', 'author': {'id': 1}}
|
||||
... info = type('Info', (), {'context': context})()
|
||||
... result = await create_draft(None, info, {'title': 'Test'})
|
||||
... assert result.get('error') is None
|
||||
... assert result['draft'].title == 'Test'
|
||||
... return result
|
||||
"""
|
||||
user_id = info.context.get("user_id")
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
|
||||
if not user_id or not author_id:
|
||||
return {"error": "Author ID is required"}
|
||||
|
||||
# Проверяем обязательные поля
|
||||
if "body" not in draft_input or not draft_input["body"]:
|
||||
draft_input["body"] = "" # Пустая строка вместо NULL
|
||||
|
||||
if "title" not in draft_input or not draft_input["title"]:
|
||||
draft_input["title"] = "" # Пустая строка вместо NULL
|
||||
|
||||
try:
|
||||
with local_session() as session:
|
||||
# Remove id from input if present since it's auto-generated
|
||||
if "id" in draft_input:
|
||||
del draft_input["id"]
|
||||
|
||||
# Добавляем текущее время создания
|
||||
draft_input["created_at"] = int(time.time())
|
||||
|
||||
draft = Draft(created_by=author_id, **draft_input)
|
||||
session.add(draft)
|
||||
session.commit()
|
||||
return {"draft": draft}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create draft: {e}", exc_info=True)
|
||||
return {"error": f"Failed to create draft: {str(e)}"}
|
||||
|
||||
|
||||
@mutation.field("update_draft")
|
||||
@login_required
|
||||
async def update_draft(_, info, draft_id: int, draft_input):
|
||||
"""Обновляет черновик публикации.
|
||||
|
||||
Args:
|
||||
draft_id: ID черновика для обновления
|
||||
draft_input: Данные для обновления черновика
|
||||
|
||||
Returns:
|
||||
dict: Обновленный черновик или сообщение об ошибке
|
||||
"""
|
||||
user_id = info.context.get("user_id")
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
|
||||
if not user_id or not author_id:
|
||||
return {"error": "Author ID are required"}
|
||||
|
||||
with local_session() as session:
|
||||
draft = session.query(Draft).filter(Draft.id == draft_id).first()
|
||||
if not draft:
|
||||
return {"error": "Draft not found"}
|
||||
|
||||
Draft.update(draft, draft_input)
|
||||
draft.updated_at = int(time.time())
|
||||
session.commit()
|
||||
return {"draft": draft}
|
||||
|
||||
|
||||
@mutation.field("delete_draft")
|
||||
@login_required
|
||||
async def delete_draft(_, info, draft_id: int):
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
|
||||
with local_session() as session:
|
||||
draft = session.query(Draft).filter(Draft.id == draft_id).first()
|
||||
if not draft:
|
||||
return {"error": "Draft not found"}
|
||||
if author_id != draft.created_by and draft.authors.filter(Author.id == author_id).count() == 0:
|
||||
return {"error": "You are not allowed to delete this draft"}
|
||||
session.delete(draft)
|
||||
session.commit()
|
||||
return {"draft": draft}
|
||||
|
||||
|
||||
@mutation.field("publish_draft")
|
||||
@login_required
|
||||
async def publish_draft(_, info, draft_id: int):
|
||||
user_id = info.context.get("user_id")
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
if not user_id or not author_id:
|
||||
return {"error": "User ID and author ID are required"}
|
||||
|
||||
with local_session() as session:
|
||||
draft = session.query(Draft).filter(Draft.id == draft_id).first()
|
||||
if not draft:
|
||||
return {"error": "Draft not found"}
|
||||
shout = create_shout_from_draft(session, draft, author_id)
|
||||
session.add(shout)
|
||||
session.commit()
|
||||
return {"shout": shout, "draft": draft}
|
||||
|
||||
|
||||
@mutation.field("unpublish_draft")
|
||||
@login_required
|
||||
async def unpublish_draft(_, info, draft_id: int):
|
||||
user_id = info.context.get("user_id")
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
if not user_id or not author_id:
|
||||
return {"error": "User ID and author ID are required"}
|
||||
|
||||
with local_session() as session:
|
||||
draft = session.query(Draft).filter(Draft.id == draft_id).first()
|
||||
if not draft:
|
||||
return {"error": "Draft not found"}
|
||||
shout = session.query(Shout).filter(Shout.draft == draft.id).first()
|
||||
if shout:
|
||||
shout.published_at = None
|
||||
session.commit()
|
||||
return {"shout": shout, "draft": draft}
|
||||
return {"error": "Failed to unpublish draft"}
|
||||
|
||||
|
||||
@mutation.field("publish_shout")
|
||||
@login_required
|
||||
async def publish_shout(_, info, shout_id: int):
|
||||
"""Publish draft as a shout or update existing shout.
|
||||
|
||||
Args:
|
||||
shout_id: ID существующей публикации или 0 для новой
|
||||
draft: Объект черновика (опционально)
|
||||
"""
|
||||
user_id = info.context.get("user_id")
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
now = int(time.time())
|
||||
if not user_id or not author_id:
|
||||
return {"error": "User ID and author ID are required"}
|
||||
|
||||
try:
|
||||
with local_session() as session:
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
if not shout:
|
||||
return {"error": "Shout not found"}
|
||||
was_published = shout.published_at is not None
|
||||
draft = session.query(Draft).where(Draft.id == shout.draft).first()
|
||||
if not draft:
|
||||
return {"error": "Draft not found"}
|
||||
# Находим черновик если не передан
|
||||
|
||||
if not shout:
|
||||
shout = create_shout_from_draft(session, draft, author_id)
|
||||
else:
|
||||
# Обновляем существующую публикацию
|
||||
shout.draft = draft.id
|
||||
shout.created_by = author_id
|
||||
shout.title = draft.title
|
||||
shout.subtitle = draft.subtitle
|
||||
shout.body = draft.body
|
||||
shout.cover = draft.cover
|
||||
shout.cover_caption = draft.cover_caption
|
||||
shout.lead = draft.lead
|
||||
shout.description = draft.description
|
||||
shout.layout = draft.layout
|
||||
shout.media = draft.media
|
||||
shout.lang = draft.lang
|
||||
shout.seo = draft.seo
|
||||
|
||||
draft.updated_at = now
|
||||
shout.updated_at = now
|
||||
|
||||
# Устанавливаем published_at только если была ранее снята с публикации
|
||||
if not was_published:
|
||||
shout.published_at = now
|
||||
|
||||
# Обрабатываем связи с авторами
|
||||
if (
|
||||
not session.query(ShoutAuthor)
|
||||
.filter(and_(ShoutAuthor.shout == shout.id, ShoutAuthor.author == author_id))
|
||||
.first()
|
||||
):
|
||||
sa = ShoutAuthor(shout=shout.id, author=author_id)
|
||||
session.add(sa)
|
||||
|
||||
# Обрабатываем темы
|
||||
if draft.topics:
|
||||
for topic in draft.topics:
|
||||
st = ShoutTopic(
|
||||
topic=topic.id, shout=shout.id, main=topic.main if hasattr(topic, "main") else False
|
||||
)
|
||||
session.add(st)
|
||||
|
||||
session.add(shout)
|
||||
session.add(draft)
|
||||
session.flush()
|
||||
|
||||
# Инвалидируем кэш только если это новая публикация или была снята с публикации
|
||||
if not was_published:
|
||||
cache_keys = ["feed", f"author_{author_id}", "random_top", "unrated"]
|
||||
|
||||
# Добавляем ключи для тем
|
||||
for topic in shout.topics:
|
||||
cache_keys.append(f"topic_{topic.id}")
|
||||
cache_keys.append(f"topic_shouts_{topic.id}")
|
||||
await cache_by_id(Topic, topic.id, cache_topic)
|
||||
|
||||
# Инвалидируем кэш
|
||||
await invalidate_shouts_cache(cache_keys)
|
||||
await invalidate_shout_related_cache(shout, author_id)
|
||||
|
||||
# Обновляем кэш авторов
|
||||
for author in shout.authors:
|
||||
await cache_by_id(Author, author.id, cache_author)
|
||||
|
||||
# Отправляем уведомление о публикации
|
||||
await notify_shout(shout.dict(), "published")
|
||||
|
||||
# Обновляем поисковый индекс
|
||||
search_service.index(shout)
|
||||
else:
|
||||
# Для уже опубликованных материалов просто отправляем уведомление об обновлении
|
||||
await notify_shout(shout.dict(), "update")
|
||||
|
||||
session.commit()
|
||||
return {"shout": shout}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish shout: {e}", exc_info=True)
|
||||
if "session" in locals():
|
||||
session.rollback()
|
||||
return {"error": f"Failed to publish shout: {str(e)}"}
|
||||
|
||||
|
||||
@mutation.field("unpublish_shout")
|
||||
@login_required
|
||||
async def unpublish_shout(_, info, shout_id: int):
|
||||
"""Unpublish a shout.
|
||||
|
||||
Args:
|
||||
shout_id: The ID of the shout to unpublish
|
||||
|
||||
Returns:
|
||||
dict: The unpublished shout or an error message
|
||||
"""
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
if not author_id:
|
||||
return {"error": "Author ID is required"}
|
||||
|
||||
shout = None
|
||||
with local_session() as session:
|
||||
try:
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
shout.published_at = None
|
||||
session.commit()
|
||||
invalidate_shout_related_cache(shout)
|
||||
invalidate_shouts_cache()
|
||||
|
||||
except Exception:
|
||||
session.rollback()
|
||||
return {"error": "Failed to unpublish shout"}
|
||||
|
||||
return {"shout": shout}
|
||||
@@ -1,46 +1,89 @@
|
||||
import time
|
||||
|
||||
import orjson
|
||||
from sqlalchemy import and_, desc, select
|
||||
from sqlalchemy.orm import joinedload
|
||||
from sqlalchemy.sql.functions import coalesce
|
||||
|
||||
from orm.rating import is_negative, is_positive
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from cache.cache import (
|
||||
cache_author,
|
||||
cache_topic,
|
||||
invalidate_shout_related_cache,
|
||||
invalidate_shouts_cache,
|
||||
)
|
||||
from orm.author import Author
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from orm.author import Author
|
||||
from resolvers.follower import reactions_follow, reactions_unfollow
|
||||
from resolvers.follower import follow, unfollow
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.auth import login_required
|
||||
from services.cache import cache_topic, cache_author
|
||||
from services.db import local_session
|
||||
from services.diff import apply_diff, get_diff
|
||||
from services.logger import root_logger as logger
|
||||
from services.notify import notify_shout
|
||||
from services.schema import mutation, query
|
||||
from services.schema import query
|
||||
from services.search import search_service
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
async def cache_by_id(what: str, entity_id: int):
|
||||
is_author = what == "AUTHOR"
|
||||
alias = Author if is_author else Topic
|
||||
q = select(alias).filter(alias.id == entity_id)
|
||||
[x] = get_with_stat(q)
|
||||
if not x:
|
||||
async def cache_by_id(entity, entity_id: int, cache_method):
|
||||
"""Cache an entity by its ID using the provided cache method.
|
||||
|
||||
Args:
|
||||
entity: The SQLAlchemy model class to query
|
||||
entity_id (int): The ID of the entity to cache
|
||||
cache_method: The caching function to use
|
||||
|
||||
Returns:
|
||||
dict: The cached entity data if successful, None if entity not found
|
||||
|
||||
Example:
|
||||
>>> async def test_cache():
|
||||
... author = await cache_by_id(Author, 1, cache_author)
|
||||
... assert author['id'] == 1
|
||||
... assert 'name' in author
|
||||
... return author
|
||||
"""
|
||||
caching_query = select(entity).filter(entity.id == entity_id)
|
||||
result = get_with_stat(caching_query)
|
||||
if not result or not result[0]:
|
||||
logger.warning(f"{entity.__name__} with id {entity_id} not found")
|
||||
return
|
||||
|
||||
x = result[0]
|
||||
d = x.dict() # convert object to dictionary
|
||||
if is_author:
|
||||
await cache_author(d)
|
||||
else:
|
||||
await cache_topic(d)
|
||||
cache_method(d)
|
||||
return d
|
||||
|
||||
|
||||
@query.field("get_my_shout")
|
||||
@login_required
|
||||
async def get_my_shout(_, info, shout_id: int):
|
||||
logger.debug(info)
|
||||
"""Get a shout by ID if the requesting user has permission to view it.
|
||||
|
||||
DEPRECATED: use `load_drafts` instead
|
||||
|
||||
Args:
|
||||
info: GraphQL resolver info containing context
|
||||
shout_id (int): ID of the shout to retrieve
|
||||
|
||||
Returns:
|
||||
dict: Contains either:
|
||||
- error (str): Error message if retrieval failed
|
||||
- shout (Shout): The requested shout if found and accessible
|
||||
|
||||
Permissions:
|
||||
User must be:
|
||||
- The shout creator
|
||||
- Listed as an author
|
||||
- Have editor role
|
||||
|
||||
Example:
|
||||
>>> async def test_get_my_shout():
|
||||
... context = {'user_id': '123', 'author': {'id': 1}, 'roles': []}
|
||||
... info = type('Info', (), {'context': context})()
|
||||
... result = await get_my_shout(None, info, 1)
|
||||
... assert result['error'] is None
|
||||
... assert result['shout'].id == 1
|
||||
... return result
|
||||
"""
|
||||
user_id = info.context.get("user_id", "")
|
||||
author_dict = info.context.get("author", {})
|
||||
author_id = author_dict.get("id")
|
||||
@@ -59,17 +102,26 @@ async def get_my_shout(_, info, shout_id: int):
|
||||
if not shout:
|
||||
return {"error": "no shout found", "shout": None}
|
||||
|
||||
logger.debug(
|
||||
f"got shout authors: {shout.authors} created by {shout.created_by}"
|
||||
)
|
||||
# Преобразуем media JSON в список объектов MediaItem
|
||||
if hasattr(shout, "media") and shout.media:
|
||||
if isinstance(shout.media, str):
|
||||
try:
|
||||
shout.media = orjson.loads(shout.media)
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing shout media: {e}")
|
||||
shout.media = []
|
||||
if not isinstance(shout.media, list):
|
||||
shout.media = [shout.media] if shout.media else []
|
||||
else:
|
||||
shout.media = []
|
||||
|
||||
logger.debug(f"got {len(shout.authors)} shout authors, created by {shout.created_by}")
|
||||
is_editor = "editor" in roles
|
||||
logger.debug(f'viewer is{'' if is_editor else ' not'} editor')
|
||||
logger.debug(f"viewer is{'' if is_editor else ' not'} editor")
|
||||
is_creator = author_id == shout.created_by
|
||||
logger.debug(f'viewer is{'' if is_creator else ' not'} creator')
|
||||
is_author = bool(
|
||||
list(filter(lambda x: x.id == int(author_id), [x for x in shout.authors]))
|
||||
)
|
||||
logger.debug(f'viewer is{'' if is_creator else ' not'} author')
|
||||
logger.debug(f"viewer is{'' if is_creator else ' not'} creator")
|
||||
is_author = bool(list(filter(lambda x: x.id == int(author_id), [x for x in shout.authors])))
|
||||
logger.debug(f"viewer is{'' if is_creator else ' not'} author")
|
||||
can_edit = is_editor or is_author or is_creator
|
||||
|
||||
if not can_edit:
|
||||
@@ -85,9 +137,7 @@ async def get_shouts_drafts(_, info):
|
||||
# user_id = info.context.get("user_id")
|
||||
author_dict = info.context.get("author")
|
||||
if not author_dict:
|
||||
logger.error("trying to get drafts failed")
|
||||
# logger.debug(info)
|
||||
return []
|
||||
return {"error": "author profile was not found"}
|
||||
author_id = author_dict.get("id")
|
||||
shouts = []
|
||||
with local_session() as session:
|
||||
@@ -95,255 +145,464 @@ async def get_shouts_drafts(_, info):
|
||||
q = (
|
||||
select(Shout)
|
||||
.options(joinedload(Shout.authors), joinedload(Shout.topics))
|
||||
.filter(
|
||||
and_(Shout.deleted_at.is_(None), Shout.created_by == int(author_id))
|
||||
)
|
||||
.filter(and_(Shout.deleted_at.is_(None), Shout.created_by == int(author_id)))
|
||||
.filter(Shout.published_at.is_(None))
|
||||
.order_by(desc(coalesce(Shout.updated_at, Shout.created_at)))
|
||||
.group_by(Shout.id)
|
||||
)
|
||||
shouts = [shout for [shout] in session.execute(q).unique()]
|
||||
return shouts
|
||||
return {"shouts": shouts}
|
||||
|
||||
|
||||
@mutation.field("create_shout")
|
||||
@login_required
|
||||
# @mutation.field("create_shout")
|
||||
# @login_required
|
||||
async def create_shout(_, info, inp):
|
||||
logger.info(f"Starting create_shout with input: {inp}")
|
||||
user_id = info.context.get("user_id")
|
||||
author_dict = info.context["author"]
|
||||
author_dict = info.context.get("author")
|
||||
logger.debug(f"Context user_id: {user_id}, author: {author_dict}")
|
||||
|
||||
if not author_dict:
|
||||
logger.error("Author profile not found in context")
|
||||
return {"error": "author profile was not found"}
|
||||
|
||||
author_id = author_dict.get("id")
|
||||
if user_id and author_id:
|
||||
with local_session() as session:
|
||||
author_id = int(author_id)
|
||||
current_time = int(time.time())
|
||||
slug = inp.get("slug") or f"draft-{current_time}"
|
||||
shout_dict = {
|
||||
"title": inp.get("title", ""),
|
||||
"subtitle": inp.get("subtitle", ""),
|
||||
"lead": inp.get("lead", ""),
|
||||
"description": inp.get("description", ""),
|
||||
"body": inp.get("body", ""),
|
||||
"layout": inp.get("layout", "article"),
|
||||
"created_by": author_id,
|
||||
"authors": [],
|
||||
"slug": slug,
|
||||
"topics": inp.get("topics", []),
|
||||
"published_at": None,
|
||||
"created_at": current_time, # Set created_at as Unix timestamp
|
||||
}
|
||||
same_slug_shout = (
|
||||
session.query(Shout)
|
||||
.filter(Shout.slug == shout_dict.get("slug"))
|
||||
.first()
|
||||
)
|
||||
c = 1
|
||||
while same_slug_shout is not None:
|
||||
same_slug_shout = (
|
||||
session.query(Shout)
|
||||
.filter(Shout.slug == shout_dict.get("slug"))
|
||||
.first()
|
||||
try:
|
||||
with local_session() as session:
|
||||
author_id = int(author_id)
|
||||
current_time = int(time.time())
|
||||
slug = inp.get("slug") or f"draft-{current_time}"
|
||||
|
||||
logger.info(f"Creating shout with input: {inp}")
|
||||
# Создаем публикацию без topics
|
||||
new_shout = Shout(
|
||||
slug=slug,
|
||||
body=inp.get("body", ""),
|
||||
layout=inp.get("layout", "article"),
|
||||
title=inp.get("title", ""),
|
||||
created_by=author_id,
|
||||
created_at=current_time,
|
||||
community=1,
|
||||
)
|
||||
c += 1
|
||||
shout_dict["slug"] += f"-{c}"
|
||||
new_shout = Shout(**shout_dict)
|
||||
session.add(new_shout)
|
||||
session.commit()
|
||||
|
||||
# NOTE: requesting new shout back
|
||||
shout = session.query(Shout).where(Shout.slug == slug).first()
|
||||
if shout:
|
||||
sa = ShoutAuthor(shout=shout.id, author=author_id)
|
||||
session.add(sa)
|
||||
# Проверяем уникальность slug
|
||||
logger.debug(f"Checking for existing slug: {slug}")
|
||||
same_slug_shout = session.query(Shout).filter(Shout.slug == new_shout.slug).first()
|
||||
c = 1
|
||||
while same_slug_shout is not None:
|
||||
logger.debug(f"Found duplicate slug, trying iteration {c}")
|
||||
new_shout.slug = f"{slug}-{c}"
|
||||
same_slug_shout = session.query(Shout).filter(Shout.slug == new_shout.slug).first()
|
||||
c += 1
|
||||
|
||||
topics = (
|
||||
session.query(Topic)
|
||||
.filter(Topic.slug.in_(inp.get("topics", [])))
|
||||
.all()
|
||||
)
|
||||
for topic in topics:
|
||||
t = ShoutTopic(topic=topic.id, shout=shout.id)
|
||||
session.add(t)
|
||||
try:
|
||||
logger.info("Creating new shout object")
|
||||
session.add(new_shout)
|
||||
session.commit()
|
||||
logger.info(f"Created shout with ID: {new_shout.id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating shout object: {e}", exc_info=True)
|
||||
return {"error": f"Database error: {str(e)}"}
|
||||
|
||||
session.commit()
|
||||
# Связываем с автором
|
||||
try:
|
||||
logger.debug(f"Linking author {author_id} to shout {new_shout.id}")
|
||||
sa = ShoutAuthor(shout=new_shout.id, author=author_id)
|
||||
session.add(sa)
|
||||
except Exception as e:
|
||||
logger.error(f"Error linking author: {e}", exc_info=True)
|
||||
return {"error": f"Error linking author: {str(e)}"}
|
||||
|
||||
reactions_follow(author_id, shout.id, True)
|
||||
# Связываем с темами
|
||||
|
||||
# notifier
|
||||
# await notify_shout(shout_dict, 'create')
|
||||
input_topics = inp.get("topics", [])
|
||||
if input_topics:
|
||||
try:
|
||||
logger.debug(f"Linking topics: {[t.slug for t in input_topics]}")
|
||||
main_topic = inp.get("main_topic")
|
||||
for topic in input_topics:
|
||||
st = ShoutTopic(
|
||||
topic=topic.id,
|
||||
shout=new_shout.id,
|
||||
main=(topic.slug == main_topic) if main_topic else False,
|
||||
)
|
||||
session.add(st)
|
||||
logger.debug(f"Added topic {topic.slug} {'(main)' if st.main else ''}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error linking topics: {e}", exc_info=True)
|
||||
return {"error": f"Error linking topics: {str(e)}"}
|
||||
|
||||
try:
|
||||
session.commit()
|
||||
logger.info("Final commit successful")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in final commit: {e}", exc_info=True)
|
||||
return {"error": f"Error in final commit: {str(e)}"}
|
||||
|
||||
# Получаем созданную публикацию
|
||||
shout = session.query(Shout).filter(Shout.id == new_shout.id).first()
|
||||
|
||||
# Подписываем автора
|
||||
try:
|
||||
logger.debug("Following created shout")
|
||||
await follow(None, info, "shout", shout.slug)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error following shout: {e}", exc_info=True)
|
||||
|
||||
logger.info(f"Successfully created shout {shout.id}")
|
||||
return {"shout": shout}
|
||||
|
||||
return {"error": "cant create shout" if user_id else "unauthorized"}
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in create_shout: {e}", exc_info=True)
|
||||
return {"error": f"Unexpected error: {str(e)}"}
|
||||
|
||||
error_msg = "cant create shout" if user_id else "unauthorized"
|
||||
logger.error(f"Create shout failed: {error_msg}")
|
||||
return {"error": error_msg}
|
||||
|
||||
|
||||
def patch_main_topic(session, main_topic, shout):
|
||||
def patch_main_topic(session, main_topic_slug, shout):
|
||||
"""Update the main topic for a shout."""
|
||||
logger.info(f"Starting patch_main_topic for shout#{shout.id} with slug '{main_topic_slug}'")
|
||||
logger.debug(f"Current shout topics: {[(t.topic.slug, t.main) for t in shout.topics]}")
|
||||
|
||||
with session.begin():
|
||||
shout = (
|
||||
session.query(Shout)
|
||||
.options(joinedload(Shout.topics))
|
||||
.filter(Shout.id == shout.id)
|
||||
.first()
|
||||
# Получаем текущий главный топик
|
||||
old_main = (
|
||||
session.query(ShoutTopic).filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True))).first()
|
||||
)
|
||||
if not shout:
|
||||
if old_main:
|
||||
logger.info(f"Found current main topic: {old_main.topic.slug}")
|
||||
else:
|
||||
logger.info("No current main topic found")
|
||||
|
||||
# Находим новый главный топик
|
||||
main_topic = session.query(Topic).filter(Topic.slug == main_topic_slug).first()
|
||||
if not main_topic:
|
||||
logger.error(f"Main topic with slug '{main_topic_slug}' not found")
|
||||
return
|
||||
old_main_topic = (
|
||||
|
||||
logger.info(f"Found new main topic: {main_topic.slug} (id={main_topic.id})")
|
||||
|
||||
# Находим связь с новым главным топиком
|
||||
new_main = (
|
||||
session.query(ShoutTopic)
|
||||
.filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True)))
|
||||
.filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.topic == main_topic.id))
|
||||
.first()
|
||||
)
|
||||
logger.debug(f"Found new main topic relation: {new_main is not None}")
|
||||
|
||||
main_topic = session.query(Topic).filter(Topic.slug == main_topic).first()
|
||||
if old_main and new_main and old_main is not new_main:
|
||||
logger.info(f"Updating main topic flags: {old_main.topic.slug} -> {new_main.topic.slug}")
|
||||
old_main.main = False
|
||||
session.add(old_main)
|
||||
|
||||
if main_topic:
|
||||
new_main_topic = (
|
||||
session.query(ShoutTopic)
|
||||
.filter(
|
||||
and_(
|
||||
ShoutTopic.shout == shout.id, ShoutTopic.topic == main_topic.id
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
new_main.main = True
|
||||
session.add(new_main)
|
||||
|
||||
if (
|
||||
old_main_topic
|
||||
and new_main_topic
|
||||
and old_main_topic is not new_main_topic
|
||||
):
|
||||
ShoutTopic.update(old_main_topic, {"main": False})
|
||||
session.add(old_main_topic)
|
||||
|
||||
ShoutTopic.update(new_main_topic, {"main": True})
|
||||
session.add(new_main_topic)
|
||||
session.flush()
|
||||
logger.info(f"Main topic updated for shout#{shout.id}")
|
||||
else:
|
||||
logger.warning(f"No changes needed for main topic (old={old_main is not None}, new={new_main is not None})")
|
||||
|
||||
|
||||
def patch_topics(session, shout, topics_input):
|
||||
new_topics_to_link = [
|
||||
Topic(**new_topic) for new_topic in topics_input if new_topic["id"] < 0
|
||||
]
|
||||
"""Update the topics associated with a shout.
|
||||
|
||||
Args:
|
||||
session: SQLAlchemy session
|
||||
shout (Shout): The shout to update
|
||||
topics_input (list): List of topic dicts with fields:
|
||||
- id (int): Topic ID (<0 for new topics)
|
||||
- slug (str): Topic slug
|
||||
- title (str): Topic title (for new topics)
|
||||
|
||||
Side Effects:
|
||||
- Creates new topics if needed
|
||||
- Updates shout-topic associations
|
||||
- Refreshes shout object with new topics
|
||||
|
||||
Example:
|
||||
>>> def test_patch_topics():
|
||||
... topics = [
|
||||
... {'id': -1, 'slug': 'new-topic', 'title': 'New Topic'},
|
||||
... {'id': 1, 'slug': 'existing-topic'}
|
||||
... ]
|
||||
... with local_session() as session:
|
||||
... shout = session.query(Shout).first()
|
||||
... patch_topics(session, shout, topics)
|
||||
... assert len(shout.topics) == 2
|
||||
... assert any(t.slug == 'new-topic' for t in shout.topics)
|
||||
... return shout.topics
|
||||
"""
|
||||
logger.info(f"Starting patch_topics for shout#{shout.id}")
|
||||
logger.info(f"Received topics_input: {topics_input}")
|
||||
|
||||
# Создаем новые топики если есть
|
||||
new_topics_to_link = [Topic(**new_topic) for new_topic in topics_input if new_topic["id"] < 0]
|
||||
if new_topics_to_link:
|
||||
logger.info(f"Creating new topics: {[t.dict() for t in new_topics_to_link]}")
|
||||
session.add_all(new_topics_to_link)
|
||||
session.commit()
|
||||
session.flush()
|
||||
|
||||
for new_topic_to_link in new_topics_to_link:
|
||||
created_unlinked_topic = ShoutTopic(shout=shout.id, topic=new_topic_to_link.id)
|
||||
session.add(created_unlinked_topic)
|
||||
# Получаем текущие связи
|
||||
current_links = session.query(ShoutTopic).filter(ShoutTopic.shout == shout.id).all()
|
||||
logger.info(f"Current topic links: {[{t.topic: t.main} for t in current_links]}")
|
||||
|
||||
existing_topics_input = [
|
||||
topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0
|
||||
]
|
||||
existing_topic_to_link_ids = [
|
||||
existing_topic_input["id"]
|
||||
for existing_topic_input in existing_topics_input
|
||||
if existing_topic_input["id"] not in [topic.id for topic in shout.topics]
|
||||
]
|
||||
# Удаляем старые связи
|
||||
if current_links:
|
||||
logger.info(f"Removing old topic links for shout#{shout.id}")
|
||||
for link in current_links:
|
||||
session.delete(link)
|
||||
session.flush()
|
||||
|
||||
for existing_topic_to_link_id in existing_topic_to_link_ids:
|
||||
created_unlinked_topic = ShoutTopic(
|
||||
shout=shout.id, topic=existing_topic_to_link_id
|
||||
)
|
||||
session.add(created_unlinked_topic)
|
||||
# Создаем новые связи
|
||||
for topic_input in topics_input:
|
||||
topic_id = topic_input["id"]
|
||||
if topic_id < 0:
|
||||
topic = next(t for t in new_topics_to_link if t.slug == topic_input["slug"])
|
||||
topic_id = topic.id
|
||||
|
||||
topic_to_unlink_ids = [
|
||||
topic.id
|
||||
for topic in shout.topics
|
||||
if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]
|
||||
]
|
||||
logger.info(f"Creating new topic link: shout#{shout.id} -> topic#{topic_id}")
|
||||
new_link = ShoutTopic(shout=shout.id, topic=topic_id, main=False)
|
||||
session.add(new_link)
|
||||
|
||||
session.query(ShoutTopic).filter(
|
||||
and_(ShoutTopic.shout == shout.id, ShoutTopic.topic.in_(topic_to_unlink_ids))
|
||||
).delete(synchronize_session=False)
|
||||
session.flush()
|
||||
# Обновляем связи в объекте шаута
|
||||
session.refresh(shout)
|
||||
|
||||
logger.info(f"Successfully updated topics for shout#{shout.id}")
|
||||
logger.info(f"Final shout topics: {[t.dict() for t in shout.topics]}")
|
||||
|
||||
|
||||
@mutation.field("update_shout")
|
||||
@login_required
|
||||
# @mutation.field("update_shout")
|
||||
# @login_required
|
||||
async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
|
||||
logger.info(f"Starting update_shout with id={shout_id}, publish={publish}")
|
||||
logger.debug(f"Full shout_input: {shout_input}")
|
||||
|
||||
user_id = info.context.get("user_id")
|
||||
roles = info.context.get("roles", [])
|
||||
author_dict = info.context["author"]
|
||||
author_dict = info.context.get("author")
|
||||
if not author_dict:
|
||||
logger.error("Author profile not found")
|
||||
return {"error": "author profile was not found"}
|
||||
|
||||
author_id = author_dict.get("id")
|
||||
shout_input = shout_input or {}
|
||||
current_time = int(time.time())
|
||||
shout_id = shout_id or shout_input.get("id", shout_id)
|
||||
slug = shout_input.get("slug")
|
||||
|
||||
if not user_id:
|
||||
logger.error("Unauthorized update attempt")
|
||||
return {"error": "unauthorized"}
|
||||
|
||||
try:
|
||||
with local_session() as session:
|
||||
if author_id:
|
||||
logger.info(f"author for shout#{shout_id} detected author #{author_id}")
|
||||
shout_by_id = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
logger.info(f"Processing update for shout#{shout_id} by author #{author_id}")
|
||||
shout_by_id = (
|
||||
session.query(Shout)
|
||||
.options(joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors))
|
||||
.filter(Shout.id == shout_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not shout_by_id:
|
||||
logger.error(f"shout#{shout_id} not found")
|
||||
return {"error": "shout not found"}
|
||||
|
||||
logger.info(f"Found shout#{shout_id}")
|
||||
|
||||
# Логируем текущие топики
|
||||
current_topics = (
|
||||
[{"id": t.id, "slug": t.slug, "title": t.title} for t in shout_by_id.topics]
|
||||
if shout_by_id.topics
|
||||
else []
|
||||
)
|
||||
logger.info(f"Current topics for shout#{shout_id}: {current_topics}")
|
||||
|
||||
if slug != shout_by_id.slug:
|
||||
same_slug_shout = (
|
||||
session.query(Shout).filter(Shout.slug == slug).first()
|
||||
)
|
||||
same_slug_shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||
c = 1
|
||||
while same_slug_shout is not None:
|
||||
c += 1
|
||||
slug = f"{slug}-{c}"
|
||||
same_slug_shout = (
|
||||
session.query(Shout).filter(Shout.slug == slug).first()
|
||||
)
|
||||
same_slug_shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||
shout_input["slug"] = slug
|
||||
logger.info(f"shout#{shout_id} slug patched")
|
||||
|
||||
if filter(lambda x: x.id == author_id, [x for x in shout_by_id.authors]) or "editor" in roles:
|
||||
logger.info(f"Author #{author_id} has permission to edit shout#{shout_id}")
|
||||
|
||||
if (
|
||||
filter(
|
||||
lambda x: x.id == author_id, [x for x in shout_by_id.authors]
|
||||
)
|
||||
or "editor" in roles
|
||||
):
|
||||
# topics patch
|
||||
topics_input = shout_input.get("topics")
|
||||
if topics_input:
|
||||
patch_topics(session, shout_by_id, topics_input)
|
||||
logger.info(f"Received topics_input for shout#{shout_id}: {topics_input}")
|
||||
try:
|
||||
patch_topics(session, shout_by_id, topics_input)
|
||||
logger.info(f"Successfully patched topics for shout#{shout_id}")
|
||||
|
||||
# Обновляем связи в сессии после patch_topics
|
||||
session.refresh(shout_by_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error patching topics: {e}", exc_info=True)
|
||||
return {"error": f"Failed to update topics: {str(e)}"}
|
||||
|
||||
del shout_input["topics"]
|
||||
for tpc in topics_input:
|
||||
await cache_by_id("TOPIC", tpc["id"])
|
||||
await cache_by_id(Topic, tpc["id"], cache_topic)
|
||||
else:
|
||||
logger.warning(f"No topics_input received for shout#{shout_id}")
|
||||
|
||||
# main topic
|
||||
main_topic = shout_input.get("main_topic")
|
||||
if main_topic:
|
||||
logger.info(f"Updating main topic for shout#{shout_id} to {main_topic}")
|
||||
patch_main_topic(session, main_topic, shout_by_id)
|
||||
|
||||
shout_input["updated_at"] = current_time
|
||||
shout_input["published_at"] = current_time if publish else None
|
||||
if publish:
|
||||
logger.info(f"Publishing shout#{shout_id}")
|
||||
shout_input["published_at"] = current_time
|
||||
# Проверяем наличие связи с автором
|
||||
logger.info(f"Checking author link for shout#{shout_id} and author#{author_id}")
|
||||
author_link = (
|
||||
session.query(ShoutAuthor)
|
||||
.filter(and_(ShoutAuthor.shout == shout_id, ShoutAuthor.author == author_id))
|
||||
.first()
|
||||
)
|
||||
|
||||
if not author_link:
|
||||
logger.info(f"Adding missing author link for shout#{shout_id}")
|
||||
sa = ShoutAuthor(shout=shout_id, author=author_id)
|
||||
session.add(sa)
|
||||
session.flush()
|
||||
logger.info("Author link added successfully")
|
||||
else:
|
||||
logger.info("Author link already exists")
|
||||
|
||||
# Логируем финальное состояние перед сохранением
|
||||
logger.info(f"Final shout_input for update: {shout_input}")
|
||||
Shout.update(shout_by_id, shout_input)
|
||||
session.add(shout_by_id)
|
||||
session.commit()
|
||||
|
||||
shout_dict = shout_by_id.dict()
|
||||
try:
|
||||
session.commit()
|
||||
# Обновляем объект после коммита чтобы получить все связи
|
||||
session.refresh(shout_by_id)
|
||||
logger.info(f"Successfully committed updates for shout#{shout_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Commit failed: {e}", exc_info=True)
|
||||
return {"error": f"Failed to save changes: {str(e)}"}
|
||||
|
||||
# После обновления проверяем топики
|
||||
updated_topics = (
|
||||
[{"id": t.id, "slug": t.slug, "title": t.title} for t in shout_by_id.topics]
|
||||
if shout_by_id.topics
|
||||
else []
|
||||
)
|
||||
logger.info(f"Updated topics for shout#{shout_id}: {updated_topics}")
|
||||
|
||||
# Инвалидация кэша после обновления
|
||||
try:
|
||||
logger.info("Invalidating cache after shout update")
|
||||
|
||||
cache_keys = [
|
||||
"feed", # лента
|
||||
f"author_{author_id}", # публикации автора
|
||||
"random_top", # случайные топовые
|
||||
"unrated", # неоцененные
|
||||
]
|
||||
|
||||
# Добавляем ключи для тем публикации
|
||||
for topic in shout_by_id.topics:
|
||||
cache_keys.append(f"topic_{topic.id}")
|
||||
cache_keys.append(f"topic_shouts_{topic.id}")
|
||||
|
||||
await invalidate_shouts_cache(cache_keys)
|
||||
await invalidate_shout_related_cache(shout_by_id, author_id)
|
||||
|
||||
# Обновляем кэш тем и авторов
|
||||
for topic in shout_by_id.topics:
|
||||
await cache_by_id(Topic, topic.id, cache_topic)
|
||||
for author in shout_by_id.authors:
|
||||
await cache_author(author.dict())
|
||||
|
||||
logger.info("Cache invalidated successfully")
|
||||
except Exception as cache_error:
|
||||
logger.warning(f"Cache invalidation error: {cache_error}", exc_info=True)
|
||||
|
||||
if not publish:
|
||||
await notify_shout(shout_dict, "update")
|
||||
await notify_shout(shout_by_id.dict(), "update")
|
||||
else:
|
||||
await notify_shout(shout_dict, "published")
|
||||
await notify_shout(shout_by_id.dict(), "published")
|
||||
# search service indexing
|
||||
search_service.index(shout_by_id)
|
||||
for a in shout_by_id.authors:
|
||||
await cache_by_id("AUTHOR", a.id)
|
||||
await cache_by_id(Author, a.id, cache_author)
|
||||
logger.info(f"shout#{shout_id} updated")
|
||||
# Получаем полные данные шаута со связями
|
||||
shout_with_relations = (
|
||||
session.query(Shout)
|
||||
.options(joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors))
|
||||
.filter(Shout.id == shout_id)
|
||||
.first()
|
||||
)
|
||||
|
||||
# Создаем словарь с базовыми полями
|
||||
shout_dict = shout_with_relations.dict()
|
||||
|
||||
# Явно добавляем связанные данные
|
||||
shout_dict["topics"] = (
|
||||
[
|
||||
{"id": topic.id, "slug": topic.slug, "title": topic.title}
|
||||
for topic in shout_with_relations.topics
|
||||
]
|
||||
if shout_with_relations.topics
|
||||
else []
|
||||
)
|
||||
|
||||
# Add main_topic to the shout dictionary
|
||||
shout_dict["main_topic"] = get_main_topic(shout_with_relations.topics)
|
||||
|
||||
shout_dict["authors"] = (
|
||||
[
|
||||
{"id": author.id, "name": author.name, "slug": author.slug}
|
||||
for author in shout_with_relations.authors
|
||||
]
|
||||
if shout_with_relations.authors
|
||||
else []
|
||||
)
|
||||
|
||||
logger.info(f"Final shout data with relations: {shout_dict}")
|
||||
logger.debug(
|
||||
f"Loaded topics details: {[(t.topic.slug if t.topic else 'no-topic', t.main) for t in shout_with_relations.topics]}"
|
||||
)
|
||||
return {"shout": shout_dict, "error": None}
|
||||
else:
|
||||
logger.warning(f"Access denied: author #{author_id} cannot edit shout#{shout_id}")
|
||||
return {"error": "access denied", "shout": None}
|
||||
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
logger.error(exc)
|
||||
logger.error(f" cannot update with data: {shout_input}")
|
||||
logger.error(f"Unexpected error in update_shout: {exc}", exc_info=True)
|
||||
logger.error(f"Failed input data: {shout_input}")
|
||||
return {"error": "cant update shout"}
|
||||
|
||||
return {"error": "cant update shout"}
|
||||
|
||||
|
||||
@mutation.field("delete_shout")
|
||||
@login_required
|
||||
# @mutation.field("delete_shout")
|
||||
# @login_required
|
||||
async def delete_shout(_, info, shout_id: int):
|
||||
user_id = info.context.get("user_id")
|
||||
roles = info.context.get("roles", [])
|
||||
author_dict = info.context["author"]
|
||||
author_dict = info.context.get("author")
|
||||
if not author_dict:
|
||||
return {"error": "author profile was not found"}
|
||||
author_id = author_dict.get("id")
|
||||
if user_id and author_id:
|
||||
author_id = int(author_id)
|
||||
@@ -359,12 +618,14 @@ async def delete_shout(_, info, shout_id: int):
|
||||
session.add(shout)
|
||||
session.commit()
|
||||
|
||||
for author_id in shout.authors:
|
||||
reactions_unfollow(author_id, shout_id)
|
||||
await cache_by_id("AUTHOR", author_id)
|
||||
for author in shout.authors:
|
||||
await cache_by_id(Author, author.id, cache_author)
|
||||
info.context["author"] = author.dict()
|
||||
info.context["user_id"] = author.user
|
||||
unfollow(None, info, "shout", shout.slug)
|
||||
|
||||
for topic_id in shout.topics:
|
||||
await cache_by_id("TOPIC", topic_id)
|
||||
for topic in shout.topics:
|
||||
await cache_by_id(Topic, topic.id, cache_topic)
|
||||
|
||||
await notify_shout(shout_dict, "delete")
|
||||
return {"error": None}
|
||||
@@ -372,48 +633,43 @@ async def delete_shout(_, info, shout_id: int):
|
||||
return {"error": "access denied"}
|
||||
|
||||
|
||||
def handle_proposing(session, r, shout):
|
||||
if is_positive(r.kind):
|
||||
replied_reaction = (
|
||||
session.query(Reaction)
|
||||
.filter(Reaction.id == r.reply_to, Reaction.shout == r.shout)
|
||||
.first()
|
||||
)
|
||||
def get_main_topic(topics):
|
||||
"""Get the main topic from a list of ShoutTopic objects."""
|
||||
logger.info(f"Starting get_main_topic with {len(topics) if topics else 0} topics")
|
||||
logger.debug(
|
||||
f"Topics data: {[(t.topic.slug if t.topic else 'no-topic', t.main) for t in topics] if topics else []}"
|
||||
)
|
||||
|
||||
if (
|
||||
replied_reaction
|
||||
and replied_reaction.kind is ReactionKind.PROPOSE.value
|
||||
and replied_reaction.quote
|
||||
):
|
||||
# patch all the proposals' quotes
|
||||
proposals = (
|
||||
session.query(Reaction)
|
||||
.filter(
|
||||
and_(
|
||||
Reaction.shout == r.shout,
|
||||
Reaction.kind == ReactionKind.PROPOSE.value,
|
||||
)
|
||||
)
|
||||
.all()
|
||||
)
|
||||
if not topics:
|
||||
logger.warning("No topics provided to get_main_topic")
|
||||
return {"id": 0, "title": "no topic", "slug": "notopic", "is_main": True}
|
||||
|
||||
for proposal in proposals:
|
||||
if proposal.quote:
|
||||
proposal_diff = get_diff(shout.body, proposal.quote)
|
||||
proposal_dict = proposal.dict()
|
||||
proposal_dict["quote"] = apply_diff(
|
||||
replied_reaction.quote, proposal_diff
|
||||
)
|
||||
Reaction.update(proposal, proposal_dict)
|
||||
session.add(proposal)
|
||||
# Find first main topic in original order
|
||||
main_topic_rel = next((st for st in topics if st.main), None)
|
||||
logger.debug(
|
||||
f"Found main topic relation: {main_topic_rel.topic.slug if main_topic_rel and main_topic_rel.topic else None}"
|
||||
)
|
||||
|
||||
# patch shout's body
|
||||
shout_dict = shout.dict()
|
||||
shout_dict["body"] = replied_reaction.quote
|
||||
Shout.update(shout, shout_dict)
|
||||
session.add(shout)
|
||||
session.commit()
|
||||
if main_topic_rel and main_topic_rel.topic:
|
||||
result = {
|
||||
"slug": main_topic_rel.topic.slug,
|
||||
"title": main_topic_rel.topic.title,
|
||||
"id": main_topic_rel.topic.id,
|
||||
"is_main": True,
|
||||
}
|
||||
logger.info(f"Returning main topic: {result}")
|
||||
return result
|
||||
|
||||
if is_negative(r.kind):
|
||||
# TODO: rejection logic
|
||||
pass
|
||||
# If no main found but topics exist, return first
|
||||
if topics and topics[0].topic:
|
||||
logger.info(f"No main topic found, using first topic: {topics[0].topic.slug}")
|
||||
result = {
|
||||
"slug": topics[0].topic.slug,
|
||||
"title": topics[0].topic.title,
|
||||
"id": topics[0].topic.id,
|
||||
"is_main": True,
|
||||
}
|
||||
return result
|
||||
|
||||
logger.warning("No valid topics found, returning default")
|
||||
return {"slug": "notopic", "title": "no topic", "id": 0, "is_main": True}
|
||||
|
||||
198
resolvers/feed.py
Normal file
198
resolvers/feed.py
Normal file
@@ -0,0 +1,198 @@
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy import and_, select
|
||||
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from resolvers.reader import (
|
||||
apply_options,
|
||||
get_shouts_with_links,
|
||||
has_field,
|
||||
query_with_stat,
|
||||
)
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.schema import query
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
@query.field("load_shouts_coauthored")
|
||||
@login_required
|
||||
async def load_shouts_coauthored(_, info, options):
|
||||
"""
|
||||
Загрузка публикаций, написанных в соавторстве с пользователем.
|
||||
|
||||
:param info: Информаци о контексте GraphQL.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций в соавтостве.
|
||||
"""
|
||||
author_id = info.context.get("author", {}).get("id")
|
||||
if not author_id:
|
||||
return []
|
||||
q = query_with_stat(info)
|
||||
q = q.filter(Shout.authors.any(id=author_id))
|
||||
q, limit, offset = apply_options(q, options)
|
||||
return get_shouts_with_links(info, q, limit, offset=offset)
|
||||
|
||||
|
||||
@query.field("load_shouts_discussed")
|
||||
@login_required
|
||||
async def load_shouts_discussed(_, info, options):
|
||||
"""
|
||||
Загрузка публикаций, которые обсуждались пользователем.
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций, обсужденых пользователем.
|
||||
"""
|
||||
author_id = info.context.get("author", {}).get("id")
|
||||
if not author_id:
|
||||
return []
|
||||
q = query_with_stat(info)
|
||||
options["filters"]["commented"] = True
|
||||
q, limit, offset = apply_options(q, options, author_id)
|
||||
return get_shouts_with_links(info, q, limit, offset=offset)
|
||||
|
||||
|
||||
def shouts_by_follower(info, follower_id: int, options):
|
||||
"""
|
||||
Загружает публикации, на которые подписан автор.
|
||||
|
||||
- по авторам
|
||||
- по темам
|
||||
- по реакциям
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param follower_id: Идентификатор автора.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
q = query_with_stat(info)
|
||||
reader_followed_authors = select(AuthorFollower.author).where(AuthorFollower.follower == follower_id)
|
||||
reader_followed_topics = select(TopicFollower.topic).where(TopicFollower.follower == follower_id)
|
||||
reader_followed_shouts = select(ShoutReactionsFollower.shout).where(ShoutReactionsFollower.follower == follower_id)
|
||||
followed_subquery = (
|
||||
select(Shout.id)
|
||||
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
|
||||
.join(ShoutTopic, ShoutTopic.shout == Shout.id)
|
||||
.where(
|
||||
ShoutAuthor.author.in_(reader_followed_authors)
|
||||
| ShoutTopic.topic.in_(reader_followed_topics)
|
||||
| Shout.id.in_(reader_followed_shouts)
|
||||
)
|
||||
.scalar_subquery()
|
||||
)
|
||||
q = q.filter(Shout.id.in_(followed_subquery))
|
||||
q, limit, offset = apply_options(q, options)
|
||||
shouts = get_shouts_with_links(info, q, limit, offset=offset)
|
||||
return shouts
|
||||
|
||||
|
||||
@query.field("load_shouts_followed_by")
|
||||
async def load_shouts_followed_by(_, info, slug: str, options) -> List[Shout]:
|
||||
"""
|
||||
Загружает публикации, на которые подписан автор по slug.
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param slug: Slug автора.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.slug == slug).first()
|
||||
if author:
|
||||
follower_id = author.dict()["id"]
|
||||
shouts = shouts_by_follower(info, follower_id, options)
|
||||
return shouts
|
||||
return []
|
||||
|
||||
|
||||
@query.field("load_shouts_feed")
|
||||
@login_required
|
||||
async def load_shouts_feed(_, info, options) -> List[Shout]:
|
||||
"""
|
||||
Загружает публикации, на которые подписан авторизованный пользователь.
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
author_id = info.context.get("author", {}).get("id")
|
||||
return shouts_by_follower(info, author_id, options) if author_id else []
|
||||
|
||||
|
||||
@query.field("load_shouts_authored_by")
|
||||
async def load_shouts_authored_by(_, info, slug: str, options) -> List[Shout]:
|
||||
"""
|
||||
Загружает публикации, написанные автором по slug.
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param slug: Slug автора.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.slug == slug).first()
|
||||
if author:
|
||||
try:
|
||||
author_id: int = author.dict()["id"]
|
||||
q = (
|
||||
query_with_stat(info)
|
||||
if has_field(info, "stat")
|
||||
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
)
|
||||
q = q.filter(Shout.authors.any(id=author_id))
|
||||
q, limit, offset = apply_options(q, options, author_id)
|
||||
shouts = get_shouts_with_links(info, q, limit, offset=offset)
|
||||
return shouts
|
||||
except Exception as error:
|
||||
logger.debug(error)
|
||||
return []
|
||||
|
||||
|
||||
@query.field("load_shouts_with_topic")
|
||||
async def load_shouts_with_topic(_, info, slug: str, options) -> List[Shout]:
|
||||
"""
|
||||
Загружает публикации, связанные с темой по slug.
|
||||
|
||||
:param info: Информация о контексте GraphQL.
|
||||
:param slug: Slug темы.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
with local_session() as session:
|
||||
topic = session.query(Topic).filter(Topic.slug == slug).first()
|
||||
if topic:
|
||||
try:
|
||||
topic_id: int = topic.dict()["id"]
|
||||
q = (
|
||||
query_with_stat(info)
|
||||
if has_field(info, "stat")
|
||||
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
)
|
||||
q = q.filter(Shout.topics.any(id=topic_id))
|
||||
q, limit, offset = apply_options(q, options)
|
||||
shouts = get_shouts_with_links(info, q, limit, offset=offset)
|
||||
return shouts
|
||||
except Exception as error:
|
||||
logger.debug(error)
|
||||
return []
|
||||
|
||||
|
||||
def apply_filters(q, filters):
|
||||
"""
|
||||
Применяет фильтры к запросу
|
||||
"""
|
||||
logger.info(f"Applying filters: {filters}")
|
||||
|
||||
if filters.get("published"):
|
||||
q = q.filter(Shout.published_at.is_not(None))
|
||||
logger.info("Added published filter")
|
||||
|
||||
if filters.get("topic"):
|
||||
topic_slug = filters["topic"]
|
||||
q = q.join(ShoutTopic).join(Topic).filter(Topic.slug == topic_slug)
|
||||
logger.info(f"Added topic filter: {topic_slug}")
|
||||
|
||||
return q
|
||||
@@ -1,316 +1,222 @@
|
||||
import json
|
||||
import time
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy import or_, select
|
||||
from graphql import GraphQLError
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.sql import and_
|
||||
|
||||
from cache.cache import (
|
||||
cache_author,
|
||||
cache_topic,
|
||||
get_cached_follower_authors,
|
||||
get_cached_follower_topics,
|
||||
)
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.community import Community
|
||||
from orm.community import Community, CommunityFollower
|
||||
from orm.reaction import Reaction
|
||||
from orm.shout import Shout, ShoutReactionsFollower
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from resolvers.stat import author_follows_authors, author_follows_topics, get_with_stat
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.auth import login_required
|
||||
from services.cache import DEFAULT_FOLLOWS, cache_author, cache_topic
|
||||
from services.db import local_session
|
||||
from services.logger import root_logger as logger
|
||||
from services.notify import notify_follower
|
||||
from services.rediscache import redis
|
||||
from services.schema import mutation, query
|
||||
|
||||
|
||||
async def cache_by_slug(what: str, slug: str):
|
||||
is_author = what == "AUTHOR"
|
||||
alias = Author if is_author else Topic
|
||||
q = select(alias).filter(alias.slug == slug)
|
||||
[x] = get_with_stat(q)
|
||||
if not x:
|
||||
return
|
||||
|
||||
d = x.dict() # convert object to dictionary
|
||||
if is_author:
|
||||
await cache_author(d)
|
||||
else:
|
||||
await cache_topic(d)
|
||||
return d
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
@mutation.field("follow")
|
||||
@login_required
|
||||
async def follow(_, info, what, slug):
|
||||
error = None
|
||||
async def follow(_, info, what, slug="", entity_id=0):
|
||||
logger.debug("Начало выполнения функции 'follow'")
|
||||
user_id = info.context.get("user_id")
|
||||
follower_dict = info.context["author"]
|
||||
follower_dict = info.context.get("author")
|
||||
logger.debug(f"follower: {follower_dict}")
|
||||
|
||||
if not user_id or not follower_dict:
|
||||
return GraphQLError("unauthorized")
|
||||
|
||||
follower_id = follower_dict.get("id")
|
||||
if not user_id or not follower_id:
|
||||
return {"error": "unauthorized"}
|
||||
logger.debug(f"follower_id: {follower_id}")
|
||||
|
||||
entity = what.lower()
|
||||
follows = []
|
||||
follows_str = await redis.execute("GET", f"author:{follower_id}:follows-{entity}s")
|
||||
if isinstance(follows_str, str):
|
||||
follows = json.loads(follows_str) or []
|
||||
entity_classes = {
|
||||
"AUTHOR": (Author, AuthorFollower, get_cached_follower_authors, cache_author),
|
||||
"TOPIC": (Topic, TopicFollower, get_cached_follower_topics, cache_topic),
|
||||
"COMMUNITY": (Community, CommunityFollower, None, None), # Нет методов кэша для сообщества
|
||||
"SHOUT": (Shout, ShoutReactionsFollower, None, None), # Нет методов кэша для shout
|
||||
}
|
||||
|
||||
if what == "AUTHOR":
|
||||
follower_id = int(follower_id)
|
||||
error = author_follow(follower_id, slug)
|
||||
if not error:
|
||||
[author] = get_with_stat(select(Author).filter(Author.slug == slug))
|
||||
if author:
|
||||
author_dict = author.dict()
|
||||
author_id = int(author_dict.get('id', 0))
|
||||
follows_ids = set(int(a.get('id')) for a in follows)
|
||||
if author_id not in follows_ids:
|
||||
await cache_author(author_dict)
|
||||
await cache_author(follower_dict)
|
||||
await notify_follower(follower_dict, author_id, "follow")
|
||||
follows.append(author_dict)
|
||||
if what not in entity_classes:
|
||||
logger.error(f"Неверный тип для следования: {what}")
|
||||
return {"error": "invalid follow type"}
|
||||
|
||||
elif what == "TOPIC":
|
||||
error = topic_follow(follower_id, slug)
|
||||
_topic_dict = await cache_by_slug(what, slug)
|
||||
entity_class, follower_class, get_cached_follows_method, cache_method = entity_classes[what]
|
||||
entity_type = what.lower()
|
||||
entity_dict = None
|
||||
|
||||
elif what == "COMMUNITY":
|
||||
# FIXME: when more communities
|
||||
follows = local_session().execute(select(Community))
|
||||
try:
|
||||
logger.debug("Попытка получить сущность из базы данных")
|
||||
with local_session() as session:
|
||||
entity_query = select(entity_class).filter(entity_class.slug == slug)
|
||||
entities = get_with_stat(entity_query)
|
||||
[entity] = entities
|
||||
if not entity:
|
||||
logger.warning(f"{what.lower()} не найден по slug: {slug}")
|
||||
return {"error": f"{what.lower()} not found"}
|
||||
if not entity_id and entity:
|
||||
entity_id = entity.id
|
||||
entity_dict = entity.dict()
|
||||
logger.debug(f"entity_id: {entity_id}, entity_dict: {entity_dict}")
|
||||
|
||||
elif what == "SHOUT":
|
||||
error = reactions_follow(follower_id, slug)
|
||||
if entity_id:
|
||||
logger.debug("Проверка существующей подписки")
|
||||
with local_session() as session:
|
||||
existing_sub = (
|
||||
session.query(follower_class)
|
||||
.filter(follower_class.follower == follower_id, getattr(follower_class, entity_type) == entity_id)
|
||||
.first()
|
||||
)
|
||||
if existing_sub:
|
||||
logger.info(f"Пользователь {follower_id} уже подписан на {what.lower()} с ID {entity_id}")
|
||||
else:
|
||||
logger.debug("Добавление новой записи в базу данных")
|
||||
sub = follower_class(follower=follower_id, **{entity_type: entity_id})
|
||||
logger.debug(f"Создан объект подписки: {sub}")
|
||||
session.add(sub)
|
||||
session.commit()
|
||||
logger.info(f"Пользователь {follower_id} подписался на {what.lower()} с ID {entity_id}")
|
||||
|
||||
if error:
|
||||
return {"error": error}
|
||||
follows = None
|
||||
if cache_method:
|
||||
logger.debug("Обновление кэша")
|
||||
await cache_method(entity_dict)
|
||||
if get_cached_follows_method:
|
||||
logger.debug("Получение подписок из кэша")
|
||||
existing_follows = await get_cached_follows_method(follower_id)
|
||||
follows = [*existing_follows, entity_dict] if not existing_sub else existing_follows
|
||||
logger.debug("Обновлен список подписок")
|
||||
|
||||
return {f"{entity}s": follows}
|
||||
if what == "AUTHOR" and not existing_sub:
|
||||
logger.debug("Отправка уведомления автору о подписке")
|
||||
await notify_follower(follower=follower_dict, author_id=entity_id, action="follow")
|
||||
|
||||
except Exception as exc:
|
||||
logger.exception("Произошла ошибка в функции 'follow'")
|
||||
return {"error": str(exc)}
|
||||
|
||||
return {f"{what.lower()}s": follows}
|
||||
|
||||
|
||||
@mutation.field("unfollow")
|
||||
@login_required
|
||||
async def unfollow(_, info, what, slug):
|
||||
async def unfollow(_, info, what, slug="", entity_id=0):
|
||||
logger.debug("Начало выполнения функции 'unfollow'")
|
||||
user_id = info.context.get("user_id")
|
||||
follower_dict = info.context.get("author")
|
||||
logger.debug(f"follower: {follower_dict}")
|
||||
|
||||
if not user_id or not follower_dict:
|
||||
logger.warning("Неавторизованный доступ при попытке отписаться")
|
||||
return {"error": "unauthorized"}
|
||||
|
||||
follower_id = follower_dict.get("id")
|
||||
logger.debug(f"follower_id: {follower_id}")
|
||||
|
||||
entity_classes = {
|
||||
"AUTHOR": (Author, AuthorFollower, get_cached_follower_authors, cache_author),
|
||||
"TOPIC": (Topic, TopicFollower, get_cached_follower_topics, cache_topic),
|
||||
"COMMUNITY": (Community, CommunityFollower, None, None), # Нет методов кэша для сообщества
|
||||
"SHOUT": (Shout, ShoutReactionsFollower, None, None), # Нет методов кэша для shout
|
||||
}
|
||||
|
||||
if what not in entity_classes:
|
||||
logger.error(f"Неверный тип для отписки: {what}")
|
||||
return {"error": "invalid unfollow type"}
|
||||
|
||||
entity_class, follower_class, get_cached_follows_method, cache_method = entity_classes[what]
|
||||
entity_type = what.lower()
|
||||
follows = []
|
||||
error = None
|
||||
user_id = info.context.get("user_id")
|
||||
follower_dict = info.context["author"]
|
||||
follower_id = follower_dict.get("id")
|
||||
if not user_id:
|
||||
return {"error": "unauthorized"}
|
||||
|
||||
if not follower_id:
|
||||
return {"error": "cant find follower account"}
|
||||
|
||||
entity = what.lower()
|
||||
follows = []
|
||||
follows_str = await redis.execute("GET", f"author:{follower_id}:follows-{entity}s")
|
||||
if isinstance(follows_str, str):
|
||||
follows = json.loads(follows_str) or []
|
||||
|
||||
if what == "AUTHOR":
|
||||
error = author_unfollow(follower_id, slug)
|
||||
# NOTE: after triggers should update cached stats
|
||||
if not error:
|
||||
logger.info(f"@{follower_dict.get('slug')} unfollowed @{slug}")
|
||||
[author] = get_with_stat(select(Author).filter(Author.slug == slug))
|
||||
if author:
|
||||
author_dict = author.dict()
|
||||
author_id = author.id
|
||||
await cache_author(author_dict)
|
||||
for idx, item in enumerate(follows):
|
||||
if item["id"] == author_id:
|
||||
await cache_author(follower_dict)
|
||||
await notify_follower(follower_dict, author_id, "unfollow")
|
||||
follows.pop(idx)
|
||||
break
|
||||
|
||||
elif what == "TOPIC":
|
||||
error = topic_unfollow(follower_id, slug)
|
||||
_topic_dict = await cache_by_slug(what, slug)
|
||||
|
||||
elif what == "COMMUNITY":
|
||||
follows = local_session().execute(select(Community))
|
||||
|
||||
elif what == "SHOUT":
|
||||
error = reactions_unfollow(follower_id, slug)
|
||||
|
||||
return {"error": error, f"{entity}s": follows}
|
||||
|
||||
|
||||
async def get_follows_by_user_id(user_id: str):
|
||||
if not user_id:
|
||||
return {"error": "unauthorized"}
|
||||
author = await redis.execute("GET", f"user:{user_id}")
|
||||
if isinstance(author, str):
|
||||
author = json.loads(author)
|
||||
if not author:
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if not author:
|
||||
return {"error": "cant find author"}
|
||||
author = author.dict()
|
||||
last_seen = author.get("last_seen", 0) if isinstance(author, dict) else 0
|
||||
follows = DEFAULT_FOLLOWS
|
||||
day_old = int(time.time()) - last_seen > 24 * 60 * 60
|
||||
if day_old:
|
||||
author_id = json.loads(str(author)).get("id")
|
||||
if author_id:
|
||||
topics = author_follows_topics(author_id)
|
||||
authors = author_follows_authors(author_id)
|
||||
follows = {
|
||||
"topics": topics,
|
||||
"authors": authors,
|
||||
"communities": [
|
||||
{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}
|
||||
],
|
||||
}
|
||||
else:
|
||||
logger.debug(f"getting follows for {user_id} from redis")
|
||||
res = await redis.execute("GET", f"user:{user_id}:follows")
|
||||
if isinstance(res, str):
|
||||
follows = json.loads(res)
|
||||
return follows
|
||||
|
||||
|
||||
def topic_follow(follower_id, slug):
|
||||
try:
|
||||
logger.debug("Попытка получить сущность из базы данных")
|
||||
with local_session() as session:
|
||||
topic = session.query(Topic).where(Topic.slug == slug).one()
|
||||
_following = TopicFollower(topic=topic.id, follower=follower_id)
|
||||
return None
|
||||
except Exception as error:
|
||||
logger.warn(error)
|
||||
return "cant follow"
|
||||
entity = session.query(entity_class).filter(entity_class.slug == slug).first()
|
||||
logger.debug(f"Полученная сущность: {entity}")
|
||||
if not entity:
|
||||
logger.warning(f"{what.lower()} не найден по slug: {slug}")
|
||||
return {"error": f"{what.lower()} not found"}
|
||||
if entity and not entity_id:
|
||||
entity_id = entity.id
|
||||
logger.debug(f"entity_id: {entity_id}")
|
||||
|
||||
|
||||
def topic_unfollow(follower_id, slug):
|
||||
try:
|
||||
with local_session() as session:
|
||||
sub = (
|
||||
session.query(TopicFollower)
|
||||
.join(Topic)
|
||||
.filter(and_(TopicFollower.follower == follower_id, Topic.slug == slug))
|
||||
session.query(follower_class)
|
||||
.filter(
|
||||
and_(
|
||||
getattr(follower_class, "follower") == follower_id,
|
||||
getattr(follower_class, entity_type) == entity_id,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
logger.debug(f"Найдена подписка для удаления: {sub}")
|
||||
if sub:
|
||||
session.delete(sub)
|
||||
session.commit()
|
||||
return None
|
||||
except Exception as error:
|
||||
logger.warn(error)
|
||||
return "cant unfollow"
|
||||
logger.info(f"Пользователь {follower_id} отписался от {what.lower()} с ID {entity_id}")
|
||||
|
||||
if cache_method:
|
||||
logger.debug("Обновление кэша после отписки")
|
||||
await cache_method(entity.dict())
|
||||
if get_cached_follows_method:
|
||||
logger.debug("Получение подписок из кэша")
|
||||
existing_follows = await get_cached_follows_method(follower_id)
|
||||
follows = filter(lambda x: x["id"] != entity_id, existing_follows)
|
||||
logger.debug("Обновлен список подписок")
|
||||
|
||||
def reactions_follow(author_id, shout_id, auto=False):
|
||||
try:
|
||||
with local_session() as session:
|
||||
shout = session.query(Shout).where(Shout.id == shout_id).one()
|
||||
if what == "AUTHOR":
|
||||
logger.debug("Отправка уведомления автору об отписке")
|
||||
await notify_follower(follower=follower_dict, author_id=entity_id, action="unfollow")
|
||||
else:
|
||||
return {"error": "following was not found", f"{entity_type}s": follows}
|
||||
|
||||
following = (
|
||||
session.query(ShoutReactionsFollower)
|
||||
.where(
|
||||
and_(
|
||||
ShoutReactionsFollower.follower == author_id,
|
||||
ShoutReactionsFollower.shout == shout.id,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.exception("Произошла ошибка в функции 'unfollow'")
|
||||
import traceback
|
||||
|
||||
if not following:
|
||||
following = ShoutReactionsFollower(
|
||||
follower=author_id, shout=shout.id, auto=auto
|
||||
)
|
||||
session.add(following)
|
||||
session.commit()
|
||||
return None
|
||||
except Exception as error:
|
||||
logger.warn(error)
|
||||
return "cant follow"
|
||||
traceback.print_exc()
|
||||
return {"error": str(exc)}
|
||||
|
||||
|
||||
def reactions_unfollow(author_id, shout_id: int):
|
||||
try:
|
||||
with local_session() as session:
|
||||
shout = session.query(Shout).where(Shout.id == shout_id).one()
|
||||
|
||||
following = (
|
||||
session.query(ShoutReactionsFollower)
|
||||
.where(
|
||||
and_(
|
||||
ShoutReactionsFollower.follower == author_id,
|
||||
ShoutReactionsFollower.shout == shout.id,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if following:
|
||||
session.delete(following)
|
||||
session.commit()
|
||||
return None
|
||||
except Exception as error:
|
||||
logger.warn(error)
|
||||
return "cant unfollow"
|
||||
|
||||
|
||||
# for mutation.field("follow")
|
||||
def author_follow(follower_id, slug):
|
||||
try:
|
||||
with local_session() as session:
|
||||
author = session.query(Author).where(Author.slug == slug).one()
|
||||
af = AuthorFollower(follower=follower_id, author=author.id)
|
||||
session.add(af)
|
||||
session.commit()
|
||||
return None
|
||||
except Exception as error:
|
||||
logger.warn(error)
|
||||
return "cant follow"
|
||||
|
||||
|
||||
# for mutation.field("unfollow")
|
||||
def author_unfollow(follower_id, slug):
|
||||
try:
|
||||
with local_session() as session:
|
||||
flw = (
|
||||
session.query(AuthorFollower)
|
||||
.join(Author, Author.id == AuthorFollower.author)
|
||||
.filter(
|
||||
and_(AuthorFollower.follower == follower_id, Author.slug == slug)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if flw:
|
||||
session.delete(flw)
|
||||
session.commit()
|
||||
return None
|
||||
except Exception as error:
|
||||
logger.warn(error)
|
||||
return "cant unfollow"
|
||||
|
||||
|
||||
@query.field("get_topic_followers")
|
||||
async def get_topic_followers(_, _info, slug: str, topic_id: int) -> List[Author]:
|
||||
q = select(Author)
|
||||
q = (
|
||||
q.join(TopicFollower, TopicFollower.follower == Author.id)
|
||||
.join(Topic, Topic.id == TopicFollower.topic)
|
||||
.filter(or_(Topic.slug == slug, Topic.id == topic_id))
|
||||
)
|
||||
return get_with_stat(q)
|
||||
# logger.debug(f"Функция 'unfollow' завершена успешно с результатом: {entity_type}s={follows}, error={error}")
|
||||
return {f"{entity_type}s": follows, "error": error}
|
||||
|
||||
|
||||
@query.field("get_shout_followers")
|
||||
def get_shout_followers(
|
||||
_, _info, slug: str = "", shout_id: int | None = None
|
||||
) -> List[Author]:
|
||||
def get_shout_followers(_, _info, slug: str = "", shout_id: int | None = None) -> List[Author]:
|
||||
logger.debug("Начало выполнения функции 'get_shout_followers'")
|
||||
followers = []
|
||||
with local_session() as session:
|
||||
shout = None
|
||||
if slug:
|
||||
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||
elif shout_id:
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
if shout:
|
||||
reactions = session.query(Reaction).filter(Reaction.shout == shout.id).all()
|
||||
for r in reactions:
|
||||
followers.append(r.created_by)
|
||||
try:
|
||||
with local_session() as session:
|
||||
shout = None
|
||||
if slug:
|
||||
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||
logger.debug(f"Найден shout по slug: {slug} -> {shout}")
|
||||
elif shout_id:
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
logger.debug(f"Найден shout по ID: {shout_id} -> {shout}")
|
||||
|
||||
if shout:
|
||||
reactions = session.query(Reaction).filter(Reaction.shout == shout.id).all()
|
||||
logger.debug(f"Полученные реакции для shout ID {shout.id}: {reactions}")
|
||||
for r in reactions:
|
||||
followers.append(r.created_by)
|
||||
logger.debug(f"Добавлен follower: {r.created_by}")
|
||||
|
||||
except Exception as _exc:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
logger.exception("Произошла ошибка в функции 'get_shout_followers'")
|
||||
return []
|
||||
|
||||
# logger.debug(f"Функция 'get_shout_followers' завершена с {len(followers)} подписчиками")
|
||||
return followers
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import json
|
||||
import time
|
||||
from typing import List, Tuple
|
||||
|
||||
import orjson
|
||||
from sqlalchemy import and_, select
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.orm import aliased
|
||||
@@ -17,13 +17,11 @@ from orm.notification import (
|
||||
from orm.shout import Shout
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.logger import root_logger as logger
|
||||
from services.schema import mutation, query
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def query_notifications(
|
||||
author_id: int, after: int = 0
|
||||
) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
|
||||
def query_notifications(author_id: int, after: int = 0) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
|
||||
notification_seen_alias = aliased(NotificationSeen)
|
||||
q = select(Notification, notification_seen_alias.viewer.label("seen")).outerjoin(
|
||||
NotificationSeen,
|
||||
@@ -68,9 +66,7 @@ def query_notifications(
|
||||
return total, unread, notifications
|
||||
|
||||
|
||||
def group_notification(
|
||||
thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"
|
||||
):
|
||||
def group_notification(thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"):
|
||||
reactions = reactions or []
|
||||
authors = authors or []
|
||||
return {
|
||||
@@ -84,9 +80,7 @@ def group_notification(
|
||||
}
|
||||
|
||||
|
||||
def get_notifications_grouped(
|
||||
author_id: int, after: int = 0, limit: int = 10, offset: int = 0
|
||||
):
|
||||
def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, offset: int = 0):
|
||||
"""
|
||||
Retrieves notifications for a given author.
|
||||
|
||||
@@ -121,7 +115,7 @@ def get_notifications_grouped(
|
||||
if (groups_amount + offset) >= limit:
|
||||
break
|
||||
|
||||
payload = json.loads(str(notification.payload))
|
||||
payload = orjson.loads(str(notification.payload))
|
||||
|
||||
if str(notification.entity) == NotificationEntity.SHOUT.value:
|
||||
shout = payload
|
||||
@@ -152,9 +146,7 @@ def get_notifications_grouped(
|
||||
author_id = reaction.get("created_by", 0)
|
||||
if shout_id and author_id:
|
||||
with local_session() as session:
|
||||
author = (
|
||||
session.query(Author).filter(Author.id == author_id).first()
|
||||
)
|
||||
author = session.query(Author).filter(Author.id == author_id).first()
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
if shout and author:
|
||||
author = author.dict()
|
||||
@@ -167,9 +159,7 @@ def get_notifications_grouped(
|
||||
if existing_group:
|
||||
existing_group["seen"] = False
|
||||
existing_group["authors"].append(author_id)
|
||||
existing_group["reactions"] = (
|
||||
existing_group["reactions"] or []
|
||||
)
|
||||
existing_group["reactions"] = existing_group["reactions"] or []
|
||||
existing_group["reactions"].append(reaction)
|
||||
groups_by_thread[thread_id] = existing_group
|
||||
else:
|
||||
@@ -187,7 +177,7 @@ def get_notifications_grouped(
|
||||
|
||||
elif str(notification.entity) == "follower":
|
||||
thread_id = "followers"
|
||||
follower = json.loads(payload)
|
||||
follower = orjson.loads(payload)
|
||||
group = groups_by_thread.get(thread_id)
|
||||
if group:
|
||||
if str(notification.action) == "follow":
|
||||
@@ -222,9 +212,7 @@ async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
|
||||
try:
|
||||
if author_id:
|
||||
groups, unread, total = get_notifications_grouped(author_id, after, limit)
|
||||
notifications = sorted(
|
||||
groups.values(), key=lambda group: group.updated_at, reverse=True
|
||||
)
|
||||
notifications = sorted(groups.values(), key=lambda group: group.updated_at, reverse=True)
|
||||
except Exception as e:
|
||||
error = e
|
||||
logger.error(e)
|
||||
@@ -262,11 +250,7 @@ async def notifications_seen_after(_, info, after: int):
|
||||
author_id = info.context.get("author", {}).get("id")
|
||||
if author_id:
|
||||
with local_session() as session:
|
||||
nnn = (
|
||||
session.query(Notification)
|
||||
.filter(and_(Notification.created_at > after))
|
||||
.all()
|
||||
)
|
||||
nnn = session.query(Notification).filter(and_(Notification.created_at > after)).all()
|
||||
for n in nnn:
|
||||
try:
|
||||
ns = NotificationSeen(notification=n.id, viewer=author_id)
|
||||
@@ -309,11 +293,11 @@ async def notifications_seen_thread(_, info, thread: str, after: int):
|
||||
)
|
||||
exclude = set()
|
||||
for nr in removed_reaction_notifications:
|
||||
reaction = json.loads(str(nr.payload))
|
||||
reaction = orjson.loads(str(nr.payload))
|
||||
reaction_id = reaction.get("id")
|
||||
exclude.add(reaction_id)
|
||||
for n in new_reaction_notifications:
|
||||
reaction = json.loads(str(n.payload))
|
||||
reaction = orjson.loads(str(n.payload))
|
||||
reaction_id = reaction.get("id")
|
||||
if (
|
||||
reaction_id not in exclude
|
||||
|
||||
49
resolvers/proposals.py
Normal file
49
resolvers/proposals.py
Normal file
@@ -0,0 +1,49 @@
|
||||
from sqlalchemy import and_
|
||||
|
||||
from orm.rating import is_negative, is_positive
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout
|
||||
from services.db import local_session
|
||||
from utils.diff import apply_diff, get_diff
|
||||
|
||||
|
||||
def handle_proposing(kind: ReactionKind, reply_to: int, shout_id: int):
|
||||
with local_session() as session:
|
||||
if is_positive(kind):
|
||||
replied_reaction = (
|
||||
session.query(Reaction).filter(Reaction.id == reply_to, Reaction.shout == shout_id).first()
|
||||
)
|
||||
|
||||
if replied_reaction and replied_reaction.kind is ReactionKind.PROPOSE.value and replied_reaction.quote:
|
||||
# patch all the proposals' quotes
|
||||
proposals = (
|
||||
session.query(Reaction)
|
||||
.filter(
|
||||
and_(
|
||||
Reaction.shout == shout_id,
|
||||
Reaction.kind == ReactionKind.PROPOSE.value,
|
||||
)
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
# patch shout's body
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
body = replied_reaction.quote
|
||||
Shout.update(shout, {body})
|
||||
session.add(shout)
|
||||
session.commit()
|
||||
|
||||
# реакция содержит цитату -> обновляются все предложения
|
||||
# (proposals) для соответствующего Shout.
|
||||
for proposal in proposals:
|
||||
if proposal.quote:
|
||||
proposal_diff = get_diff(shout.body, proposal.quote)
|
||||
proposal_dict = proposal.dict()
|
||||
proposal_dict["quote"] = apply_diff(replied_reaction.quote, proposal_diff)
|
||||
Reaction.update(proposal, proposal_dict)
|
||||
session.add(proposal)
|
||||
|
||||
if is_negative(kind):
|
||||
# TODO: rejection logic
|
||||
pass
|
||||
25
resolvers/pyrightconfig.json
Normal file
25
resolvers/pyrightconfig.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"include": [
|
||||
"."
|
||||
],
|
||||
"exclude": [
|
||||
"**/node_modules",
|
||||
"**/__pycache__",
|
||||
"**/.*"
|
||||
],
|
||||
"defineConstant": {
|
||||
"DEBUG": true
|
||||
},
|
||||
"venvPath": ".",
|
||||
"venv": ".venv",
|
||||
"pythonVersion": "3.11",
|
||||
"typeCheckingMode": "strict",
|
||||
"reportMissingImports": true,
|
||||
"reportMissingTypeStubs": false,
|
||||
"reportUnknownMemberType": false,
|
||||
"reportUnknownParameterType": false,
|
||||
"reportUnknownVariableType": false,
|
||||
"reportUnknownArgumentType": false,
|
||||
"reportPrivateUsage": false,
|
||||
"reportUntypedFunctionDecorator": false
|
||||
}
|
||||
@@ -6,7 +6,91 @@ from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.schema import mutation
|
||||
from services.schema import mutation, query
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
@query.field("get_my_rates_comments")
|
||||
@login_required
|
||||
async def get_my_rates_comments(_, info, comments: list[int]) -> list[dict]:
|
||||
"""
|
||||
Получение реакций пользователя на комментарии
|
||||
|
||||
Args:
|
||||
info: Контекст запроса
|
||||
comments: Список ID комментариев
|
||||
|
||||
Returns:
|
||||
list[dict]: Список словарей с реакциями пользователя на комментарии
|
||||
Каждый словарь содержит:
|
||||
- comment_id: ID комментария
|
||||
- my_rate: Тип реакции (LIKE/DISLIKE)
|
||||
"""
|
||||
author_dict = info.context.get("author") if info.context else None
|
||||
author_id = author_dict.get("id") if author_dict else None
|
||||
if not author_id:
|
||||
return [] # Возвращаем пустой список вместо словаря с ошибкой
|
||||
|
||||
# Подзапрос для реакций текущего пользователя
|
||||
rated_query = (
|
||||
select(Reaction.id.label("comment_id"), Reaction.kind.label("my_rate"))
|
||||
.where(
|
||||
and_(
|
||||
Reaction.reply_to.in_(comments),
|
||||
Reaction.created_by == author_id,
|
||||
Reaction.deleted_at.is_(None),
|
||||
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||
)
|
||||
)
|
||||
.order_by(Reaction.shout, Reaction.created_at.desc())
|
||||
.distinct(Reaction.shout)
|
||||
)
|
||||
with local_session() as session:
|
||||
comments_result = session.execute(rated_query).all()
|
||||
return [{"comment_id": row.comment_id, "my_rate": row.my_rate} for row in comments_result]
|
||||
|
||||
|
||||
@query.field("get_my_rates_shouts")
|
||||
@login_required
|
||||
async def get_my_rates_shouts(_, info, shouts):
|
||||
"""
|
||||
Получение реакций пользователя на публикации
|
||||
"""
|
||||
author_dict = info.context.get("author") if info.context else None
|
||||
author_id = author_dict.get("id") if author_dict else None
|
||||
|
||||
if not author_id:
|
||||
return []
|
||||
|
||||
with local_session() as session:
|
||||
try:
|
||||
stmt = (
|
||||
select(Reaction)
|
||||
.where(
|
||||
and_(
|
||||
Reaction.shout.in_(shouts),
|
||||
Reaction.reply_to.is_(None),
|
||||
Reaction.created_by == author_id,
|
||||
Reaction.deleted_at.is_(None),
|
||||
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||
)
|
||||
)
|
||||
.order_by(Reaction.shout, Reaction.created_at.desc())
|
||||
.distinct(Reaction.shout)
|
||||
)
|
||||
|
||||
result = session.execute(stmt).all()
|
||||
|
||||
return [
|
||||
{
|
||||
"shout_id": row[0].shout, # Получаем shout_id из объекта Reaction
|
||||
"my_rate": row[0].kind, # Получаем kind (my_rate) из объекта Reaction
|
||||
}
|
||||
for row in result
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Error in get_my_rates_shouts: {e}")
|
||||
return []
|
||||
|
||||
|
||||
@mutation.field("rate_author")
|
||||
@@ -35,9 +119,7 @@ async def rate_author(_, info, rated_slug, value):
|
||||
return {}
|
||||
else:
|
||||
try:
|
||||
rating = AuthorRating(
|
||||
rater=rater_id, author=rated_author.id, plus=value > 0
|
||||
)
|
||||
rating = AuthorRating(rater=rater_id, author=rated_author.id, plus=value > 0)
|
||||
session.add(rating)
|
||||
session.commit()
|
||||
except Exception as err:
|
||||
@@ -105,9 +187,7 @@ def count_author_shouts_rating(session, author_id) -> int:
|
||||
|
||||
def get_author_rating_old(session, author: Author):
|
||||
likes_count = (
|
||||
session.query(AuthorRating)
|
||||
.filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True)))
|
||||
.count()
|
||||
session.query(AuthorRating).filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True))).count()
|
||||
)
|
||||
dislikes_count = (
|
||||
session.query(AuthorRating)
|
||||
@@ -167,9 +247,7 @@ def get_author_rating_comments(session, author: Author) -> int:
|
||||
and_(
|
||||
replied_comment.kind == ReactionKind.COMMENT.value,
|
||||
replied_comment.created_by == author.id,
|
||||
Reaction.kind.in_(
|
||||
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
|
||||
),
|
||||
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||
Reaction.reply_to == replied_comment.id,
|
||||
Reaction.deleted_at.is_(None),
|
||||
),
|
||||
@@ -184,9 +262,7 @@ def add_author_rating_columns(q, group_list):
|
||||
|
||||
# old karma
|
||||
q = q.outerjoin(AuthorRating, AuthorRating.author == Author.id)
|
||||
q = q.add_columns(
|
||||
func.sum(case((AuthorRating.plus == true(), 1), else_=-1)).label("rating")
|
||||
)
|
||||
q = q.add_columns(func.sum(case((AuthorRating.plus == true(), 1), else_=-1)).label("rating"))
|
||||
|
||||
# by shouts rating
|
||||
shout_reaction = aliased(Reaction)
|
||||
@@ -200,7 +276,8 @@ def add_author_rating_columns(q, group_list):
|
||||
(shout_reaction.kind == ReactionKind.DISLIKE.value, -1),
|
||||
else_=0,
|
||||
)
|
||||
)
|
||||
),
|
||||
0,
|
||||
).label("shouts_rating"),
|
||||
)
|
||||
.select_from(shout_reaction)
|
||||
@@ -234,7 +311,8 @@ def add_author_rating_columns(q, group_list):
|
||||
(reaction_2.kind == ReactionKind.DISLIKE.value, -1),
|
||||
else_=0,
|
||||
)
|
||||
)
|
||||
),
|
||||
0,
|
||||
).label("comments_rating"),
|
||||
)
|
||||
.select_from(reaction_2)
|
||||
@@ -243,9 +321,7 @@ def add_author_rating_columns(q, group_list):
|
||||
and_(
|
||||
replied_comment.kind == ReactionKind.COMMENT.value,
|
||||
replied_comment.created_by == Author.id,
|
||||
reaction_2.kind.in_(
|
||||
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
|
||||
),
|
||||
reaction_2.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||
reaction_2.reply_to == replied_comment.id,
|
||||
reaction_2.deleted_at.is_(None),
|
||||
),
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,71 +1,306 @@
|
||||
from sqlalchemy import bindparam, distinct, or_, text
|
||||
from sqlalchemy.orm import aliased, joinedload
|
||||
from sqlalchemy.sql.expression import and_, asc, case, desc, func, nulls_last, select
|
||||
import orjson
|
||||
from graphql import GraphQLResolveInfo
|
||||
from sqlalchemy import and_, nulls_last, text
|
||||
from sqlalchemy.orm import aliased
|
||||
from sqlalchemy.sql.expression import asc, case, desc, func, select
|
||||
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.author import Author
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from resolvers.reaction import add_reaction_stat_columns
|
||||
from resolvers.topic import get_topics_random
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.logger import root_logger as logger
|
||||
from orm.topic import Topic
|
||||
from services.db import json_array_builder, json_builder, local_session
|
||||
from services.schema import query
|
||||
from services.search import search_text
|
||||
from services.viewed import ViewedStorage
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def query_shouts():
|
||||
return (
|
||||
select(Shout)
|
||||
.options(joinedload(Shout.authors), joinedload(Shout.topics))
|
||||
.where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
def apply_options(q, options, reactions_created_by=0):
|
||||
"""
|
||||
Применяет опции фильтрации и сортировки
|
||||
[опционально] выбирая те публикации, на которые есть реакции/комментарии от указанного автора
|
||||
|
||||
:param q: Исходный запрос.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:param reactions_created_by: Идентификатор автора.
|
||||
:return: Запрос с примененными опциями.
|
||||
"""
|
||||
filters = options.get("filters")
|
||||
if isinstance(filters, dict):
|
||||
q = apply_filters(q, filters)
|
||||
if reactions_created_by:
|
||||
q = q.join(Reaction, Reaction.shout == Shout.id)
|
||||
q = q.filter(Reaction.created_by == reactions_created_by)
|
||||
if "commented" in filters:
|
||||
q = q.filter(Reaction.body.is_not(None))
|
||||
q = apply_sorting(q, options)
|
||||
limit = options.get("limit", 10)
|
||||
offset = options.get("offset", 0)
|
||||
return q, limit, offset
|
||||
|
||||
|
||||
def has_field(info, fieldname: str) -> bool:
|
||||
"""
|
||||
Проверяет, запрошено ли поле :fieldname: в GraphQL запросе
|
||||
|
||||
:param info: Информация о контексте GraphQL
|
||||
:param fieldname: Имя запрашиваемого поля
|
||||
:return: True, если поле запрошено, False в противном случае
|
||||
"""
|
||||
field_node = info.field_nodes[0]
|
||||
for selection in field_node.selection_set.selections:
|
||||
if hasattr(selection, "name") and selection.name.value == fieldname:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def query_with_stat(info):
|
||||
"""
|
||||
:param info: Информация о контексте GraphQL - для получения id авторизованного пользователя
|
||||
:return: Запрос с подзапросами статистики.
|
||||
|
||||
Добавляет подзапрос статистики
|
||||
"""
|
||||
q = select(Shout).filter(
|
||||
and_(
|
||||
Shout.published_at.is_not(None), # Проверяем published_at
|
||||
Shout.deleted_at.is_(None), # Проверяем deleted_at
|
||||
)
|
||||
)
|
||||
|
||||
# Главный автор
|
||||
main_author = aliased(Author)
|
||||
q = q.join(main_author, main_author.id == Shout.created_by)
|
||||
q = q.add_columns(
|
||||
json_builder(
|
||||
"id",
|
||||
main_author.id,
|
||||
"name",
|
||||
main_author.name,
|
||||
"slug",
|
||||
main_author.slug,
|
||||
"pic",
|
||||
main_author.pic,
|
||||
"created_at",
|
||||
main_author.created_at,
|
||||
).label("main_author")
|
||||
)
|
||||
|
||||
def filter_my(info, session, q):
|
||||
user_id = info.context.get("user_id")
|
||||
reader_id = info.context.get("author", {}).get("id")
|
||||
if user_id and reader_id:
|
||||
reader_followed_authors = select(AuthorFollower.author).where(
|
||||
AuthorFollower.follower == reader_id
|
||||
)
|
||||
reader_followed_topics = select(TopicFollower.topic).where(
|
||||
TopicFollower.follower == reader_id
|
||||
if has_field(info, "main_topic"):
|
||||
main_topic_join = aliased(ShoutTopic)
|
||||
main_topic = aliased(Topic)
|
||||
q = q.join(main_topic_join, and_(main_topic_join.shout == Shout.id, main_topic_join.main.is_(True)))
|
||||
q = q.join(main_topic, main_topic.id == main_topic_join.topic)
|
||||
q = q.add_columns(
|
||||
json_builder(
|
||||
"id", main_topic.id, "title", main_topic.title, "slug", main_topic.slug, "is_main", main_topic_join.main
|
||||
).label("main_topic")
|
||||
)
|
||||
|
||||
subquery = (
|
||||
select(Shout.id)
|
||||
.where(Shout.id == ShoutAuthor.shout)
|
||||
.where(Shout.id == ShoutTopic.shout)
|
||||
.where(
|
||||
(ShoutAuthor.author.in_(reader_followed_authors))
|
||||
| (ShoutTopic.topic.in_(reader_followed_topics))
|
||||
if has_field(info, "authors"):
|
||||
authors_subquery = (
|
||||
select(
|
||||
ShoutAuthor.shout,
|
||||
json_array_builder(
|
||||
json_builder(
|
||||
"id",
|
||||
Author.id,
|
||||
"name",
|
||||
Author.name,
|
||||
"slug",
|
||||
Author.slug,
|
||||
"pic",
|
||||
Author.pic,
|
||||
"caption",
|
||||
ShoutAuthor.caption,
|
||||
"created_at",
|
||||
Author.created_at,
|
||||
)
|
||||
).label("authors"),
|
||||
)
|
||||
.outerjoin(Author, ShoutAuthor.author == Author.id)
|
||||
.where(ShoutAuthor.shout == Shout.id)
|
||||
.group_by(ShoutAuthor.shout)
|
||||
.subquery()
|
||||
)
|
||||
q = q.filter(Shout.id.in_(subquery))
|
||||
return q, reader_id
|
||||
q = q.outerjoin(authors_subquery, authors_subquery.c.shout == Shout.id)
|
||||
q = q.add_columns(authors_subquery.c.authors)
|
||||
|
||||
if has_field(info, "topics"):
|
||||
topics_subquery = (
|
||||
select(
|
||||
ShoutTopic.shout,
|
||||
json_array_builder(
|
||||
json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug, "is_main", ShoutTopic.main)
|
||||
).label("topics"),
|
||||
)
|
||||
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
|
||||
.where(ShoutTopic.shout == Shout.id)
|
||||
.group_by(ShoutTopic.shout)
|
||||
.subquery()
|
||||
)
|
||||
q = q.outerjoin(topics_subquery, topics_subquery.c.shout == Shout.id)
|
||||
q = q.add_columns(topics_subquery.c.topics)
|
||||
|
||||
if has_field(info, "stat"):
|
||||
stats_subquery = (
|
||||
select(
|
||||
Reaction.shout,
|
||||
func.count(func.distinct(Reaction.id))
|
||||
.filter(Reaction.kind == ReactionKind.COMMENT.value)
|
||||
.label("comments_count"),
|
||||
func.sum(
|
||||
case(
|
||||
(Reaction.kind == ReactionKind.LIKE.value, 1),
|
||||
(Reaction.kind == ReactionKind.DISLIKE.value, -1),
|
||||
else_=0,
|
||||
)
|
||||
)
|
||||
.filter(Reaction.reply_to.is_(None))
|
||||
.label("rating"),
|
||||
func.max(Reaction.created_at)
|
||||
.filter(Reaction.kind == ReactionKind.COMMENT.value)
|
||||
.label("last_commented_at"),
|
||||
)
|
||||
.where(Reaction.deleted_at.is_(None))
|
||||
.group_by(Reaction.shout)
|
||||
.subquery()
|
||||
)
|
||||
q = q.outerjoin(stats_subquery, stats_subquery.c.shout == Shout.id)
|
||||
q = q.add_columns(
|
||||
json_builder(
|
||||
"comments_count",
|
||||
func.coalesce(stats_subquery.c.comments_count, 0),
|
||||
"rating",
|
||||
func.coalesce(stats_subquery.c.rating, 0),
|
||||
"last_commented_at",
|
||||
func.coalesce(stats_subquery.c.last_commented_at, 0),
|
||||
).label("stat")
|
||||
)
|
||||
|
||||
return q
|
||||
|
||||
|
||||
def apply_filters(q, filters, author_id=None):
|
||||
def get_shouts_with_links(info, q, limit=20, offset=0):
|
||||
"""
|
||||
получение публикаций с применением пагинации
|
||||
"""
|
||||
shouts = []
|
||||
try:
|
||||
# logger.info(f"Starting get_shouts_with_links with limit={limit}, offset={offset}")
|
||||
q = q.limit(limit).offset(offset)
|
||||
|
||||
with local_session() as session:
|
||||
shouts_result = session.execute(q).all()
|
||||
# logger.info(f"Got {len(shouts_result) if shouts_result else 0} shouts from query")
|
||||
|
||||
if not shouts_result:
|
||||
logger.warning("No shouts found in query result")
|
||||
return []
|
||||
|
||||
for idx, row in enumerate(shouts_result):
|
||||
try:
|
||||
shout = None
|
||||
if hasattr(row, "Shout"):
|
||||
shout = row.Shout
|
||||
# logger.debug(f"Processing shout#{shout.id} at index {idx}")
|
||||
if shout:
|
||||
shout_id = int(f"{shout.id}")
|
||||
shout_dict = shout.dict()
|
||||
|
||||
if has_field(info, "created_by") and shout_dict.get("created_by"):
|
||||
main_author_id = shout_dict.get("created_by")
|
||||
a = session.query(Author).filter(Author.id == main_author_id).first()
|
||||
shout_dict["created_by"] = {
|
||||
"id": main_author_id,
|
||||
"name": a.name,
|
||||
"slug": a.slug,
|
||||
"pic": a.pic,
|
||||
}
|
||||
|
||||
if has_field(info, "stat"):
|
||||
stat = {}
|
||||
if isinstance(row.stat, str):
|
||||
stat = orjson.loads(row.stat)
|
||||
elif isinstance(row.stat, dict):
|
||||
stat = row.stat
|
||||
viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0
|
||||
shout_dict["stat"] = {**stat, "viewed": viewed, "commented": stat.get("comments_count", 0)}
|
||||
|
||||
# Обработка main_topic и topics
|
||||
topics = None
|
||||
if has_field(info, "topics") and hasattr(row, "topics"):
|
||||
topics = orjson.loads(row.topics) if isinstance(row.topics, str) else row.topics
|
||||
# logger.debug(f"Shout#{shout_id} topics: {topics}")
|
||||
shout_dict["topics"] = topics
|
||||
|
||||
if has_field(info, "main_topic"):
|
||||
main_topic = None
|
||||
if hasattr(row, "main_topic"):
|
||||
# logger.debug(f"Raw main_topic for shout#{shout_id}: {row.main_topic}")
|
||||
main_topic = (
|
||||
orjson.loads(row.main_topic) if isinstance(row.main_topic, str) else row.main_topic
|
||||
)
|
||||
# logger.debug(f"Parsed main_topic for shout#{shout_id}: {main_topic}")
|
||||
|
||||
if not main_topic and topics and len(topics) > 0:
|
||||
# logger.info(f"No main_topic found for shout#{shout_id}, using first topic from list")
|
||||
main_topic = {
|
||||
"id": topics[0]["id"],
|
||||
"title": topics[0]["title"],
|
||||
"slug": topics[0]["slug"],
|
||||
"is_main": True,
|
||||
}
|
||||
elif not main_topic:
|
||||
logger.warning(f"No main_topic and no topics found for shout#{shout_id}")
|
||||
main_topic = {"id": 0, "title": "no topic", "slug": "notopic", "is_main": True}
|
||||
shout_dict["main_topic"] = main_topic
|
||||
# logger.debug(f"Final main_topic for shout#{shout_id}: {main_topic}")
|
||||
|
||||
if has_field(info, "authors") and hasattr(row, "authors"):
|
||||
shout_dict["authors"] = (
|
||||
orjson.loads(row.authors) if isinstance(row.authors, str) else row.authors
|
||||
)
|
||||
|
||||
if has_field(info, "media") and shout.media:
|
||||
# Обработка поля media
|
||||
media_data = shout.media
|
||||
if isinstance(media_data, str):
|
||||
try:
|
||||
media_data = orjson.loads(media_data)
|
||||
except orjson.JSONDecodeError:
|
||||
media_data = []
|
||||
shout_dict["media"] = [media_data] if isinstance(media_data, dict) else media_data
|
||||
|
||||
shouts.append(shout_dict)
|
||||
|
||||
except Exception as row_error:
|
||||
logger.error(f"Error processing row {idx}: {row_error}", exc_info=True)
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Fatal error in get_shouts_with_links: {e}", exc_info=True)
|
||||
raise
|
||||
finally:
|
||||
logger.info(f"Returning {len(shouts)} shouts from get_shouts_with_links")
|
||||
return shouts
|
||||
|
||||
|
||||
def apply_filters(q, filters):
|
||||
"""
|
||||
Применение общих фильтров к запросу.
|
||||
|
||||
:param q: Исходный запрос.
|
||||
:param filters: Словарь фильтров.
|
||||
:return: Запрос с примененными фильтрами.
|
||||
"""
|
||||
if isinstance(filters, dict):
|
||||
if filters.get("reacted"):
|
||||
q = q.join(
|
||||
Reaction,
|
||||
and_(
|
||||
Reaction.shout == Shout.id,
|
||||
Reaction.created_by == author_id,
|
||||
),
|
||||
)
|
||||
|
||||
featured_filter = filters.get("featured", "")
|
||||
if featured_filter:
|
||||
q = q.filter(Shout.featured_at.is_not(None))
|
||||
elif "featured" in filters:
|
||||
q = q.filter(Shout.featured_at.is_(None))
|
||||
else:
|
||||
pass
|
||||
if "featured" in filters:
|
||||
featured_filter = filters.get("featured")
|
||||
if featured_filter:
|
||||
q = q.filter(Shout.featured_at.is_not(None))
|
||||
else:
|
||||
q = q.filter(Shout.featured_at.is_(None))
|
||||
by_layouts = filters.get("layouts")
|
||||
if by_layouts and isinstance(by_layouts, list):
|
||||
q = q.filter(Shout.layout.in_(by_layouts))
|
||||
@@ -84,337 +319,175 @@ def apply_filters(q, filters, author_id=None):
|
||||
|
||||
|
||||
@query.field("get_shout")
|
||||
async def get_shout(_, info, slug: str):
|
||||
async def get_shout(_, info: GraphQLResolveInfo, slug="", shout_id=0):
|
||||
"""
|
||||
Получение публикации по slug или id.
|
||||
|
||||
:param _: Корневой объект запроса (не используется)
|
||||
:param info: Информация о контексте GraphQL
|
||||
:param slug: Уникальный идентификатор публикации
|
||||
:param shout_id: ID публикации
|
||||
:return: Данные публикации с включенной статистикой
|
||||
"""
|
||||
try:
|
||||
with local_session() as session:
|
||||
q = query_shouts()
|
||||
aliased_reaction = aliased(Reaction)
|
||||
q = add_reaction_stat_columns(q, aliased_reaction)
|
||||
q = q.filter(Shout.slug == slug)
|
||||
q = q.group_by(Shout.id)
|
||||
# Получаем базовый запрос с подзапросами статистики
|
||||
q = query_with_stat(info)
|
||||
|
||||
results = session.execute(q).first()
|
||||
if results:
|
||||
[
|
||||
shout,
|
||||
reacted_stat,
|
||||
commented_stat,
|
||||
likes_stat,
|
||||
dislikes_stat,
|
||||
last_comment,
|
||||
] = results
|
||||
# Применяем фильтр по slug или id
|
||||
if slug:
|
||||
q = q.where(Shout.slug == slug)
|
||||
elif shout_id:
|
||||
q = q.where(Shout.id == shout_id)
|
||||
else:
|
||||
return None
|
||||
|
||||
shout.stat = {
|
||||
"viewed": await ViewedStorage.get_shout(shout.slug),
|
||||
"reacted": reacted_stat,
|
||||
"commented": commented_stat,
|
||||
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
"last_comment": last_comment,
|
||||
}
|
||||
# Получаем результат через get_shouts_with_stats с limit=1
|
||||
shouts = get_shouts_with_links(info, q, limit=1)
|
||||
|
||||
for author_caption in (
|
||||
session.query(ShoutAuthor)
|
||||
.join(Shout)
|
||||
.where(
|
||||
and_(
|
||||
Shout.slug == slug,
|
||||
Shout.published_at.is_not(None),
|
||||
Shout.deleted_at.is_(None),
|
||||
)
|
||||
)
|
||||
):
|
||||
for author in shout.authors:
|
||||
if author.id == author_caption.author:
|
||||
author.caption = author_caption.caption
|
||||
main_topic = (
|
||||
session.query(Topic.slug)
|
||||
.join(
|
||||
ShoutTopic,
|
||||
and_(
|
||||
ShoutTopic.topic == Topic.id,
|
||||
ShoutTopic.shout == shout.id,
|
||||
ShoutTopic.main.is_(True),
|
||||
),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
# Возвращаем первую (и единственную) публикацию, если она найдена
|
||||
return shouts[0] if shouts else None
|
||||
|
||||
if main_topic:
|
||||
shout.main_topic = main_topic[0]
|
||||
return shout
|
||||
except Exception as _exc:
|
||||
import traceback
|
||||
except Exception as exc:
|
||||
logger.error(f"Error in get_shout: {exc}", exc_info=True)
|
||||
return None
|
||||
|
||||
logger.error(traceback.format_exc())
|
||||
|
||||
def apply_sorting(q, options):
|
||||
"""
|
||||
Применение сортировки с сохранением порядка
|
||||
"""
|
||||
order_str = options.get("order_by")
|
||||
if order_str in ["rating", "comments_count", "last_commented_at"]:
|
||||
query_order_by = desc(text(order_str)) if options.get("order_by_desc", True) else asc(text(order_str))
|
||||
q = q.distinct(text(order_str), Shout.id).order_by( # DISTINCT ON включает поле сортировки
|
||||
nulls_last(query_order_by), Shout.id
|
||||
)
|
||||
else:
|
||||
q = q.distinct(Shout.published_at, Shout.id).order_by(Shout.published_at.desc(), Shout.id)
|
||||
|
||||
return q
|
||||
|
||||
|
||||
@query.field("load_shouts_by")
|
||||
async def load_shouts_by(_, _info, options):
|
||||
async def load_shouts_by(_, info: GraphQLResolveInfo, options):
|
||||
"""
|
||||
:param options: {
|
||||
filters: {
|
||||
layouts: ['audio', 'video', ..],
|
||||
reacted: True,
|
||||
featured: True, // filter featured-only
|
||||
author: 'discours',
|
||||
topic: 'culture',
|
||||
after: 1234567 // unixtime
|
||||
}
|
||||
offset: 0
|
||||
limit: 50
|
||||
order_by: 'created_at' | 'commented' | 'likes_stat'
|
||||
order_by_desc: true
|
||||
Загрузка публикаций с фильтрацией, сортировкой и пагинацией.
|
||||
|
||||
}
|
||||
:return: Shout[]
|
||||
:param _: Корневой объект запроса (не используется)
|
||||
:param info: Информация о контексте GraphQL
|
||||
:param options: Опции фильтрации и сортировки
|
||||
:return: Список публикаций, удовлетворяющих критериям
|
||||
"""
|
||||
# Базовый запрос со статистикой
|
||||
q = query_with_stat(info)
|
||||
|
||||
# base
|
||||
q = query_shouts()
|
||||
# Применяем остальные опции фильтрации
|
||||
q, limit, offset = apply_options(q, options)
|
||||
|
||||
# stats
|
||||
aliased_reaction = aliased(Reaction)
|
||||
q = add_reaction_stat_columns(q, aliased_reaction)
|
||||
|
||||
# filters
|
||||
filters = options.get("filters", {})
|
||||
q = apply_filters(q, filters)
|
||||
|
||||
# group
|
||||
q = q.group_by(Shout.id)
|
||||
|
||||
# order
|
||||
order_by = Shout.featured_at if filters.get("featured") else Shout.published_at
|
||||
order_str = options.get("order_by")
|
||||
if order_str in ["likes", "followers", "comments", "last_comment"]:
|
||||
q = q.order_by(desc(text(f"{order_str}_stat")))
|
||||
query_order_by = (
|
||||
desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
|
||||
)
|
||||
q = q.order_by(nulls_last(query_order_by))
|
||||
|
||||
# limit offset
|
||||
offset = options.get("offset", 0)
|
||||
limit = options.get("limit", 10)
|
||||
q = q.limit(limit).offset(offset)
|
||||
|
||||
shouts = []
|
||||
with local_session() as session:
|
||||
for [
|
||||
shout,
|
||||
reacted_stat,
|
||||
commented_stat,
|
||||
likes_stat,
|
||||
dislikes_stat,
|
||||
last_comment,
|
||||
] in session.execute(q).unique():
|
||||
main_topic = (
|
||||
session.query(Topic.slug)
|
||||
.join(
|
||||
ShoutTopic,
|
||||
and_(
|
||||
ShoutTopic.topic == Topic.id,
|
||||
ShoutTopic.shout == shout.id,
|
||||
ShoutTopic.main.is_(True),
|
||||
),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if main_topic:
|
||||
shout.main_topic = main_topic[0]
|
||||
shout.stat = {
|
||||
"viewed": await ViewedStorage.get_shout(shout.slug),
|
||||
"reacted": reacted_stat,
|
||||
"commented": commented_stat,
|
||||
"rating": int(likes_stat) - int(dislikes_stat),
|
||||
"last_comment": last_comment,
|
||||
}
|
||||
shouts.append(shout)
|
||||
|
||||
return shouts
|
||||
|
||||
|
||||
@query.field("load_shouts_feed")
|
||||
@login_required
|
||||
async def load_shouts_feed(_, info, options):
|
||||
shouts = []
|
||||
with local_session() as session:
|
||||
q = query_shouts()
|
||||
|
||||
aliased_reaction = aliased(Reaction)
|
||||
q = add_reaction_stat_columns(q, aliased_reaction)
|
||||
|
||||
# filters
|
||||
filters = options.get("filters", {})
|
||||
if filters:
|
||||
q, reader_id = filter_my(info, session, q)
|
||||
q = apply_filters(q, filters, reader_id)
|
||||
|
||||
# sort order
|
||||
order_by = options.get(
|
||||
"order_by",
|
||||
Shout.featured_at if filters.get("featured") else Shout.published_at,
|
||||
)
|
||||
|
||||
query_order_by = (
|
||||
desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
|
||||
)
|
||||
|
||||
# pagination
|
||||
offset = options.get("offset", 0)
|
||||
limit = options.get("limit", 10)
|
||||
|
||||
q = (
|
||||
q.group_by(Shout.id)
|
||||
.order_by(nulls_last(query_order_by))
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
|
||||
logger.debug(q.compile(compile_kwargs={"literal_binds": True}))
|
||||
|
||||
for [
|
||||
shout,
|
||||
reacted_stat,
|
||||
commented_stat,
|
||||
likes_stat,
|
||||
dislikes_stat,
|
||||
last_comment,
|
||||
] in session.execute(q).unique():
|
||||
main_topic = (
|
||||
session.query(Topic.slug)
|
||||
.join(
|
||||
ShoutTopic,
|
||||
and_(
|
||||
ShoutTopic.topic == Topic.id,
|
||||
ShoutTopic.shout == shout.id,
|
||||
ShoutTopic.main.is_(True),
|
||||
),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if main_topic:
|
||||
shout.main_topic = main_topic[0]
|
||||
shout.stat = {
|
||||
"viewed": await ViewedStorage.get_shout(shout.slug),
|
||||
"reacted": reacted_stat,
|
||||
"commented": commented_stat,
|
||||
"rating": likes_stat - dislikes_stat,
|
||||
"last_comment": last_comment,
|
||||
}
|
||||
shouts.append(shout)
|
||||
|
||||
return shouts
|
||||
# Передача сформированного запроса в метод получения публикаций с учетом сортировки и пагинации
|
||||
return get_shouts_with_links(info, q, limit, offset)
|
||||
|
||||
|
||||
@query.field("load_shouts_search")
|
||||
async def load_shouts_search(_, _info, text, limit=50, offset=0):
|
||||
async def load_shouts_search(_, info, text, options):
|
||||
"""
|
||||
Поиск публикаций по тексту.
|
||||
|
||||
:param _: Корневой объект запроса (не используется)
|
||||
:param info: Информация о контексте GraphQL
|
||||
:param text: Строка поиска.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций, найденных по тексту.
|
||||
"""
|
||||
limit = options.get("limit", 10)
|
||||
offset = options.get("offset", 0)
|
||||
if isinstance(text, str) and len(text) > 2:
|
||||
results = await search_text(text, limit, offset)
|
||||
logger.debug(results)
|
||||
return results
|
||||
scores = {}
|
||||
hits_ids = []
|
||||
for sr in results:
|
||||
shout_id = sr.get("id")
|
||||
if shout_id:
|
||||
shout_id = str(shout_id)
|
||||
scores[shout_id] = sr.get("score")
|
||||
hits_ids.append(shout_id)
|
||||
|
||||
q = (
|
||||
query_with_stat(info)
|
||||
if has_field(info, "stat")
|
||||
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
)
|
||||
q = q.filter(Shout.id.in_(hits_ids))
|
||||
q = apply_filters(q, options)
|
||||
q = apply_sorting(q, options)
|
||||
shouts = get_shouts_with_links(info, q, limit, offset)
|
||||
for shout in shouts:
|
||||
shout.score = scores[f"{shout.id}"]
|
||||
shouts.sort(key=lambda x: x.score, reverse=True)
|
||||
return shouts
|
||||
return []
|
||||
|
||||
|
||||
@query.field("load_shouts_unrated")
|
||||
@login_required
|
||||
async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
|
||||
q = query_shouts()
|
||||
q = (
|
||||
q.outerjoin(
|
||||
Reaction,
|
||||
and_(
|
||||
Reaction.shout == Shout.id,
|
||||
Reaction.replyTo.is_(None),
|
||||
Reaction.kind.in_(
|
||||
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
|
||||
),
|
||||
),
|
||||
)
|
||||
.outerjoin(Author, Author.user == bindparam("user_id"))
|
||||
async def load_shouts_unrated(_, info, options):
|
||||
"""
|
||||
Загрузка публикаций с менее чем 3 реакциями типа LIKE/DISLIKE
|
||||
|
||||
:param _: Корневой объект запроса (не используется)
|
||||
:param info: Информация о контексте GraphQL
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список публикаций.
|
||||
"""
|
||||
rated_shouts = (
|
||||
select(Reaction.shout)
|
||||
.where(
|
||||
and_(
|
||||
Shout.deleted_at.is_(None),
|
||||
Shout.layout.is_not(None),
|
||||
or_(Author.id.is_(None), Reaction.created_by != Author.id),
|
||||
Reaction.deleted_at.is_(None), Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value])
|
||||
)
|
||||
)
|
||||
.group_by(Reaction.shout)
|
||||
.having(func.count("*") >= 3)
|
||||
.scalar_subquery()
|
||||
)
|
||||
|
||||
# 3 or fewer votes is 0, 1, 2 or 3 votes (null, reaction id1, reaction id2, reaction id3)
|
||||
q = q.having(func.count(distinct(Reaction.id)) <= 4)
|
||||
q = select(Shout).where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
|
||||
q = q.join(Author, Author.id == Shout.created_by)
|
||||
q = q.add_columns(
|
||||
json_builder("id", Author.id, "name", Author.name, "slug", Author.slug, "pic", Author.pic).label("main_author")
|
||||
)
|
||||
q = q.join(ShoutTopic, and_(ShoutTopic.shout == Shout.id, ShoutTopic.main.is_(True)))
|
||||
q = q.join(Topic, Topic.id == ShoutTopic.topic)
|
||||
q = q.add_columns(json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug).label("main_topic"))
|
||||
q = q.where(Shout.id.not_in(rated_shouts))
|
||||
q = q.order_by(func.random())
|
||||
|
||||
aliased_reaction = aliased(Reaction)
|
||||
q = add_reaction_stat_columns(q, aliased_reaction)
|
||||
|
||||
q = q.group_by(Shout.id).order_by(func.random()).limit(limit).offset(offset)
|
||||
user_id = info.context.get("user_id") if isinstance(info.context, dict) else None
|
||||
if user_id:
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if author:
|
||||
return await get_shouts_from_query(q, author.id)
|
||||
else:
|
||||
return await get_shouts_from_query(q)
|
||||
|
||||
|
||||
async def get_shouts_from_query(q, author_id=None):
|
||||
shouts = []
|
||||
with local_session() as session:
|
||||
for [
|
||||
shout,
|
||||
reacted_stat,
|
||||
commented_stat,
|
||||
likes_stat,
|
||||
dislikes_stat,
|
||||
last_comment,
|
||||
] in session.execute(q, {"author_id": author_id}).unique():
|
||||
shouts.append(shout)
|
||||
shout.stat = {
|
||||
"viewed": await ViewedStorage.get_shout(shout_slug=shout.slug),
|
||||
"reacted": reacted_stat,
|
||||
"commented": commented_stat,
|
||||
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
"last_comment": last_comment,
|
||||
}
|
||||
|
||||
return shouts
|
||||
limit = options.get("limit", 5)
|
||||
offset = options.get("offset", 0)
|
||||
return get_shouts_with_links(info, q, limit, offset)
|
||||
|
||||
|
||||
@query.field("load_shouts_random_top")
|
||||
async def load_shouts_random_top(_, _info, options):
|
||||
"""
|
||||
:param _
|
||||
:param _info: GraphQLInfoContext
|
||||
:param options: {
|
||||
filters: {
|
||||
layouts: ['music']
|
||||
after: 13245678
|
||||
}
|
||||
random_limit: 100
|
||||
limit: 50
|
||||
offset: 0
|
||||
}
|
||||
:return: Shout[]
|
||||
async def load_shouts_random_top(_, info, options):
|
||||
"""
|
||||
Загрузка случайных публикаций, упорядоченных по топовым реакциям.
|
||||
|
||||
:param _info: Информация о контексте GraphQL.
|
||||
:param options: Опции фильтрации и сортировки.
|
||||
:return: Список случайных публикаций.
|
||||
"""
|
||||
aliased_reaction = aliased(Reaction)
|
||||
|
||||
subquery = (
|
||||
select(Shout.id)
|
||||
.outerjoin(aliased_reaction)
|
||||
.where(and_(Shout.deleted_at.is_(None), Shout.layout.is_not(None)))
|
||||
)
|
||||
subquery = select(Shout.id).outerjoin(aliased_reaction).where(Shout.deleted_at.is_(None))
|
||||
|
||||
subquery = apply_filters(subquery, options.get("filters", {}))
|
||||
filters = options.get("filters")
|
||||
if isinstance(filters, dict):
|
||||
subquery = apply_filters(subquery, filters)
|
||||
|
||||
subquery = subquery.group_by(Shout.id).order_by(
|
||||
desc(
|
||||
func.sum(
|
||||
case(
|
||||
# do not count comments' reactions
|
||||
# не учитывать реакции на комментарии
|
||||
(aliased_reaction.reply_to.is_not(None), 0),
|
||||
(aliased_reaction.kind == ReactionKind.LIKE.value, 1),
|
||||
(aliased_reaction.kind == ReactionKind.DISLIKE.value, -1),
|
||||
@@ -425,57 +498,9 @@ async def load_shouts_random_top(_, _info, options):
|
||||
)
|
||||
|
||||
random_limit = options.get("random_limit", 100)
|
||||
if random_limit:
|
||||
subquery = subquery.limit(random_limit)
|
||||
|
||||
q = (
|
||||
select(Shout)
|
||||
.options(joinedload(Shout.authors), joinedload(Shout.topics))
|
||||
.where(Shout.id.in_(subquery))
|
||||
)
|
||||
|
||||
q = add_reaction_stat_columns(q, aliased_reaction)
|
||||
|
||||
subquery = subquery.limit(random_limit)
|
||||
q = query_with_stat(info)
|
||||
q = q.filter(Shout.id.in_(subquery))
|
||||
q = q.order_by(func.random())
|
||||
limit = options.get("limit", 10)
|
||||
q = q.group_by(Shout.id).order_by(func.random()).limit(limit)
|
||||
|
||||
shouts = await get_shouts_from_query(q)
|
||||
|
||||
return shouts
|
||||
|
||||
|
||||
@query.field("load_shouts_random_topic")
|
||||
async def load_shouts_random_topic(_, info, limit: int = 10):
|
||||
[topic] = get_topics_random(None, None, 1)
|
||||
if topic:
|
||||
shouts = fetch_shouts_by_topic(topic, limit)
|
||||
if shouts:
|
||||
return {"topic": topic, "shouts": shouts}
|
||||
return {
|
||||
"error": "failed to get random topic after few retries",
|
||||
"shouts": [],
|
||||
"topic": {},
|
||||
}
|
||||
|
||||
|
||||
def fetch_shouts_by_topic(topic, limit):
|
||||
q = (
|
||||
select(Shout)
|
||||
.options(joinedload(Shout.authors), joinedload(Shout.topics))
|
||||
.filter(
|
||||
and_(
|
||||
Shout.deleted_at.is_(None),
|
||||
Shout.featured_at.is_not(None),
|
||||
Shout.topics.any(slug=topic.slug),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
aliased_reaction = aliased(Reaction)
|
||||
q = add_reaction_stat_columns(q, aliased_reaction)
|
||||
|
||||
q = q.group_by(Shout.id).order_by(desc(Shout.created_at)).limit(limit)
|
||||
|
||||
shouts = get_shouts_from_query(q)
|
||||
|
||||
return shouts
|
||||
return get_shouts_with_links(info, q, limit)
|
||||
|
||||
@@ -1,48 +1,98 @@
|
||||
import asyncio
|
||||
|
||||
from sqlalchemy import and_, distinct, func, join, select
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from cache.cache import cache_author
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from services.cache import cache_author
|
||||
from services.db import local_session
|
||||
from services.logger import root_logger as logger
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def add_topic_stat_columns(q):
|
||||
"""
|
||||
Добавляет статистические колонки к запросу тем.
|
||||
|
||||
:param q: SQL-запрос для получения тем.
|
||||
:return: Запрос с добавленными колонками статистики.
|
||||
"""
|
||||
# Создаем алиасы для предотвращения конфликтов имен
|
||||
aliased_shout = aliased(ShoutTopic)
|
||||
q = q.outerjoin(aliased_shout).add_columns(
|
||||
func.count(distinct(aliased_shout.shout)).label("shouts_stat")
|
||||
|
||||
# Создаем новый объект запроса для тем
|
||||
new_q = select(Topic)
|
||||
|
||||
# Применяем необходимые фильтры и добавляем колонки статистики
|
||||
new_q = (
|
||||
new_q.join(
|
||||
aliased_shout,
|
||||
aliased_shout.topic == Topic.id,
|
||||
)
|
||||
.join(
|
||||
Shout,
|
||||
and_(
|
||||
aliased_shout.shout == Shout.id,
|
||||
Shout.deleted_at.is_(None),
|
||||
),
|
||||
)
|
||||
.add_columns(
|
||||
func.count(distinct(aliased_shout.shout)).label("shouts_stat")
|
||||
) # Подсчет уникальных публикаций для темы
|
||||
)
|
||||
|
||||
aliased_follower = aliased(TopicFollower)
|
||||
q = q.outerjoin(aliased_follower, aliased_follower.topic == Topic.id).add_columns(
|
||||
|
||||
# Добавляем количество подписчиков темы
|
||||
new_q = new_q.outerjoin(aliased_follower, aliased_follower.topic == Topic.id).add_columns(
|
||||
func.count(distinct(aliased_follower.follower)).label("followers_stat")
|
||||
)
|
||||
|
||||
q = q.group_by(Topic.id)
|
||||
# Группировка по идентификатору темы
|
||||
new_q = new_q.group_by(Topic.id)
|
||||
|
||||
return q
|
||||
return new_q
|
||||
|
||||
|
||||
def add_author_stat_columns(q):
|
||||
aliased_shout = aliased(ShoutAuthor)
|
||||
q = q.outerjoin(aliased_shout).add_columns(
|
||||
func.count(distinct(aliased_shout.shout)).label("shouts_stat")
|
||||
)
|
||||
aliased_follower = aliased(AuthorFollower)
|
||||
q = q.outerjoin(
|
||||
aliased_follower, aliased_follower.follower == Author.id
|
||||
).add_columns(
|
||||
func.count(distinct(aliased_follower.follower)).label("followers_stat")
|
||||
"""
|
||||
Добавляет статистические колонки к запросу авторов.
|
||||
|
||||
:param q: SQL-запрос для получения авторов.
|
||||
:return: Запрос с добавленными колонками статистики.
|
||||
"""
|
||||
# Подзапрос для подсчета публикаций
|
||||
shouts_subq = (
|
||||
select(func.count(distinct(Shout.id)))
|
||||
.select_from(ShoutAuthor)
|
||||
.join(Shout, and_(Shout.id == ShoutAuthor.shout, Shout.deleted_at.is_(None)))
|
||||
.where(ShoutAuthor.author == Author.id)
|
||||
.scalar_subquery()
|
||||
)
|
||||
|
||||
q = q.group_by(Author.id)
|
||||
# Подзапрос для подсчета подписчиков
|
||||
followers_subq = (
|
||||
select(func.count(distinct(AuthorFollower.follower)))
|
||||
.where(AuthorFollower.author == Author.id)
|
||||
.scalar_subquery()
|
||||
)
|
||||
|
||||
# Основной запрос
|
||||
q = (
|
||||
q.select_from(Author)
|
||||
.add_columns(shouts_subq.label("shouts_stat"), followers_subq.label("followers_stat"))
|
||||
.group_by(Author.id)
|
||||
)
|
||||
|
||||
return q
|
||||
|
||||
|
||||
def get_topic_shouts_stat(topic_id: int):
|
||||
def get_topic_shouts_stat(topic_id: int) -> int:
|
||||
"""
|
||||
Получает количество опубликованных постов для темы
|
||||
"""
|
||||
q = (
|
||||
select(func.count(distinct(ShoutTopic.shout)))
|
||||
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
|
||||
@@ -54,13 +104,20 @@ def get_topic_shouts_stat(topic_id: int):
|
||||
)
|
||||
)
|
||||
)
|
||||
result = local_session().execute(q).first()
|
||||
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_topic_authors_stat(topic_id: int):
|
||||
# authors
|
||||
q = (
|
||||
def get_topic_authors_stat(topic_id: int) -> int:
|
||||
"""
|
||||
Получает количество уникальных авторов для указанной темы.
|
||||
|
||||
:param topic_id: Идентификатор темы.
|
||||
:return: Количество уникальных авторов, связанных с темой.
|
||||
"""
|
||||
count_query = (
|
||||
select(func.count(distinct(ShoutAuthor.author)))
|
||||
.select_from(join(ShoutTopic, Shout, ShoutTopic.shout == Shout.id))
|
||||
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
|
||||
@@ -72,24 +129,39 @@ def get_topic_authors_stat(topic_id: int):
|
||||
)
|
||||
)
|
||||
)
|
||||
result = local_session().execute(q).first()
|
||||
|
||||
# Выполнение запроса и получение результата
|
||||
with local_session() as session:
|
||||
result = session.execute(count_query).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_topic_followers_stat(topic_id: int):
|
||||
def get_topic_followers_stat(topic_id: int) -> int:
|
||||
"""
|
||||
Получает количество подписчиков для указанной темы.
|
||||
|
||||
:param topic_id: Идентификатор темы.
|
||||
:return: Количество уникальных подписчиков темы.
|
||||
"""
|
||||
aliased_followers = aliased(TopicFollower)
|
||||
q = select(func.count(distinct(aliased_followers.follower))).filter(
|
||||
aliased_followers.topic == topic_id
|
||||
)
|
||||
result = local_session().execute(q).first()
|
||||
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.topic == topic_id)
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_topic_comments_stat(topic_id: int):
|
||||
def get_topic_comments_stat(topic_id: int) -> int:
|
||||
"""
|
||||
Получает количество комментариев для всех публикаций в указанной теме.
|
||||
|
||||
:param topic_id: Идентификатор темы.
|
||||
:return: Общее количество комментариев к публикациям темы.
|
||||
"""
|
||||
# Подзапрос для получения количества комментариев для каждой публикации
|
||||
sub_comments = (
|
||||
select(
|
||||
Shout.id.label("shout_id"),
|
||||
func.coalesce(func.count(Reaction.id)).label("comments_count"),
|
||||
func.coalesce(func.count(Reaction.id), 0).label("comments_count"),
|
||||
)
|
||||
.join(ShoutTopic, ShoutTopic.shout == Shout.id)
|
||||
.join(Topic, ShoutTopic.topic == Topic.id)
|
||||
@@ -104,25 +176,47 @@ def get_topic_comments_stat(topic_id: int):
|
||||
.group_by(Shout.id)
|
||||
.subquery()
|
||||
)
|
||||
q = select(func.coalesce(func.sum(sub_comments.c.comments_count), 0)).filter(
|
||||
ShoutTopic.topic == topic_id
|
||||
)
|
||||
# Запрос для суммирования количества комментариев по теме
|
||||
q = select(func.coalesce(func.sum(sub_comments.c.comments_count), 0)).filter(ShoutTopic.topic == topic_id)
|
||||
q = q.outerjoin(sub_comments, ShoutTopic.shout == sub_comments.c.shout_id)
|
||||
|
||||
result = local_session().execute(q).first()
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_author_shouts_stat(author_id: int):
|
||||
def get_author_shouts_stat(author_id: int) -> int:
|
||||
"""
|
||||
Получает количество опубликованных постов для автора
|
||||
"""
|
||||
aliased_shout_author = aliased(ShoutAuthor)
|
||||
q = select(func.count(distinct(aliased_shout_author.shout))).filter(
|
||||
aliased_shout_author.author == author_id
|
||||
aliased_shout = aliased(Shout)
|
||||
|
||||
q = (
|
||||
select(func.count(distinct(aliased_shout.id)))
|
||||
.select_from(aliased_shout)
|
||||
.join(aliased_shout_author, aliased_shout.id == aliased_shout_author.shout)
|
||||
.filter(
|
||||
and_(
|
||||
aliased_shout_author.author == author_id,
|
||||
aliased_shout.published_at.is_not(None),
|
||||
aliased_shout.deleted_at.is_(None), # Добавляем проверку на удаление
|
||||
)
|
||||
)
|
||||
)
|
||||
result = local_session().execute(q).first()
|
||||
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_author_authors_stat(author_id: int):
|
||||
def get_author_authors_stat(author_id: int) -> int:
|
||||
"""
|
||||
Получает количество авторов, на которых подписан указанный автор.
|
||||
|
||||
:param author_id: Идентификатор автора.
|
||||
:return: Количество уникальных авторов, на которых подписан автор.
|
||||
"""
|
||||
aliased_authors = aliased(AuthorFollower)
|
||||
q = select(func.count(distinct(aliased_authors.author))).filter(
|
||||
and_(
|
||||
@@ -130,25 +224,29 @@ def get_author_authors_stat(author_id: int):
|
||||
aliased_authors.author != author_id,
|
||||
)
|
||||
)
|
||||
result = local_session().execute(q).first()
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_author_followers_stat(author_id: int):
|
||||
def get_author_followers_stat(author_id: int) -> int:
|
||||
"""
|
||||
Получает количество подписчиков для указанного автора.
|
||||
|
||||
:param author_id: Идентификатор автора.
|
||||
:return: Количество уникальных подписчиков автора.
|
||||
"""
|
||||
aliased_followers = aliased(AuthorFollower)
|
||||
q = select(func.count(distinct(aliased_followers.follower))).filter(
|
||||
aliased_followers.author == author_id
|
||||
)
|
||||
result = local_session().execute(q).first()
|
||||
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.author == author_id)
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result[0] if result else 0
|
||||
|
||||
|
||||
def get_author_comments_stat(author_id: int):
|
||||
sub_comments = (
|
||||
select(
|
||||
Author.id, func.coalesce(func.count(Reaction.id)).label("comments_count")
|
||||
)
|
||||
.select_from(Author) # явно указываем левый элемент join'а
|
||||
def get_author_comments_stat(author_id):
|
||||
q = (
|
||||
select(func.coalesce(func.count(Reaction.id), 0).label("comments_count"))
|
||||
.select_from(Author)
|
||||
.outerjoin(
|
||||
Reaction,
|
||||
and_(
|
||||
@@ -157,66 +255,98 @@ def get_author_comments_stat(author_id: int):
|
||||
Reaction.deleted_at.is_(None),
|
||||
),
|
||||
)
|
||||
.where(Author.id == author_id)
|
||||
.group_by(Author.id)
|
||||
.subquery()
|
||||
)
|
||||
q = select(sub_comments.c.comments_count).filter(sub_comments.c.id == author_id)
|
||||
|
||||
result = local_session().execute(q).first()
|
||||
return result[0] if result else 0
|
||||
with local_session() as session:
|
||||
result = session.execute(q).first()
|
||||
return result.comments_count if result else 0
|
||||
|
||||
|
||||
def get_with_stat(q):
|
||||
"""
|
||||
Выполняет запрос с добавлением статистики.
|
||||
|
||||
:param q: SQL-запрос для выполнения.
|
||||
:return: Список объектов с добавленной статистикой.
|
||||
"""
|
||||
records = []
|
||||
try:
|
||||
is_author = f"{q}".lower().startswith("select author")
|
||||
# is_topic = f"{q}".lower().startswith("select topic")
|
||||
result = []
|
||||
if is_author:
|
||||
q = add_author_stat_columns(q)
|
||||
else:
|
||||
q = add_topic_stat_columns(q)
|
||||
with local_session() as session:
|
||||
result = session.execute(q)
|
||||
# Определяем, является ли запрос запросом авторов
|
||||
author_prefixes = ("select author", "select * from author")
|
||||
is_author = f"{q}".lower().startswith(author_prefixes)
|
||||
|
||||
# Добавляем колонки статистики в запрос
|
||||
q = add_author_stat_columns(q) if is_author else add_topic_stat_columns(q)
|
||||
|
||||
# Выполняем запрос
|
||||
result = session.execute(q)
|
||||
for cols in result:
|
||||
entity = cols[0]
|
||||
stat = dict()
|
||||
stat["shouts"] = cols[1]
|
||||
stat["followers"] = cols[2]
|
||||
stat["shouts"] = cols[1] # Статистика по публикациям
|
||||
stat["followers"] = cols[2] # Статистика по подписчикам
|
||||
if is_author:
|
||||
stat["authors"] = get_author_authors_stat(entity.id)
|
||||
stat["comments"] = get_author_comments_stat(entity.id)
|
||||
stat["authors"] = get_author_authors_stat(entity.id) # Статистика по подпискам на авторов
|
||||
stat["comments"] = get_author_comments_stat(entity.id) # Статистика по комментариям
|
||||
else:
|
||||
stat["authors"] = get_topic_authors_stat(entity.id)
|
||||
stat["authors"] = get_topic_authors_stat(entity.id) # Статистика по авторам темы
|
||||
entity.stat = stat
|
||||
records.append(entity)
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
|
||||
logger.debug(q)
|
||||
traceback.print_exc()
|
||||
logger.error(exc, exc_info=True)
|
||||
return records
|
||||
|
||||
|
||||
def author_follows_authors(author_id: int):
|
||||
"""
|
||||
Получает список авторов, на которых подписан указанный автор.
|
||||
|
||||
:param author_id: Идентификатор автора.
|
||||
:return: Список авторов с добавленной статистикой.
|
||||
"""
|
||||
af = aliased(AuthorFollower, name="af")
|
||||
q = (
|
||||
select(Author)
|
||||
.select_from(join(Author, af, Author.id == af.author))
|
||||
.where(af.follower == author_id)
|
||||
author_follows_authors_query = (
|
||||
select(Author).select_from(join(Author, af, Author.id == af.author)).where(af.follower == author_id)
|
||||
)
|
||||
return get_with_stat(q)
|
||||
return get_with_stat(author_follows_authors_query)
|
||||
|
||||
|
||||
def author_follows_topics(author_id: int):
|
||||
q = (
|
||||
"""
|
||||
Получает список тем, на которые подписан указанный автор.
|
||||
|
||||
:param author_id: Идентификатор автора.
|
||||
:return: Список тем с добавленной статистикой.
|
||||
"""
|
||||
author_follows_topics_query = (
|
||||
select(Topic)
|
||||
.select_from(join(Topic, TopicFollower, Topic.id == TopicFollower.topic))
|
||||
.where(TopicFollower.follower == author_id)
|
||||
)
|
||||
return get_with_stat(q)
|
||||
return get_with_stat(author_follows_topics_query)
|
||||
|
||||
|
||||
async def update_author_stat(author_id: int):
|
||||
author_with_stat = get_with_stat(select(Author).where(Author.id == author_id))
|
||||
if isinstance(author_with_stat, Author):
|
||||
author_dict = author_with_stat.dict()
|
||||
await cache_author(author_dict)
|
||||
def update_author_stat(author_id: int):
|
||||
"""
|
||||
Обновляет статистику для указанного автора и сохраняет её в кэше.
|
||||
|
||||
:param author_id: Идентификатор автора.
|
||||
"""
|
||||
author_query = select(Author).where(Author.id == author_id)
|
||||
try:
|
||||
result = get_with_stat(author_query)
|
||||
if result:
|
||||
author_with_stat = result[0]
|
||||
if isinstance(author_with_stat, Author):
|
||||
author_dict = author_with_stat.dict()
|
||||
# Асинхронное кэширование данных автора
|
||||
asyncio.create_task(cache_author(author_dict))
|
||||
except Exception as exc:
|
||||
logger.error(exc, exc_info=True)
|
||||
|
||||
@@ -1,88 +1,308 @@
|
||||
from sqlalchemy import distinct, func, select
|
||||
from sqlalchemy import desc, select, text
|
||||
|
||||
from cache.cache import (
|
||||
cache_topic,
|
||||
cached_query,
|
||||
get_cached_topic_authors,
|
||||
get_cached_topic_by_slug,
|
||||
get_cached_topic_followers,
|
||||
invalidate_cache_by_prefix,
|
||||
)
|
||||
from orm.author import Author
|
||||
from orm.shout import ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.memorycache import cache_region
|
||||
from services.redis import redis
|
||||
from services.schema import mutation, query
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
# Вспомогательная функция для получения всех тем без статистики
|
||||
async def get_all_topics():
|
||||
"""
|
||||
Получает все темы без статистики.
|
||||
Используется для случаев, когда нужен полный список тем без дополнительной информации.
|
||||
|
||||
Returns:
|
||||
list: Список всех тем без статистики
|
||||
"""
|
||||
cache_key = "topics:all:basic"
|
||||
|
||||
# Функция для получения всех тем из БД
|
||||
async def fetch_all_topics():
|
||||
logger.debug("Получаем список всех тем из БД и кешируем результат")
|
||||
|
||||
with local_session() as session:
|
||||
# Запрос на получение базовой информации о темах
|
||||
topics_query = select(Topic)
|
||||
topics = session.execute(topics_query).scalars().all()
|
||||
|
||||
# Преобразуем темы в словари
|
||||
return [topic.dict() for topic in topics]
|
||||
|
||||
# Используем универсальную функцию для кеширования запросов
|
||||
return await cached_query(cache_key, fetch_all_topics)
|
||||
|
||||
|
||||
# Вспомогательная функция для получения тем со статистикой с пагинацией
|
||||
async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None):
|
||||
"""
|
||||
Получает темы со статистикой с пагинацией.
|
||||
|
||||
Args:
|
||||
limit: Максимальное количество возвращаемых тем
|
||||
offset: Смещение для пагинации
|
||||
community_id: Опциональный ID сообщества для фильтрации
|
||||
by: Опциональный параметр сортировки
|
||||
|
||||
Returns:
|
||||
list: Список тем с их статистикой
|
||||
"""
|
||||
# Формируем ключ кеша с помощью универсальной функции
|
||||
cache_key = f"topics:stats:limit={limit}:offset={offset}:community_id={community_id}"
|
||||
|
||||
# Функция для получения тем из БД
|
||||
async def fetch_topics_with_stats():
|
||||
logger.debug(f"Выполняем запрос на получение тем со статистикой: limit={limit}, offset={offset}")
|
||||
|
||||
with local_session() as session:
|
||||
# Базовый запрос для получения тем
|
||||
base_query = select(Topic)
|
||||
|
||||
# Добавляем фильтр по сообществу, если указан
|
||||
if community_id:
|
||||
base_query = base_query.where(Topic.community == community_id)
|
||||
|
||||
# Применяем сортировку на основе параметра by
|
||||
if by:
|
||||
if isinstance(by, dict):
|
||||
# Обработка словаря параметров сортировки
|
||||
for field, direction in by.items():
|
||||
column = getattr(Topic, field, None)
|
||||
if column:
|
||||
if direction.lower() == "desc":
|
||||
base_query = base_query.order_by(desc(column))
|
||||
else:
|
||||
base_query = base_query.order_by(column)
|
||||
elif by == "popular":
|
||||
# Сортировка по популярности (количеству публикаций)
|
||||
# Примечание: это требует дополнительного запроса или подзапроса
|
||||
base_query = base_query.order_by(
|
||||
desc(Topic.id)
|
||||
) # Временно, нужно заменить на proper implementation
|
||||
else:
|
||||
# По умолчанию сортируем по ID в обратном порядке
|
||||
base_query = base_query.order_by(desc(Topic.id))
|
||||
else:
|
||||
# По умолчанию сортируем по ID в обратном порядке
|
||||
base_query = base_query.order_by(desc(Topic.id))
|
||||
|
||||
# Применяем лимит и смещение
|
||||
base_query = base_query.limit(limit).offset(offset)
|
||||
|
||||
# Получаем темы
|
||||
topics = session.execute(base_query).scalars().all()
|
||||
topic_ids = [topic.id for topic in topics]
|
||||
|
||||
if not topic_ids:
|
||||
return []
|
||||
|
||||
# Запрос на получение статистики по публикациям для выбранных тем
|
||||
shouts_stats_query = f"""
|
||||
SELECT st.topic, COUNT(DISTINCT s.id) as shouts_count
|
||||
FROM shout_topic st
|
||||
JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL
|
||||
WHERE st.topic IN ({",".join(map(str, topic_ids))})
|
||||
GROUP BY st.topic
|
||||
"""
|
||||
shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query))}
|
||||
|
||||
# Запрос на получение статистики по подписчикам для выбранных тем
|
||||
followers_stats_query = f"""
|
||||
SELECT topic, COUNT(DISTINCT follower) as followers_count
|
||||
FROM topic_followers
|
||||
WHERE topic IN ({",".join(map(str, topic_ids))})
|
||||
GROUP BY topic
|
||||
"""
|
||||
followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))}
|
||||
|
||||
# Формируем результат с добавлением статистики
|
||||
result = []
|
||||
for topic in topics:
|
||||
topic_dict = topic.dict()
|
||||
topic_dict["stat"] = {
|
||||
"shouts": shouts_stats.get(topic.id, 0),
|
||||
"followers": followers_stats.get(topic.id, 0),
|
||||
}
|
||||
result.append(topic_dict)
|
||||
|
||||
# Кешируем каждую тему отдельно для использования в других функциях
|
||||
await cache_topic(topic_dict)
|
||||
|
||||
return result
|
||||
|
||||
# Используем универсальную функцию для кеширования запросов
|
||||
return await cached_query(cache_key, fetch_topics_with_stats)
|
||||
|
||||
|
||||
# Функция для инвалидации кеша тем
|
||||
async def invalidate_topics_cache(topic_id=None):
|
||||
"""
|
||||
Инвалидирует кеши тем при изменении данных.
|
||||
|
||||
Args:
|
||||
topic_id: Опциональный ID темы для точечной инвалидации.
|
||||
Если не указан, инвалидируются все кеши тем.
|
||||
"""
|
||||
if topic_id:
|
||||
# Точечная инвалидация конкретной темы
|
||||
logger.debug(f"Инвалидация кеша для темы #{topic_id}")
|
||||
specific_keys = [
|
||||
f"topic:id:{topic_id}",
|
||||
f"topic:authors:{topic_id}",
|
||||
f"topic:followers:{topic_id}",
|
||||
f"topic_shouts_{topic_id}",
|
||||
]
|
||||
|
||||
# Получаем slug темы, если есть
|
||||
with local_session() as session:
|
||||
topic = session.query(Topic).filter(Topic.id == topic_id).first()
|
||||
if topic and topic.slug:
|
||||
specific_keys.append(f"topic:slug:{topic.slug}")
|
||||
|
||||
# Удаляем конкретные ключи
|
||||
for key in specific_keys:
|
||||
try:
|
||||
await redis.execute("DEL", key)
|
||||
logger.debug(f"Удален ключ кеша {key}")
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при удалении ключа {key}: {e}")
|
||||
|
||||
# Также ищем и удаляем ключи коллекций, содержащих данные об этой теме
|
||||
collection_keys = await redis.execute("KEYS", "topics:stats:*")
|
||||
if collection_keys:
|
||||
await redis.execute("DEL", *collection_keys)
|
||||
logger.debug(f"Удалено {len(collection_keys)} коллекционных ключей тем")
|
||||
else:
|
||||
# Общая инвалидация всех кешей тем
|
||||
logger.debug("Полная инвалидация кеша тем")
|
||||
await invalidate_cache_by_prefix("topics")
|
||||
|
||||
|
||||
# Запрос на получение всех тем
|
||||
@query.field("get_topics_all")
|
||||
def get_topics_all(_, _info):
|
||||
cache_key = "get_topics_all"
|
||||
async def get_topics_all(_, _info):
|
||||
"""
|
||||
Получает список всех тем без статистики.
|
||||
|
||||
@cache_region.cache_on_arguments(cache_key)
|
||||
def _get_topics_all():
|
||||
return get_with_stat(select(Topic))
|
||||
|
||||
return _get_topics_all()
|
||||
Returns:
|
||||
list: Список всех тем
|
||||
"""
|
||||
return await get_all_topics()
|
||||
|
||||
|
||||
# Запрос на получение тем с пагинацией и статистикой
|
||||
@query.field("get_topics_paginated")
|
||||
async def get_topics_paginated(_, _info, limit=100, offset=0, by=None):
|
||||
"""
|
||||
Получает список тем с пагинацией и статистикой.
|
||||
|
||||
Args:
|
||||
limit: Максимальное количество возвращаемых тем
|
||||
offset: Смещение для пагинации
|
||||
by: Опциональные параметры сортировки
|
||||
|
||||
Returns:
|
||||
list: Список тем с их статистикой
|
||||
"""
|
||||
return await get_topics_with_stats(limit, offset, None, by)
|
||||
|
||||
|
||||
# Запрос на получение тем по сообществу
|
||||
@query.field("get_topics_by_community")
|
||||
def get_topics_by_community(_, _info, community_id: int):
|
||||
cache_key = f"get_topics_by_community_{community_id}"
|
||||
async def get_topics_by_community(_, _info, community_id: int, limit=100, offset=0, by=None):
|
||||
"""
|
||||
Получает список тем, принадлежащих указанному сообществу с пагинацией и статистикой.
|
||||
|
||||
@cache_region.cache_on_arguments(cache_key)
|
||||
def _get_topics_by_community():
|
||||
q = select(Topic).where(Topic.community == community_id)
|
||||
return get_with_stat(q)
|
||||
Args:
|
||||
community_id: ID сообщества
|
||||
limit: Максимальное количество возвращаемых тем
|
||||
offset: Смещение для пагинации
|
||||
by: Опциональные параметры сортировки
|
||||
|
||||
return _get_topics_by_community()
|
||||
Returns:
|
||||
list: Список тем с их статистикой
|
||||
"""
|
||||
return await get_topics_with_stats(limit, offset, community_id, by)
|
||||
|
||||
|
||||
# Запрос на получение тем по автору
|
||||
@query.field("get_topics_by_author")
|
||||
async def get_topics_by_author(_, _info, author_id=0, slug="", user=""):
|
||||
q = select(Topic)
|
||||
topics_by_author_query = select(Topic)
|
||||
if author_id:
|
||||
q = q.join(Author).where(Author.id == author_id)
|
||||
topics_by_author_query = topics_by_author_query.join(Author).where(Author.id == author_id)
|
||||
elif slug:
|
||||
q = q.join(Author).where(Author.slug == slug)
|
||||
topics_by_author_query = topics_by_author_query.join(Author).where(Author.slug == slug)
|
||||
elif user:
|
||||
q = q.join(Author).where(Author.user == user)
|
||||
topics_by_author_query = topics_by_author_query.join(Author).where(Author.user == user)
|
||||
|
||||
return get_with_stat(q)
|
||||
return get_with_stat(topics_by_author_query)
|
||||
|
||||
|
||||
# Запрос на получение одной темы по её slug
|
||||
@query.field("get_topic")
|
||||
def get_topic(_, _info, slug: str):
|
||||
q = select(Topic).filter(Topic.slug == slug)
|
||||
result = get_with_stat(q)
|
||||
for topic in result:
|
||||
async def get_topic(_, _info, slug: str):
|
||||
topic = await get_cached_topic_by_slug(slug, get_with_stat)
|
||||
if topic:
|
||||
return topic
|
||||
|
||||
|
||||
# Мутация для создания новой темы
|
||||
@mutation.field("create_topic")
|
||||
@login_required
|
||||
async def create_topic(_, _info, inp):
|
||||
async def create_topic(_, _info, topic_input):
|
||||
with local_session() as session:
|
||||
# TODO: check user permissions to create topic for exact community
|
||||
# and actor is permitted to craete it
|
||||
new_topic = Topic(**inp)
|
||||
# TODO: проверить права пользователя на создание темы для конкретного сообщества
|
||||
# и разрешение на создание
|
||||
new_topic = Topic(**topic_input)
|
||||
session.add(new_topic)
|
||||
session.commit()
|
||||
|
||||
# Инвалидируем кеш всех тем
|
||||
await invalidate_topics_cache()
|
||||
|
||||
return {"topic": new_topic}
|
||||
|
||||
|
||||
# Мутация для обновления темы
|
||||
@mutation.field("update_topic")
|
||||
@login_required
|
||||
async def update_topic(_, _info, inp):
|
||||
slug = inp["slug"]
|
||||
async def update_topic(_, _info, topic_input):
|
||||
slug = topic_input["slug"]
|
||||
with local_session() as session:
|
||||
topic = session.query(Topic).filter(Topic.slug == slug).first()
|
||||
if not topic:
|
||||
return {"error": "topic not found"}
|
||||
else:
|
||||
Topic.update(topic, inp)
|
||||
old_slug = topic.slug
|
||||
Topic.update(topic, topic_input)
|
||||
session.add(topic)
|
||||
session.commit()
|
||||
|
||||
# Инвалидируем кеш только для этой конкретной темы
|
||||
await invalidate_topics_cache(topic.id)
|
||||
|
||||
# Если slug изменился, удаляем старый ключ
|
||||
if old_slug != topic.slug:
|
||||
await redis.execute("DEL", f"topic:slug:{old_slug}")
|
||||
logger.debug(f"Удален ключ кеша для старого slug: {old_slug}")
|
||||
|
||||
return {"topic": topic}
|
||||
|
||||
|
||||
# Мутация для удаления темы
|
||||
@mutation.field("delete_topic")
|
||||
@login_required
|
||||
async def delete_topic(_, info, slug: str):
|
||||
@@ -99,21 +319,30 @@ async def delete_topic(_, info, slug: str):
|
||||
session.delete(t)
|
||||
session.commit()
|
||||
|
||||
# Инвалидируем кеш всех тем и конкретной темы
|
||||
await invalidate_topics_cache()
|
||||
await redis.execute("DEL", f"topic:slug:{slug}")
|
||||
await redis.execute("DEL", f"topic:id:{t.id}")
|
||||
|
||||
return {}
|
||||
return {"error": "access denied"}
|
||||
|
||||
|
||||
@query.field("get_topics_random")
|
||||
def get_topics_random(_, _info, amount=12):
|
||||
q = select(Topic)
|
||||
q = q.join(ShoutTopic)
|
||||
q = q.group_by(Topic.id)
|
||||
q = q.having(func.count(distinct(ShoutTopic.shout)) > 2)
|
||||
q = q.order_by(func.random()).limit(amount)
|
||||
# Запрос на получение подписчиков темы
|
||||
@query.field("get_topic_followers")
|
||||
async def get_topic_followers(_, _info, slug: str):
|
||||
logger.debug(f"getting followers for @{slug}")
|
||||
topic = await get_cached_topic_by_slug(slug, get_with_stat)
|
||||
topic_id = topic.id if isinstance(topic, Topic) else topic.get("id")
|
||||
followers = await get_cached_topic_followers(topic_id)
|
||||
return followers
|
||||
|
||||
topics = []
|
||||
with local_session() as session:
|
||||
for [topic] in session.execute(q):
|
||||
topics.append(topic)
|
||||
|
||||
return topics
|
||||
# Запрос на получение авторов темы
|
||||
@query.field("get_topic_authors")
|
||||
async def get_topic_authors(_, _info, slug: str):
|
||||
logger.debug(f"getting authors for @{slug}")
|
||||
topic = await get_cached_topic_by_slug(slug, get_with_stat)
|
||||
topic_id = topic.id if isinstance(topic, Topic) else topic.get("id")
|
||||
authors = await get_cached_topic_authors(topic_id)
|
||||
return authors
|
||||
|
||||
@@ -13,6 +13,12 @@ enum ReactionSort {
|
||||
dislike
|
||||
}
|
||||
|
||||
enum ShoutsOrderBy {
|
||||
last_commented_at
|
||||
rating
|
||||
comments_count
|
||||
}
|
||||
|
||||
enum ReactionKind {
|
||||
|
||||
# collabs
|
||||
|
||||
@@ -1,15 +1,47 @@
|
||||
input ShoutInput {
|
||||
slug: String
|
||||
input MediaItemInput {
|
||||
url: String
|
||||
title: String
|
||||
body: String
|
||||
source: String
|
||||
pic: String
|
||||
date: String
|
||||
genre: String
|
||||
artist: String
|
||||
lyrics: String
|
||||
}
|
||||
|
||||
input AuthorInput {
|
||||
id: Int!
|
||||
slug: String
|
||||
}
|
||||
|
||||
input TopicInput {
|
||||
id: Int
|
||||
slug: String!
|
||||
title: String
|
||||
body: String
|
||||
pic: String
|
||||
}
|
||||
|
||||
input DraftInput {
|
||||
id: Int
|
||||
# no created_at, updated_at, deleted_at, updated_by, deleted_by
|
||||
layout: String
|
||||
shout_id: Int # Changed from shout: Shout
|
||||
author_ids: [Int!] # Changed from authors: [Author]
|
||||
topic_ids: [Int!] # Changed from topics: [Topic]
|
||||
main_topic_id: Int # Changed from main_topic: Topic
|
||||
media: [MediaItemInput] # Changed to use MediaItemInput
|
||||
lead: String
|
||||
description: String
|
||||
layout: String
|
||||
media: String
|
||||
topics: [TopicInput]
|
||||
community: Int
|
||||
subtitle: String
|
||||
lang: String
|
||||
seo: String
|
||||
body: String
|
||||
title: String
|
||||
slug: String
|
||||
cover: String
|
||||
cover_caption: String
|
||||
}
|
||||
|
||||
input ProfileInput {
|
||||
@@ -21,14 +53,6 @@ input ProfileInput {
|
||||
about: String
|
||||
}
|
||||
|
||||
input TopicInput {
|
||||
id: Int
|
||||
slug: String!
|
||||
title: String
|
||||
body: String
|
||||
pic: String
|
||||
}
|
||||
|
||||
input ReactionInput {
|
||||
id: Int
|
||||
kind: ReactionKind!
|
||||
@@ -54,17 +78,16 @@ input LoadShoutsFilters {
|
||||
author: String
|
||||
layouts: [String]
|
||||
featured: Boolean
|
||||
reacted: Boolean
|
||||
reacted: Boolean # requires auth, used in load_shouts_feed
|
||||
after: Int
|
||||
}
|
||||
|
||||
input LoadShoutsOptions {
|
||||
filters: LoadShoutsFilters
|
||||
with_author_captions: Boolean
|
||||
limit: Int!
|
||||
random_limit: Int
|
||||
offset: Int
|
||||
order_by: String
|
||||
order_by: ShoutsOrderBy
|
||||
order_by_desc: Boolean
|
||||
}
|
||||
|
||||
@@ -72,15 +95,23 @@ input ReactionBy {
|
||||
shout: String
|
||||
shouts: [String]
|
||||
search: String
|
||||
comment: Boolean
|
||||
rating: Boolean
|
||||
kinds: [ReactionKind]
|
||||
reply_to: Int # filter
|
||||
topic: String
|
||||
created_by: Int
|
||||
author: String
|
||||
after: Int
|
||||
sort: ReactionSort
|
||||
sort: ReactionSort # sort
|
||||
}
|
||||
|
||||
input NotificationSeenInput {
|
||||
notifications: [Int]
|
||||
thread: Int
|
||||
}
|
||||
|
||||
input CommunityInput {
|
||||
slug: String
|
||||
name: String
|
||||
desc: String
|
||||
pic: String
|
||||
}
|
||||
|
||||
@@ -3,18 +3,23 @@ type Mutation {
|
||||
rate_author(rated_slug: String!, value: Int!): CommonResult!
|
||||
update_author(profile: ProfileInput!): CommonResult!
|
||||
|
||||
# editor
|
||||
create_shout(inp: ShoutInput!): CommonResult!
|
||||
update_shout(shout_id: Int!, shout_input: ShoutInput, publish: Boolean): CommonResult!
|
||||
delete_shout(shout_id: Int!): CommonResult!
|
||||
# draft
|
||||
create_draft(draft_input: DraftInput!): CommonResult!
|
||||
update_draft(draft_id: Int!, draft_input: DraftInput!): CommonResult!
|
||||
delete_draft(draft_id: Int!): CommonResult!
|
||||
# publication
|
||||
publish_shout(shout_id: Int!): CommonResult!
|
||||
publish_draft(draft_id: Int!): CommonResult!
|
||||
unpublish_draft(draft_id: Int!): CommonResult!
|
||||
unpublish_shout(shout_id: Int!): CommonResult!
|
||||
|
||||
# follower
|
||||
follow(what: FollowingEntity!, slug: String!): CommonResult!
|
||||
unfollow(what: FollowingEntity!, slug: String!): CommonResult!
|
||||
follow(what: FollowingEntity!, slug: String!): AuthorFollowsResult!
|
||||
unfollow(what: FollowingEntity!, slug: String!): AuthorFollowsResult!
|
||||
|
||||
# topic
|
||||
create_topic(input: TopicInput!): CommonResult!
|
||||
update_topic(input: TopicInput!): CommonResult!
|
||||
create_topic(topic_input: TopicInput!): CommonResult!
|
||||
update_topic(topic_input: TopicInput!): CommonResult!
|
||||
delete_topic(slug: String!): CommonResult!
|
||||
|
||||
# reaction
|
||||
@@ -29,8 +34,18 @@ type Mutation {
|
||||
accept_invite(invite_id: Int!): CommonResult!
|
||||
reject_invite(invite_id: Int!): CommonResult!
|
||||
|
||||
# bookmark
|
||||
toggle_bookmark_shout(slug: String!): CommonResult!
|
||||
|
||||
# notifier
|
||||
notification_mark_seen(notification_id: Int!, seen: Boolean): CommonResult!
|
||||
notifications_seen_after(after: Int!, seen: Boolean): CommonResult!
|
||||
notifications_seen_thread(thread_id: String!, seen: Boolean): CommonResult!
|
||||
|
||||
# community
|
||||
join_community(slug: String!): CommonResult!
|
||||
leave_community(slug: String!): CommonResult!
|
||||
create_community(community_input: CommunityInput!): CommonResult!
|
||||
update_community(community_input: CommunityInput!): CommonResult!
|
||||
delete_community(slug: String!): CommonResult!
|
||||
}
|
||||
|
||||
@@ -9,36 +9,56 @@ type Query {
|
||||
# community
|
||||
get_community: Community
|
||||
get_communities_all: [Community]
|
||||
get_communities_by_author(slug: String, user: String, author_id: Int): [Community]
|
||||
|
||||
# follower
|
||||
get_shout_followers(slug: String, shout_id: Int): [Author]
|
||||
get_topic_followers(slug: String, topic_id: Int): [Author]
|
||||
get_topic_followers(slug: String): [Author]
|
||||
get_topic_authors(slug: String): [Author]
|
||||
get_author_followers(slug: String, user: String, author_id: Int): [Author]
|
||||
get_author_follows(slug: String, user: String, author_id: Int): AuthorFollowsResult!
|
||||
get_author_follows(slug: String, user: String, author_id: Int): CommonResult!
|
||||
get_author_follows_topics(slug: String, user: String, author_id: Int): [Topic]
|
||||
get_author_follows_authors(slug: String, user: String, author_id: Int): [Author]
|
||||
load_shouts_followed(follower_id: Int!, limit: Int, offset: Int): [Shout] # userReactedShouts
|
||||
|
||||
# reaction
|
||||
load_reactions_by(by: ReactionBy!, limit: Int, offset: Int): [Reaction]
|
||||
load_shout_comments(shout: Int!, limit: Int, offset: Int): [Reaction]
|
||||
load_shout_ratings(shout: Int!, limit: Int, offset: Int): [Reaction]
|
||||
load_comment_ratings(comment: Int!, limit: Int, offset: Int): [Reaction]
|
||||
|
||||
# branched comments pagination
|
||||
load_comments_branch(shout: Int!, parent_id: Int, limit: Int, offset: Int, sort: ReactionSort, children_limit: Int, children_offset: Int): [Reaction]
|
||||
|
||||
# reader
|
||||
get_shout(slug: String): Shout
|
||||
get_shout(slug: String, shout_id: Int): Shout
|
||||
load_shouts_by(options: LoadShoutsOptions): [Shout]
|
||||
load_shouts_search(text: String!, limit: Int, offset: Int): [SearchResult]
|
||||
load_shouts_search(text: String!, options: LoadShoutsOptions): [SearchResult]
|
||||
load_shouts_bookmarked(options: LoadShoutsOptions): [Shout]
|
||||
|
||||
# rating
|
||||
get_my_rates_shouts(shouts: [Int!]!): [MyRateShout]
|
||||
get_my_rates_comments(comments: [Int!]!): [MyRateComment]
|
||||
|
||||
# public feeds
|
||||
load_shouts_with_topic(slug: String, options: LoadShoutsOptions): [Shout] # topic feed
|
||||
load_shouts_random_top(options: LoadShoutsOptions): [Shout] # random order, fixed filter, limit offset can be used
|
||||
load_shouts_authored_by(slug: String, options: LoadShoutsOptions): [Shout] # author feed
|
||||
load_shouts_followed_by(slug: String, options: LoadShoutsOptions): [Shout] # another author feed
|
||||
|
||||
# my feeds
|
||||
load_shouts_feed(options: LoadShoutsOptions): [Shout]
|
||||
load_shouts_unrated(limit: Int, offset: Int): [Shout]
|
||||
load_shouts_random_top(options: LoadShoutsOptions): [Shout]
|
||||
load_shouts_random_topic(limit: Int!): CommonResult! # { topic shouts }
|
||||
load_shouts_unrated(options: LoadShoutsOptions): [Shout]
|
||||
load_shouts_coauthored(options: LoadShoutsOptions): [Shout]
|
||||
load_shouts_discussed(options: LoadShoutsOptions): [Shout]
|
||||
|
||||
# editor
|
||||
get_my_shout(shout_id: Int!): CommonResult!
|
||||
get_shouts_drafts: [Shout]
|
||||
get_shouts_drafts: CommonResult!
|
||||
load_drafts: CommonResult!
|
||||
|
||||
# topic
|
||||
get_topic(slug: String!): Topic
|
||||
get_topics_all: [Topic]
|
||||
get_topics_random(amount: Int): [Topic]
|
||||
get_topics_by_author(slug: String, user: String, author_id: Int): [Topic]
|
||||
get_topics_by_community(slug: String, community_id: Int): [Topic]
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
type AuthorStat {
|
||||
shouts: Int
|
||||
topics: Int
|
||||
authors: Int
|
||||
followers: Int
|
||||
rating: Int
|
||||
@@ -54,28 +55,44 @@ type Reaction {
|
||||
stat: Stat
|
||||
oid: String
|
||||
# old_thread: String
|
||||
first_replies: [Reaction]
|
||||
}
|
||||
|
||||
type MediaItem {
|
||||
url: String
|
||||
title: String
|
||||
body: String
|
||||
source: String # image
|
||||
pic: String
|
||||
|
||||
# audio specific properties
|
||||
date: String
|
||||
genre: String
|
||||
artist: String
|
||||
lyrics: String
|
||||
}
|
||||
|
||||
type Shout {
|
||||
id: Int!
|
||||
title: String!
|
||||
slug: String!
|
||||
body: String!
|
||||
layout: String!
|
||||
|
||||
lead: String
|
||||
description: String
|
||||
main_topic: String
|
||||
topics: [Topic]
|
||||
created_by: Author!
|
||||
updated_by: Author
|
||||
deleted_by: Author
|
||||
authors: [Author]
|
||||
communities: [Community]
|
||||
title: String!
|
||||
subtitle: String
|
||||
lang: String
|
||||
community: String
|
||||
cover: String
|
||||
cover_caption: String
|
||||
layout: String!
|
||||
|
||||
community: Community!
|
||||
main_topic: Topic
|
||||
created_by: Author!
|
||||
topics: [Topic]
|
||||
authors: [Author]
|
||||
updated_by: Author
|
||||
deleted_by: Author
|
||||
|
||||
created_at: Int!
|
||||
updated_at: Int
|
||||
@@ -84,19 +101,51 @@ type Shout {
|
||||
deleted_at: Int
|
||||
|
||||
version_of: Shout # TODO: use version_of somewhere
|
||||
|
||||
media: String
|
||||
draft: Draft
|
||||
media: [MediaItem]
|
||||
stat: Stat
|
||||
score: Float
|
||||
}
|
||||
|
||||
type Draft {
|
||||
id: Int!
|
||||
created_at: Int!
|
||||
created_by: Author!
|
||||
|
||||
layout: String
|
||||
slug: String
|
||||
title: String
|
||||
subtitle: String
|
||||
lead: String
|
||||
description: String
|
||||
body: String
|
||||
media: [MediaItem]
|
||||
cover: String
|
||||
cover_caption: String
|
||||
lang: String
|
||||
seo: String
|
||||
|
||||
# auto
|
||||
updated_at: Int
|
||||
deleted_at: Int
|
||||
updated_by: Author
|
||||
deleted_by: Author
|
||||
authors: [Author]
|
||||
topics: [Topic]
|
||||
|
||||
}
|
||||
|
||||
type Stat {
|
||||
viewed: Int
|
||||
reacted: Int
|
||||
rating: Int
|
||||
commented: Int
|
||||
ranking: Int
|
||||
last_comment: Int
|
||||
viewed: Int
|
||||
last_commented_at: Int
|
||||
}
|
||||
|
||||
type CommunityStat {
|
||||
shouts: Int!
|
||||
followers: Int!
|
||||
authors: Int!
|
||||
}
|
||||
|
||||
type Community {
|
||||
@@ -107,6 +156,7 @@ type Community {
|
||||
pic: String!
|
||||
created_at: Int!
|
||||
created_by: Author!
|
||||
stat: CommunityStat
|
||||
}
|
||||
|
||||
type Collection {
|
||||
@@ -135,12 +185,15 @@ type Topic {
|
||||
pic: String
|
||||
stat: TopicStat
|
||||
oid: String
|
||||
is_main: Boolean
|
||||
}
|
||||
|
||||
# output type
|
||||
|
||||
type CommonResult {
|
||||
error: String
|
||||
drafts: [Draft]
|
||||
draft: Draft
|
||||
slugs: [String]
|
||||
shout: Shout
|
||||
shouts: [Shout]
|
||||
@@ -158,7 +211,7 @@ type SearchResult {
|
||||
slug: String!
|
||||
title: String!
|
||||
cover: String
|
||||
main_topic: String
|
||||
main_topic: Topic
|
||||
created_at: Int
|
||||
authors: [Author]
|
||||
topics: [Topic]
|
||||
@@ -176,7 +229,6 @@ type Invite {
|
||||
type AuthorFollowsResult {
|
||||
topics: [Topic]
|
||||
authors: [Author]
|
||||
# shouts: [Shout]
|
||||
communities: [Community]
|
||||
error: String
|
||||
}
|
||||
@@ -211,3 +263,15 @@ type NotificationsResult {
|
||||
total: Int!
|
||||
error: String
|
||||
}
|
||||
|
||||
type MyRateShout {
|
||||
shout_id: Int!
|
||||
my_rate: ReactionKind
|
||||
}
|
||||
|
||||
type MyRateComment {
|
||||
shout_id: Int
|
||||
comment_id: Int!
|
||||
my_rate: ReactionKind
|
||||
}
|
||||
|
||||
|
||||
23
server.py
23
server.py
@@ -1,23 +0,0 @@
|
||||
from granian.constants import Interfaces
|
||||
from granian.server import Granian
|
||||
|
||||
from services.logger import root_logger as logger
|
||||
from settings import PORT
|
||||
|
||||
def is_docker_container_running(name):
|
||||
cmd = ['docker', 'ps', '-f', f'name={name}']
|
||||
output = subprocess.run(cmd, capture_output=True, text=True).stdout
|
||||
return name in output
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.info("started")
|
||||
|
||||
granian_instance = Granian(
|
||||
"main:app",
|
||||
address="0.0.0.0", # noqa S104
|
||||
port=PORT,
|
||||
threads=4,
|
||||
websockets=False,
|
||||
interface=Interfaces.ASGI,
|
||||
)
|
||||
granian_instance.serve()
|
||||
165
services/auth.py
165
services/auth.py
@@ -1,75 +1,88 @@
|
||||
import json
|
||||
from functools import wraps
|
||||
|
||||
import httpx
|
||||
|
||||
from services.logger import root_logger as logger
|
||||
from services.rediscache import redis
|
||||
from cache.cache import get_cached_author_by_user_id
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.schema import request_graphql_data
|
||||
from settings import ADMIN_SECRET, AUTH_URL
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
async def get_author_by_user(user: str):
|
||||
author = None
|
||||
redis_key = f"user:{user}"
|
||||
|
||||
result = await redis.execute("GET", redis_key)
|
||||
if isinstance(result, str):
|
||||
author = json.loads(result)
|
||||
if author:
|
||||
return author
|
||||
return
|
||||
|
||||
|
||||
async def request_data(gql, headers=None):
|
||||
if headers is None:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(AUTH_URL, json=gql, headers=headers)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
errors = data.get("errors")
|
||||
if errors:
|
||||
logger.error(f"HTTP Errors: {errors}")
|
||||
else:
|
||||
return data
|
||||
except Exception as e:
|
||||
# Handling and logging exceptions during authentication check
|
||||
import traceback
|
||||
|
||||
logger.error(f"request_data error: {e}")
|
||||
logger.error(traceback.format_exc())
|
||||
return None
|
||||
# Список разрешенных заголовков
|
||||
ALLOWED_HEADERS = ["Authorization", "Content-Type"]
|
||||
|
||||
|
||||
async def check_auth(req):
|
||||
"""
|
||||
Проверка авторизации пользователя.
|
||||
|
||||
Эта функция проверяет токен авторизации, переданный в заголовках запроса,
|
||||
и возвращает идентификатор пользователя и его роли.
|
||||
|
||||
Параметры:
|
||||
- req: Входящий GraphQL запрос, содержащий заголовок авторизации.
|
||||
|
||||
Возвращает:
|
||||
- user_id: str - Идентификатор пользователя.
|
||||
- user_roles: list[str] - Список ролей пользователя.
|
||||
"""
|
||||
token = req.headers.get("Authorization")
|
||||
|
||||
host = req.headers.get("host", "")
|
||||
logger.debug(f"check_auth: host={host}")
|
||||
auth_url = AUTH_URL
|
||||
if ".dscrs.site" in host or "localhost" in host:
|
||||
auth_url = "https://auth.dscrs.site/graphql"
|
||||
user_id = ""
|
||||
user_roles = []
|
||||
if token:
|
||||
# Проверяем и очищаем токен от префикса Bearer если он есть
|
||||
if token.startswith("Bearer "):
|
||||
token = token.split("Bearer ")[-1].strip()
|
||||
# Logging the authentication token
|
||||
logger.debug(f"{token}")
|
||||
logger.debug(f"TOKEN: {token}")
|
||||
query_name = "validate_jwt_token"
|
||||
operation = "ValidateToken"
|
||||
variables = {"params": {"token_type": "access_token", "token": token}}
|
||||
|
||||
# Только необходимые заголовки для GraphQL запроса
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
gql = {
|
||||
"query": f"query {operation}($params: ValidateJWTTokenInput!) {{"
|
||||
"query": f"query {operation}($params: ValidateJWTTokenInput!)"
|
||||
+ "{"
|
||||
+ f"{query_name}(params: $params) {{ is_valid claims }} "
|
||||
+ "}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
}
|
||||
data = await request_data(gql)
|
||||
data = await request_graphql_data(gql, url=auth_url, headers=headers)
|
||||
if data:
|
||||
logger.debug(data)
|
||||
user_data = data.get("data", {}).get(query_name, {}).get("claims", {})
|
||||
logger.debug(f"Auth response: {data}")
|
||||
validation_result = data.get("data", {}).get(query_name, {})
|
||||
logger.debug(f"Validation result: {validation_result}")
|
||||
is_valid = validation_result.get("is_valid", False)
|
||||
if not is_valid:
|
||||
logger.error(f"Token validation failed: {validation_result}")
|
||||
return "", []
|
||||
user_data = validation_result.get("claims", {})
|
||||
logger.debug(f"User claims: {user_data}")
|
||||
user_id = user_data.get("sub", "")
|
||||
user_roles = user_data.get("allowed_roles", [])
|
||||
return user_id, user_roles
|
||||
|
||||
|
||||
async def add_user_role(user_id):
|
||||
"""
|
||||
Добавление роли пользователя.
|
||||
|
||||
Эта функция добавляет роли "author" и "reader" для указанного пользователя
|
||||
в системе авторизации.
|
||||
|
||||
Параметры:
|
||||
- user_id: str - Идентификатор пользователя, которому нужно добавить роли.
|
||||
|
||||
Возвращает:
|
||||
- user_id: str - Идентификатор пользователя, если операция прошла успешно.
|
||||
"""
|
||||
logger.info(f"add author role for user_id: {user_id}")
|
||||
query_name = "_update_user"
|
||||
operation = "UpdateUserRoles"
|
||||
@@ -83,13 +96,26 @@ async def add_user_role(user_id):
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
}
|
||||
data = await request_data(gql, headers)
|
||||
data = await request_graphql_data(gql, headers=headers)
|
||||
if data:
|
||||
user_id = data.get("data", {}).get(query_name, {}).get("id")
|
||||
return user_id
|
||||
|
||||
|
||||
def login_required(f):
|
||||
"""
|
||||
Декоратор для проверки авторизации пользователя.
|
||||
|
||||
Этот декоратор проверяет, авторизован ли пользователь, <20><> добавляет
|
||||
информацию о пользователе в контекст функции.
|
||||
|
||||
Параметры:
|
||||
- f: Функция, которую нужно декорировать.
|
||||
|
||||
Возвращает:
|
||||
- Обернутую функцию с добавленной проверкой авторизации.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
async def decorated_function(*args, **kwargs):
|
||||
info = args[1]
|
||||
@@ -99,10 +125,59 @@ def login_required(f):
|
||||
logger.info(f" got {user_id} roles: {user_roles}")
|
||||
info.context["user_id"] = user_id.strip()
|
||||
info.context["roles"] = user_roles
|
||||
author = await get_author_by_user(user_id)
|
||||
author = await get_cached_author_by_user_id(user_id, get_with_stat)
|
||||
if not author:
|
||||
logger.error(f"author profile not found for user {user_id}")
|
||||
info.context["author"] = author
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
||||
|
||||
def login_accepted(f):
|
||||
"""
|
||||
Декоратор для добавления данных авторизации в контекст.
|
||||
|
||||
Этот декоратор добавляет данные авторизации в контекст, если они доступны,
|
||||
но не блокирует доступ для неавторизованных пользователей.
|
||||
|
||||
Параметры:
|
||||
- f: Функция, которую нужно декорировать.
|
||||
|
||||
Возвращает:
|
||||
- Обернутую функцию с добавленной проверкой авторизации.
|
||||
"""
|
||||
|
||||
@wraps(f)
|
||||
async def decorated_function(*args, **kwargs):
|
||||
info = args[1]
|
||||
req = info.context.get("request")
|
||||
|
||||
logger.debug("login_accepted: Проверка авторизации пользователя.")
|
||||
user_id, user_roles = await check_auth(req)
|
||||
logger.debug(f"login_accepted: user_id={user_id}, user_roles={user_roles}")
|
||||
|
||||
if user_id and user_roles:
|
||||
logger.info(f"login_accepted: Пользователь авторизован: {user_id} с ролями {user_roles}")
|
||||
info.context["user_id"] = user_id.strip()
|
||||
info.context["roles"] = user_roles
|
||||
|
||||
# Пробуем получить профиль автора
|
||||
author = await get_cached_author_by_user_id(user_id, get_with_stat)
|
||||
if author:
|
||||
logger.debug(f"login_accepted: Найден профиль автора: {author}")
|
||||
# Предполагается, что `author` является объектом с атрибутом `id`
|
||||
info.context["author"] = author.dict()
|
||||
else:
|
||||
logger.error(
|
||||
f"login_accepted: Профиль автора не найден для пользователя {user_id}. Используем базовые данные."
|
||||
) # Используем базовую информацию об автор
|
||||
else:
|
||||
logger.debug("login_accepted: Пользователь не авторизован. Очищаем контекст.")
|
||||
info.context["user_id"] = None
|
||||
info.context["roles"] = None
|
||||
info.context["author"] = None
|
||||
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
||||
@@ -1,159 +0,0 @@
|
||||
import json
|
||||
|
||||
from orm.topic import TopicFollower
|
||||
from services.encoders import CustomJSONEncoder
|
||||
from services.rediscache import redis
|
||||
from services.db import local_session
|
||||
|
||||
DEFAULT_FOLLOWS = {
|
||||
"topics": [],
|
||||
"authors": [],
|
||||
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||
}
|
||||
|
||||
|
||||
async def cache_author(author: dict):
|
||||
author_id = author.get("id")
|
||||
payload = json.dumps(author, cls=CustomJSONEncoder)
|
||||
await redis.execute("SET", f'user:{author.get("user")}', payload)
|
||||
await redis.execute("SET", f'author:{author_id}', payload)
|
||||
|
||||
# update stat all field for followers' caches in <authors> list
|
||||
followers_str = await redis.execute("GET", f'author:{author_id}:followers')
|
||||
followers = []
|
||||
if isinstance(followers_str, str):
|
||||
followers = json.loads(followers_str)
|
||||
if isinstance(followers, list):
|
||||
for follower in followers:
|
||||
follower_follows_authors = []
|
||||
follower_follows_authors_str = await redis.execute("GET", f'author:{author_id}:follows-authors')
|
||||
if isinstance(follower_follows_authors_str, str):
|
||||
follower_follows_authors = json.loads(follower_follows_authors_str)
|
||||
c = 0
|
||||
for old_author in follower_follows_authors:
|
||||
if int(old_author.get("id")) == int(author.get("id", 0)):
|
||||
follower_follows_authors[c] = author
|
||||
break # exit the loop since we found and updated the author
|
||||
c += 1
|
||||
else:
|
||||
# author not found in the list, so add the new author with the updated stat field
|
||||
follower_follows_authors.append(author)
|
||||
|
||||
# update stat field for all authors' caches in <followers> list
|
||||
follows_str = await redis.execute("GET", f'author:{author_id}:follows-authors')
|
||||
follows_authors = []
|
||||
if isinstance(follows_str, str):
|
||||
follows_authors = json.loads(follows_str)
|
||||
if isinstance(follows_authors, list):
|
||||
for followed_author in follows_authors:
|
||||
followed_author_followers = []
|
||||
followed_author_followers_str = await redis.execute("GET", f'author:{author_id}:followers')
|
||||
if isinstance(followed_author_followers_str, str):
|
||||
followed_author_followers = json.loads(followed_author_followers_str)
|
||||
c = 0
|
||||
for old_follower in followed_author_followers:
|
||||
old_follower_id = int(old_follower.get("id"))
|
||||
if old_follower_id == author_id:
|
||||
followed_author_followers[c] = author
|
||||
break # exit the loop since we found and updated the author
|
||||
c += 1
|
||||
# author not found in the list, so add the new author with the updated stat field
|
||||
followed_author_followers.append(author)
|
||||
await redis.execute("SET", f'author:{author_id}:followers', followed_author_followers)
|
||||
|
||||
|
||||
async def cache_follows(follower: dict, entity_type: str, entity: dict, is_insert=True):
|
||||
# prepare
|
||||
follows = []
|
||||
follower_id = follower.get("id")
|
||||
if follower_id:
|
||||
redis_key = f"author:{follower_id}:follows-{entity_type}s"
|
||||
follows_str = await redis.execute("GET", redis_key)
|
||||
if isinstance(follows_str, str):
|
||||
follows = json.loads(follows_str)
|
||||
if is_insert:
|
||||
follows.append(entity)
|
||||
else:
|
||||
entity_id = entity.get("id")
|
||||
if not entity_id:
|
||||
raise Exception("wrong entity")
|
||||
# Remove the entity from follows
|
||||
follows = [e for e in follows if e["id"] != entity_id]
|
||||
|
||||
# update follows cache
|
||||
payload = json.dumps(follows, cls=CustomJSONEncoder)
|
||||
await redis.execute("SET", redis_key, payload)
|
||||
|
||||
# update follower's stats everywhere
|
||||
author_str = await redis.execute("GET", f"author:{follower_id}")
|
||||
if isinstance(author_str, str):
|
||||
author = json.loads(author_str)
|
||||
author["stat"][f"{entity_type}s"] = len(follows)
|
||||
await cache_author(author)
|
||||
return follows
|
||||
|
||||
|
||||
async def cache_follower(follower: dict, author: dict, is_insert=True):
|
||||
author_id = author.get("id")
|
||||
follower_id = follower.get("id")
|
||||
followers = []
|
||||
if author_id and follower_id:
|
||||
redis_key = f"author:{author_id}:followers"
|
||||
followers_str = await redis.execute("GET", redis_key)
|
||||
followers = []
|
||||
if isinstance(followers_str, str):
|
||||
followers = json.loads(followers_str)
|
||||
if is_insert:
|
||||
# Remove the entity from followers
|
||||
followers = [e for e in followers if e["id"] != author_id]
|
||||
else:
|
||||
followers.append(follower)
|
||||
payload = json.dumps(followers, cls=CustomJSONEncoder)
|
||||
await redis.execute("SET", redis_key, payload)
|
||||
author_str = await redis.execute("GET", f"author:{follower_id}")
|
||||
if isinstance(author_str, str):
|
||||
author = json.loads(author_str)
|
||||
author["stat"]["followers"] = len(followers)
|
||||
await cache_author(author)
|
||||
return followers
|
||||
|
||||
|
||||
async def cache_topic(topic_dict: dict):
|
||||
# update stat all field for followers' caches in <topics> list
|
||||
followers = (
|
||||
local_session()
|
||||
.query(TopicFollower)
|
||||
.filter(TopicFollower.topic == topic_dict.get("id"))
|
||||
.all()
|
||||
)
|
||||
for tf in followers:
|
||||
follower_id = tf.follower
|
||||
follower_follows_topics = []
|
||||
follower_follows_topics_str = await redis.execute(
|
||||
"GET", f"author:{follower_id}:follows-topics"
|
||||
)
|
||||
if isinstance(follower_follows_topics_str, str):
|
||||
follower_follows_topics = json.loads(follower_follows_topics_str)
|
||||
c = 0
|
||||
for old_topic in follower_follows_topics:
|
||||
if int(old_topic.get("id")) == int(topic_dict.get("id", 0)):
|
||||
follower_follows_topics[c] = topic_dict
|
||||
break # exit the loop since we found and updated the topic
|
||||
c += 1
|
||||
else:
|
||||
# topic not found in the list, so add the new topic with the updated stat field
|
||||
follower_follows_topics.append(topic_dict)
|
||||
|
||||
await redis.execute(
|
||||
"SET",
|
||||
"SET",
|
||||
f"author:{follower_id}:follows-topics",
|
||||
json.dumps(follower_follows_topics),
|
||||
)
|
||||
|
||||
# update topic's stat
|
||||
topic_dict["stat"]["followers"] = len(followers)
|
||||
|
||||
# save in cache
|
||||
payload = json.dumps(topic_dict, cls=CustomJSONEncoder)
|
||||
await redis.execute("SET", f'topic:{topic_dict.get("slug")}', payload)
|
||||
24
services/common_result.py
Normal file
24
services/common_result.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List, Optional
|
||||
|
||||
from orm.author import Author
|
||||
from orm.community import Community
|
||||
from orm.reaction import Reaction
|
||||
from orm.shout import Shout
|
||||
from orm.topic import Topic
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommonResult:
|
||||
error: Optional[str] = None
|
||||
slugs: Optional[List[str]] = None
|
||||
shout: Optional[Shout] = None
|
||||
shouts: Optional[List[Shout]] = None
|
||||
author: Optional[Author] = None
|
||||
authors: Optional[List[Author]] = None
|
||||
reaction: Optional[Reaction] = None
|
||||
reactions: Optional[List[Reaction]] = None
|
||||
topic: Optional[Topic] = None
|
||||
topics: Optional[List[Topic]] = None
|
||||
community: Optional[Community] = None
|
||||
communities: Optional[List[Community]] = None
|
||||
226
services/db.py
226
services/db.py
@@ -1,23 +1,47 @@
|
||||
import json
|
||||
import math
|
||||
import time
|
||||
import traceback
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, TypeVar
|
||||
|
||||
from sqlalchemy import JSON, Column, Engine, Integer, create_engine, event, exc, inspect
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import Session, configure_mappers
|
||||
import orjson
|
||||
import sqlalchemy
|
||||
from sqlalchemy import (
|
||||
JSON,
|
||||
Column,
|
||||
Engine,
|
||||
Index,
|
||||
Integer,
|
||||
create_engine,
|
||||
event,
|
||||
exc,
|
||||
func,
|
||||
inspect,
|
||||
text,
|
||||
)
|
||||
from sqlalchemy.orm import Session, configure_mappers, declarative_base
|
||||
from sqlalchemy.sql.schema import Table
|
||||
|
||||
from services.logger import root_logger as logger
|
||||
from settings import DB_URL
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
# from sqlalchemy_searchable import make_searchable
|
||||
if DB_URL.startswith("postgres"):
|
||||
engine = create_engine(
|
||||
DB_URL,
|
||||
echo=False,
|
||||
pool_size=10,
|
||||
max_overflow=20,
|
||||
pool_timeout=30, # Время ожидания свободного соединения
|
||||
pool_recycle=1800, # Время жизни соединения
|
||||
pool_pre_ping=True, # Добавить проверку соединений
|
||||
connect_args={
|
||||
"sslmode": "disable",
|
||||
"connect_timeout": 40, # Добавить таймаут подключения
|
||||
},
|
||||
)
|
||||
else:
|
||||
engine = create_engine(DB_URL, echo=False, connect_args={"check_same_thread": False})
|
||||
|
||||
|
||||
# Подключение к базе данных SQLAlchemy
|
||||
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
|
||||
inspector = inspect(engine)
|
||||
configure_mappers()
|
||||
T = TypeVar("T")
|
||||
@@ -25,6 +49,91 @@ REGISTRY: Dict[str, type] = {}
|
||||
FILTERED_FIELDS = ["_sa_instance_state", "search_vector"]
|
||||
|
||||
|
||||
def create_table_if_not_exists(engine, table):
|
||||
inspector = inspect(engine)
|
||||
if table and not inspector.has_table(table.__tablename__):
|
||||
table.__table__.create(engine)
|
||||
logger.info(f"Table '{table.__tablename__}' created.")
|
||||
else:
|
||||
logger.info(f"Table '{table.__tablename__}' ok.")
|
||||
|
||||
|
||||
def sync_indexes():
|
||||
"""
|
||||
Синхронизирует индексы в БД с индексами, определенными в моделях SQLAlchemy.
|
||||
Создает недостающие индексы, если они определены в моделях, но отсутствуют в БД.
|
||||
|
||||
Использует pg_catalog для PostgreSQL для получения списка существующих индексов.
|
||||
"""
|
||||
if not DB_URL.startswith("postgres"):
|
||||
logger.warning("Функция sync_indexes поддерживается только для PostgreSQL.")
|
||||
return
|
||||
|
||||
logger.info("Начинаем синхронизацию индексов в базе данных...")
|
||||
|
||||
# Получаем все существующие индексы в БД
|
||||
with local_session() as session:
|
||||
existing_indexes_query = text("""
|
||||
SELECT
|
||||
t.relname AS table_name,
|
||||
i.relname AS index_name
|
||||
FROM
|
||||
pg_catalog.pg_class i
|
||||
JOIN
|
||||
pg_catalog.pg_index ix ON ix.indexrelid = i.oid
|
||||
JOIN
|
||||
pg_catalog.pg_class t ON t.oid = ix.indrelid
|
||||
JOIN
|
||||
pg_catalog.pg_namespace n ON n.oid = i.relnamespace
|
||||
WHERE
|
||||
i.relkind = 'i'
|
||||
AND n.nspname = 'public'
|
||||
AND t.relkind = 'r'
|
||||
ORDER BY
|
||||
t.relname, i.relname;
|
||||
""")
|
||||
|
||||
existing_indexes = {row[1].lower() for row in session.execute(existing_indexes_query)}
|
||||
logger.debug(f"Найдено {len(existing_indexes)} существующих индексов в БД")
|
||||
|
||||
# Проверяем каждую модель и её индексы
|
||||
for _model_name, model_class in REGISTRY.items():
|
||||
if hasattr(model_class, "__table__") and hasattr(model_class, "__table_args__"):
|
||||
table_args = model_class.__table_args__
|
||||
|
||||
# Если table_args - это кортеж, ищем в нём объекты Index
|
||||
if isinstance(table_args, tuple):
|
||||
for arg in table_args:
|
||||
if isinstance(arg, Index):
|
||||
index_name = arg.name.lower()
|
||||
|
||||
# Проверяем, существует ли индекс в БД
|
||||
if index_name not in existing_indexes:
|
||||
logger.info(
|
||||
f"Создаем отсутствующий индекс {index_name} для таблицы {model_class.__tablename__}"
|
||||
)
|
||||
|
||||
# Создаем индекс если он отсутствует
|
||||
try:
|
||||
arg.create(engine)
|
||||
logger.info(f"Индекс {index_name} успешно создан")
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при создании индекса {index_name}: {e}")
|
||||
else:
|
||||
logger.debug(f"Индекс {index_name} уже существует")
|
||||
|
||||
# Анализируем таблицы для оптимизации запросов
|
||||
for model_name, model_class in REGISTRY.items():
|
||||
if hasattr(model_class, "__tablename__"):
|
||||
try:
|
||||
session.execute(text(f"ANALYZE {model_class.__tablename__}"))
|
||||
logger.debug(f"Таблица {model_class.__tablename__} проанализирована")
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при анализе таблицы {model_class.__tablename__}: {e}")
|
||||
|
||||
logger.info("Синхронизация индексов завершена.")
|
||||
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def local_session(src=""):
|
||||
return Session(bind=engine, expire_on_commit=False)
|
||||
@@ -45,25 +154,26 @@ class Base(declarative_base()):
|
||||
REGISTRY[cls.__name__] = cls
|
||||
|
||||
def dict(self) -> Dict[str, Any]:
|
||||
column_names = filter(
|
||||
lambda x: x not in FILTERED_FIELDS, self.__table__.columns.keys()
|
||||
)
|
||||
column_names = filter(lambda x: x not in FILTERED_FIELDS, self.__table__.columns.keys())
|
||||
data = {}
|
||||
try:
|
||||
data = {}
|
||||
for c in column_names:
|
||||
value = getattr(self, c)
|
||||
if isinstance(value, JSON):
|
||||
# save JSON column as dict
|
||||
data[c] = json.loads(str(value))
|
||||
for column_name in column_names:
|
||||
value = getattr(self, column_name)
|
||||
# Check if the value is JSON and decode it if necessary
|
||||
if isinstance(value, (str, bytes)) and isinstance(self.__table__.columns[column_name].type, JSON):
|
||||
try:
|
||||
data[column_name] = orjson.loads(value)
|
||||
except (TypeError, orjson.JSONDecodeError) as e:
|
||||
logger.error(f"Error decoding JSON for column '{column_name}': {e}")
|
||||
data[column_name] = value
|
||||
else:
|
||||
data[c] = value
|
||||
# Add synthetic field .stat
|
||||
data[column_name] = value
|
||||
# Add synthetic field .stat if it exists
|
||||
if hasattr(self, "stat"):
|
||||
data["stat"] = self.stat
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.error(f"Error occurred while converting object to dictionary: {e}")
|
||||
return {}
|
||||
return data
|
||||
|
||||
def update(self, values: Dict[str, Any]) -> None:
|
||||
for key, value in values.items():
|
||||
@@ -72,13 +182,11 @@ class Base(declarative_base()):
|
||||
|
||||
|
||||
# make_searchable(Base.metadata)
|
||||
Base.metadata.create_all(bind=engine)
|
||||
# Base.metadata.create_all(bind=engine)
|
||||
|
||||
|
||||
# Функция для вывода полного трейсбека при предупреждениях
|
||||
def warning_with_traceback(
|
||||
message: Warning | str, category, filename: str, lineno: int, file=None, line=None
|
||||
):
|
||||
def warning_with_traceback(message: Warning | str, category, filename: str, lineno: int, file=None, line=None):
|
||||
tb = traceback.format_stack()
|
||||
tb_str = "".join(tb)
|
||||
return f"{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}"
|
||||
@@ -89,23 +197,65 @@ warnings.showwarning = warning_with_traceback
|
||||
warnings.simplefilter("always", exc.SAWarning)
|
||||
|
||||
|
||||
# Функция для извлечения SQL-запроса из контекста
|
||||
def get_statement_from_context(context):
|
||||
query = ""
|
||||
compiled = context.compiled
|
||||
if compiled:
|
||||
compiled_statement = compiled.string
|
||||
compiled_parameters = compiled.params
|
||||
if compiled_statement:
|
||||
if compiled_parameters:
|
||||
try:
|
||||
# Безопасное форматирование параметров
|
||||
query = compiled_statement % compiled_parameters
|
||||
except Exception as e:
|
||||
logger.error(f"Error formatting query: {e}")
|
||||
else:
|
||||
query = compiled_statement
|
||||
if query:
|
||||
query = query.replace("\n", " ").replace(" ", " ").replace(" ", " ").strip()
|
||||
return query
|
||||
|
||||
|
||||
# Обработчик события перед выполнением запроса
|
||||
@event.listens_for(Engine, "before_cursor_execute")
|
||||
def before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
||||
conn.query_start_time = time.time()
|
||||
conn.last_statement = ""
|
||||
conn.cursor_id = id(cursor) # Отслеживание конкретного курсора
|
||||
|
||||
|
||||
# Обработчик события после выполнения запроса
|
||||
@event.listens_for(Engine, "after_cursor_execute")
|
||||
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
||||
compiled_statement = context.compiled.string
|
||||
compiled_parameters = context.compiled.params
|
||||
if compiled_statement:
|
||||
elapsed = time.time() - conn.query_start_time
|
||||
if compiled_parameters is not None:
|
||||
query = compiled_statement.format(*compiled_parameters)
|
||||
else:
|
||||
query = compiled_statement # or handle this case in a way that makes sense for your application
|
||||
if hasattr(conn, "cursor_id") and conn.cursor_id == id(cursor):
|
||||
query = get_statement_from_context(context)
|
||||
if query:
|
||||
elapsed = time.time() - conn.query_start_time
|
||||
if elapsed > 1:
|
||||
query_end = query[-16:]
|
||||
query = query.split(query_end)[0] + query_end
|
||||
logger.debug(query)
|
||||
elapsed_n = math.floor(elapsed)
|
||||
logger.debug("*" * (elapsed_n))
|
||||
logger.debug(f"{elapsed:.3f} s")
|
||||
del conn.cursor_id # Удаление идентификатора курсора после выполнения
|
||||
|
||||
if elapsed > 1 and conn.last_statement != query:
|
||||
conn.last_statement = query
|
||||
logger.debug(f"\n{query}\n{'*' * math.floor(elapsed)} {elapsed:.3f} s\n")
|
||||
|
||||
def get_json_builder():
|
||||
"""
|
||||
Возвращает подходящие функции для построения JSON объектов в зависимости от драйвера БД
|
||||
"""
|
||||
dialect = engine.dialect.name
|
||||
json_cast = lambda x: x # noqa: E731
|
||||
if dialect.startswith("postgres"):
|
||||
json_cast = lambda x: func.cast(x, sqlalchemy.Text) # noqa: E731
|
||||
return func.json_build_object, func.json_agg, json_cast
|
||||
elif dialect.startswith("sqlite") or dialect.startswith("mysql"):
|
||||
return func.json_object, func.json_group_array, json_cast
|
||||
else:
|
||||
raise NotImplementedError(f"JSON builder not implemented for dialect {dialect}")
|
||||
|
||||
|
||||
# Используем их в коде
|
||||
json_builder, json_array_builder, json_cast = get_json_builder()
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
import json
|
||||
from decimal import Decimal
|
||||
|
||||
|
||||
class CustomJSONEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, Decimal):
|
||||
return str(obj)
|
||||
return super().default(obj)
|
||||
17
services/exception.py
Normal file
17
services/exception.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import logging
|
||||
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
logger = logging.getLogger("exception")
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
|
||||
class ExceptionHandlerMiddleware(BaseHTTPMiddleware):
|
||||
async def dispatch(self, request, call_next):
|
||||
try:
|
||||
response = await call_next(request)
|
||||
return response
|
||||
except Exception as exc:
|
||||
logger.exception(exc)
|
||||
return JSONResponse({"detail": "An error occurred. Please try again later."}, status_code=500)
|
||||
@@ -1,11 +0,0 @@
|
||||
from dogpile.cache import make_region
|
||||
|
||||
from settings import REDIS_URL
|
||||
|
||||
# Создание региона кэша с TTL
|
||||
cache_region = make_region()
|
||||
cache_region.configure(
|
||||
"dogpile.cache.redis",
|
||||
arguments={"url": f"{REDIS_URL}/1"},
|
||||
expiration_time=3600, # Cache expiration time in seconds
|
||||
)
|
||||
@@ -1,9 +1,9 @@
|
||||
import json
|
||||
import orjson
|
||||
|
||||
from orm.notification import Notification
|
||||
from services.db import local_session
|
||||
from services.logger import root_logger as logger
|
||||
from services.rediscache import redis
|
||||
from services.redis import redis
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
def save_notification(action: str, entity: str, payload):
|
||||
@@ -18,7 +18,7 @@ async def notify_reaction(reaction, action: str = "create"):
|
||||
data = {"payload": reaction, "action": action}
|
||||
try:
|
||||
save_notification(action, channel_name, data.get("payload"))
|
||||
await redis.publish(channel_name, json.dumps(data))
|
||||
await redis.publish(channel_name, orjson.dumps(data))
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish to channel {channel_name}: {e}")
|
||||
|
||||
@@ -28,7 +28,7 @@ async def notify_shout(shout, action: str = "update"):
|
||||
data = {"payload": shout, "action": action}
|
||||
try:
|
||||
save_notification(action, channel_name, data.get("payload"))
|
||||
await redis.publish(channel_name, json.dumps(data))
|
||||
await redis.publish(channel_name, orjson.dumps(data))
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to publish to channel {channel_name}: {e}")
|
||||
|
||||
@@ -43,7 +43,7 @@ async def notify_follower(follower: dict, author_id: int, action: str = "follow"
|
||||
save_notification(action, channel_name, data.get("payload"))
|
||||
|
||||
# Convert data to JSON string
|
||||
json_data = json.dumps(data)
|
||||
json_data = orjson.dumps(data)
|
||||
|
||||
# Ensure the data is not empty before publishing
|
||||
if json_data:
|
||||
|
||||
170
services/pretopic.py
Normal file
170
services/pretopic.py
Normal file
@@ -0,0 +1,170 @@
|
||||
import concurrent.futures
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
from txtai.embeddings import Embeddings
|
||||
|
||||
from services.logger import root_logger as logger
|
||||
|
||||
|
||||
class TopicClassifier:
|
||||
def __init__(self, shouts_by_topic: Dict[str, str], publications: List[Dict[str, str]]):
|
||||
"""
|
||||
Инициализация классификатора тем и поиска публикаций.
|
||||
Args:
|
||||
shouts_by_topic: Словарь {тема: текст_всех_публикаций}
|
||||
publications: Список публикаций с полями 'id', 'title', 'text'
|
||||
"""
|
||||
self.shouts_by_topic = shouts_by_topic
|
||||
self.topics = list(shouts_by_topic.keys())
|
||||
self.publications = publications
|
||||
self.topic_embeddings = None # Для классификации тем
|
||||
self.search_embeddings = None # Для поиска публикаций
|
||||
self._initialization_future = None
|
||||
self._executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
|
||||
|
||||
def initialize(self) -> None:
|
||||
"""
|
||||
Асинхронная инициализация векторных представлений.
|
||||
"""
|
||||
if self._initialization_future is None:
|
||||
self._initialization_future = self._executor.submit(self._prepare_embeddings)
|
||||
logger.info("Векторизация текстов начата в фоновом режиме...")
|
||||
|
||||
def _prepare_embeddings(self) -> None:
|
||||
"""
|
||||
Подготавливает векторные представления для тем и поиска.
|
||||
"""
|
||||
logger.info("Начинается подготовка векторных представлений...")
|
||||
|
||||
# Модель для русского языка
|
||||
# TODO: model local caching
|
||||
model_path = "sentence-transformers/paraphrase-multilingual-mpnet-base-v2"
|
||||
|
||||
# Инициализируем embeddings для классификации тем
|
||||
self.topic_embeddings = Embeddings(path=model_path)
|
||||
topic_documents = [(topic, text) for topic, text in self.shouts_by_topic.items()]
|
||||
self.topic_embeddings.index(topic_documents)
|
||||
|
||||
# Инициализируем embeddings для поиска публикаций
|
||||
self.search_embeddings = Embeddings(path=model_path)
|
||||
search_documents = [(str(pub["id"]), f"{pub['title']} {pub['text']}") for pub in self.publications]
|
||||
self.search_embeddings.index(search_documents)
|
||||
|
||||
logger.info("Подготовка векторных представлений завершена.")
|
||||
|
||||
def predict_topic(self, text: str) -> Tuple[float, str]:
|
||||
"""
|
||||
Предсказывает тему для заданного текста из известного набора тем.
|
||||
Args:
|
||||
text: Текст для классификации
|
||||
Returns:
|
||||
Tuple[float, str]: (уверенность, тема)
|
||||
"""
|
||||
if not self.is_ready():
|
||||
logger.error("Векторные представления не готовы. Вызовите initialize() и дождитесь завершения.")
|
||||
return 0.0, "unknown"
|
||||
|
||||
try:
|
||||
# Ищем наиболее похожую тему
|
||||
results = self.topic_embeddings.search(text, 1)
|
||||
if not results:
|
||||
return 0.0, "unknown"
|
||||
|
||||
score, topic = results[0]
|
||||
return float(score), topic
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при определении темы: {str(e)}")
|
||||
return 0.0, "unknown"
|
||||
|
||||
def search_similar(self, query: str, limit: int = 5) -> List[Dict[str, any]]:
|
||||
"""
|
||||
Ищет публикации похожие на поисковый запрос.
|
||||
Args:
|
||||
query: Поисковый запрос
|
||||
limit: Максимальное количество результатов
|
||||
Returns:
|
||||
List[Dict]: Список найденных публикаций с оценкой релевантности
|
||||
"""
|
||||
if not self.is_ready():
|
||||
logger.error("Векторные представления не готовы. Вызовите initialize() и дождитесь завершения.")
|
||||
return []
|
||||
|
||||
try:
|
||||
# Ищем похожие публикации
|
||||
results = self.search_embeddings.search(query, limit)
|
||||
|
||||
# Формируем результаты
|
||||
found_publications = []
|
||||
for score, pub_id in results:
|
||||
# Находим публикацию по id
|
||||
publication = next((pub for pub in self.publications if str(pub["id"]) == pub_id), None)
|
||||
if publication:
|
||||
found_publications.append({**publication, "relevance": float(score)})
|
||||
|
||||
return found_publications
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка при поиске публикаций: {str(e)}")
|
||||
return []
|
||||
|
||||
def is_ready(self) -> bool:
|
||||
"""
|
||||
Проверяет, готовы ли векторные представления.
|
||||
"""
|
||||
return self.topic_embeddings is not None and self.search_embeddings is not None
|
||||
|
||||
def wait_until_ready(self) -> None:
|
||||
"""
|
||||
Ожидает завершения подготовки векторных представлений.
|
||||
"""
|
||||
if self._initialization_future:
|
||||
self._initialization_future.result()
|
||||
|
||||
def __del__(self):
|
||||
"""
|
||||
Очистка ресурсов при удалении объекта.
|
||||
"""
|
||||
if self._executor:
|
||||
self._executor.shutdown(wait=False)
|
||||
|
||||
|
||||
# Пример использования:
|
||||
"""
|
||||
shouts_by_topic = {
|
||||
"Спорт": "... большой текст со всеми спортивными публикациями ...",
|
||||
"Технологии": "... большой текст со всеми технологическими публикациями ...",
|
||||
"Политика": "... большой текст со всеми политическими публикациями ..."
|
||||
}
|
||||
|
||||
publications = [
|
||||
{
|
||||
'id': 1,
|
||||
'title': 'Новый процессор AMD',
|
||||
'text': 'Компания AMD представила новый процессор...'
|
||||
},
|
||||
{
|
||||
'id': 2,
|
||||
'title': 'Футбольный матч',
|
||||
'text': 'Вчера состоялся решающий матч...'
|
||||
}
|
||||
]
|
||||
|
||||
# Создание классификатора
|
||||
classifier = TopicClassifier(shouts_by_topic, publications)
|
||||
classifier.initialize()
|
||||
classifier.wait_until_ready()
|
||||
|
||||
# Определение темы текста
|
||||
text = "Новый процессор показал высокую производительность"
|
||||
score, topic = classifier.predict_topic(text)
|
||||
print(f"Тема: {topic} (уверенность: {score:.4f})")
|
||||
|
||||
# Поиск похожих публикаций
|
||||
query = "процессор AMD производительность"
|
||||
similar_publications = classifier.search_similar(query, limit=3)
|
||||
for pub in similar_publications:
|
||||
print(f"\nНайдена публикация (релевантность: {pub['relevance']:.4f}):")
|
||||
print(f"Заголовок: {pub['title']}")
|
||||
print(f"Текст: {pub['text'][:100]}...")
|
||||
"""
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
|
||||
import redis.asyncio as aredis
|
||||
from redis.asyncio import Redis
|
||||
|
||||
from settings import REDIS_URL
|
||||
|
||||
@@ -9,30 +9,33 @@ logger = logging.getLogger("redis")
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
|
||||
class RedisCache:
|
||||
class RedisService:
|
||||
def __init__(self, uri=REDIS_URL):
|
||||
self._uri: str = uri
|
||||
self.pubsub_channels = []
|
||||
self._client = None
|
||||
|
||||
async def connect(self):
|
||||
self._client = aredis.Redis.from_url(self._uri, decode_responses=True)
|
||||
if self._uri:
|
||||
self._client = await Redis.from_url(self._uri, decode_responses=True)
|
||||
logger.info("Redis connection was established.")
|
||||
|
||||
async def disconnect(self):
|
||||
if self._client:
|
||||
if isinstance(self._client, Redis):
|
||||
await self._client.close()
|
||||
logger.info("Redis connection was closed.")
|
||||
|
||||
async def execute(self, command, *args, **kwargs):
|
||||
if self._client:
|
||||
try:
|
||||
logger.debug(f"{command} {args} {kwargs}")
|
||||
logger.debug(f"{command}") # {args[0]}") # {args} {kwargs}")
|
||||
for arg in args:
|
||||
if isinstance(arg, dict):
|
||||
if arg.get("_sa_instance_state"):
|
||||
del arg["_sa_instance_state"]
|
||||
r = await self._client.execute_command(command, *args, **kwargs)
|
||||
logger.debug(type(r))
|
||||
logger.debug(r)
|
||||
# logger.debug(type(r))
|
||||
# logger.debug(r)
|
||||
return r
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
@@ -57,7 +60,22 @@ class RedisCache:
|
||||
return
|
||||
await self._client.publish(channel, data)
|
||||
|
||||
async def set(self, key, data, ex=None):
|
||||
# Prepare the command arguments
|
||||
args = [key, data]
|
||||
|
||||
redis = RedisCache()
|
||||
# If an expiration time is provided, add it to the arguments
|
||||
if ex is not None:
|
||||
args.append("EX")
|
||||
args.append(ex)
|
||||
|
||||
# Execute the command with the provided arguments
|
||||
await self.execute("set", *args)
|
||||
|
||||
async def get(self, key):
|
||||
return await self.execute("get", key)
|
||||
|
||||
|
||||
redis = RedisService()
|
||||
|
||||
__all__ = ["redis"]
|
||||
@@ -1,5 +1,87 @@
|
||||
from asyncio.log import logger
|
||||
|
||||
import httpx
|
||||
from ariadne import MutationType, QueryType
|
||||
|
||||
from services.db import create_table_if_not_exists, local_session
|
||||
from settings import AUTH_URL
|
||||
|
||||
query = QueryType()
|
||||
mutation = MutationType()
|
||||
resolvers = [query, mutation]
|
||||
|
||||
|
||||
async def request_graphql_data(gql, url=AUTH_URL, headers=None):
|
||||
"""
|
||||
Выполняет GraphQL запрос к указанному URL
|
||||
|
||||
:param gql: GraphQL запрос
|
||||
:param url: URL для запроса, по умолчанию AUTH_URL
|
||||
:param headers: Заголовки запроса
|
||||
:return: Результат запроса или None в случае ошибки
|
||||
"""
|
||||
if not url:
|
||||
return None
|
||||
if headers is None:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(url, json=gql, headers=headers)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
errors = data.get("errors")
|
||||
if errors:
|
||||
logger.error(f"{url} response: {data}")
|
||||
else:
|
||||
return data
|
||||
else:
|
||||
logger.error(f"{url}: {response.status_code} {response.text}")
|
||||
except Exception as _e:
|
||||
import traceback
|
||||
|
||||
logger.error(f"request_graphql_data error: {traceback.format_exc()}")
|
||||
return None
|
||||
|
||||
|
||||
def create_all_tables():
|
||||
"""Create all database tables in the correct order."""
|
||||
from orm import author, community, draft, notification, reaction, shout, topic
|
||||
|
||||
# Порядок важен - сначала таблицы без внешних ключей, затем зависимые таблицы
|
||||
models_in_order = [
|
||||
# user.User, # Базовая таблица auth
|
||||
author.Author, # Базовая таблица
|
||||
community.Community, # Базовая таблица
|
||||
topic.Topic, # Базовая таблица
|
||||
# Связи для базовых таблиц
|
||||
author.AuthorFollower, # Зависит от Author
|
||||
community.CommunityFollower, # Зависит от Community
|
||||
topic.TopicFollower, # Зависит от Topic
|
||||
# Черновики (теперь без зависимости от Shout)
|
||||
draft.Draft, # Зависит только от Author
|
||||
draft.DraftAuthor, # Зависит от Draft и Author
|
||||
draft.DraftTopic, # Зависит от Draft и Topic
|
||||
# Основные таблицы контента
|
||||
shout.Shout, # Зависит от Author и Draft
|
||||
shout.ShoutAuthor, # Зависит от Shout и Author
|
||||
shout.ShoutTopic, # Зависит от Shout и Topic
|
||||
# Реакции
|
||||
reaction.Reaction, # Зависит от Author и Shout
|
||||
shout.ShoutReactionsFollower, # Зависит от Shout и Reaction
|
||||
# Дополнительные таблицы
|
||||
author.AuthorRating, # Зависит от Author
|
||||
notification.Notification, # Зависит от Author
|
||||
notification.NotificationSeen, # Зависит от Notification
|
||||
# collection.Collection,
|
||||
# collection.ShoutCollection,
|
||||
# invite.Invite
|
||||
]
|
||||
|
||||
with local_session() as session:
|
||||
for model in models_in_order:
|
||||
try:
|
||||
create_table_if_not_exists(session.get_bind(), model)
|
||||
# logger.info(f"Created or verified table: {model.__tablename__}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating table {model.__tablename__}: {e}")
|
||||
raise
|
||||
|
||||
@@ -1,22 +1,27 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
import orjson
|
||||
from opensearchpy import OpenSearch
|
||||
|
||||
from services.encoders import CustomJSONEncoder
|
||||
from services.logger import root_logger as logger
|
||||
from services.rediscache import redis
|
||||
from services.redis import redis
|
||||
from utils.encoders import CustomJSONEncoder
|
||||
|
||||
# Set redis logging level to suppress DEBUG messages
|
||||
logger = logging.getLogger("search")
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
ELASTIC_HOST = os.environ.get("ELASTIC_HOST", "").replace("https://", "")
|
||||
ELASTIC_USER = os.environ.get("ELASTIC_USER", "")
|
||||
ELASTIC_PASSWORD = os.environ.get("ELASTIC_PASSWORD", "")
|
||||
ELASTIC_PORT = os.environ.get("ELASTIC_PORT", 9200)
|
||||
ELASTIC_AUTH = f"{ELASTIC_USER}:{ELASTIC_PASSWORD}" if ELASTIC_USER else ""
|
||||
ELASTIC_URL = os.environ.get(
|
||||
"ELASTIC_URL", f"https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}"
|
||||
"ELASTIC_URL",
|
||||
f"https://{ELASTIC_USER}:{ELASTIC_PASSWORD}@{ELASTIC_HOST}:{ELASTIC_PORT}",
|
||||
)
|
||||
REDIS_TTL = 86400 # 1 day in seconds
|
||||
REDIS_TTL = 86400 # 1 день в секундах
|
||||
|
||||
index_settings = {
|
||||
"settings": {
|
||||
@@ -40,25 +45,54 @@ index_settings = {
|
||||
"title": {"type": "text", "analyzer": "ru"},
|
||||
"subtitle": {"type": "text", "analyzer": "ru"},
|
||||
"lead": {"type": "text", "analyzer": "ru"},
|
||||
# 'author': {'type': 'text'},
|
||||
"media": {"type": "text", "analyzer": "ru"},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
expected_mapping = index_settings["mappings"]
|
||||
|
||||
# Create an event loop
|
||||
# Создание цикла событий
|
||||
search_loop = asyncio.get_event_loop()
|
||||
|
||||
# В начале файла добавим флаг
|
||||
SEARCH_ENABLED = bool(os.environ.get("ELASTIC_HOST", ""))
|
||||
|
||||
|
||||
def get_indices_stats():
|
||||
indices_stats = search_service.client.cat.indices(format="json")
|
||||
for index_info in indices_stats:
|
||||
index_name = index_info["index"]
|
||||
if not index_name.startswith("."):
|
||||
index_health = index_info["health"]
|
||||
index_status = index_info["status"]
|
||||
pri_shards = index_info["pri"]
|
||||
rep_shards = index_info["rep"]
|
||||
docs_count = index_info["docs.count"]
|
||||
docs_deleted = index_info["docs.deleted"]
|
||||
store_size = index_info["store.size"]
|
||||
pri_store_size = index_info["pri.store.size"]
|
||||
|
||||
logger.info(f"Index: {index_name}")
|
||||
logger.info(f"Health: {index_health}")
|
||||
logger.info(f"Status: {index_status}")
|
||||
logger.info(f"Primary Shards: {pri_shards}")
|
||||
logger.info(f"Replica Shards: {rep_shards}")
|
||||
logger.info(f"Documents Count: {docs_count}")
|
||||
logger.info(f"Deleted Documents: {docs_deleted}")
|
||||
logger.info(f"Store Size: {store_size}")
|
||||
logger.info(f"Primary Store Size: {pri_store_size}")
|
||||
|
||||
|
||||
class SearchService:
|
||||
def __init__(self, index_name="search_index"):
|
||||
logger.info("Инициализируем поиск...")
|
||||
self.index_name = index_name
|
||||
self.client = None
|
||||
self.lock = asyncio.Lock() # Create an asyncio lock
|
||||
self.lock = asyncio.Lock()
|
||||
|
||||
# Only initialize the instance if it's not already initialized
|
||||
if ELASTIC_HOST:
|
||||
# Инициализация клиента OpenSearch только если поиск включен
|
||||
if SEARCH_ENABLED:
|
||||
try:
|
||||
self.client = OpenSearch(
|
||||
hosts=[{"host": ELASTIC_HOST, "port": ELASTIC_PORT}],
|
||||
@@ -68,96 +102,118 @@ class SearchService:
|
||||
verify_certs=False,
|
||||
ssl_assert_hostname=False,
|
||||
ssl_show_warn=False,
|
||||
# ca_certs = ca_certs_path
|
||||
)
|
||||
logger.info(" Клиент OpenSearch.org подключен")
|
||||
|
||||
# Create a task and run it in the event loop
|
||||
logger.info("Клиент OpenSearch.org подключен")
|
||||
search_loop.create_task(self.check_index())
|
||||
except Exception as exc:
|
||||
logger.error(f" {exc}")
|
||||
logger.warning(f"Поиск отключен из-за ошибки подключения: {exc}")
|
||||
self.client = None
|
||||
|
||||
def info(self):
|
||||
if isinstance(self.client, OpenSearch):
|
||||
logger.info(" Поиск подключен") # : {self.client.info()}')
|
||||
else:
|
||||
logger.info(" * Задайте переменные среды для подключения к серверу поиска")
|
||||
logger.info("Поиск отключен (ELASTIC_HOST не установлен)")
|
||||
|
||||
async def info(self):
|
||||
if not SEARCH_ENABLED:
|
||||
return {"status": "disabled"}
|
||||
|
||||
try:
|
||||
return get_indices_stats()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get search info: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def delete_index(self):
|
||||
if self.client:
|
||||
logger.debug(f" Удаляем индекс {self.index_name}")
|
||||
logger.warning(f"[!!!] Удаляем индекс {self.index_name}")
|
||||
self.client.indices.delete(index=self.index_name, ignore_unavailable=True)
|
||||
|
||||
def create_index(self):
|
||||
if self.client:
|
||||
logger.debug(f"Создается индекс: {self.index_name}")
|
||||
self.delete_index()
|
||||
logger.info(f"Создается индекс: {self.index_name}")
|
||||
self.client.indices.create(index=self.index_name, body=index_settings)
|
||||
logger.debug(f"Индекс {self.index_name} создан")
|
||||
logger.info(f"Индекс {self.index_name} создан")
|
||||
|
||||
async def check_index(self):
|
||||
if self.client:
|
||||
logger.debug(f" Проверяем индекс {self.index_name}...")
|
||||
logger.info(f"Проверяем индекс {self.index_name}...")
|
||||
if not self.client.indices.exists(index=self.index_name):
|
||||
self.create_index()
|
||||
self.client.indices.put_mapping(
|
||||
index=self.index_name, body=expected_mapping
|
||||
)
|
||||
self.client.indices.put_mapping(index=self.index_name, body=expected_mapping)
|
||||
else:
|
||||
logger.info(f"найден существующий индекс {self.index_name}")
|
||||
# Check if the mapping is correct, and recreate the index if needed
|
||||
logger.info(f"Найден существующий индекс {self.index_name}")
|
||||
# Проверка и обновление структуры индекса, если необходимо
|
||||
result = self.client.indices.get_mapping(index=self.index_name)
|
||||
if isinstance(result, str):
|
||||
result = json.loads(result)
|
||||
result = orjson.loads(result)
|
||||
if isinstance(result, dict):
|
||||
mapping = result.get("mapping")
|
||||
if mapping and mapping != expected_mapping:
|
||||
logger.debug(f" найдена структура индексации: {mapping}")
|
||||
logger.warn(
|
||||
" требуется другая структура индексации, переиндексация"
|
||||
)
|
||||
await self.recreate_index()
|
||||
|
||||
async def recreate_index(self):
|
||||
if self.client:
|
||||
async with self.lock:
|
||||
self.client.indices.delete(
|
||||
index=self.index_name, ignore_unavailable=True
|
||||
)
|
||||
await self.check_index()
|
||||
mapping = result.get(self.index_name, {}).get("mappings")
|
||||
logger.info(f"Найдена структура индексации: {mapping['properties'].keys()}")
|
||||
expected_keys = expected_mapping["properties"].keys()
|
||||
if mapping and mapping["properties"].keys() != expected_keys:
|
||||
logger.info(f"Ожидаемая структура индексации: {expected_mapping}")
|
||||
logger.warning("[!!!] Требуется переиндексация всех данных")
|
||||
self.delete_index()
|
||||
self.client = None
|
||||
else:
|
||||
logger.error("клиент не инициализован, невозможно проверить индекс")
|
||||
|
||||
def index(self, shout):
|
||||
if self.client:
|
||||
id_ = str(shout.id)
|
||||
logger.debug(f" Индексируем пост {id_}")
|
||||
asyncio.create_task(self.perform_index(shout))
|
||||
if not SEARCH_ENABLED:
|
||||
return
|
||||
|
||||
async def perform_index(self, shout):
|
||||
if self.client:
|
||||
self.client.index(
|
||||
index=self.index_name, id=str(shout.id), body=shout.dict()
|
||||
)
|
||||
logger.info(f"Индексируем пост {shout.id}")
|
||||
index_body = {
|
||||
"body": shout.body,
|
||||
"title": shout.title,
|
||||
"subtitle": shout.subtitle,
|
||||
"lead": shout.lead,
|
||||
"media": shout.media,
|
||||
}
|
||||
asyncio.create_task(self.perform_index(shout, index_body))
|
||||
|
||||
async def perform_index(self, shout, index_body):
|
||||
if self.client:
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
self.client.index(index=self.index_name, id=str(shout.id), body=index_body), timeout=40.0
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
logger.error(f"Indexing timeout for shout {shout.id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Indexing error for shout {shout.id}: {e}")
|
||||
|
||||
async def search(self, text, limit, offset):
|
||||
logger.debug(f" Ищем: {text}")
|
||||
search_body = {"query": {"match": {"_all": text}}}
|
||||
if not SEARCH_ENABLED:
|
||||
return []
|
||||
|
||||
logger.info(f"Ищем: {text} {offset}+{limit}")
|
||||
search_body = {
|
||||
"query": {"multi_match": {"query": text, "fields": ["title", "lead", "subtitle", "body", "media"]}}
|
||||
}
|
||||
|
||||
if self.client:
|
||||
search_response = self.client.search(
|
||||
index=self.index_name, body=search_body, size=limit, from_=offset
|
||||
index=self.index_name,
|
||||
body=search_body,
|
||||
size=limit,
|
||||
from_=offset,
|
||||
_source=False,
|
||||
_source_excludes=["title", "body", "subtitle", "media", "lead", "_index"],
|
||||
)
|
||||
hits = search_response["hits"]["hits"]
|
||||
results = [{"id": hit["_id"], "score": hit["_score"]} for hit in hits]
|
||||
|
||||
results = [{**hit["_source"], "score": hit["_score"]} for hit in hits]
|
||||
|
||||
# Use Redis as cache with TTL
|
||||
redis_key = f"search:{text}"
|
||||
await redis.execute(
|
||||
"SETEX",
|
||||
redis_key,
|
||||
REDIS_TTL,
|
||||
json.dumps(results, cls=CustomJSONEncoder),
|
||||
)
|
||||
# если результаты не пустые
|
||||
if results:
|
||||
# Кэширование в Redis с TTL
|
||||
redis_key = f"search:{text}:{offset}+{limit}"
|
||||
await redis.execute(
|
||||
"SETEX",
|
||||
redis_key,
|
||||
REDIS_TTL,
|
||||
json.dumps(results, cls=CustomJSONEncoder),
|
||||
)
|
||||
return results
|
||||
return []
|
||||
|
||||
|
||||
@@ -167,6 +223,10 @@ search_service = SearchService()
|
||||
async def search_text(text: str, limit: int = 50, offset: int = 0):
|
||||
payload = []
|
||||
if search_service.client:
|
||||
# Use OpenSearchService.search_post method
|
||||
# Использование метода search_post из OpenSearchService
|
||||
payload = await search_service.search(text, limit, offset)
|
||||
return payload
|
||||
|
||||
|
||||
# Проверить что URL корректный
|
||||
OPENSEARCH_URL = os.getenv("OPENSEARCH_URL", "rc1a-3n5pi3bhuj9gieel.mdb.yandexcloud.net")
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import logging
|
||||
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.ariadne import AriadneIntegration
|
||||
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
|
||||
@@ -5,26 +7,24 @@ from sentry_sdk.integrations.starlette import StarletteIntegration
|
||||
|
||||
from settings import GLITCHTIP_DSN
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
# Настройка логирования для отправки логов в Sentry
|
||||
sentry_logging_handler = sentry_sdk.integrations.logging.SentryHandler(level=logging.WARNING)
|
||||
logger.addHandler(sentry_logging_handler)
|
||||
logger.setLevel(logging.DEBUG) # Более подробное логирование
|
||||
|
||||
|
||||
def start_sentry():
|
||||
# sentry monitoring
|
||||
try:
|
||||
logger.info("[services.sentry] Sentry init started...")
|
||||
sentry_sdk.init(
|
||||
GLITCHTIP_DSN,
|
||||
# Set traces_sample_rate to 1.0 to capture 100%
|
||||
# of transactions for performance monitoring.
|
||||
traces_sample_rate=1.0,
|
||||
# Set profiles_sample_rate to 1.0 to profile 100%
|
||||
# of sampled transactions.
|
||||
# We recommend adjusting this value in production.
|
||||
profiles_sample_rate=1.0,
|
||||
dsn=GLITCHTIP_DSN,
|
||||
traces_sample_rate=1.0, # Захват 100% транзакций
|
||||
profiles_sample_rate=1.0, # Профилирование 100% транзакций
|
||||
enable_tracing=True,
|
||||
integrations=[
|
||||
StarletteIntegration(),
|
||||
AriadneIntegration(),
|
||||
SqlalchemyIntegration(),
|
||||
],
|
||||
integrations=[StarletteIntegration(), AriadneIntegration(), SqlalchemyIntegration()],
|
||||
send_default_pii=True, # Отправка информации о пользователе (PII)
|
||||
)
|
||||
except Exception as e:
|
||||
print("[services.sentry] init error")
|
||||
print(e)
|
||||
logger.info("[services.sentry] Sentry initialized successfully.")
|
||||
except Exception as _e:
|
||||
logger.warning("[services.sentry] Failed to initialize Sentry", exc_info=True)
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
import asyncio
|
||||
import json
|
||||
|
||||
from sqlalchemy import event, select
|
||||
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.reaction import Reaction
|
||||
from orm.shout import Shout, ShoutAuthor
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.cache import cache_author, cache_follower, cache_follows
|
||||
from services.encoders import CustomJSONEncoder
|
||||
from services.logger import root_logger as logger
|
||||
from services.rediscache import redis
|
||||
|
||||
DEFAULT_FOLLOWS = {
|
||||
"topics": [],
|
||||
"authors": [],
|
||||
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||
}
|
||||
|
||||
|
||||
async def handle_author_follower_change(
|
||||
author_id: int, follower_id: int, is_insert: bool
|
||||
):
|
||||
logger.info(author_id)
|
||||
author_query = select(Author).select_from(Author).filter(Author.id == author_id)
|
||||
[author] = get_with_stat(author_query)
|
||||
follower_query = select(Author).select_from(Author).filter(Author.id == follower_id)
|
||||
[follower] = get_with_stat(follower_query)
|
||||
if follower and author:
|
||||
await cache_author(author.dict())
|
||||
await cache_author(follower.dict())
|
||||
await cache_follows(follower.dict(), "author", author.dict(), is_insert)
|
||||
await cache_follower(follower.dict(), author.dict(), is_insert)
|
||||
|
||||
|
||||
async def handle_topic_follower_change(
|
||||
topic_id: int, follower_id: int, is_insert: bool
|
||||
):
|
||||
logger.info(topic_id)
|
||||
topic_query = select(Topic).filter(Topic.id == topic_id)
|
||||
[topic] = get_with_stat(topic_query)
|
||||
follower_query = select(Author).filter(Author.id == follower_id)
|
||||
[follower] = get_with_stat(follower_query)
|
||||
if follower and topic:
|
||||
await cache_author(follower.dict())
|
||||
await redis.execute(
|
||||
"SET", f"topic:{topic.id}", json.dumps(topic.dict(), cls=CustomJSONEncoder)
|
||||
)
|
||||
await cache_follows(follower.dict(), "topic", topic.dict(), is_insert)
|
||||
|
||||
|
||||
# handle_author_follow and handle_topic_follow -> cache_author, cache_follows, cache_followers
|
||||
|
||||
|
||||
def after_shout_update(_mapper, _connection, shout: Shout):
|
||||
logger.info("after shout update")
|
||||
# Main query to get authors associated with the shout through ShoutAuthor
|
||||
authors_query = (
|
||||
select(Author)
|
||||
.select_from(ShoutAuthor) # Select from ShoutAuthor
|
||||
.join(Author, Author.id == ShoutAuthor.author) # Join with Author
|
||||
.filter(ShoutAuthor.shout == shout.id) # Filter by shout.id
|
||||
)
|
||||
|
||||
for author_with_stat in get_with_stat(authors_query):
|
||||
asyncio.create_task(cache_author(author_with_stat.dict()))
|
||||
|
||||
|
||||
def after_reaction_update(mapper, connection, reaction: Reaction):
|
||||
logger.info("after reaction update")
|
||||
try:
|
||||
author_subquery = select(Author).where(Author.id == reaction.created_by)
|
||||
replied_author_subquery = (
|
||||
select(Author)
|
||||
.join(Reaction, Author.id == Reaction.created_by)
|
||||
.where(Reaction.id == reaction.reply_to)
|
||||
)
|
||||
|
||||
author_query = (
|
||||
select(author_subquery.subquery())
|
||||
.select_from(author_subquery.subquery())
|
||||
.union(
|
||||
select(replied_author_subquery.subquery()).select_from(
|
||||
replied_author_subquery.subquery()
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
for author_with_stat in get_with_stat(author_query):
|
||||
asyncio.create_task(cache_author(author_with_stat.dict()))
|
||||
|
||||
shout = connection.execute(
|
||||
select(Shout).select_from(Shout).where(Shout.id == reaction.shout)
|
||||
).first()
|
||||
if shout:
|
||||
after_shout_update(mapper, connection, shout)
|
||||
except Exception as exc:
|
||||
logger.error(exc)
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def after_author_update(_mapper, _connection, author: Author):
|
||||
logger.info("after author update")
|
||||
q = select(Author).where(Author.id == author.id)
|
||||
result = get_with_stat(q)
|
||||
if result:
|
||||
[author_with_stat] = result
|
||||
if author_with_stat:
|
||||
_task = asyncio.create_task(cache_author(author_with_stat.dict()))
|
||||
|
||||
|
||||
def after_topic_follower_insert(_mapper, _connection, target: TopicFollower):
|
||||
logger.info(target)
|
||||
asyncio.create_task(
|
||||
handle_topic_follower_change(target.topic, target.follower, True) # type: ignore
|
||||
)
|
||||
|
||||
|
||||
def after_topic_follower_delete(_mapper, _connection, target: TopicFollower):
|
||||
logger.info(target)
|
||||
asyncio.create_task(
|
||||
handle_topic_follower_change(target.topic, target.follower, False) # type: ignore
|
||||
)
|
||||
|
||||
|
||||
def after_author_follower_insert(_mapper, _connection, target: AuthorFollower):
|
||||
logger.info(target)
|
||||
asyncio.create_task(
|
||||
handle_author_follower_change(target.author, target.follower, True) # type: ignore
|
||||
)
|
||||
|
||||
|
||||
def after_author_follower_delete(_mapper, _connection, target: AuthorFollower):
|
||||
logger.info(target)
|
||||
asyncio.create_task(
|
||||
handle_author_follower_change(target.author, target.follower, False) # type: ignore
|
||||
)
|
||||
|
||||
|
||||
def events_register():
|
||||
event.listen(Shout, "after_insert", after_shout_update)
|
||||
event.listen(Shout, "after_update", after_shout_update)
|
||||
|
||||
event.listen(Reaction, "after_insert", after_reaction_update)
|
||||
event.listen(Reaction, "after_update", after_reaction_update)
|
||||
|
||||
event.listen(Author, "after_insert", after_author_update)
|
||||
event.listen(Author, "after_update", after_author_update)
|
||||
|
||||
event.listen(AuthorFollower, "after_insert", after_author_follower_insert)
|
||||
event.listen(AuthorFollower, "after_delete", after_author_follower_delete)
|
||||
|
||||
event.listen(TopicFollower, "after_insert", after_topic_follower_insert)
|
||||
event.listen(TopicFollower, "after_delete", after_topic_follower_delete)
|
||||
|
||||
logger.info("cache events were registered!")
|
||||
@@ -1,24 +0,0 @@
|
||||
import json
|
||||
|
||||
from services.rediscache import redis
|
||||
|
||||
|
||||
async def get_unread_counter(chat_id: str, author_id: int) -> int:
|
||||
r = await redis.execute("LLEN", f"chats/{chat_id}/unread/{author_id}")
|
||||
if isinstance(r, str):
|
||||
return int(r)
|
||||
elif isinstance(r, int):
|
||||
return r
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
async def get_total_unread_counter(author_id: int) -> int:
|
||||
chats_set = await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")
|
||||
s = 0
|
||||
if isinstance(chats_set, str):
|
||||
chats_set = json.loads(chats_set)
|
||||
if isinstance(chats_set, list):
|
||||
for chat_id in chats_set:
|
||||
s += await get_unread_counter(chat_id, author_id)
|
||||
return s
|
||||
@@ -1,10 +1,11 @@
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Dict
|
||||
|
||||
import orjson
|
||||
|
||||
# ga
|
||||
from google.analytics.data_v1beta import BetaAnalyticsDataClient
|
||||
from google.analytics.data_v1beta.types import (
|
||||
@@ -13,12 +14,13 @@ from google.analytics.data_v1beta.types import (
|
||||
Metric,
|
||||
RunReportRequest,
|
||||
)
|
||||
from google.analytics.data_v1beta.types import Filter as GAFilter
|
||||
|
||||
from orm.author import Author
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from services.db import local_session
|
||||
from services.logger import root_logger as logger
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
GOOGLE_KEYFILE_PATH = os.environ.get("GOOGLE_KEYFILE_PATH", "/dump/google-service.json")
|
||||
GOOGLE_PROPERTY_ID = os.environ.get("GOOGLE_PROPERTY_ID", "")
|
||||
@@ -27,6 +29,7 @@ VIEWS_FILEPATH = "/dump/views.json"
|
||||
|
||||
class ViewedStorage:
|
||||
lock = asyncio.Lock()
|
||||
precounted_by_slug = {}
|
||||
views_by_shout = {}
|
||||
shouts_by_topic = {}
|
||||
shouts_by_author = {}
|
||||
@@ -34,7 +37,7 @@ class ViewedStorage:
|
||||
period = 60 * 60 # каждый час
|
||||
analytics_client: BetaAnalyticsDataClient | None = None
|
||||
auth_result = None
|
||||
disabled = False
|
||||
running = False
|
||||
start_date = datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
@staticmethod
|
||||
@@ -50,52 +53,52 @@ class ViewedStorage:
|
||||
# Using a default constructor instructs the client to use the credentials
|
||||
# specified in GOOGLE_APPLICATION_CREDENTIALS environment variable.
|
||||
self.analytics_client = BetaAnalyticsDataClient()
|
||||
logger.info(" * Клиент Google Analytics успешно авторизован")
|
||||
logger.info(" * Google Analytics credentials accepted")
|
||||
|
||||
# Запуск фоновой задачи
|
||||
_task = asyncio.create_task(self.worker())
|
||||
else:
|
||||
logger.info(" * Пожалуйста, добавьте ключевой файл Google Analytics")
|
||||
self.disabled = True
|
||||
logger.warning(" * please, add Google Analytics credentials file")
|
||||
self.running = False
|
||||
|
||||
@staticmethod
|
||||
def load_precounted_views():
|
||||
"""Загрузка предварительно подсчитанных просмотров из файла JSON"""
|
||||
self = ViewedStorage
|
||||
viewfile_path = VIEWS_FILEPATH
|
||||
if not os.path.exists(viewfile_path):
|
||||
viewfile_path = os.path.join(os.path.curdir, "views.json")
|
||||
if not os.path.exists(viewfile_path):
|
||||
logger.warning(" * views.json not found")
|
||||
return
|
||||
|
||||
logger.info(f" * loading views from {viewfile_path}")
|
||||
try:
|
||||
if os.path.exists(VIEWS_FILEPATH):
|
||||
start_date_int = os.path.getmtime(VIEWS_FILEPATH)
|
||||
start_date_str = datetime.fromtimestamp(start_date_int).strftime(
|
||||
"%Y-%m-%d"
|
||||
)
|
||||
self.start_date = start_date_str
|
||||
now_date = datetime.now().strftime("%Y-%m-%d")
|
||||
start_date_int = os.path.getmtime(viewfile_path)
|
||||
start_date_str = datetime.fromtimestamp(start_date_int).strftime("%Y-%m-%d")
|
||||
self.start_date = start_date_str
|
||||
now_date = datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
if now_date == self.start_date:
|
||||
logger.info(" * Данные актуализованы!")
|
||||
else:
|
||||
logger.warn(
|
||||
f" * Файл просмотров {VIEWS_FILEPATH} устарел: {self.start_date}"
|
||||
)
|
||||
|
||||
with open(VIEWS_FILEPATH, "r") as file:
|
||||
precounted_views = json.load(file)
|
||||
self.views_by_shout.update(precounted_views)
|
||||
logger.info(
|
||||
f" * {len(precounted_views)} публикаций с просмотрами успешно загружены."
|
||||
)
|
||||
if now_date == self.start_date:
|
||||
logger.info(" * views data is up to date!")
|
||||
else:
|
||||
logger.info(" * Файл просмотров не найден.")
|
||||
logger.warn(f" * {viewfile_path} is too old: {self.start_date}")
|
||||
|
||||
with open(viewfile_path, "r") as file:
|
||||
precounted_views = orjson.loads(file.read())
|
||||
self.precounted_by_slug.update(precounted_views)
|
||||
logger.info(f" * {len(precounted_views)} shouts with views was loaded.")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Ошибка загрузки предварительно подсчитанных просмотров: {e}")
|
||||
logger.error(f"precounted views loading error: {e}")
|
||||
|
||||
# noinspection PyTypeChecker
|
||||
@staticmethod
|
||||
async def update_pages():
|
||||
"""Запрос всех страниц от Google Analytics, отсортированных по количеству просмотров"""
|
||||
"""Запрос всех страниц от Google Analytics, отсортрованных по количеству просмотров"""
|
||||
self = ViewedStorage
|
||||
logger.info(" ⎧ Обновление данных просмотров от Google Analytics ---")
|
||||
if not self.disabled:
|
||||
logger.info(" ⎧ views update from Google Analytics ---")
|
||||
if self.running:
|
||||
try:
|
||||
start = time.time()
|
||||
async with self.lock:
|
||||
@@ -104,9 +107,7 @@ class ViewedStorage:
|
||||
property=f"properties/{GOOGLE_PROPERTY_ID}",
|
||||
dimensions=[Dimension(name="pagePath")],
|
||||
metrics=[Metric(name="screenPageViews")],
|
||||
date_ranges=[
|
||||
DateRange(start_date=self.start_date, end_date="today")
|
||||
],
|
||||
date_ranges=[DateRange(start_date=self.start_date, end_date="today")],
|
||||
)
|
||||
response = self.analytics_client.run_report(request)
|
||||
if response and isinstance(response.rows, list):
|
||||
@@ -120,114 +121,151 @@ class ViewedStorage:
|
||||
if isinstance(row.dimension_values, list):
|
||||
page_path = row.dimension_values[0].value
|
||||
slug = page_path.split("discours.io/")[-1]
|
||||
views_count = int(row.metric_values[0].value)
|
||||
fresh_views = int(row.metric_values[0].value)
|
||||
|
||||
# Обновление данных в хранилище
|
||||
self.views_by_shout[slug] = self.views_by_shout.get(
|
||||
slug, 0
|
||||
)
|
||||
self.views_by_shout[slug] += views_count
|
||||
self.views_by_shout[slug] = self.views_by_shout.get(slug, 0)
|
||||
self.views_by_shout[slug] += fresh_views
|
||||
self.update_topics(slug)
|
||||
|
||||
# Запись путей страниц для логирования
|
||||
slugs.add(slug)
|
||||
|
||||
logger.info(f" ⎪ Собрано страниц: {len(slugs)} ")
|
||||
logger.info(f" ⎪ collected pages: {len(slugs)} ")
|
||||
|
||||
end = time.time()
|
||||
logger.info(" ⎪ Обновление страниц заняло %fs " % (end - start))
|
||||
logger.info(" ⎪ views update time: %fs " % (end - start))
|
||||
except Exception as error:
|
||||
logger.error(error)
|
||||
self.running = False
|
||||
|
||||
@staticmethod
|
||||
async def get_shout(shout_slug) -> int:
|
||||
"""Получение метрики просмотров shout по slug"""
|
||||
def get_shout(shout_slug="", shout_id=0) -> int:
|
||||
"""Получение метрики просмотров shout по slug или id."""
|
||||
self = ViewedStorage
|
||||
async with self.lock:
|
||||
return self.views_by_shout.get(shout_slug, 0)
|
||||
fresh_views = self.views_by_shout.get(shout_slug, 0)
|
||||
precounted_views = self.precounted_by_slug.get(shout_slug, 0)
|
||||
return fresh_views + precounted_views
|
||||
|
||||
@staticmethod
|
||||
async def get_shout_media(shout_slug) -> Dict[str, int]:
|
||||
"""Получение метрики воспроизведения shout по slug"""
|
||||
def get_shout_media(shout_slug) -> Dict[str, int]:
|
||||
"""Получение метрики воспроизведения shout по slug."""
|
||||
self = ViewedStorage
|
||||
async with self.lock:
|
||||
return self.views_by_shout.get(shout_slug, 0)
|
||||
|
||||
# TODO: get media plays from Google Analytics
|
||||
|
||||
return self.views_by_shout.get(shout_slug, 0)
|
||||
|
||||
@staticmethod
|
||||
async def get_topic(topic_slug) -> int:
|
||||
"""Получение суммарного значения просмотров темы"""
|
||||
def get_topic(topic_slug) -> int:
|
||||
"""Получение суммарного значения просмотров темы."""
|
||||
self = ViewedStorage
|
||||
topic_views = 0
|
||||
async with self.lock:
|
||||
for shout_slug in self.shouts_by_topic.get(topic_slug, []):
|
||||
topic_views += self.views_by_shout.get(shout_slug, 0)
|
||||
return topic_views
|
||||
return sum(self.views_by_shout.get(shout_slug, 0) for shout_slug in self.shouts_by_topic.get(topic_slug, []))
|
||||
|
||||
@staticmethod
|
||||
async def get_author(author_slug) -> int:
|
||||
"""Получение суммарного значения просмотров автора"""
|
||||
def get_author(author_slug) -> int:
|
||||
"""Получение суммарного значения просмотров автора."""
|
||||
self = ViewedStorage
|
||||
author_views = 0
|
||||
async with self.lock:
|
||||
for shout_slug in self.shouts_by_author.get(author_slug, []):
|
||||
author_views += self.views_by_shout.get(shout_slug, 0)
|
||||
return author_views
|
||||
return sum(self.views_by_shout.get(shout_slug, 0) for shout_slug in self.shouts_by_author.get(author_slug, []))
|
||||
|
||||
@staticmethod
|
||||
def update_topics(shout_slug):
|
||||
"""Обновление счетчиков темы по slug shout"""
|
||||
self = ViewedStorage
|
||||
with local_session() as session:
|
||||
# Определение вспомогательной функции для избежания повторения кода
|
||||
# Определение вспомогательной функции для избежа<EFBFBD><EFBFBD>ия повторения кода
|
||||
def update_groups(dictionary, key, value):
|
||||
dictionary[key] = list(set(dictionary.get(key, []) + [value]))
|
||||
|
||||
# Обновление тем и авторов с использованием вспомогательной функции
|
||||
for [_shout_topic, topic] in (
|
||||
session.query(ShoutTopic, Topic)
|
||||
.join(Topic)
|
||||
.join(Shout)
|
||||
.where(Shout.slug == shout_slug)
|
||||
.all()
|
||||
for [_st, topic] in (
|
||||
session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(Shout.slug == shout_slug).all()
|
||||
):
|
||||
update_groups(self.shouts_by_topic, topic.slug, shout_slug)
|
||||
|
||||
for [_shout_topic, author] in (
|
||||
session.query(ShoutAuthor, Author)
|
||||
.join(Author)
|
||||
.join(Shout)
|
||||
.where(Shout.slug == shout_slug)
|
||||
.all()
|
||||
for [_st, author] in (
|
||||
session.query(ShoutAuthor, Author).join(Author).join(Shout).where(Shout.slug == shout_slug).all()
|
||||
):
|
||||
update_groups(self.shouts_by_author, author.slug, shout_slug)
|
||||
|
||||
@staticmethod
|
||||
async def stop():
|
||||
"""Остановка фоновой задачи"""
|
||||
self = ViewedStorage
|
||||
async with self.lock:
|
||||
self.running = False
|
||||
logger.info("ViewedStorage worker was stopped.")
|
||||
|
||||
@staticmethod
|
||||
async def worker():
|
||||
"""Асинхронная задача обновления"""
|
||||
failed = 0
|
||||
self = ViewedStorage
|
||||
if self.disabled:
|
||||
return
|
||||
|
||||
while True:
|
||||
while self.running:
|
||||
try:
|
||||
await self.update_pages()
|
||||
failed = 0
|
||||
except Exception as exc:
|
||||
failed += 1
|
||||
logger.debug(exc)
|
||||
logger.info(" - Обновление не удалось #%d, ожидание 10 секунд" % failed)
|
||||
logger.info(" - update failed #%d, wait 10 secs" % failed)
|
||||
if failed > 3:
|
||||
logger.info(" - Больше не пытаемся обновить")
|
||||
logger.info(" - views update failed, not trying anymore")
|
||||
self.running = False
|
||||
break
|
||||
if failed == 0:
|
||||
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
|
||||
t = format(when.astimezone().isoformat())
|
||||
logger.info(
|
||||
" ⎩ Следующее обновление: %s"
|
||||
% (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
|
||||
)
|
||||
logger.info(" ⎩ next update: %s" % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0]))
|
||||
await asyncio.sleep(self.period)
|
||||
else:
|
||||
await asyncio.sleep(10)
|
||||
logger.info(" - Попытка снова обновить данные")
|
||||
logger.info(" - try to update views again")
|
||||
|
||||
@staticmethod
|
||||
async def update_slug_views(slug: str) -> int:
|
||||
"""
|
||||
Получает fresh статистику просмотров для указанного slug.
|
||||
|
||||
Args:
|
||||
slug: Идентификатор страницы
|
||||
|
||||
Returns:
|
||||
int: Количество просмотров
|
||||
"""
|
||||
self = ViewedStorage
|
||||
if not self.analytics_client:
|
||||
logger.warning("Google Analytics client not initialized")
|
||||
return 0
|
||||
|
||||
try:
|
||||
# Создаем фильтр для точного совпадения конца URL
|
||||
request = RunReportRequest(
|
||||
property=f"properties/{GOOGLE_PROPERTY_ID}",
|
||||
date_ranges=[DateRange(start_date=self.start_date, end_date="today")],
|
||||
dimensions=[Dimension(name="pagePath")],
|
||||
dimension_filter=GAFilter(
|
||||
field_name="pagePath",
|
||||
string_filter=GAFilter.StringFilter(
|
||||
value=f".*/{slug}$", # Используем регулярное выражение для точного совпадения конца URL
|
||||
match_type=GAFilter.StringFilter.MatchType.FULL_REGEXP,
|
||||
case_sensitive=False, # Включаем чувствительность к регистру для точности
|
||||
),
|
||||
),
|
||||
metrics=[Metric(name="screenPageViews")],
|
||||
)
|
||||
|
||||
response = self.analytics_client.run_report(request)
|
||||
|
||||
if not response.rows:
|
||||
return 0
|
||||
|
||||
views = int(response.rows[0].metric_values[0].value)
|
||||
# Кэшируем результат
|
||||
self.views_by_shout[slug] = views
|
||||
return views
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Google Analytics API Error: {e}")
|
||||
return 0
|
||||
|
||||
@@ -1,13 +1,124 @@
|
||||
import asyncio
|
||||
import os
|
||||
import re
|
||||
from asyncio.log import logger
|
||||
|
||||
from sqlalchemy import select
|
||||
from starlette.endpoints import HTTPEndpoint
|
||||
from starlette.exceptions import HTTPException
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
from cache.cache import cache_author
|
||||
from orm.author import Author
|
||||
from resolvers.stat import get_with_stat
|
||||
from services.db import local_session
|
||||
from services.schema import request_graphql_data
|
||||
from settings import ADMIN_SECRET, WEBHOOK_SECRET
|
||||
|
||||
|
||||
async def check_webhook_existence():
|
||||
"""
|
||||
Проверяет существование вебхука для user.login события
|
||||
|
||||
Returns:
|
||||
tuple: (bool, str, str) - существует ли вебхук, его id и endpoint если существует
|
||||
"""
|
||||
logger.info("check_webhook_existence called")
|
||||
if not ADMIN_SECRET:
|
||||
logger.error("ADMIN_SECRET is not set")
|
||||
return False, None, None
|
||||
|
||||
headers = {"Content-Type": "application/json", "X-Authorizer-Admin-Secret": ADMIN_SECRET}
|
||||
|
||||
operation = "GetWebhooks"
|
||||
query_name = "_webhooks"
|
||||
variables = {"params": {}}
|
||||
# https://docs.authorizer.dev/core/graphql-api#_webhooks
|
||||
gql = {
|
||||
"query": f"query {operation}($params: PaginatedInput!)"
|
||||
+ "{"
|
||||
+ f"{query_name}(params: $params) {{ webhooks {{ id event_name endpoint }} }} "
|
||||
+ "}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
}
|
||||
result = await request_graphql_data(gql, headers=headers)
|
||||
if result:
|
||||
webhooks = result.get("data", {}).get(query_name, {}).get("webhooks", [])
|
||||
logger.info(webhooks)
|
||||
for webhook in webhooks:
|
||||
if webhook["event_name"].startswith("user.login"):
|
||||
return True, webhook["id"], webhook["endpoint"]
|
||||
return False, None, None
|
||||
|
||||
|
||||
async def create_webhook_endpoint():
|
||||
"""
|
||||
Создает вебхук для user.login события.
|
||||
Если существует старый вебхук - удаляет его и создает новый.
|
||||
"""
|
||||
logger.info("create_webhook_endpoint called")
|
||||
|
||||
headers = {"Content-Type": "application/json", "X-Authorizer-Admin-Secret": ADMIN_SECRET}
|
||||
|
||||
exists, webhook_id, current_endpoint = await check_webhook_existence()
|
||||
|
||||
# Определяем endpoint в зависимости от окружения
|
||||
host = os.environ.get("HOST", "core.dscrs.site")
|
||||
endpoint = f"https://{host}/new-author"
|
||||
|
||||
if exists:
|
||||
# Если вебхук существует, но с другим endpoint или с модифицированным именем
|
||||
if current_endpoint != endpoint or webhook_id:
|
||||
# https://docs.authorizer.dev/core/graphql-api#_delete_webhook
|
||||
operation = "DeleteWebhook"
|
||||
query_name = "_delete_webhook"
|
||||
variables = {"params": {"id": webhook_id}} # Изменено с id на webhook_id
|
||||
gql = {
|
||||
"query": f"mutation {operation}($params: WebhookRequest!)"
|
||||
+ "{"
|
||||
+ f"{query_name}(params: $params) {{ message }} "
|
||||
+ "}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
}
|
||||
try:
|
||||
await request_graphql_data(gql, headers=headers)
|
||||
exists = False
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete webhook: {e}")
|
||||
# Продолжаем выполнение даже при ошибке удаления
|
||||
exists = False
|
||||
else:
|
||||
logger.info(f"Webhook already exists and configured correctly: {webhook_id}")
|
||||
return
|
||||
|
||||
if not exists:
|
||||
# https://docs.authorizer.dev/core/graphql-api#_add_webhook
|
||||
operation = "AddWebhook"
|
||||
query_name = "_add_webhook"
|
||||
variables = {
|
||||
"params": {
|
||||
"event_name": "user.login",
|
||||
"endpoint": endpoint,
|
||||
"enabled": True,
|
||||
"headers": {"Authorization": WEBHOOK_SECRET},
|
||||
}
|
||||
}
|
||||
gql = {
|
||||
"query": f"mutation {operation}($params: AddWebhookRequest!)"
|
||||
+ "{"
|
||||
+ f"{query_name}(params: $params) {{ message }} "
|
||||
+ "}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
}
|
||||
try:
|
||||
result = await request_graphql_data(gql, headers=headers)
|
||||
logger.info(result)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create webhook: {e}")
|
||||
|
||||
|
||||
class WebhookEndpoint(HTTPEndpoint):
|
||||
@@ -18,15 +129,11 @@ class WebhookEndpoint(HTTPEndpoint):
|
||||
raise HTTPException(status_code=400, detail="Request body is empty")
|
||||
auth = request.headers.get("Authorization")
|
||||
if not auth or auth != os.environ.get("WEBHOOK_SECRET"):
|
||||
raise HTTPException(
|
||||
status_code=401, detail="Invalid Authorization header"
|
||||
)
|
||||
raise HTTPException(status_code=401, detail="Invalid Authorization header")
|
||||
# logger.debug(data)
|
||||
user = data.get("user")
|
||||
if not isinstance(user, dict):
|
||||
raise HTTPException(
|
||||
status_code=400, detail="User data is not a dictionary"
|
||||
)
|
||||
raise HTTPException(status_code=400, detail="User data is not a dictionary")
|
||||
#
|
||||
name: str = (
|
||||
f"{user.get('given_name', user.get('slug'))} {user.get('middle_name', '')}"
|
||||
@@ -37,31 +144,30 @@ class WebhookEndpoint(HTTPEndpoint):
|
||||
pic: str = user.get("picture", "")
|
||||
if user_id:
|
||||
with local_session() as session:
|
||||
author = (
|
||||
session.query(Author).filter(Author.user == user_id).first()
|
||||
)
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if not author:
|
||||
# If the author does not exist, create a new one
|
||||
slug: str = email.split("@")[0].replace(".", "-").lower()
|
||||
slug: str = re.sub("[^0-9a-z]+", "-", slug)
|
||||
while True:
|
||||
author = (
|
||||
session.query(Author)
|
||||
.filter(Author.slug == slug)
|
||||
.first()
|
||||
)
|
||||
author = session.query(Author).filter(Author.slug == slug).first()
|
||||
if not author:
|
||||
break
|
||||
slug = f"{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}"
|
||||
author = Author(user=user_id, slug=slug, name=name, pic=pic)
|
||||
session.add(author)
|
||||
session.commit()
|
||||
author_query = select(Author).filter(Author.user == user_id)
|
||||
result = get_with_stat(author_query)
|
||||
if result:
|
||||
author_with_stat = result[0]
|
||||
author_dict = author_with_stat.dict()
|
||||
# await cache_author(author_with_stat)
|
||||
asyncio.create_task(cache_author(author_dict))
|
||||
|
||||
return JSONResponse({"status": "success"})
|
||||
except HTTPException as e:
|
||||
return JSONResponse(
|
||||
{"status": "error", "message": str(e.detail)}, status_code=e.status_code
|
||||
)
|
||||
return JSONResponse({"status": "error", "message": str(e.detail)}, status_code=e.status_code)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
|
||||
26
settings.py
26
settings.py
@@ -1,17 +1,29 @@
|
||||
import sys
|
||||
from os import environ
|
||||
|
||||
PORT = 8000
|
||||
MODE = "development" if "dev" in sys.argv else "production"
|
||||
DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
|
||||
|
||||
PORT = environ.get("PORT") or 8000
|
||||
|
||||
# storages
|
||||
DB_URL = (
|
||||
environ.get("DATABASE_URL", "").replace("postgres://", "postgresql://")
|
||||
or environ.get("DB_URL", "").replace("postgres://", "postgresql://")
|
||||
or "postgresql://postgres@localhost:5432/discoursio"
|
||||
or "sqlite:///discoursio.db"
|
||||
)
|
||||
REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1"
|
||||
API_BASE = environ.get("API_BASE") or ""
|
||||
AUTH_URL = environ.get("AUTH_URL") or ""
|
||||
GLITCHTIP_DSN = environ.get("GLITCHTIP_DSN")
|
||||
DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
|
||||
MODE = "development" if "dev" in sys.argv else "production"
|
||||
|
||||
# debug
|
||||
GLITCHTIP_DSN = environ.get("GLITCHTIP_DSN")
|
||||
|
||||
# authorizer.dev
|
||||
AUTH_URL = environ.get("AUTH_URL") or "https://auth.discours.io/graphql"
|
||||
ADMIN_SECRET = environ.get("AUTH_SECRET") or "nothing"
|
||||
WEBHOOK_SECRET = environ.get("WEBHOOK_SECRET") or "nothing-else"
|
||||
|
||||
# own auth
|
||||
ONETIME_TOKEN_LIFE_SPAN = 60 * 60 * 24 * 3 # 3 days
|
||||
SESSION_TOKEN_LIFE_SPAN = 60 * 60 * 24 * 30 # 30 days
|
||||
JWT_ALGORITHM = "HS256"
|
||||
JWT_SECRET_KEY = environ.get("JWT_SECRET") or "nothing-else-jwt-secret-matters"
|
||||
|
||||
60
tests/conftest.py
Normal file
60
tests/conftest.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import Session
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
from main import app
|
||||
from services.db import Base
|
||||
from services.redis import redis
|
||||
|
||||
# Use SQLite for testing
|
||||
TEST_DB_URL = "sqlite:///test.db"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def event_loop():
|
||||
"""Create an instance of the default event loop for the test session."""
|
||||
loop = asyncio.get_event_loop_policy().new_event_loop()
|
||||
yield loop
|
||||
loop.close()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def test_engine():
|
||||
"""Create a test database engine."""
|
||||
engine = create_engine(TEST_DB_URL)
|
||||
Base.metadata.create_all(engine)
|
||||
yield engine
|
||||
Base.metadata.drop_all(engine)
|
||||
os.remove("test.db")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def db_session(test_engine):
|
||||
"""Create a new database session for a test."""
|
||||
connection = test_engine.connect()
|
||||
transaction = connection.begin()
|
||||
session = Session(bind=connection)
|
||||
|
||||
yield session
|
||||
|
||||
session.close()
|
||||
transaction.rollback()
|
||||
connection.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def redis_client():
|
||||
"""Create a test Redis client."""
|
||||
await redis.connect()
|
||||
yield redis
|
||||
await redis.disconnect()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_client():
|
||||
"""Create a TestClient instance."""
|
||||
return TestClient(app)
|
||||
94
tests/test_drafts.py
Normal file
94
tests/test_drafts.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import pytest
|
||||
|
||||
from orm.author import Author
|
||||
from orm.shout import Shout
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_author(db_session):
|
||||
"""Create a test author."""
|
||||
author = Author(name="Test Author", slug="test-author", user="test-user-id")
|
||||
db_session.add(author)
|
||||
db_session.commit()
|
||||
return author
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_shout(db_session):
|
||||
"""Create test shout with required fields."""
|
||||
author = Author(name="Test Author", slug="test-author", user="test-user-id")
|
||||
db_session.add(author)
|
||||
db_session.flush()
|
||||
|
||||
shout = Shout(
|
||||
title="Test Shout",
|
||||
slug="test-shout",
|
||||
created_by=author.id, # Обязательное поле
|
||||
body="Test body",
|
||||
layout="article",
|
||||
lang="ru",
|
||||
)
|
||||
db_session.add(shout)
|
||||
db_session.commit()
|
||||
return shout
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_shout(test_client, db_session, test_author):
|
||||
"""Test creating a new shout."""
|
||||
response = test_client.post(
|
||||
"/",
|
||||
json={
|
||||
"query": """
|
||||
mutation CreateDraft($draft_input: DraftInput!) {
|
||||
create_draft(draft_input: $draft_input) {
|
||||
error
|
||||
draft {
|
||||
id
|
||||
title
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
""",
|
||||
"variables": {
|
||||
"input": {
|
||||
"title": "Test Shout",
|
||||
"body": "This is a test shout",
|
||||
}
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "errors" not in data
|
||||
assert data["data"]["create_draft"]["draft"]["title"] == "Test Shout"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_load_drafts(test_client, db_session):
|
||||
"""Test retrieving a shout."""
|
||||
response = test_client.post(
|
||||
"/",
|
||||
json={
|
||||
"query": """
|
||||
query {
|
||||
load_drafts {
|
||||
error
|
||||
drafts {
|
||||
id
|
||||
title
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
""",
|
||||
"variables": {"slug": "test-shout"},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "errors" not in data
|
||||
assert data["data"]["load_drafts"]["drafts"] == []
|
||||
64
tests/test_reactions.py
Normal file
64
tests/test_reactions.py
Normal file
@@ -0,0 +1,64 @@
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from orm.author import Author
|
||||
from orm.reaction import ReactionKind
|
||||
from orm.shout import Shout
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_setup(db_session):
|
||||
"""Set up test data."""
|
||||
now = int(datetime.now().timestamp())
|
||||
author = Author(name="Test Author", slug="test-author", user="test-user-id")
|
||||
db_session.add(author)
|
||||
db_session.flush()
|
||||
|
||||
shout = Shout(
|
||||
title="Test Shout",
|
||||
slug="test-shout",
|
||||
created_by=author.id,
|
||||
body="This is a test shout",
|
||||
layout="article",
|
||||
lang="ru",
|
||||
community=1,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db_session.add_all([author, shout])
|
||||
db_session.commit()
|
||||
return {"author": author, "shout": shout}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_reaction(test_client, db_session, test_setup):
|
||||
"""Test creating a reaction on a shout."""
|
||||
response = test_client.post(
|
||||
"/",
|
||||
json={
|
||||
"query": """
|
||||
mutation CreateReaction($reaction: ReactionInput!) {
|
||||
create_reaction(reaction: $reaction) {
|
||||
error
|
||||
reaction {
|
||||
id
|
||||
kind
|
||||
body
|
||||
created_by {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
""",
|
||||
"variables": {
|
||||
"reaction": {"shout": test_setup["shout"].id, "kind": ReactionKind.LIKE.value, "body": "Great post!"}
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "error" not in data
|
||||
assert data["data"]["create_reaction"]["reaction"]["kind"] == ReactionKind.LIKE.value
|
||||
85
tests/test_shouts.py
Normal file
85
tests/test_shouts.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from orm.author import Author
|
||||
from orm.shout import Shout
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_shout(db_session):
|
||||
"""Create test shout with required fields."""
|
||||
now = int(datetime.now().timestamp())
|
||||
author = Author(name="Test Author", slug="test-author", user="test-user-id")
|
||||
db_session.add(author)
|
||||
db_session.flush()
|
||||
|
||||
now = int(datetime.now().timestamp())
|
||||
|
||||
shout = Shout(
|
||||
title="Test Shout",
|
||||
slug="test-shout",
|
||||
created_by=author.id,
|
||||
body="Test body",
|
||||
layout="article",
|
||||
lang="ru",
|
||||
community=1,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db_session.add(shout)
|
||||
db_session.commit()
|
||||
return shout
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_shout(test_client, db_session):
|
||||
"""Test retrieving a shout."""
|
||||
# Создаем автора
|
||||
author = Author(name="Test Author", slug="test-author", user="test-user-id")
|
||||
db_session.add(author)
|
||||
db_session.flush()
|
||||
now = int(datetime.now().timestamp())
|
||||
|
||||
# Создаем публикацию со всеми обязательными полями
|
||||
shout = Shout(
|
||||
title="Test Shout",
|
||||
body="This is a test shout",
|
||||
slug="test-shout",
|
||||
created_by=author.id,
|
||||
layout="article",
|
||||
lang="ru",
|
||||
community=1,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db_session.add(shout)
|
||||
db_session.commit()
|
||||
|
||||
response = test_client.post(
|
||||
"/",
|
||||
json={
|
||||
"query": """
|
||||
query GetShout($slug: String!) {
|
||||
get_shout(slug: $slug) {
|
||||
id
|
||||
title
|
||||
body
|
||||
created_at
|
||||
updated_at
|
||||
created_by {
|
||||
id
|
||||
name
|
||||
slug
|
||||
}
|
||||
}
|
||||
}
|
||||
""",
|
||||
"variables": {"slug": "test-shout"},
|
||||
},
|
||||
)
|
||||
|
||||
data = response.json()
|
||||
assert response.status_code == 200
|
||||
assert "errors" not in data
|
||||
assert data["data"]["get_shout"]["title"] == "Test Shout"
|
||||
70
tests/test_validations.py
Normal file
70
tests/test_validations.py
Normal file
@@ -0,0 +1,70 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from auth.validations import (
|
||||
AuthInput,
|
||||
AuthResponse,
|
||||
TokenPayload,
|
||||
UserRegistrationInput,
|
||||
)
|
||||
|
||||
|
||||
class TestAuthValidations:
|
||||
def test_auth_input(self):
|
||||
"""Test basic auth input validation"""
|
||||
# Valid case
|
||||
auth = AuthInput(user_id="123", username="testuser", token="1234567890abcdef1234567890abcdef")
|
||||
assert auth.user_id == "123"
|
||||
assert auth.username == "testuser"
|
||||
|
||||
# Invalid cases
|
||||
with pytest.raises(ValidationError):
|
||||
AuthInput(user_id="", username="test", token="x" * 32)
|
||||
|
||||
with pytest.raises(ValidationError):
|
||||
AuthInput(user_id="123", username="t", token="x" * 32)
|
||||
|
||||
def test_user_registration(self):
|
||||
"""Test user registration validation"""
|
||||
# Valid case
|
||||
user = UserRegistrationInput(email="test@example.com", password="SecurePass123!", name="Test User")
|
||||
assert user.email == "test@example.com"
|
||||
assert user.name == "Test User"
|
||||
|
||||
# Test email validation
|
||||
with pytest.raises(ValidationError) as exc:
|
||||
UserRegistrationInput(email="invalid-email", password="SecurePass123!", name="Test")
|
||||
assert "Invalid email format" in str(exc.value)
|
||||
|
||||
# Test password validation
|
||||
with pytest.raises(ValidationError) as exc:
|
||||
UserRegistrationInput(email="test@example.com", password="weak", name="Test")
|
||||
assert "String should have at least 8 characters" in str(exc.value)
|
||||
|
||||
def test_token_payload(self):
|
||||
"""Test token payload validation"""
|
||||
now = datetime.utcnow()
|
||||
exp = now + timedelta(hours=1)
|
||||
|
||||
payload = TokenPayload(user_id="123", username="testuser", exp=exp, iat=now)
|
||||
assert payload.user_id == "123"
|
||||
assert payload.username == "testuser"
|
||||
assert payload.scopes == [] # Default empty list
|
||||
|
||||
def test_auth_response(self):
|
||||
"""Test auth response validation"""
|
||||
# Success case
|
||||
success_resp = AuthResponse(success=True, token="valid_token", user={"id": "123", "name": "Test"})
|
||||
assert success_resp.success is True
|
||||
assert success_resp.token == "valid_token"
|
||||
|
||||
# Error case
|
||||
error_resp = AuthResponse(success=False, error="Invalid credentials")
|
||||
assert error_resp.success is False
|
||||
assert error_resp.error == "Invalid credentials"
|
||||
|
||||
# Invalid case - отсутствует обязательное поле token при success=True
|
||||
with pytest.raises(ValidationError):
|
||||
AuthResponse(success=True, user={"id": "123", "name": "Test"})
|
||||
28
utils/encoders.py
Normal file
28
utils/encoders.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from decimal import Decimal
|
||||
from json import JSONEncoder
|
||||
|
||||
|
||||
class CustomJSONEncoder(JSONEncoder):
|
||||
"""
|
||||
Расширенный JSON энкодер с поддержкой сериализации объектов SQLAlchemy.
|
||||
|
||||
Примеры:
|
||||
>>> import json
|
||||
>>> from decimal import Decimal
|
||||
>>> from orm.topic import Topic
|
||||
>>> json.dumps(Decimal("10.50"), cls=CustomJSONEncoder)
|
||||
'"10.50"'
|
||||
>>> topic = Topic(id=1, slug="test")
|
||||
>>> json.dumps(topic, cls=CustomJSONEncoder)
|
||||
'{"id": 1, "slug": "test", ...}'
|
||||
"""
|
||||
|
||||
def default(self, obj):
|
||||
if isinstance(obj, Decimal):
|
||||
return str(obj)
|
||||
|
||||
# Проверяем, есть ли у объекта метод dict() (как у моделей SQLAlchemy)
|
||||
if hasattr(obj, "dict") and callable(obj.dict):
|
||||
return obj.dict()
|
||||
|
||||
return super().default(obj)
|
||||
@@ -1,15 +1,38 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import colorlog
|
||||
|
||||
_lib_path = Path(__file__).parents[1]
|
||||
_leng_path = len(_lib_path.as_posix())
|
||||
|
||||
|
||||
def filter(record: logging.LogRecord):
|
||||
# Define `package` attribute with the relative path.
|
||||
record.package = record.pathname[_leng_path + 1 :].replace(".py", "")
|
||||
record.emoji = (
|
||||
"🔍"
|
||||
if record.levelno == logging.DEBUG
|
||||
else "ℹ︎"
|
||||
if record.levelno == logging.INFO
|
||||
else "🚧"
|
||||
if record.levelno == logging.WARNING
|
||||
else "❌"
|
||||
if record.levelno == logging.ERROR
|
||||
else "🧨"
|
||||
if record.levelno == logging.CRITICAL
|
||||
else ""
|
||||
)
|
||||
return record
|
||||
|
||||
|
||||
# Define the color scheme
|
||||
color_scheme = {
|
||||
"DEBUG": "cyan",
|
||||
"DEBUG": "light_black",
|
||||
"INFO": "green",
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red,bg_white",
|
||||
"DEFAULT": "white",
|
||||
}
|
||||
|
||||
# Define secondary log colors
|
||||
@@ -17,12 +40,12 @@ secondary_colors = {
|
||||
"log_name": {"DEBUG": "blue"},
|
||||
"asctime": {"DEBUG": "cyan"},
|
||||
"process": {"DEBUG": "purple"},
|
||||
"module": {"DEBUG": "cyan,bg_blue"},
|
||||
"funcName": {"DEBUG": "light_white,bg_blue"},
|
||||
"module": {"DEBUG": "light_black,bg_blue"},
|
||||
"funcName": {"DEBUG": "light_white,bg_blue"}, # Add this line
|
||||
}
|
||||
|
||||
# Define the log format string
|
||||
fmt_string = "%(log_color)s%(levelname)s: %(log_color)s[%(module)s.%(funcName)s]%(reset)s %(white)s%(message)s"
|
||||
fmt_string = "%(emoji)s%(log_color)s%(package)s.%(funcName)s%(reset)s %(white)s%(message)s"
|
||||
|
||||
# Define formatting configuration
|
||||
fmt_config = {
|
||||
@@ -40,6 +63,10 @@ class MultilineColoredFormatter(colorlog.ColoredFormatter):
|
||||
self.secondary_log_colors = kwargs.pop("secondary_log_colors", {})
|
||||
|
||||
def format(self, record):
|
||||
# Add default emoji if not present
|
||||
if not hasattr(record, "emoji"):
|
||||
record = filter(record)
|
||||
|
||||
message = record.getMessage()
|
||||
if "\n" in message:
|
||||
lines = message.split("\n")
|
||||
@@ -61,20 +88,24 @@ formatter = MultilineColoredFormatter(fmt_string, **fmt_config)
|
||||
stream = logging.StreamHandler()
|
||||
stream.setFormatter(formatter)
|
||||
|
||||
|
||||
def get_colorful_logger(name="main"):
|
||||
# Create and configure the logger
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.addHandler(stream)
|
||||
logger.addFilter(filter)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
# Set up the root logger with the same formatting
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
root_logger.addHandler(stream)
|
||||
root_logger.addFilter(filter)
|
||||
|
||||
ignore_logs = [
|
||||
"_trace",
|
||||
"httpx",
|
||||
"_client",
|
||||
"_trace.atrace",
|
||||
"aiohttp",
|
||||
"_client",
|
||||
"._make_request",
|
||||
]
|
||||
ignore_logs = ["_trace", "httpx", "_client", "atrace", "aiohttp", "_client"]
|
||||
for lgr in ignore_logs:
|
||||
loggr = logging.getLogger(lgr)
|
||||
loggr.setLevel(logging.INFO)
|
||||
Reference in New Issue
Block a user