mirror of
https://gitlab.archlinux.org/archlinux/aurweb.git
synced 2025-02-03 10:43:03 +01:00
Compare commits
1493 commits
Author | SHA1 | Date | |
---|---|---|---|
|
8ca61eded2 | ||
|
a9bf714dae | ||
|
3e3173b5c9 | ||
|
eca8bbf515 | ||
|
edc1ab949a | ||
|
97cc6196eb | ||
|
77ef87c882 | ||
|
a40283cdb2 | ||
|
4f68532ee2 | ||
|
439ccd4aa3 | ||
|
8dcf0b2d97 | ||
|
88e8db4404 | ||
|
b730f6447d | ||
|
92f5bbd37f | ||
|
6c6ecd3971 | ||
|
9b12eaf2b9 | ||
|
d1a66a743e | ||
|
b65d6c5e3a | ||
|
d393ed2352 | ||
|
a16fac9b95 | ||
|
5dd65846d1 | ||
|
a1b2d231c3 | ||
|
f306b6df7a | ||
|
0d17895647 | ||
|
36a56e9d3c | ||
|
80d3e5f7b6 | ||
|
2df5a2d5a8 | ||
|
a54b6935a1 | ||
|
4d5909256f | ||
|
a5b94a47f3 | ||
|
33d31d4117 | ||
|
ed878c8c5e | ||
|
77e4979f79 | ||
|
85af7d6f04 | ||
|
ef0619dc2f | ||
|
43b322e739 | ||
|
afb7af3e27 | ||
|
ffddf63975 | ||
|
c6a530f24f | ||
|
3220cf886e | ||
|
21e2ef5ecb | ||
|
6ba06801f7 | ||
|
21a23c9abe | ||
|
d050b626db | ||
|
057685f304 | ||
|
319c565cb9 | ||
|
db6bba8bc8 | ||
|
a37b9685de | ||
|
6e32cf4275 | ||
|
76b6971267 | ||
|
9818c3f48c | ||
|
f967c3565a | ||
|
2fcd793a58 | ||
|
22e1577324 | ||
|
baf97bd159 | ||
|
a0b2e826be | ||
|
1ba9e6eb44 | ||
|
1b82887cd6 | ||
|
783422369e | ||
|
4637b2edba | ||
|
027dfbd970 | ||
|
8b234c580d | ||
|
9bf0c61051 | ||
|
9d5b9c4795 | ||
|
765f989b7d | ||
|
029ce3b418 | ||
|
3241391af0 | ||
|
5d302ae00c | ||
|
933654fcbb | ||
|
40c1d3e8ee | ||
|
2b8c8fc92a | ||
|
27c51430fb | ||
|
27cd533654 | ||
|
2166426d4c | ||
|
fd3022ff6c | ||
|
9e9ba15813 | ||
|
d2d47254b4 | ||
|
87f6791ea8 | ||
|
61f1e5b399 | ||
|
148c882501 | ||
|
f540c79580 | ||
|
1702075875 | ||
|
7466e96449 | ||
|
0a7b02956f | ||
|
1433553c05 | ||
|
5699e9bb41 | ||
|
9eda6a42c6 | ||
|
6c610b26a3 | ||
|
3005e82f60 | ||
|
f05f1dbac7 | ||
|
8ad03522de | ||
|
94b62d2949 | ||
|
7a44f37968 | ||
|
969b84afe4 | ||
|
f74f94b501 | ||
|
375895f080 | ||
|
e45878a058 | ||
|
6cd70a5c9f | ||
|
8699457917 | ||
|
44c158b8c2 | ||
|
347c2ce721 | ||
|
bc03d8b8f2 | ||
|
5729d6787f | ||
|
862221f5ce | ||
|
27819b4465 | ||
|
fa1212f2de | ||
|
c0bbe21d81 | ||
|
5ccfa7c0fd | ||
|
225ce23761 | ||
|
4821fc1312 | ||
|
1f40f6c5a0 | ||
|
81d29b4c66 | ||
|
7cde1ca560 | ||
|
f3f8c0a871 | ||
|
9fe8d524ff | ||
|
814ccf6b04 | ||
|
3acfb08a0f | ||
|
7c8b9ba6bc | ||
|
c41f2e854a | ||
|
e2c113caee | ||
|
143575c9de | ||
|
c6c81f0789 | ||
|
32461f28ea | ||
|
58158505b0 | ||
|
ed17486da6 | ||
|
1c11c901a2 | ||
|
26b2566b3f | ||
|
e9cc2fb437 | ||
|
ed2f85ad04 | ||
|
2709585a70 | ||
|
d1a3fee9fe | ||
|
49e98d64f4 | ||
|
a7882c7533 | ||
|
22fe4a988a | ||
|
0807ae6b7c | ||
|
d366377231 | ||
|
57c154a72c | ||
|
638ca7b1d0 | ||
|
edc4ac332d | ||
|
2eacc84cd0 | ||
|
5fe375bdc3 | ||
|
1b41e8572a | ||
|
7a88aeb673 | ||
|
f24fae0ce6 | ||
|
acdb2864de | ||
|
146943b3b6 | ||
|
d0b0e4d88b | ||
|
3253a6ad29 | ||
|
d2e8fa0249 | ||
|
1d627edbe7 | ||
|
b115aedf97 | ||
|
af4239bcac | ||
|
a8d14e0194 | ||
|
8c5b85db5c | ||
|
b3fcfb7679 | ||
|
e896edaccc | ||
|
bab17a9d26 | ||
|
ad61c443f4 | ||
|
8ca63075e9 | ||
|
97d0eac303 | ||
|
1325c71712 | ||
|
6ede837b4f | ||
|
174af5f025 | ||
|
993a044680 | ||
|
bf0d4a2be7 | ||
|
b9df7541b3 | ||
|
7d1827ffc5 | ||
|
52c962a590 | ||
|
c0390240bc | ||
|
7d06c9ab97 | ||
|
8aac842307 | ||
|
0c5b4721d6 | ||
|
8d2e176c2f | ||
|
b1a9efd552 | ||
|
68813abcf0 | ||
|
45218c4ce7 | ||
|
cb16f42a27 | ||
|
f9a5188fb7 | ||
|
2373bdf400 | ||
|
8b25d11a3a | ||
|
ef2baad7b3 | ||
|
137ed04d34 | ||
|
97e1f07f71 | ||
|
2b76b90885 | ||
|
7f9ac28f6e | ||
|
255cdcf667 | ||
|
ec239ceeb3 | ||
|
becce1aac4 | ||
|
6c9be9eb97 | ||
|
c176b2b611 | ||
|
ff0123b54a | ||
|
36fd58d7a6 | ||
|
65ba735f18 | ||
|
a2487c20d8 | ||
|
f41f090ed7 | ||
|
0e44687ab1 | ||
|
4d0a982c51 | ||
|
f6c4891415 | ||
|
2150f8bc19 | ||
|
ff44eb02de | ||
|
154bb239bf | ||
|
65d364fe90 | ||
|
ef0e3b9f35 | ||
|
2770952dfb | ||
|
4cff1e500b | ||
|
b36cbd526b | ||
|
5609ddf791 | ||
|
8592bada16 | ||
|
46c925bc82 | ||
|
8ee843b7b1 | ||
|
ebae0d4304 | ||
|
fa20a3b5d8 | ||
|
e7bcf2fc97 | ||
|
bb00a4ecfd | ||
|
6ee7598211 | ||
|
e572b86fd3 | ||
|
05c6266986 | ||
|
57a2b4b516 | ||
|
d20dbbcf74 | ||
|
e5137e0c42 | ||
|
e6d36101d9 | ||
|
08af8cad8d | ||
|
a12dbd191a | ||
|
0d950a0c9f | ||
|
3a460faa6e | ||
|
28e8b31211 | ||
|
5f71e58db1 | ||
|
bf348fa572 | ||
|
b209cd962c | ||
|
9385c14f77 | ||
|
ff01947f3d | ||
|
3fa9047864 | ||
|
bce9bedaf4 | ||
|
076245e061 | ||
|
aeb38b599d | ||
|
6bf408775c | ||
|
791e715aee | ||
|
5a7a9c2c9f | ||
|
da458ae70a | ||
|
618a382e6c | ||
|
d6661403aa | ||
|
9229220e21 | ||
|
b89fe9eb13 | ||
|
3a13eeb744 | ||
|
65266d752b | ||
|
413de914ca | ||
|
7a9448a3e5 | ||
|
d8e91d058c | ||
|
2b8dedb3a2 | ||
|
8027ff936c | ||
|
c74772cb36 | ||
|
7864ac6dfe | ||
|
a08681ba23 | ||
|
a832b3cddb | ||
|
1216399d53 | ||
|
512ba02389 | ||
|
6b0978b9a5 | ||
|
d5e102e3f4 | ||
|
ff92e95f7a | ||
|
bce5b81acd | ||
|
500d6b403b | ||
|
bcd808ddc1 | ||
|
efd20ed2c7 | ||
|
5484e68b42 | ||
|
0583f30a53 | ||
|
50287cb066 | ||
|
73f0bddf0b | ||
|
c248a74f80 | ||
|
4f56a01662 | ||
|
c0e806072e | ||
|
d00371f444 | ||
|
f10c1a0505 | ||
|
5669821b29 | ||
|
286834bab1 | ||
|
6ee34ab3cb | ||
|
333051ab1f | ||
|
48e5dc6763 | ||
|
7e06823e58 | ||
|
d793193fdf | ||
|
3dcbee5a4f | ||
|
524334409a | ||
|
0417603499 | ||
|
8555e232ae | ||
|
9c0f8f053e | ||
|
b757e66997 | ||
|
da5a646a73 | ||
|
18f5e142b9 | ||
|
3ae6323a7c | ||
|
8657fd336e | ||
|
1180565d0c | ||
|
eb0c5605e4 | ||
|
e00b0059f7 | ||
|
0dddaeeb98 | ||
|
137644e919 | ||
|
30e72d2db5 | ||
|
ec3152014b | ||
|
f450b5dfc7 | ||
|
adc3a21863 | ||
|
37c7dee099 | ||
|
624954042b | ||
|
17f2c05fd3 | ||
|
8e8b746a5b | ||
|
5e75a00c17 | ||
|
9faa7b801d | ||
|
df0a4a2be2 | ||
|
bb6e602e13 | ||
|
4e0618469d | ||
|
b3853e01b8 | ||
|
03776c4663 | ||
|
a2d08e441e | ||
|
6ad24fc950 | ||
|
69d6724749 | ||
|
307d944cf1 | ||
|
3de17311cf | ||
|
7ad22d8143 | ||
|
6ab9663b76 | ||
|
486f8bd61c | ||
|
a39f34d695 | ||
|
bb310bdf65 | ||
|
a73af3e76d | ||
|
a981ae4052 | ||
|
cdc7bd618c | ||
|
b38e765dfe | ||
|
655402a509 | ||
|
a84d115fa1 | ||
|
310c469ba8 | ||
|
25e05830a6 | ||
|
0388b12896 | ||
|
83ddbd220f | ||
|
a629098b92 | ||
|
7fed5742b8 | ||
|
6435c2b1f1 | ||
|
b8a4ce4ceb | ||
|
8a3a7e31ac | ||
|
929bb756a8 | ||
|
fbb3e052fe | ||
|
57c0409958 | ||
|
ce5dbf0eeb | ||
|
de5538a40f | ||
|
505eb90479 | ||
|
9c6c13b78a | ||
|
b47882b114 | ||
|
08d485206c | ||
|
ab2956eef7 | ||
|
93b4cec932 | ||
|
fd4aaed208 | ||
|
8e43932aa6 | ||
|
4303086c0e | ||
|
f10732960c | ||
|
fb1fb2ef3b | ||
|
33bf5df236 | ||
|
15d016eb70 | ||
|
7a52da5587 | ||
|
7b047578fd | ||
|
801df832e5 | ||
|
edacde48e5 | ||
|
b4e0aea2b7 | ||
|
9497f6e671 | ||
|
4565aa38cf | ||
|
a82d552e1b | ||
|
d63615a994 | ||
|
6f7ac33166 | ||
|
829a8b4b81 | ||
|
952c24783b | ||
|
6c7e274968 | ||
|
5abd5db313 | ||
|
b3d09a4b77 | ||
|
1a7f6e1fa9 | ||
|
913ce8a4f0 | ||
|
0e82916b0a | ||
|
9648628a2c | ||
|
2c080b2ea9 | ||
|
1d6335363c | ||
|
a509e40474 | ||
|
d6fa4ec5a8 | ||
|
28970ccc91 | ||
|
034e47bc28 | ||
|
0b03a6871e | ||
|
4a58e1349c | ||
|
edef6cc6ac | ||
|
ade624c215 | ||
|
98f55879d3 | ||
|
8598ea6f74 | ||
|
4ddd1dec9c | ||
|
0b54488563 | ||
|
02d114d575 | ||
|
7a525d7693 | ||
|
a553d5d95a | ||
|
cf4295a13e | ||
|
ed41a4fe19 | ||
|
d8564e446b | ||
|
afd25c248f | ||
|
790ca4194a | ||
|
7ddce6bb2d | ||
|
c149afb1f1 | ||
|
d7cb04b93d | ||
|
49c5a3facf | ||
|
0afa07ed3b | ||
|
a1a88ea872 | ||
|
9791704632 | ||
|
2a393f95fa | ||
|
e00cf5f124 | ||
|
13217be939 | ||
|
e2a17fef95 | ||
|
0f0a2f18ad | ||
|
5045f0f3e4 | ||
|
f11e8de251 | ||
|
6a243e90db | ||
|
b80d914cba | ||
|
c7c79a152b | ||
|
95c191fb31 | ||
|
9204b76110 | ||
|
1bb4daa36a | ||
|
25d74d02c7 | ||
|
d92f183840 | ||
|
b63ac7ce91 | ||
|
07e479ab50 | ||
|
51d4b7f993 | ||
|
3aa8d523f5 | ||
|
27f30212e8 | ||
|
7c36379715 | ||
|
9f452a62e5 | ||
|
6e837e0c02 | ||
|
1e31db47ab | ||
|
80622cc966 | ||
|
4a4fd01563 | ||
|
c83c5cdc42 | ||
|
388e64d0af | ||
|
7cc20cd9a4 | ||
|
e43e1c6d20 | ||
|
14347232fd | ||
|
8387f325f6 | ||
|
1d86b3e210 | ||
|
4e641d945c | ||
|
b2508e5bf8 | ||
|
dcaf407536 | ||
|
bfd592299c | ||
|
0bfecb9844 | ||
|
2fd9f3436d | ||
|
e3864d4b7c | ||
|
361163098f | ||
|
040c9bc3e6 | ||
|
640630faff | ||
|
da0e74a648 | ||
|
9327594926 | ||
|
29061c000c | ||
|
1671868956 | ||
|
708ade4dbf | ||
|
35e7486ea3 | ||
|
50b726d739 | ||
|
a2e993119e | ||
|
bd13d6904b | ||
|
41a6e9740f | ||
|
7485cc231e | ||
|
e2eb3a7ded | ||
|
0c20e4056e | ||
|
e80891f2f2 | ||
|
3af66cafbe | ||
|
86caee74c5 | ||
|
f928a49c8a | ||
|
3f95ac7db3 | ||
|
c883c71053 | ||
|
2cb53411c0 | ||
|
4ae72af4b5 | ||
|
b6321bbdc5 | ||
|
b119db251b | ||
|
40a0e866e7 | ||
|
acc8885844 | ||
|
d79d7bdd1e | ||
|
bf0623d8c7 | ||
|
310484a8cc | ||
|
a21c48afe7 | ||
|
477e814cd8 | ||
|
95bbdfc3bb | ||
|
4c14a10b91 | ||
|
660d57340a | ||
|
957803a70b | ||
|
828847cfcd | ||
|
33cddb36ff | ||
|
2dfa41c9a5 | ||
|
26f0b014f9 | ||
|
83f5d9e460 | ||
|
750653361f | ||
|
1545eab81d | ||
|
e777b7052e | ||
|
2d6c09bec5 | ||
|
d5a1c16458 | ||
|
39d6f927e6 | ||
|
7618101b1b | ||
|
a445a40bea | ||
|
c1420b52fb | ||
|
28549b47bb | ||
|
c80a16c254 | ||
|
f3360d1249 | ||
|
ac68f74c69 | ||
|
6986d1bb1e | ||
|
3cb106bc9d | ||
|
b7bf83c5f0 | ||
|
c783ce17be | ||
|
101de8e7b1 | ||
|
0c1bd982ea | ||
|
987f9eab3b | ||
|
ab1479925b | ||
|
2c08672f15 | ||
|
2f8e2288ad | ||
|
164037da43 | ||
|
8310357029 | ||
|
bfe48a7d76 | ||
|
c39a648bf2 | ||
|
9111f645b7 | ||
|
ef0285bc7c | ||
|
4659b5f941 | ||
|
ad1d5a1217 | ||
|
16bdbee520 | ||
|
85012bb100 | ||
|
82972d28e2 | ||
|
ba6ba4c367 | ||
|
f7c81ce855 | ||
|
2f294480a9 | ||
|
8cca03a3f6 | ||
|
34c2692193 | ||
|
01a0c286c9 | ||
|
05e3e4bda7 | ||
|
5fade479a3 | ||
|
62388b4161 | ||
|
2c4f4155d6 | ||
|
fee7e41ae4 | ||
|
a2dfb97b6b | ||
|
5e52bafb5c | ||
|
8c665d1651 | ||
|
18b18bf667 | ||
|
57bc9b6b73 | ||
|
d7c19ee6ce | ||
|
a467b18474 | ||
|
d3d4424bc5 | ||
|
621f030977 | ||
|
8d8f7954e9 | ||
|
12f74fc40a | ||
|
2feb9b90b2 | ||
|
dbbae97038 | ||
|
e1a87c3407 | ||
|
7f6c23d4cb | ||
|
7bcc8d7ce7 | ||
|
7f1de72e08 | ||
|
211ca5e49c | ||
|
fca175ed84 | ||
|
3102736b13 | ||
|
ce7c44758e | ||
|
8a81eae8f4 | ||
|
ebb333565e | ||
|
e5dfd53b9a | ||
|
290ef1a2ed | ||
|
bf4662e26f | ||
|
cce9385fb1 | ||
|
c07c40bcb6 | ||
|
d94e2dc9d7 | ||
|
eb59cbaa39 | ||
|
64069b9b5d | ||
|
9441f4f904 | ||
|
3e3706911c | ||
|
42aa12d075 | ||
|
b092e247fc | ||
|
34a29df1a8 | ||
|
0f4ead759c | ||
|
ec3295ffd4 | ||
|
88cb1096c0 | ||
|
b4495a49bf | ||
|
d31a51742b | ||
|
c4ea1171cd | ||
|
43b7fdb61d | ||
|
60ae676075 | ||
|
1ee8d177b4 | ||
|
6d4e8028eb | ||
|
4edae5015a | ||
|
9f9b1c1732 | ||
|
d675c0dc26 | ||
|
c775e8a692 | ||
|
e6679e4c4e | ||
|
6f6f067597 | ||
|
9e7ae5904f | ||
|
bf371c447f | ||
|
a6faf9bd2e | ||
|
b5ff8581f3 | ||
|
6e27f62e1b | ||
|
efd61979f7 | ||
|
d49886f44f | ||
|
059733cb8c | ||
|
9d221604b4 | ||
|
902c4d7a9c | ||
|
0df57debb8 | ||
|
ae7621fb54 | ||
|
0988415931 | ||
|
8ffff6261b | ||
|
2cb9de0800 | ||
|
1af61b0c50 | ||
|
b0eea00181 | ||
|
71e73ca654 | ||
|
83dc26ccde | ||
|
6c6eb2c21b | ||
|
e126d431d7 | ||
|
51b60f4210 | ||
|
3e048e9675 | ||
|
a1f46611e1 | ||
|
a77d44e919 | ||
|
bd2ad9b616 | ||
|
c735f9868b | ||
|
53fabdfaea | ||
|
278490e103 | ||
|
67dd432e86 | ||
|
cab86035e9 | ||
|
8f8929f324 | ||
|
be7a96076e | ||
|
6fdaeee026 | ||
|
fc229d755b | ||
|
9d3e77bab1 | ||
|
3a771fc807 | ||
|
34cb8ec268 | ||
|
d55dab93da | ||
|
80ee7f3d4b | ||
|
260b67c49e | ||
|
b27dab99d8 | ||
|
2baf061b96 | ||
|
84a54bb6e6 | ||
|
56bd60559c | ||
|
e75aa386ea | ||
|
50eec96dd0 | ||
|
5142447b7e | ||
|
22093c5c38 | ||
|
0c07c14860 | ||
|
36bc9ae29b | ||
|
d6d41cdbad | ||
|
94e8d34948 | ||
|
e17389485b | ||
|
f273cfc87d | ||
|
e1543f2e91 | ||
|
c86f71a4b4 | ||
|
703d655a5e | ||
|
3b878da59a | ||
|
f357615bfb | ||
|
2e12417a6c | ||
|
48973fe036 | ||
|
c7751d5d63 | ||
|
02a62532da | ||
|
918593c3e6 | ||
|
95a215ec58 | ||
|
de671e9b9c | ||
|
c47578f158 | ||
|
d0e183a738 | ||
|
19bd3766d2 | ||
|
3a43e2b98c | ||
|
1fede8d2a3 | ||
|
26b1674c9e | ||
|
bad57ba502 | ||
|
85e6ad03db | ||
|
60b098a2f2 | ||
|
32660881f6 | ||
|
c3d962a0d0 | ||
|
061e828f16 | ||
|
7831503c19 | ||
|
409229739e | ||
|
50d6a9b5c8 | ||
|
73034c7998 | ||
|
1b203f0d30 | ||
|
452f5d160a | ||
|
31d82fb1af | ||
|
2df54bd7a0 | ||
|
de7e3ab607 | ||
|
a9a0adaead | ||
|
8b350066c1 | ||
|
4667993dad | ||
|
0447afa2e5 | ||
|
51b4709ea4 | ||
|
57df6db609 | ||
|
27f8603dc5 | ||
|
cf978e23aa | ||
|
0ed752277c | ||
|
8501bba0ac | ||
|
224a0de784 | ||
|
2ea4559b60 | ||
|
f8bef16d32 | ||
|
973dbf0482 | ||
|
d0fc56d53f | ||
|
bfa916c7b2 | ||
|
522177e813 | ||
|
aa717a4ef9 | ||
|
b0b5e4c9d1 | ||
|
75ad2fb53d | ||
|
81f8c23265 | ||
|
806a19b91a | ||
|
abfd41f31e | ||
|
a747548254 | ||
|
e1bf6dd562 | ||
|
42701514e7 | ||
|
0435c56a41 | ||
|
c09784d58f | ||
|
112837e0e9 | ||
|
043ac7fe92 | ||
|
fccd8b63d2 | ||
|
7ef3e34386 | ||
|
de0f919077 | ||
|
eb396813a8 | ||
|
5b14ad4065 | ||
|
140f9b1fb2 | ||
|
05bd6e9076 | ||
|
150c944758 | ||
|
df530d8a73 | ||
|
171b347dad | ||
|
93bc91cce2 | ||
|
ae72817950 | ||
|
ca25595022 | ||
|
a0e1a1641d | ||
|
31a093ba06 | ||
|
14d80d756f | ||
|
ff3931e435 | ||
|
655b98d19e | ||
|
a082de5244 | ||
|
b20ec9925a | ||
|
91f6591141 | ||
|
d6cb3b9fac | ||
|
735c5f57cb | ||
|
adafa6ebc1 | ||
|
012dd24fd8 | ||
|
604df50b88 | ||
|
2fee6205a6 | ||
|
867825491b | ||
|
0b30216229 | ||
|
429d8059e1 | ||
|
a10f8663fd | ||
|
125b244f44 | ||
|
ecbab8546b | ||
|
a6ac5f0dbf | ||
|
274682f040 | ||
|
001e86317f | ||
|
9bfe2b07ba | ||
|
fd8d23a379 | ||
|
69eb17cb0d | ||
|
44f2366675 | ||
|
436d742017 | ||
|
4426c639ce | ||
|
67a6b8360e | ||
|
bc1cf8b1f6 | ||
|
2d0e09cd63 | ||
|
4b0cb0721d | ||
|
155aa47a1a | ||
|
d8e3ca1abb | ||
|
9fb1fbe32c | ||
|
b72bd38f76 | ||
|
f4ef02fa5b | ||
|
d097799b34 | ||
|
8d5683d3f1 | ||
|
29c2d0de6b | ||
|
c59acbf6d6 | ||
|
29989b7fdb | ||
|
3efb9a57b5 | ||
|
3a65e33abe | ||
|
dbeebd3b01 | ||
|
343a306bb8 | ||
|
84beacd427 | ||
|
5b350bc361 | ||
|
0726a08677 | ||
|
f3efc18b50 | ||
|
0e938209af | ||
|
199622c53f | ||
|
232594ae44 | ||
|
7b0d664bc0 | ||
|
47feb72f48 | ||
|
d658627e99 | ||
|
a87973e0c7 | ||
|
759f18ea75 | ||
|
b98159d5b9 | ||
|
e558e979ff | ||
|
1aab960401 | ||
|
6bb002e708 | ||
|
47d83244bb | ||
|
3b686c475d | ||
|
39fd3b891e | ||
|
e891d7c8e8 | ||
|
34747359ba | ||
|
41e0eaaece | ||
|
ffb450db71 | ||
|
bc7bf9866a | ||
|
e8f4c9cf69 | ||
|
d4d9f50b8f | ||
|
604901fe74 | ||
|
c7feecd4b8 | ||
|
a1e547c057 | ||
|
ba3ef742ce | ||
|
233d25b1c3 | ||
|
19191fa8b5 | ||
|
47d0df76e6 | ||
|
82ca4ad9a0 | ||
|
191198ca41 | ||
|
0b5d088016 | ||
|
008a8824ce | ||
|
f897411ddf | ||
|
7f981b9ed7 | ||
|
a348cdaac3 | ||
|
7739b2178e | ||
|
dbe5cb4a33 | ||
|
672af707ad | ||
|
2df7187514 | ||
|
2892d21ff1 | ||
|
303585cdbf | ||
|
94972841d6 | ||
|
ccf50cbdf5 | ||
|
abe8c0630c | ||
|
912b7e0c11 | ||
|
a5c0c47e5b | ||
|
fb92fb509b | ||
|
60f63876c4 | ||
|
a025118344 | ||
|
fa26c8078b | ||
|
fa43f6bc3e | ||
|
07aac768d6 | ||
|
0abdf8d468 | ||
|
40b21203ed | ||
|
cea9104efb | ||
|
b0b05df193 | ||
|
e3fff9e357 | ||
|
91b570ff0d | ||
|
7f6d9966e5 | ||
|
9424341b55 | ||
|
12400147fc | ||
|
4103ab49c9 | ||
|
f8ba2c5342 | ||
|
cee7512e4d | ||
|
bd59adc886 | ||
|
686c032290 | ||
|
7aa959150e | ||
|
e8e9edbb21 | ||
|
a33e9bd571 | ||
|
50a9690c2d | ||
|
2016b80ea9 | ||
|
2dc6cfec23 | ||
|
20f5519b99 | ||
|
363afff332 | ||
|
5f5fa44d0d | ||
|
cef217388a | ||
|
0da11f068b | ||
|
567090547d | ||
|
66978e40a4 | ||
|
8788f99005 | ||
|
6e344ce9da | ||
|
52110b7db5 | ||
|
daef98080e | ||
|
4b2be7fff8 | ||
|
0c57c53da1 | ||
|
4f7aeafa8d | ||
|
abbecf5194 | ||
|
107367f958 | ||
|
068b067e14 | ||
|
0403b89f53 | ||
|
0155f4ea84 | ||
|
d62af4ceb5 | ||
|
f3f662c696 | ||
|
f606140050 | ||
|
10fcf93991 | ||
|
4b8963b7ba | ||
|
338a44839f | ||
|
b8d7619dbc | ||
|
464540c9a9 | ||
|
e9cc133005 | ||
|
85ebc72e8a | ||
|
3517862ecd | ||
|
446a082352 | ||
|
9f1f399957 | ||
|
cdca8bd295 | ||
|
51fb24ab73 | ||
|
69773a5b58 | ||
|
020409ef46 | ||
|
e4a5b7fae9 | ||
|
16e6fa2cdd | ||
|
9aa8decf40 | ||
|
cdb854259a | ||
|
dc397f6bd8 | ||
|
1be4ac2fde | ||
|
f21765bfe4 | ||
|
cc45290ec2 | ||
|
4d214b9cd9 | ||
|
a82879210c | ||
|
451eec0c28 | ||
|
f26cd1e994 | ||
|
cef69b6342 | ||
|
b7475a5bd0 | ||
|
61f3cb938c | ||
|
2cc44e8f28 | ||
|
12b4269ba8 | ||
|
05e6cfca62 | ||
|
9fef8b0611 | ||
|
af2f3694e7 | ||
|
ece25e0499 | ||
|
c28f1695ed | ||
|
9d6dbaf0ec | ||
|
a38e126f49 | ||
|
6d376fed15 | ||
|
0af6a2c32f | ||
|
b3b31394e8 | ||
|
9464de108f | ||
|
01e27fa347 | ||
|
7f4c011dc3 | ||
|
8dcdc7ff38 | ||
|
46c39399ff | ||
|
348128fada | ||
|
691b7b9091 | ||
|
adb6252f85 | ||
|
9fd07c36eb | ||
|
a3a5ec678c | ||
|
7ee32a4ea1 | ||
|
8239dcdd1b | ||
|
b49b629395 | ||
|
1c0543c07e | ||
|
1656f5824d | ||
|
651c1cd8c6 | ||
|
1f2347c6b4 | ||
|
034288711b | ||
|
64ba18e417 | ||
|
7418c33a30 | ||
|
d7ac95a707 | ||
|
65be8b8e07 | ||
|
7e7a1ead88 | ||
|
0d734eb07d | ||
|
da55aa6491 | ||
|
94d494866f | ||
|
5fb75b9614 | ||
|
bc9bb045ed | ||
|
6b065956f7 | ||
|
c6c04f4952 | ||
|
60bffa4fb6 | ||
|
81417ea8b2 | ||
|
13b344d238 | ||
|
f1ad1b9aed | ||
|
9b5eeb7652 | ||
|
4ae3fbd5d1 | ||
|
d4210c53cf | ||
|
d5520c9ed2 | ||
|
b277d94e0b | ||
|
e9fc27a33b | ||
|
fb85cb60a0 | ||
|
0b1c3ea539 | ||
|
db730ad8cb | ||
|
c4163547f6 | ||
|
65240c8343 | ||
|
6662975005 | ||
|
a06f4ec19c | ||
|
5ae9d09e98 | ||
|
f5e38e9979 | ||
|
7c4fb539d8 | ||
|
30ab45f459 | ||
|
2b9840149e | ||
|
990f4d182b | ||
|
c8f3ea2eba | ||
|
fb0f252b39 | ||
|
ddc51dd5eb | ||
|
fd58e4df04 | ||
|
4cb0994fee | ||
|
beed64e001 | ||
|
b4092fe77d | ||
|
3b28be1741 | ||
|
f4cfc7c5ca | ||
|
4f505ca6c1 | ||
|
1cb1ce0d99 | ||
|
37f0c352f6 | ||
|
210d92e382 | ||
|
4e3cc1dfe2 | ||
|
36c1ee35a7 | ||
|
be64ca7b0e | ||
|
37232f71ee | ||
|
d22580fa74 | ||
|
267f2cb2c4 | ||
|
e05cfc3375 | ||
|
c588a4e82e | ||
|
db67e83bb8 | ||
|
7f72d78dcc | ||
|
927f5e8567 | ||
|
28c4e9697b | ||
|
bfdc85d7d6 | ||
|
6d59a97955 | ||
|
c572a97d1c | ||
|
6ddf888b67 | ||
|
56eefabc6d | ||
|
51320ab22a | ||
|
0c37216626 | ||
|
7b7e571e93 | ||
|
50634d30b3 | ||
|
27a6563302 | ||
|
8040ef5a9c | ||
|
2d46811c45 | ||
|
71b3f781f7 | ||
|
81c9312606 | ||
|
dd420f8c41 | ||
|
b631dcb756 | ||
|
040bb0d7f4 | ||
|
5bfc1e9094 | ||
|
22b3af61b5 | ||
|
748faca87d | ||
|
3d971bfc8d | ||
|
68383b79e2 | ||
|
4525a11d92 | ||
|
27fbda5e7b | ||
|
34c96ed81b | ||
|
d9ab65cb6f | ||
|
5bbc94f2ef | ||
|
305d077973 | ||
|
72d6016da4 | ||
|
63498f5edd | ||
|
27c5d17fc8 | ||
|
4b95ec41ed | ||
|
01fb42c5d9 | ||
|
d38abd7832 | ||
|
4e7d2295da | ||
|
0ddc969bdc | ||
|
ed68fa2b57 | ||
|
c8d01cc5e8 | ||
|
16d516c221 | ||
|
0a02df363a | ||
|
0dfff2bcb2 | ||
|
8eadb4251d | ||
|
55ebfa0d01 | ||
|
2e6f8cb9f4 | ||
|
a756691d08 | ||
|
dc11a88ed3 | ||
|
8bc1fab74d | ||
|
75c49e4f8a | ||
|
e5299b5ed4 | ||
|
33b18907eb | ||
|
889c5b1e21 | ||
|
a54a09f61d | ||
|
1bce53bbb7 | ||
|
1956be0f46 | ||
|
82a3349649 | ||
|
aac13cd123 | ||
|
f392b3607e | ||
|
9af76a73a3 | ||
|
5c179dc4d3 | ||
|
08068e0a5c | ||
|
7bfc2bf9b4 | ||
|
b5f8e69b8a | ||
|
f6141ff177 | ||
|
1c031638c6 | ||
|
ad8369395e | ||
|
1cf9420997 | ||
|
99482f9962 | ||
|
c164abe256 | ||
|
4d191b51f9 | ||
|
986fa9ee30 | ||
|
2efd254974 | ||
|
0895dd07ee | ||
|
bb45ae7ac3 | ||
|
40cd1b9029 | ||
|
d3be30744c | ||
|
6644c42922 | ||
|
59d04d6e0c | ||
|
fc28aad245 | ||
|
0d8216e8ea | ||
|
7961fa932a | ||
|
5e95cfbc8a | ||
|
f849e8b696 | ||
|
4abbf9a917 | ||
|
a3cb81962f | ||
|
438080827a | ||
|
eaf012963a | ||
|
ef0c2d5a28 | ||
|
3b1809e2ea | ||
|
ad9997c48f | ||
|
fbd91f346a | ||
|
836af2d588 | ||
|
dc5dc233ec | ||
|
7e58986356 | ||
|
5cf7062092 | ||
|
6298b1228a | ||
|
741cbfaa4e | ||
|
c006386079 | ||
|
b59601a8b7 | ||
|
aee1390e2c | ||
|
4de18d8134 | ||
|
fd9b07c429 | ||
|
f7818e26b5 | ||
|
0bbb3cc4d0 | ||
|
06ec6388b4 | ||
|
6afcaf665e | ||
|
c56a23d21c | ||
|
25aea216c5 | ||
|
a4f5c8bef6 | ||
|
3ea515d705 | ||
|
ab8a44cede | ||
|
c2d3dc1daf | ||
|
db2718fcba | ||
|
ad3016ef4f | ||
|
0fd31b8d36 | ||
|
2e3f69ab12 | ||
|
0386e0dbc2 | ||
|
95357687f9 | ||
|
e93b0a9b45 | ||
|
fa07f94051 | ||
|
3f034ac128 | ||
|
2f9994807b | ||
|
4e5b67f0a6 | ||
|
5e6f0cb8d7 | ||
|
5c7e76ef89 | ||
|
1b452d1264 | ||
|
a5943bf2ad | ||
|
cfa95ef80a | ||
|
b52059d437 | ||
|
210e459ba9 | ||
|
c9374732c0 | ||
|
a114bd3e16 | ||
|
49cc12f99d | ||
|
e15a18e9fb | ||
|
718ae1acba | ||
|
55c29c4519 | ||
|
45fbf214b4 | ||
|
1c26ce52a5 | ||
|
a0be018547 | ||
|
eff7d478ab | ||
|
85b1a05d01 | ||
|
e61050adcf | ||
|
e69004bc4a | ||
|
0075ba3c33 | ||
|
b88fa8386a | ||
|
fb908189b6 | ||
|
f147ef3476 | ||
|
a72ab61902 | ||
|
6eafb457ec | ||
|
f086457741 | ||
|
5a175bd92a | ||
|
af51b5c460 | ||
|
5bd3a7bbab | ||
|
eb8ea53a44 | ||
|
469c141f6b | ||
|
d9cdd5faef | ||
|
9e73936c4e | ||
|
968ed736c1 | ||
|
91e769f603 | ||
|
96d1af9363 | ||
|
35851d5533 | ||
|
ae0f69a5e4 | ||
|
4ade8b0539 | ||
|
bace345da4 | ||
|
04d1c81d3d | ||
|
88569b6d09 | ||
|
ae3d302c47 | ||
|
2d3d03e01e | ||
|
1e1c0c3fe5 | ||
|
9197f86a03 | ||
|
565f62471b | ||
|
53391bec1a | ||
|
4959f62cf5 | ||
|
a6ca345af4 | ||
|
d57dfd4d36 | ||
|
c05fafea0e | ||
|
13b4dbf541 | ||
|
b4e46450b5 | ||
|
ec38d2f5a0 | ||
|
8bf6384504 | ||
|
3f1ea7d31a | ||
|
ae953ce19b | ||
|
77d54b5e1b | ||
|
07e70690e1 | ||
|
e0ee881b67 | ||
|
c54000045c | ||
|
7542798335 | ||
|
80ce10eb0f | ||
|
eec09dec3e | ||
|
8d6e782ba1 | ||
|
021a1c8fb6 | ||
|
450469e3d6 | ||
|
bdc913d088 | ||
|
96bc86d153 | ||
|
3a74f76ff9 | ||
|
2835dd89ea | ||
|
427a30ef8a | ||
|
3f60f5048e | ||
|
f4406ccf5c | ||
|
6c7bb04b93 | ||
|
4442ba6703 | ||
|
a120af5a00 | ||
|
3bacfe6cd9 | ||
|
af96be7d09 | ||
|
2a47aa09cd | ||
|
c3a29171cd | ||
|
2f5d9c63c4 | ||
|
dbbafc15fa | ||
|
719fa82ae5 | ||
|
7d695f0c6a | ||
|
f8d2d4c82a | ||
|
3c6b2203e9 | ||
|
28300ee889 | ||
|
a26e703343 | ||
|
987f825714 | ||
|
222d995e95 | ||
|
b2491ddc07 | ||
|
acc100eb52 | ||
|
12911a101e | ||
|
0a3aa40f20 | ||
|
83f93c8dbb | ||
|
97c1247b57 | ||
|
04ab98907a | ||
|
bfffdd4d91 | ||
|
85ba4a33a8 | ||
|
83c038a42a | ||
|
ac1779b705 | ||
|
dc4cc9b604 | ||
|
e534704a98 | ||
|
d674aaf736 | ||
|
a6bba601a9 | ||
|
d606ebc0f1 | ||
|
ef4a7308ee | ||
|
4927a61378 | ||
|
07c4be0afb | ||
|
9ee7be4a1c | ||
|
ff3519ae11 | ||
|
cec07c76b6 | ||
|
d8556b0d86 | ||
|
a702f7bc0b | ||
|
eb56305091 | ||
|
201a04ffb9 | ||
|
d95e4ec443 | ||
|
c8d88464b1 | ||
|
42bd0027b3 | ||
|
512f8064c1 | ||
|
61c473405f | ||
|
df161ef38e | ||
|
495dd2d821 | ||
|
565b928a59 | ||
|
a36cc0c00a | ||
|
adb42882c5 | ||
|
e624e25c0f | ||
|
2a3df086d3 | ||
|
5bd46d18a3 | ||
|
3b8e3f3e4b | ||
|
8abb096d7b | ||
|
55c0637b98 | ||
|
865c414504 | ||
|
13456fea1e | ||
|
91dc3efc75 | ||
|
ec632a7091 | ||
|
763b84d0b9 | ||
|
959e535126 | ||
|
af76e660d0 | ||
|
06fa8ab5f3 | ||
|
d7603fa4d3 | ||
|
8b6f92f9e9 | ||
|
d7941e6bed | ||
|
d5e650a339 | ||
|
b1baf76998 | ||
|
ac67268a28 | ||
|
f89d06d092 | ||
|
b7d67bf5fc | ||
|
7ae95ac908 | ||
|
40448ccd34 | ||
|
bd8f528011 | ||
|
be3bab2ce0 | ||
|
8d9f20939c | ||
|
4330fe4f33 | ||
|
e7db894eb7 | ||
|
537349e124 | ||
|
2bb30f9bf5 | ||
|
18ec8e3cc8 | ||
|
0c1241f8bb | ||
|
8c345a0448 | ||
|
541c978ac4 | ||
|
809939ab03 | ||
|
65ff0e76da | ||
|
3bf4b3717a | ||
|
511f174c8b | ||
|
163e4d7389 | ||
|
5b856c7af2 | ||
|
229df1adef | ||
|
ebd216edfd | ||
|
fc28c1e5fd | ||
|
11c4926502 | ||
|
d18cfad63e | ||
|
5de7ff64df | ||
|
888cf5118a | ||
|
b32022a176 | ||
|
a625df07e2 | ||
|
7f7a975614 | ||
|
4f09e939ae | ||
|
83887b97df | ||
|
25937d9543 | ||
|
f9f41dc99b | ||
|
889d358a6d | ||
|
1874e821f5 | ||
|
e865a6347f | ||
|
5ceeb88bee | ||
|
4d1faca447 | ||
|
62e58b122f | ||
|
228bc8fe7c | ||
|
aecb649473 | ||
|
d7481b9649 | ||
|
e5df083d45 | ||
|
a65a60604a | ||
|
2b83d2fb6b | ||
|
a9cfbce11e | ||
|
e401b92acb | ||
|
068c8ba638 | ||
|
4201348dea | ||
|
a8a9c28783 | ||
|
75cc0be189 | ||
|
943d97efac | ||
|
38dc2bb99d | ||
|
f2121fb833 | ||
|
15b1332656 | ||
|
621e459dfb | ||
|
f8a6049de2 | ||
|
794868b20f | ||
|
b692b11f62 | ||
|
e1ab02c2bf | ||
|
718fa48a5c | ||
|
29db2ee513 | ||
|
fb21015811 | ||
|
a7e5498197 | ||
|
822905be7d | ||
|
32abdbafae | ||
|
4f928b4577 | ||
|
4e9ef6fb00 | ||
|
d323c1f95b | ||
|
c94793b0b1 | ||
|
19b4a896f1 | ||
|
df0a637d2b | ||
|
7a6a38592e | ||
|
a5be6fc9be | ||
|
9052688ed2 | ||
|
07d5907ecd | ||
|
670f711b59 | ||
|
9fdbe3f775 | ||
|
a33d076d8b | ||
|
4423326cec | ||
|
5d4a5deddf | ||
|
56f2798279 | ||
|
137c050f99 | ||
|
1922e5380d | ||
|
adc9fccb7d | ||
|
a836892cde | ||
|
5185df629e | ||
|
64bc93926f | ||
|
ac31f520ea | ||
|
7b7c3abbe2 | ||
|
72f755817c | ||
|
66189c4460 | ||
|
3f1f03e03c | ||
|
e0eb6b0e76 | ||
|
1d5827007f | ||
|
f6744d3e39 | ||
|
cdf75ced9a | ||
|
82f3871a83 | ||
|
81856f3b64 | ||
|
02311eab76 | ||
|
8a47afd2ea | ||
|
e860d828b6 | ||
|
32f2893095 | ||
|
4238a9fc68 | ||
|
7c65604dad | ||
|
2df90ce280 | ||
|
1ff822bb14 | ||
|
bda9256ab1 | ||
|
c1e29e90ca | ||
|
21140e28a8 | ||
|
cd3e880264 | ||
|
52ab056e18 | ||
|
57c11ae13f | ||
|
6d08789ac1 | ||
|
4b7609681d | ||
|
e800cefe95 | ||
|
4230772e3b | ||
|
bac38edd48 | ||
|
b1121dc6ca | ||
|
0d68b914bf | ||
|
82f6d2ce75 | ||
|
b41422450a | ||
|
25393dc326 | ||
|
4fa220850f | ||
|
e454a0200c | ||
|
19652d6cbe | ||
|
bab74dd307 | ||
|
8ec170b3e0 | ||
|
c3035a9039 | ||
|
d668ef0bcd | ||
|
21c457817f | ||
|
933d2705f9 | ||
|
62b413f6b7 | ||
|
d5d333005e | ||
|
eb11943fed | ||
|
1d0c6ffe24 | ||
|
78dbbd3dfa | ||
|
568e0d2fa3 | ||
|
bc972089a1 | ||
|
3062a78a92 | ||
|
51a3535820 | ||
|
879c0622d6 | ||
|
92e315465b | ||
|
db75a5528e | ||
|
e62d472708 | ||
|
4e4f5855f1 | ||
|
83d228d9e8 | ||
|
8c28ba6e7f | ||
|
87815d37c0 | ||
|
be31675b65 | ||
|
5fb4fc12de | ||
|
202ffd8923 | ||
|
9290eee138 | ||
|
a1a742b518 | ||
|
445a991ef1 | ||
|
efe99dc16f | ||
|
239988def7 | ||
|
e323156947 | ||
|
0e08b151e5 | ||
|
357dba87b3 | ||
|
4d0f2d2279 | ||
|
d12ea08fca | ||
|
4bf8228324 | ||
|
8d5244d0c0 | ||
|
42f8f160b6 | ||
|
c77e9d1de0 | ||
|
a5554c19a9 | ||
|
3f31d149a6 | ||
|
2b439b8199 | ||
|
3b347d3989 | ||
|
b1300117ac | ||
|
0e3bd8b596 | ||
|
8c868e088c | ||
|
5be07a8a9e | ||
|
d4abe0b72d | ||
|
41a8493411 | ||
|
8f47b8d731 | ||
|
4cf94816ae | ||
|
8d1be7ea8a | ||
|
48b58b1c2f | ||
|
8a13500535 | ||
|
23f6dd16a7 | ||
|
db02227cc4 | ||
|
71740a75a2 | ||
|
be5197a5fe | ||
|
d92dd69aa3 | ||
|
d5e308550a | ||
|
613364b773 | ||
|
c4f4ac510b | ||
|
03a6fa2f7e | ||
|
169607f153 | ||
|
1369eb87b3 | ||
|
853ed9a950 | ||
|
279d8042e3 | ||
|
a09c4d8168 | ||
|
31a5b40b5c | ||
|
28ba3f77dc | ||
|
bf7c49158c | ||
|
90c0a361b5 | ||
|
e374a91feb | ||
|
81d55e70ee | ||
|
e4cbe264cf | ||
|
a8a1f74a92 | ||
|
7188743fc3 | ||
|
4b2102ceb2 | ||
|
cbab9870c1 | ||
|
afe3f5d0e5 | ||
|
33d8fe035e | ||
|
050b08081a | ||
|
5ca1e271f9 | ||
|
65c98d1216 | ||
|
b855ce9452 | ||
|
de549fb2d5 | ||
|
3f2654e79e | ||
|
d4632aaffa | ||
|
e15d5c8180 | ||
|
81faab9978 | ||
|
127bb4c84c | ||
|
199f34e42e | ||
|
0fc69e96bd | ||
|
c277a3de8f | ||
|
8ff21fd39c | ||
|
aa555f9ae5 | ||
|
e5f8fe5528 | ||
|
ee2aa9755f | ||
|
e5a839bf0b | ||
|
23c0c9c372 | ||
|
def2787b45 | ||
|
8fc8898fef | ||
|
7aa420d24d | ||
|
f090896fa1 | ||
|
d0e5c3db69 | ||
|
4ececd6041 | ||
|
daee20c694 | ||
|
eeaa1c3a32 | ||
|
58aa0a9e45 | ||
|
f7f5152be5 | ||
|
ee959c9907 | ||
|
2422fb020b | ||
|
4b97789bab | ||
|
882c011e74 | ||
|
771ced3236 | ||
|
86e4cd0731 | ||
|
a29155ac5b | ||
|
99a3ced73b |
567 changed files with 98838 additions and 19080 deletions
9
.coveragerc
Normal file
9
.coveragerc
Normal file
|
@ -0,0 +1,9 @@
|
|||
[run]
|
||||
disable_warnings = already-imported
|
||||
|
||||
[report]
|
||||
include = aurweb/*
|
||||
fail_under = 95
|
||||
exclude_lines =
|
||||
if __name__ == .__main__.:
|
||||
pragma: no cover
|
23
.dockerignore
Normal file
23
.dockerignore
Normal file
|
@ -0,0 +1,23 @@
|
|||
# Config files
|
||||
conf/config
|
||||
conf/config.sqlite
|
||||
conf/config.sqlite.defaults
|
||||
conf/docker
|
||||
conf/docker.defaults
|
||||
|
||||
# Compiled translation files
|
||||
**/*.mo
|
||||
|
||||
# Typical virtualenv directories
|
||||
env/
|
||||
venv/
|
||||
.venv/
|
||||
|
||||
# Test output
|
||||
htmlcov/
|
||||
test-emails/
|
||||
test/__pycache__
|
||||
test/test-results
|
||||
test/trash_directory*
|
||||
.coverage
|
||||
.pytest_cache
|
10
.editorconfig
Normal file
10
.editorconfig
Normal file
|
@ -0,0 +1,10 @@
|
|||
# EditorConfig configuration for aurweb
|
||||
# https://editorconfig.org
|
||||
|
||||
# Top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
charset = utf-8
|
8
.env
Normal file
8
.env
Normal file
|
@ -0,0 +1,8 @@
|
|||
FASTAPI_BACKEND="uvicorn"
|
||||
FASTAPI_WORKERS=2
|
||||
MARIADB_SOCKET_DIR="/var/run/mysqld/"
|
||||
AURWEB_FASTAPI_PREFIX=https://localhost:8444
|
||||
AURWEB_SSHD_PREFIX=ssh://aur@localhost:2222
|
||||
GIT_DATA_DIR="./aur.git/"
|
||||
TEST_RECURSION_LIMIT=10000
|
||||
COMMIT_HASH=
|
2
.git-blame-ignore-revs
Normal file
2
.git-blame-ignore-revs
Normal file
|
@ -0,0 +1,2 @@
|
|||
# style: Run pre-commit
|
||||
9c6c13b78a30cb9d800043410799e29631f803d2
|
62
.gitignore
vendored
62
.gitignore
vendored
|
@ -1,9 +1,63 @@
|
|||
/data/
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
.vim/
|
||||
.pylintrc
|
||||
.coverage
|
||||
.idea
|
||||
/cache/*
|
||||
/logs/*
|
||||
/build/
|
||||
/dist/
|
||||
/aurweb.egg-info/
|
||||
/personal/
|
||||
/notes/
|
||||
/vendor/
|
||||
/pyrightconfig.json
|
||||
/taskell.md
|
||||
aur.git/
|
||||
aurweb.sqlite3
|
||||
conf/config
|
||||
conf/config.sqlite
|
||||
conf/config.sqlite.defaults
|
||||
conf/docker
|
||||
conf/docker.defaults
|
||||
data.sql
|
||||
dummy-data.sql*
|
||||
fastapi_aw/
|
||||
htmlcov/
|
||||
po/*.mo
|
||||
po/*.po~
|
||||
po/POTFILES
|
||||
web/locale/*/
|
||||
aur.git/
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
schema/aur-schema-sqlite.sql
|
||||
test/test-results/
|
||||
test/trash_directory*
|
||||
web/locale/*/
|
||||
web/html/*.gz
|
||||
|
||||
# Do not stage compiled asciidoc: make -C doc
|
||||
doc/rpc.html
|
||||
|
||||
# Ignore any user-configured .envrc files at the root.
|
||||
/.envrc
|
||||
|
||||
# Ignore .python-version file from Pyenv
|
||||
.python-version
|
||||
|
||||
# Ignore coverage report
|
||||
coverage.xml
|
||||
|
||||
# Ignore pytest report
|
||||
report.xml
|
||||
|
||||
# Ignore test emails
|
||||
test-emails/
|
||||
|
||||
# Ignore typical virtualenv directories
|
||||
env/
|
||||
venv/
|
||||
.venv/
|
||||
|
||||
# Ignore some terraform files
|
||||
/ci/tf/.terraform
|
||||
/ci/tf/terraform.tfstate*
|
||||
|
|
161
.gitlab-ci.yml
Normal file
161
.gitlab-ci.yml
Normal file
|
@ -0,0 +1,161 @@
|
|||
image: archlinux:base-devel
|
||||
cache:
|
||||
key: system-v1
|
||||
paths:
|
||||
# For some reason Gitlab CI only supports storing cache/artifacts in a path relative to the build directory
|
||||
- .pkg-cache
|
||||
- .venv
|
||||
- .pre-commit
|
||||
|
||||
variables:
|
||||
AUR_CONFIG: conf/config # Default MySQL config setup in before_script.
|
||||
DB_HOST: localhost
|
||||
TEST_RECURSION_LIMIT: 10000
|
||||
CURRENT_DIR: "$(pwd)"
|
||||
LOG_CONFIG: logging.test.conf
|
||||
DEV_FQDN: aurweb-$CI_COMMIT_REF_SLUG.sandbox.archlinux.page
|
||||
INFRASTRUCTURE_REPO: https://gitlab.archlinux.org/archlinux/infrastructure.git
|
||||
|
||||
lint:
|
||||
stage: .pre
|
||||
before_script:
|
||||
- pacman -Sy --noconfirm --noprogressbar
|
||||
archlinux-keyring
|
||||
- pacman -Syu --noconfirm --noprogressbar
|
||||
git python python-pre-commit
|
||||
script:
|
||||
- export XDG_CACHE_HOME=.pre-commit
|
||||
- pre-commit run -a
|
||||
|
||||
test:
|
||||
stage: test
|
||||
before_script:
|
||||
- export PATH="$HOME/.poetry/bin:${PATH}"
|
||||
- ./docker/scripts/install-deps.sh
|
||||
- virtualenv -p python3 .venv
|
||||
- source .venv/bin/activate # Enable our virtualenv cache
|
||||
- ./docker/scripts/install-python-deps.sh
|
||||
- useradd -U -d /aurweb -c 'AUR User' aur
|
||||
- ./docker/mariadb-entrypoint.sh
|
||||
- (cd '/usr' && /usr/bin/mysqld_safe --datadir='/var/lib/mysql') &
|
||||
- 'until : > /dev/tcp/127.0.0.1/3306; do sleep 1s; done'
|
||||
- cp -v conf/config.dev conf/config
|
||||
- sed -i "s;YOUR_AUR_ROOT;$(pwd);g" conf/config
|
||||
- ./docker/test-mysql-entrypoint.sh # Create mysql AUR_CONFIG.
|
||||
- make -C po all install # Compile translations.
|
||||
- make -C doc # Compile asciidoc.
|
||||
- make -C test clean # Cleanup coverage.
|
||||
script:
|
||||
# Run sharness.
|
||||
- make -C test sh
|
||||
# Run pytest.
|
||||
- pytest --junitxml="pytest-report.xml"
|
||||
- make -C test coverage # Produce coverage reports.
|
||||
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
|
||||
artifacts:
|
||||
reports:
|
||||
junit: pytest-report.xml
|
||||
coverage_report:
|
||||
coverage_format: cobertura
|
||||
path: coverage.xml
|
||||
|
||||
.init_tf: &init_tf
|
||||
- pacman -Syu --needed --noconfirm terraform
|
||||
- export TF_VAR_name="aurweb-${CI_COMMIT_REF_SLUG}"
|
||||
- TF_ADDRESS="${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}"
|
||||
- cd ci/tf
|
||||
- >
|
||||
terraform init \
|
||||
-backend-config="address=${TF_ADDRESS}" \
|
||||
-backend-config="lock_address=${TF_ADDRESS}/lock" \
|
||||
-backend-config="unlock_address=${TF_ADDRESS}/lock" \
|
||||
-backend-config="username=x-access-token" \
|
||||
-backend-config="password=${TF_STATE_GITLAB_ACCESS_TOKEN}" \
|
||||
-backend-config="lock_method=POST" \
|
||||
-backend-config="unlock_method=DELETE" \
|
||||
-backend-config="retry_wait_min=5"
|
||||
|
||||
deploy_review:
|
||||
stage: deploy
|
||||
script:
|
||||
- *init_tf
|
||||
- terraform apply -auto-approve
|
||||
environment:
|
||||
name: review/$CI_COMMIT_REF_NAME
|
||||
url: https://$DEV_FQDN
|
||||
on_stop: stop_review
|
||||
auto_stop_in: 1 week
|
||||
rules:
|
||||
- if: $CI_COMMIT_REF_NAME =~ /^renovate\//
|
||||
when: never
|
||||
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
|
||||
when: manual
|
||||
|
||||
provision_review:
|
||||
stage: deploy
|
||||
needs:
|
||||
- deploy_review
|
||||
script:
|
||||
- *init_tf
|
||||
- pacman -Syu --noconfirm --needed ansible git openssh jq
|
||||
# Get ssh key from terraform state file
|
||||
- mkdir -p ~/.ssh
|
||||
- chmod 700 ~/.ssh
|
||||
- terraform show -json |
|
||||
jq -r '.values.root_module.resources[] |
|
||||
select(.address == "tls_private_key.this") |
|
||||
.values.private_key_openssh' > ~/.ssh/id_ed25519
|
||||
- chmod 400 ~/.ssh/id_ed25519
|
||||
# Clone infra repo
|
||||
- git clone $INFRASTRUCTURE_REPO
|
||||
- cd infrastructure
|
||||
# Remove vault files
|
||||
- rm $(git grep -l 'ANSIBLE_VAULT;1.1;AES256$')
|
||||
# Remove vault config
|
||||
- sed -i '/^vault/d' ansible.cfg
|
||||
# Add host config
|
||||
- mkdir -p host_vars/$DEV_FQDN
|
||||
- 'echo "filesystem: btrfs" > host_vars/$DEV_FQDN/misc'
|
||||
# Add host
|
||||
- echo "$DEV_FQDN" > hosts
|
||||
# Add our pubkey and hostkeys
|
||||
- ssh-keyscan $DEV_FQDN >> ~/.ssh/known_hosts
|
||||
- ssh-keygen -f ~/.ssh/id_ed25519 -y > pubkeys/aurweb-dev.pub
|
||||
# Run our ansible playbook
|
||||
- >
|
||||
ansible-playbook playbooks/aur-dev.archlinux.org.yml \
|
||||
-e "aurdev_fqdn=$DEV_FQDN" \
|
||||
-e "aurweb_repository=$CI_REPOSITORY_URL" \
|
||||
-e "aurweb_version=$CI_COMMIT_SHA" \
|
||||
-e "{\"vault_mariadb_users\":{\"root\":\"aur\"}}" \
|
||||
-e "vault_aurweb_db_password=aur" \
|
||||
-e "vault_aurweb_gitlab_instance=https://does.not.exist" \
|
||||
-e "vault_aurweb_error_project=set-me" \
|
||||
-e "vault_aurweb_error_token=set-me" \
|
||||
-e "vault_aurweb_secret=aur" \
|
||||
-e "vault_goaurrpc_metrics_token=aur" \
|
||||
-e '{"root_additional_keys": ["moson.pub", "aurweb-dev.pub"]}'
|
||||
environment:
|
||||
name: review/$CI_COMMIT_REF_NAME
|
||||
action: access
|
||||
rules:
|
||||
- if: $CI_COMMIT_REF_NAME =~ /^renovate\//
|
||||
when: never
|
||||
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
|
||||
|
||||
stop_review:
|
||||
stage: deploy
|
||||
needs:
|
||||
- deploy_review
|
||||
script:
|
||||
- *init_tf
|
||||
- terraform destroy -auto-approve
|
||||
- 'curl --silent --show-error --fail --header "Private-Token: ${TF_STATE_GITLAB_ACCESS_TOKEN}" --request DELETE "${CI_API_V4_URL}/projects/${TF_STATE_PROJECT}/terraform/state/${CI_COMMIT_REF_SLUG}"'
|
||||
environment:
|
||||
name: review/$CI_COMMIT_REF_NAME
|
||||
action: stop
|
||||
rules:
|
||||
- if: $CI_COMMIT_REF_NAME =~ /^renovate\//
|
||||
when: never
|
||||
- if: $CI_MERGE_REQUEST_ID && $CI_PROJECT_PATH == "archlinux/aurweb"
|
||||
when: manual
|
60
.gitlab/issue_templates/Bug.md
Normal file
60
.gitlab/issue_templates/Bug.md
Normal file
|
@ -0,0 +1,60 @@
|
|||
<!--
|
||||
This template is used to report potential bugs with the AURweb website.
|
||||
|
||||
NOTE: All comment sections with a MODIFY note need to be edited. All checkboxes
|
||||
in the "Checklist" section need to be checked by the owner of the issue.
|
||||
-->
|
||||
/label ~bug ~unconfirmed
|
||||
/title [BUG] <!-- MODIFY: add subject -->
|
||||
<!--
|
||||
Please do not remove the above quick actions, which automatically label the
|
||||
issue and assign relevant users.
|
||||
-->
|
||||
|
||||
### Checklist
|
||||
|
||||
**NOTE:** This bug template is meant to provide bug issues for code existing in
|
||||
the aurweb repository.
|
||||
|
||||
**This bug template is not meant to handle bugs with user-uploaded packages.**
|
||||
To report issues you might have found in a user-uploaded package, contact
|
||||
the package's maintainer in comments.
|
||||
|
||||
- [ ] I confirm that this is an issue with aurweb's code and not a
|
||||
user-uploaded package.
|
||||
- [ ] I have described the bug in complete detail in the
|
||||
[Description](#description) section.
|
||||
- [ ] I have specified steps in the [Reproduction](#reproduction) section.
|
||||
- [ ] I have included any logs related to the bug in the
|
||||
[Logs](#logs) section.
|
||||
- [ ] I have included the versions which are affected in the
|
||||
[Version(s)](#versions) section.
|
||||
|
||||
### Description
|
||||
|
||||
Describe the bug in full detail.
|
||||
|
||||
### Reproduction
|
||||
|
||||
Describe a specific set of actions that can be used to reproduce
|
||||
this bug.
|
||||
|
||||
### Logs
|
||||
|
||||
If you have any logs relevant to the bug, include them here in
|
||||
quoted or code blocks.
|
||||
|
||||
### Version(s)
|
||||
|
||||
In this section, please include a list of versions you have found
|
||||
to be affected by this program. This can either come in the form
|
||||
of `major.minor.patch` (if it affects a release tarball), or a
|
||||
commit hash if the bug does not directly affect a release version.
|
||||
|
||||
All development is done without modifying version displays in
|
||||
aurweb's HTML render output. If you're testing locally, use the
|
||||
commit on which you are experiencing the bug. If you have found
|
||||
a bug which exists on live aur.archlinux.org, include the version
|
||||
located at the bottom of the webpage.
|
||||
|
||||
/label bug unconfirmed
|
52
.gitlab/issue_templates/Feature.md
Normal file
52
.gitlab/issue_templates/Feature.md
Normal file
|
@ -0,0 +1,52 @@
|
|||
<!--
|
||||
This template is used to feature request for AURweb website.
|
||||
|
||||
NOTE: All comment sections with a MODIFY note need to be edited. All checkboxes
|
||||
in the "Checklist" section need to be checked by the owner of the issue.
|
||||
-->
|
||||
/label ~feature ~unconfirmed
|
||||
/title [FEATURE] <!-- MODIFY: add subject -->
|
||||
<!--
|
||||
Please do not remove the above quick actions, which automatically label the
|
||||
issue and assign relevant users.
|
||||
-->
|
||||
|
||||
### Checklist
|
||||
|
||||
**NOTE:** This bug template is meant to provide bug issues for code existing in
|
||||
the aurweb repository.
|
||||
|
||||
**This bug template is not meant to handle bugs with user-uploaded packages.**
|
||||
To report issues you might have found in a user-uploaded package, contact
|
||||
the package's maintainer in comments.
|
||||
|
||||
- [ ] I have summed up the feature in concise words in the [Summary](#summary) section.
|
||||
- [ ] I have completely described the feature in the [Description](#description) section.
|
||||
- [ ] I have completed the [Blockers](#blockers) section.
|
||||
|
||||
### Summary
|
||||
|
||||
Fill this section out with a concise wording about the feature being
|
||||
requested.
|
||||
|
||||
Example: _A new `Tyrant` account type for users_.
|
||||
|
||||
### Description
|
||||
|
||||
Describe your feature in full detail.
|
||||
|
||||
Example: _The `Tyrant` account type should be used to allow a user to be
|
||||
tyrannical. When a user is a `Tyrant`, they should be able to assassinate
|
||||
users due to not complying with their laws. Laws can be configured by updating
|
||||
the Tyrant laws page at https://aur.archlinux.org/account/{username}/laws.
|
||||
More specifics about laws._
|
||||
|
||||
### Blockers
|
||||
|
||||
Include any blockers in a list. If there are no blockers, this section
|
||||
should be omitted from the issue.
|
||||
|
||||
Example:
|
||||
|
||||
- [Feature] Do not allow users to be Tyrants
|
||||
- \<(issue|merge_request)_link\>
|
36
.pre-commit-config.yaml
Normal file
36
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,36 @@
|
|||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
- id: check-case-conflict
|
||||
- id: check-merge-conflict
|
||||
- id: check-toml
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
exclude: ^po/
|
||||
- id: debug-statements
|
||||
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v2.3.1
|
||||
hooks:
|
||||
- id: autoflake
|
||||
args:
|
||||
- --in-place
|
||||
- --remove-all-unused-imports
|
||||
- --ignore-init-module-imports
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.13.2
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.4.1
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
23
.travis.yml
23
.travis.yml
|
@ -1,23 +0,0 @@
|
|||
language: python
|
||||
|
||||
python: 3.6
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- bsdtar
|
||||
- libarchive-dev
|
||||
- libgpgme11-dev
|
||||
- libprotobuf-dev
|
||||
|
||||
install:
|
||||
- curl https://codeload.github.com/libgit2/libgit2/tar.gz/v0.26.0 | tar -xz
|
||||
- curl https://sources.archlinux.org/other/pacman/pacman-5.0.2.tar.gz | tar -xz
|
||||
- curl https://git.archlinux.org/pyalpm.git/snapshot/pyalpm-0.8.1.tar.gz | tar -xz
|
||||
- ( cd libgit2-0.26.0 && cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr && make && sudo make install )
|
||||
- ( cd pacman-5.0.2 && ./configure --prefix=/usr && make && sudo make install )
|
||||
- ( cd pyalpm-0.8.1 && python setup.py build && python setup.py install )
|
||||
- pip install mysql-connector-python-rf pygit2==0.26 srcinfo
|
||||
- pip install bleach Markdown
|
||||
|
||||
script: make -C test
|
|
@ -1,7 +1,7 @@
|
|||
[main]
|
||||
host = https://www.transifex.com
|
||||
host = https://app.transifex.com
|
||||
|
||||
[aurweb.aurwebpot]
|
||||
[o:lfleischer:p:aurweb:r:aurwebpot]
|
||||
file_filter = po/<lang>.po
|
||||
source_file = po/aurweb.pot
|
||||
source_lang = en
|
||||
|
|
105
CONTRIBUTING.md
Normal file
105
CONTRIBUTING.md
Normal file
|
@ -0,0 +1,105 @@
|
|||
# Contributing
|
||||
|
||||
Patches should be sent to the [aur-dev@lists.archlinux.org][1] mailing list
|
||||
or included in a merge request on the [aurweb repository][2].
|
||||
|
||||
Before sending patches, you are recommended to run `flake8` and `isort`.
|
||||
|
||||
You can add a git hook to do this by installing `python-pre-commit` and running
|
||||
`pre-commit install`.
|
||||
|
||||
[1]: https://lists.archlinux.org/mailman3/lists/aur-dev.lists.archlinux.org/
|
||||
[2]: https://gitlab.archlinux.org/archlinux/aurweb
|
||||
|
||||
### Coding Guidelines
|
||||
|
||||
DISCLAIMER: We realise the code doesn't necessarily follow all the rules.
|
||||
This is an attempt to establish a standard coding style for future
|
||||
development.
|
||||
|
||||
1. All source modified or added within a patchset **must** maintain equivalent
|
||||
or increased coverage by providing tests that use the functionality
|
||||
2. Please keep your source within an 80 column width
|
||||
3. Use four space indentation
|
||||
4. Use [conventional commits](https://www.conventionalcommits.org/en/v1.0.0/)
|
||||
5. DRY: Don't Repeat Yourself
|
||||
6. All code should be tested for good _and_ bad cases (see [test/README.md][3])
|
||||
|
||||
[3]: https://gitlab.archlinux.org/archlinux/aurweb/-/blob/master/test/README.md
|
||||
|
||||
Test patches that increase coverage in the codebase are always welcome.
|
||||
|
||||
### Coding Style
|
||||
|
||||
We use `autoflake`, `isort`, `black` and `flake8` to enforce coding style in a
|
||||
PEP-8 compliant way. These tools run in GitLab CI using `pre-commit` to verify
|
||||
that any pushed code changes comply with this.
|
||||
|
||||
To enable the `pre-commit` git hook, install the `pre-commit` package either
|
||||
with `pacman` or `pip` and then run `pre-commit install --install-hooks`. This
|
||||
will ensure formatting is done before any code is commited to the git
|
||||
repository.
|
||||
|
||||
There are plugins for editors or IDEs which automate this process. Some
|
||||
example plugins:
|
||||
|
||||
- [tenfyzhong/autoflake.vim](https://github.com/tenfyzhong/autoflake.vim)
|
||||
- [fisadev/vim-isort](https://github.com/fisadev/vim-isort)
|
||||
- [psf/black](https://github.com/psf/black)
|
||||
- [nvie/vim-flake8](https://github.com/nvie/vim-flake8)
|
||||
- [prabirshrestha/vim-lsp](https://github.com/prabirshrestha/vim-lsp)
|
||||
- [dense-analysis/ale](https://github.com/dense-analysis/ale)
|
||||
|
||||
See `setup.cfg`, `pyproject.toml` and `.pre-commit-config.yaml` for tool
|
||||
specific configurations.
|
||||
|
||||
### Development Environment
|
||||
|
||||
To get started with local development, an instance of aurweb must be
|
||||
brought up. This can be done using the following sections:
|
||||
|
||||
- [Using Docker](#using-docker)
|
||||
- [Using INSTALL](#using-install)
|
||||
|
||||
There are a number of services aurweb employs to run the application
|
||||
in its entirety:
|
||||
|
||||
- ssh
|
||||
- cron jobs
|
||||
- starlette/fastapi asgi server
|
||||
|
||||
Project structure:
|
||||
|
||||
- `./aurweb`: `aurweb` Python package
|
||||
- `./templates`: Jinja2 templates
|
||||
- `./docker`: Docker scripts and configuration files
|
||||
|
||||
#### Using Docker
|
||||
|
||||
Using Docker, we can run the entire infrastructure in two steps:
|
||||
|
||||
# Build the aurweb:latest image
|
||||
$ docker-compose build
|
||||
|
||||
# Start all services in the background
|
||||
$ docker-compose up -d nginx
|
||||
|
||||
`docker-compose` services will generate a locally signed root certificate
|
||||
at `./data/root_ca.crt`. Users can import this into ca-certificates or their
|
||||
browser if desired.
|
||||
|
||||
Accessible services (on the host):
|
||||
|
||||
- https://localhost:8444 (python via nginx)
|
||||
- localhost:13306 (mariadb)
|
||||
- localhost:16379 (redis)
|
||||
|
||||
Docker services, by default, are setup to be hot reloaded when source code
|
||||
is changed.
|
||||
|
||||
For detailed setup instructions have a look at [TESTING](TESTING)
|
||||
|
||||
#### Using INSTALL
|
||||
|
||||
The [INSTALL](INSTALL) file describes steps to install the application on
|
||||
bare-metal systems.
|
47
Dockerfile
Normal file
47
Dockerfile
Normal file
|
@ -0,0 +1,47 @@
|
|||
FROM archlinux:base-devel
|
||||
|
||||
VOLUME /root/.cache/pypoetry/cache
|
||||
VOLUME /root/.cache/pypoetry/artifacts
|
||||
VOLUME /root/.cache/pre-commit
|
||||
|
||||
ENV PATH="/root/.poetry/bin:${PATH}"
|
||||
ENV PYTHONPATH=/aurweb
|
||||
ENV AUR_CONFIG=conf/config
|
||||
ENV COMPOSE=1
|
||||
|
||||
# Install system-wide dependencies.
|
||||
COPY ./docker/scripts/install-deps.sh /install-deps.sh
|
||||
RUN /install-deps.sh
|
||||
|
||||
# Copy Docker scripts
|
||||
COPY ./docker /docker
|
||||
COPY ./docker/scripts/* /usr/local/bin/
|
||||
|
||||
|
||||
# Copy over all aurweb files.
|
||||
COPY . /aurweb
|
||||
|
||||
# Working directory is aurweb root @ /aurweb.
|
||||
WORKDIR /aurweb
|
||||
|
||||
# Copy initial config to conf/config.
|
||||
RUN cp -vf conf/config.dev conf/config
|
||||
RUN sed -i "s;YOUR_AUR_ROOT;/aurweb;g" conf/config
|
||||
|
||||
# Install Python dependencies.
|
||||
RUN /docker/scripts/install-python-deps.sh compose
|
||||
|
||||
# Compile asciidocs.
|
||||
RUN make -C doc
|
||||
|
||||
# Add our aur user.
|
||||
RUN useradd -U -d /aurweb -c 'AUR User' aur
|
||||
|
||||
# Setup some default system stuff.
|
||||
RUN ln -sf /usr/share/zoneinfo/UTC /etc/localtime
|
||||
|
||||
# Install translations.
|
||||
RUN make -C po all install
|
||||
|
||||
# Install pre-commit repositories and run lint check.
|
||||
RUN pre-commit run -a
|
163
INSTALL
163
INSTALL
|
@ -4,62 +4,129 @@ Setup on Arch Linux
|
|||
For testing aurweb patches before submission, you can use the instructions in
|
||||
TESTING for testing the web interface only.
|
||||
|
||||
Note that you can only do limited testing using the PHP built-in web server.
|
||||
In particular, the cgit interface will be unusable as well as the ssh+git
|
||||
interface. For a detailed description on how to setup a full aurweb server,
|
||||
For a detailed description on how to setup a full aurweb server,
|
||||
read the instructions below.
|
||||
|
||||
1) Clone the aurweb project:
|
||||
1) Clone the aurweb project and install it (via `python-poetry`):
|
||||
|
||||
$ cd /srv/http/
|
||||
$ git clone git://git.archlinux.org/aurweb.git
|
||||
$ cd /srv/http/
|
||||
$ git clone git://git.archlinux.org/aurweb.git
|
||||
$ cd aurweb
|
||||
$ poetry install
|
||||
|
||||
2) Setup a web server with PHP and MySQL. Configure the web server to redirect
|
||||
all URLs to /index.php/foo/bar/. The following block can be used with nginx:
|
||||
2) Setup a web server with MySQL. The following block can be used with nginx:
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
# https is preferred and can be done easily with LetsEncrypt
|
||||
# or self-CA signing. Users can still listen over 80 for plain
|
||||
# http, for which the [options] disable_http_login used to toggle
|
||||
# the authentication feature.
|
||||
listen 443 ssl http2;
|
||||
server_name aur.local aur;
|
||||
|
||||
root /srv/http/aurweb/web/html;
|
||||
index index.php;
|
||||
# To enable SSL proxy properly, make sure gunicorn and friends
|
||||
# are supporting forwarded headers over 127.0.0.1 or any if
|
||||
# the asgi server is contacted by non-localhost hosts.
|
||||
ssl_certificate /etc/ssl/certs/aur.cert.pem;
|
||||
ssl_certificate_key /etc/ssl/private/aur.key.pem;
|
||||
|
||||
location ~ ^/[^/]+\.php($|/) {
|
||||
fastcgi_pass unix:/var/run/php-fpm/php-fpm.sock;
|
||||
fastcgi_index index.php;
|
||||
fastcgi_split_path_info ^(/[^/]+\.php)(/.*)$;
|
||||
fastcgi_param SCRIPT_FILENAME $document_root$fastcgi_script_name;
|
||||
fastcgi_param PATH_INFO $fastcgi_path_info;
|
||||
include fastcgi_params;
|
||||
# smartgit location.
|
||||
location ~ "^/([a-z0-9][a-z0-9.+_-]*?)(\.git)?/(git-(receive|upload)-pack|HEAD|info/refs|objects/(info/(http-)?alternates|packs)|[0-9a-f]{2}/[0-9a-f]{38}|pack/pack-[0-9a-f]{40}\.(pack|idx))$" {
|
||||
include uwsgi_params;
|
||||
uwsgi_pass smartgit;
|
||||
uwsgi_modifier1 9;
|
||||
uwsgi_param SCRIPT_FILENAME /usr/lib/git-core/git-http-backend;
|
||||
uwsgi_param PATH_INFO /aur.git/$3;
|
||||
uwsgi_param GIT_HTTP_EXPORT_ALL "";
|
||||
uwsgi_param GIT_NAMESPACE $1;
|
||||
uwsgi_param GIT_PROJECT_ROOT /srv/http/aurweb;
|
||||
}
|
||||
|
||||
location ~ .* {
|
||||
rewrite ^/(.*)$ /index.php/$1 last;
|
||||
# cgitrc.proto should be configured and located somewhere
|
||||
# of your choosing.
|
||||
location ~ ^/cgit {
|
||||
include uwsgi_params;
|
||||
rewrite ^/cgit/([^?/]+/[^?]*)?(?:\?(.*))?$ /cgit.cgi?url=$1&$2 last;
|
||||
uwsgi_modifier1 9;
|
||||
uwsgi_param CGIT_CONFIG /srv/http/aurweb/conf/cgitrc.proto;
|
||||
uwsgi_pass cgit;
|
||||
}
|
||||
|
||||
# Static archive assets.
|
||||
location ~ \.gz$ {
|
||||
# Asset root. This is used to match against gzip archives.
|
||||
root /srv/http/aurweb/archives;
|
||||
|
||||
types { application/gzip text/plain }
|
||||
default_type text/plain;
|
||||
add_header Content-Encoding gzip;
|
||||
expires 5m;
|
||||
}
|
||||
|
||||
# For everything else, proxy the http request to (guni|uvi|hyper)corn.
|
||||
# The ASGI server application should allow this request's IP to be
|
||||
# forwarded via the headers used below.
|
||||
# https://docs.gunicorn.org/en/stable/settings.html#forwarded-allow-ips
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Protocol ssl;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Ssl on;
|
||||
}
|
||||
}
|
||||
|
||||
Ensure to enable the pdo_mysql extension in php.ini.
|
||||
|
||||
3) Optionally copy conf/config.defaults to /etc/aurweb/. Create or copy
|
||||
/etc/aurweb/config (this is expected to contain all configuration settings
|
||||
if the defaults file does not exist) and adjust the configuration (pay
|
||||
attention to disable_http_login, enable_maintenance and aur_location).
|
||||
|
||||
4) Create a new MySQL database and a user and import the aurweb SQL schema:
|
||||
4) Install system-wide dependencies:
|
||||
|
||||
$ mysql -uaur -p AUR </srv/http/aurweb/schema/aur-schema.sql
|
||||
# pacman -S git gpgme cgit curl openssh uwsgi uwsgi-plugin-cgi \
|
||||
python-poetry
|
||||
|
||||
5) Install Python modules and dependencies:
|
||||
|
||||
# pacman -S python-mysql-connector python-pygit2 python-srcinfo
|
||||
# pacman -S python-bleach python-markdown
|
||||
# python3 setup.py install
|
||||
|
||||
6) Create a new user:
|
||||
5) Create a new user:
|
||||
|
||||
# useradd -U -d /srv/http/aurweb -c 'AUR user' aur
|
||||
# su - aur
|
||||
|
||||
7) Initialize the Git repository:
|
||||
6a) Install Python dependencies via poetry:
|
||||
|
||||
# Install the package and scripts as the aur user.
|
||||
$ poetry install
|
||||
|
||||
6b) Setup Services
|
||||
|
||||
aurweb utilizes the following systemd services:
|
||||
- mariadb
|
||||
- redis (optional, requires [options] cache 'redis')
|
||||
- `examples/aurweb.service`
|
||||
|
||||
6c) Setup Cron
|
||||
|
||||
Using [cronie](https://archlinux.org/packages/core/x86_64/cronie/):
|
||||
|
||||
# su - aur
|
||||
$ crontab -e
|
||||
|
||||
The following crontab file uses every script meant to be run on an
|
||||
interval:
|
||||
|
||||
AUR_CONFIG='/etc/aurweb/config'
|
||||
*/5 * * * * bash -c 'poetry run aurweb-mkpkglists --extended'
|
||||
*/2 * * * * bash -c 'poetry run aurweb-aurblup'
|
||||
*/2 * * * * bash -c 'poetry run aurweb-pkgmaint'
|
||||
*/2 * * * * bash -c 'poetry run aurweb-usermaint'
|
||||
*/2 * * * * bash -c 'poetry run aurweb-popupdate'
|
||||
*/12 * * * * bash -c 'poetry run aurweb-votereminder'
|
||||
|
||||
7) Create a new database and a user and import the aurweb SQL schema:
|
||||
|
||||
$ poetry run python -m aurweb.initdb
|
||||
|
||||
8) Initialize the Git repository:
|
||||
|
||||
# mkdir /srv/http/aurweb/aur.git/
|
||||
# cd /srv/http/aurweb/aur.git/
|
||||
|
@ -67,19 +134,26 @@ read the instructions below.
|
|||
# git config --local transfer.hideRefs '^refs/'
|
||||
# git config --local --add transfer.hideRefs '!refs/'
|
||||
# git config --local --add transfer.hideRefs '!HEAD'
|
||||
# ln -s /usr/local/bin/aurweb-git-update hooks/update
|
||||
# chown -R aur .
|
||||
|
||||
Link to `aurweb-git-update` poetry wrapper provided at
|
||||
`examples/aurweb-git-update.sh` which should be installed
|
||||
somewhere as executable.
|
||||
|
||||
# ln -s /path/to/aurweb-git-update.sh hooks/update
|
||||
|
||||
It is recommended to read doc/git-interface.txt for more information on the
|
||||
administration of the package Git repository.
|
||||
|
||||
8) Configure sshd(8) for the AUR. Add the following lines at the end of your
|
||||
sshd_config(5) and restart the sshd. Note that OpenSSH 6.9 or newer is
|
||||
needed!
|
||||
9) Configure sshd(8) for the AUR. Add the following lines at the end of your
|
||||
sshd_config(5) and restart the sshd.
|
||||
|
||||
If using a virtualenv, copy `examples/aurweb-git-auth.sh` to a location
|
||||
and call it below:
|
||||
|
||||
Match User aur
|
||||
PasswordAuthentication no
|
||||
AuthorizedKeysCommand /usr/local/bin/aurweb-git-auth "%t" "%k"
|
||||
AuthorizedKeysCommand /path/to/aurweb-git-auth.sh "%t" "%k"
|
||||
AuthorizedKeysCommandUser aur
|
||||
AcceptEnv AUR_OVERWRITE
|
||||
|
||||
|
@ -98,8 +172,17 @@ read the instructions below.
|
|||
|
||||
Sample systemd unit files for fcgiwrap can be found under conf/.
|
||||
|
||||
10) If you want memcache to cache MySQL data.
|
||||
10) If you want Redis to cache data.
|
||||
|
||||
# pacman -S php-memcached
|
||||
# pacman -S redis
|
||||
# systemctl enable --now redis
|
||||
|
||||
And edit the configuration file to enabled memcache caching.
|
||||
And edit the configuration file to enabled redis caching
|
||||
(`[options] cache = redis`).
|
||||
|
||||
11) Start `aurweb.service`.
|
||||
|
||||
An example systemd unit has been included at `examples/aurweb.service`.
|
||||
This unit can be used to manage the aurweb asgi backend. By default,
|
||||
it is configured to use `poetry` as the `aur` user; this should be
|
||||
configured as needed.
|
||||
|
|
201
LICENSES/starlette_exporter
Normal file
201
LICENSES/starlette_exporter
Normal file
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
58
README
58
README
|
@ -1,58 +0,0 @@
|
|||
aurweb
|
||||
======
|
||||
|
||||
aurweb is a hosting platform for the Arch User Repository (AUR), a collection
|
||||
of packaging scripts that are created and submitted by the Arch Linux
|
||||
community. The scripts contained in the repository can be built using `makepkg`
|
||||
and installed using the Arch Linux package manager `pacman`.
|
||||
|
||||
The aurweb project includes
|
||||
|
||||
* A web interface to search for packaging scripts and display package details.
|
||||
* A SSH/Git interface to submit and update packages and package meta data.
|
||||
* Community features such as comments, votes, package flagging and requests.
|
||||
* Editing/deletion of packages and accounts by Trusted Users and Developers.
|
||||
* Area for Trusted Users to post AUR-related proposals and vote on them.
|
||||
|
||||
Directory Layout
|
||||
----------------
|
||||
|
||||
aurweb::
|
||||
aurweb Python modules.
|
||||
|
||||
conf::
|
||||
Configuration and configuration templates.
|
||||
|
||||
doc::
|
||||
Project documentation.
|
||||
|
||||
po::
|
||||
Translation files for strings in the aurweb interface.
|
||||
|
||||
schema::
|
||||
Schema for the SQL database. Script for dummy data generation.
|
||||
|
||||
scripts::
|
||||
Scripts for AUR maintenance.
|
||||
|
||||
test::
|
||||
Test suite and test cases.
|
||||
|
||||
upgrading::
|
||||
Instructions for upgrading setups from one release to another.
|
||||
|
||||
web::
|
||||
Web interface for the AUR.
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
* The repository is hosted at git://git.archlinux.org/aurweb.git -- see
|
||||
doc/CodingGuidelines for information on the patch submission process.
|
||||
|
||||
* Bugs can (and should) be submitted to the aurweb bug tracker:
|
||||
https://bugs.archlinux.org/index.php?project=2
|
||||
|
||||
* Questions, comments, and patches related to aurweb can be sent to the AUR
|
||||
development mailing list: aur-dev@archlinux.org -- mailing list archives:
|
||||
https://mailman.archlinux.org/mailman/listinfo/aur-dev
|
66
README.md
Normal file
66
README.md
Normal file
|
@ -0,0 +1,66 @@
|
|||
aurweb
|
||||
======
|
||||
|
||||
aurweb is a hosting platform for the Arch User Repository (AUR), a collection
|
||||
of packaging scripts that are created and submitted by the Arch Linux
|
||||
community. The scripts contained in the repository can be built using `makepkg`
|
||||
and installed using the Arch Linux package manager `pacman`.
|
||||
|
||||
The aurweb project includes
|
||||
|
||||
* A web interface to search for packaging scripts and display package details.
|
||||
* An SSH/Git interface to submit and update packages and package meta data.
|
||||
* Community features such as comments, votes, package flagging and requests.
|
||||
* Editing/deletion of packages and accounts by Package Maintainers and Developers.
|
||||
* Area for Package Maintainers to post AUR-related proposals and vote on them.
|
||||
|
||||
Directory Layout
|
||||
----------------
|
||||
|
||||
* `aurweb`: aurweb Python modules, Git interface and maintenance scripts
|
||||
* `conf`: configuration and configuration templates
|
||||
* `static`: static resource files
|
||||
* `templates`: jinja2 template collection
|
||||
* `doc`: project documentation
|
||||
* `po`: translation files for strings in the aurweb interface
|
||||
* `schema`: schema for the SQL database
|
||||
* `test`: test suite and test cases
|
||||
* `upgrading`: instructions for upgrading setups from one release to another
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
| What | Link |
|
||||
|--------------|--------------------------------------------------|
|
||||
| Installation | [INSTALL](./INSTALL) |
|
||||
| Testing | [test/README.md](./test/README.md) |
|
||||
| Git | [doc/git-interface.txt](./doc/git-interface.txt) |
|
||||
| Maintenance | [doc/maintenance.txt](./doc/maintenance.txt) |
|
||||
| RPC | [doc/rpc.txt](./doc/rpc.txt) |
|
||||
| Docker | [doc/docker.md](./doc/docker.md) |
|
||||
|
||||
Links
|
||||
-----
|
||||
|
||||
* The repository is hosted at https://gitlab.archlinux.org/archlinux/aurweb
|
||||
-- see [CONTRIBUTING.md](./CONTRIBUTING.md) for information on the patch submission process.
|
||||
|
||||
* Bugs can (and should) be submitted to the aurweb bug tracker:
|
||||
https://gitlab.archlinux.org/archlinux/aurweb/-/issues/new?issuable_template=Bug
|
||||
|
||||
* Questions, comments, and patches related to aurweb can be sent to the AUR
|
||||
development mailing list: aur-dev@archlinux.org -- mailing list archives:
|
||||
https://mailman.archlinux.org/mailman/listinfo/aur-dev
|
||||
|
||||
Translations
|
||||
------------
|
||||
|
||||
Translations are welcome via our Transifex project at
|
||||
https://www.transifex.com/lfleischer/aurweb; see [doc/i18n.md](./doc/i18n.md) for details.
|
||||
|
||||

|
||||
|
||||
Testing
|
||||
-------
|
||||
|
||||
See [test/README.md](test/README.md) for details on dependencies and testing.
|
181
TESTING
181
TESTING
|
@ -1,6 +1,56 @@
|
|||
Setup Testing Environment
|
||||
=========================
|
||||
|
||||
The quickest way to get you hacking on aurweb is to utilize docker.
|
||||
In case you prefer to run it bare-metal see instructions further below.
|
||||
|
||||
Containerized environment
|
||||
-------------------------
|
||||
|
||||
1) Clone the aurweb project:
|
||||
|
||||
$ git clone https://gitlab.archlinux.org/archlinux/aurweb.git
|
||||
$ cd aurweb
|
||||
|
||||
2) Install the necessary packages:
|
||||
|
||||
# pacman -S --needed docker docker-compose
|
||||
|
||||
3) Build the aurweb:latest image:
|
||||
|
||||
# systemctl start docker
|
||||
# docker compose build
|
||||
|
||||
4) Run local Docker development instance:
|
||||
|
||||
# docker compose up -d
|
||||
|
||||
5) Browse to local aurweb development server.
|
||||
|
||||
https://localhost:8444/
|
||||
|
||||
6) [Optionally] populate the database with dummy data:
|
||||
|
||||
# docker compose exec mariadb /bin/bash
|
||||
# pacman -S --noconfirm words fortune-mod
|
||||
# poetry run schema/gendummydata.py dummy_data.sql
|
||||
# mariadb -uaur -paur aurweb < dummy_data.sql
|
||||
# exit
|
||||
|
||||
Inspect `dummy_data.sql` for test credentials.
|
||||
Passwords match usernames.
|
||||
|
||||
We now have fully set up environment which we can start and stop with:
|
||||
|
||||
# docker compose start
|
||||
# docker compose stop
|
||||
|
||||
Proceed with topic "Setup for running tests"
|
||||
|
||||
|
||||
Bare Metal installation
|
||||
-----------------------
|
||||
|
||||
Note that this setup is only to test the web interface. If you need to have a
|
||||
full aurweb instance with cgit, ssh interface, etc, follow the directions in
|
||||
INSTALL.
|
||||
|
@ -8,27 +58,128 @@ INSTALL.
|
|||
1) Clone the aurweb project:
|
||||
|
||||
$ git clone git://git.archlinux.org/aurweb.git
|
||||
$ cd aurweb
|
||||
|
||||
2) Install php and necessary modules:
|
||||
2) Install the necessary packages:
|
||||
|
||||
# pacman -S php php-sqlite sqlite words fortune-mod
|
||||
# pacman -S --needed python-poetry mariadb words fortune-mod nginx
|
||||
|
||||
Ensure to enable the pdo_sqlite extension in php.ini.
|
||||
3) Install the package/dependencies via `poetry`:
|
||||
|
||||
3) Prepare the testing database:
|
||||
$ poetry install
|
||||
|
||||
$ cd /path/to/aurweb/schema
|
||||
$ make
|
||||
$ ./gendummydata.py out.sql
|
||||
$ sqlite3 ../aurweb.sqlite3 < aur-schema-sqlite.sql
|
||||
$ sqlite3 ../aurweb.sqlite3 < out.sql
|
||||
4) Copy conf/config.dev to conf/config and replace YOUR_AUR_ROOT by the absolute
|
||||
path to the root of your aurweb clone. sed can do both tasks for you:
|
||||
|
||||
4) Copy conf/config.defaults to conf/config and adjust the configuration
|
||||
(pay attention to disable_http_login, enable_maintenance and aur_location).
|
||||
$ sed -e "s;YOUR_AUR_ROOT;$PWD;g" conf/config.dev > conf/config
|
||||
|
||||
Be sure to change backend to sqlite and name to the file location of your
|
||||
created test database.
|
||||
Note that when the upstream config.dev is updated, you should compare it to
|
||||
your conf/config, or regenerate your configuration with the command above.
|
||||
|
||||
5) Run the PHP built-in web server:
|
||||
5) Set up mariadb:
|
||||
|
||||
$ AUR_CONFIG='/path/to/aurweb/conf/config' php -S localhost:8080 -t /path/to/aurweb/web/html
|
||||
# mariadb-install-db --user=mysql --basedir=/usr --datadir=/var/lib/mysql
|
||||
# systemctl start mariadb
|
||||
# mariadb -u root
|
||||
> CREATE USER 'aur'@'localhost' IDENTIFIED BY 'aur';
|
||||
> GRANT ALL ON *.* TO 'aur'@'localhost' WITH GRANT OPTION;
|
||||
> CREATE DATABASE aurweb;
|
||||
> exit
|
||||
|
||||
6) Prepare a database and insert dummy data:
|
||||
|
||||
$ AUR_CONFIG=conf/config poetry run python -m aurweb.initdb
|
||||
$ poetry run schema/gendummydata.py dummy_data.sql
|
||||
$ mariadb -uaur -paur aurweb < dummy_data.sql
|
||||
|
||||
7) Run the test server:
|
||||
|
||||
## set AUR_CONFIG to our locally created config
|
||||
$ export AUR_CONFIG=conf/config
|
||||
|
||||
## with aurweb.spawn
|
||||
$ poetry run python -m aurweb.spawn
|
||||
|
||||
## with systemd service
|
||||
$ sudo install -m644 examples/aurweb.service /etc/systemd/system/
|
||||
# systemctl enable --now aurweb.service
|
||||
|
||||
|
||||
Setup for running tests
|
||||
-----------------------
|
||||
|
||||
If you've set up a docker environment, you can run the full test-suite with:
|
||||
# docker compose run test
|
||||
|
||||
You can collect code-coverage data with:
|
||||
$ ./util/fix-coverage data/.coverage
|
||||
|
||||
See information further below on how to visualize the data.
|
||||
|
||||
For running individual tests, we need to perform a couple of additional steps.
|
||||
In case you did the bare-metal install, steps 2, 3, 4 and 5 should be skipped.
|
||||
|
||||
1) Install the necessary packages:
|
||||
|
||||
# pacman -S --needed python-poetry mariadb-libs asciidoc openssh
|
||||
|
||||
2) Install the package/dependencies via `poetry`:
|
||||
|
||||
$ poetry install
|
||||
|
||||
3) Copy conf/config.dev to conf/config and replace YOUR_AUR_ROOT by the absolute
|
||||
path to the root of your aurweb clone. sed can do both tasks for you:
|
||||
|
||||
$ sed -e "s;YOUR_AUR_ROOT;$PWD;g" conf/config.dev > conf/config
|
||||
|
||||
Note that when the upstream config.dev is updated, you should compare it to
|
||||
your conf/config, or regenerate your configuration with the command above.
|
||||
|
||||
4) Edit the config file conf/config and change the mysql/mariadb portion
|
||||
|
||||
We can make use of our mariadb docker container instead of having to install
|
||||
mariadb. Change the config as follows:
|
||||
|
||||
---------------------------------------------------------------------
|
||||
; MySQL database information. User defaults to root for containerized
|
||||
; testing with mysqldb. This should be set to a non-root user.
|
||||
user = root
|
||||
password = aur
|
||||
host = 127.0.0.1
|
||||
port = 13306
|
||||
;socket = /var/run/mysqld/mysqld.sock
|
||||
---------------------------------------------------------------------
|
||||
|
||||
5) Start our mariadb docker container
|
||||
|
||||
# docker compose start mariadb
|
||||
|
||||
6) Set environment variables
|
||||
|
||||
$ export AUR_CONFIG=conf/config
|
||||
$ export LOG_CONFIG=logging.test.conf
|
||||
|
||||
7) Compile translation & doc files
|
||||
|
||||
$ make -C po install
|
||||
$ make -C doc
|
||||
|
||||
Now we can run our python test-suite or individual tests with:
|
||||
|
||||
$ poetry run pytest test/
|
||||
$ poetry run pytest test/test_whatever.py
|
||||
|
||||
To run Sharness tests:
|
||||
|
||||
$ poetry run make -C test sh
|
||||
|
||||
The e-Mails that have been generated can be found at test-emails/
|
||||
|
||||
After test runs, code-coverage reports can be created with:
|
||||
## CLI report
|
||||
$ coverage report
|
||||
|
||||
## HTML version stored at htmlcov/
|
||||
$ coverage html
|
||||
|
||||
More information about tests can be found at test/README.md
|
||||
|
|
86
alembic.ini
Normal file
86
alembic.ini
Normal file
|
@ -0,0 +1,86 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# timezone to use when rendering the date
|
||||
# within the migration file as well as the filename.
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; this defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path
|
||||
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# the database URL is generated in env.py
|
||||
# sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks=black
|
||||
# black.type=console_scripts
|
||||
# black.entrypoint=black
|
||||
# black.options=-l 79
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
1
aurweb/archives/__init__.py
Normal file
1
aurweb/archives/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
# aurweb.archives
|
1
aurweb/archives/spec/__init__.py
Normal file
1
aurweb/archives/spec/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
# aurweb.archives.spec
|
77
aurweb/archives/spec/base.py
Normal file
77
aurweb/archives/spec/base.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
from pathlib import Path
|
||||
from typing import Any, Dict, Iterable, List, Set
|
||||
|
||||
|
||||
class GitInfo:
|
||||
"""Information about a Git repository."""
|
||||
|
||||
""" Path to Git repository. """
|
||||
path: str
|
||||
|
||||
""" Local Git repository configuration. """
|
||||
config: Dict[str, Any]
|
||||
|
||||
def __init__(self, path: str, config: Dict[str, Any] = dict()) -> "GitInfo":
|
||||
self.path = Path(path)
|
||||
self.config = config
|
||||
|
||||
|
||||
class SpecOutput:
|
||||
"""Class used for git_archive.py output details."""
|
||||
|
||||
""" Filename relative to the Git repository root. """
|
||||
filename: Path
|
||||
|
||||
""" Git repository information. """
|
||||
git_info: GitInfo
|
||||
|
||||
""" Bytes bound for `SpecOutput.filename`. """
|
||||
data: bytes
|
||||
|
||||
def __init__(self, filename: str, git_info: GitInfo, data: bytes) -> "SpecOutput":
|
||||
self.filename = filename
|
||||
self.git_info = git_info
|
||||
self.data = data
|
||||
|
||||
|
||||
class SpecBase:
|
||||
"""
|
||||
Base for Spec classes defined in git_archve.py --spec modules.
|
||||
|
||||
All supported --spec modules must contain the following classes:
|
||||
- Spec(SpecBase)
|
||||
"""
|
||||
|
||||
""" A list of SpecOutputs, each of which contain output file data. """
|
||||
outputs: List[SpecOutput] = list()
|
||||
|
||||
""" A set of repositories to commit changes to. """
|
||||
repos: Set[str] = set()
|
||||
|
||||
def generate(self) -> Iterable[SpecOutput]:
|
||||
"""
|
||||
"Pure virtual" output generator.
|
||||
|
||||
`SpecBase.outputs` and `SpecBase.repos` should be populated within an
|
||||
overridden version of this function in SpecBase derivatives.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def add_output(self, filename: str, git_info: GitInfo, data: bytes) -> None:
|
||||
"""
|
||||
Add a SpecOutput instance to the set of outputs.
|
||||
|
||||
:param filename: Filename relative to the git repository root
|
||||
:param git_info: GitInfo instance
|
||||
:param data: Binary data bound for `filename`
|
||||
"""
|
||||
if git_info.path not in self.repos:
|
||||
self.repos.add(git_info.path)
|
||||
|
||||
self.outputs.append(
|
||||
SpecOutput(
|
||||
filename,
|
||||
git_info,
|
||||
data,
|
||||
)
|
||||
)
|
85
aurweb/archives/spec/metadata.py
Normal file
85
aurweb/archives/spec/metadata.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
from typing import Iterable
|
||||
|
||||
import orjson
|
||||
|
||||
from aurweb import config, db
|
||||
from aurweb.models import Package, PackageBase, User
|
||||
from aurweb.rpc import RPC
|
||||
|
||||
from .base import GitInfo, SpecBase, SpecOutput
|
||||
|
||||
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
|
||||
|
||||
|
||||
class Spec(SpecBase):
|
||||
def __init__(self) -> "Spec":
|
||||
self.metadata_repo = GitInfo(
|
||||
config.get("git-archive", "metadata-repo"),
|
||||
)
|
||||
|
||||
def generate(self) -> Iterable[SpecOutput]:
|
||||
# Base query used by the RPC.
|
||||
base_query = (
|
||||
db.query(Package)
|
||||
.join(PackageBase)
|
||||
.join(User, PackageBase.MaintainerUID == User.ID, isouter=True)
|
||||
)
|
||||
|
||||
# Create an instance of RPC, use it to get entities from
|
||||
# our query and perform a metadata subquery for all packages.
|
||||
rpc = RPC(version=5, type="info")
|
||||
print("performing package database query")
|
||||
packages = rpc.entities(base_query).all()
|
||||
print("performing package database subqueries")
|
||||
rpc.subquery({pkg.ID for pkg in packages})
|
||||
|
||||
pkgbases, pkgnames = dict(), dict()
|
||||
for package in packages:
|
||||
# Produce RPC type=info data for `package`
|
||||
data = rpc.get_info_json_data(package)
|
||||
|
||||
pkgbase_name = data.get("PackageBase")
|
||||
pkgbase_data = {
|
||||
"ID": data.pop("PackageBaseID"),
|
||||
"URLPath": data.pop("URLPath"),
|
||||
"FirstSubmitted": data.pop("FirstSubmitted"),
|
||||
"LastModified": data.pop("LastModified"),
|
||||
"OutOfDate": data.pop("OutOfDate"),
|
||||
"Maintainer": data.pop("Maintainer"),
|
||||
"Keywords": data.pop("Keywords"),
|
||||
"NumVotes": data.pop("NumVotes"),
|
||||
"Popularity": data.pop("Popularity"),
|
||||
"PopularityUpdated": package.PopularityUpdated.timestamp(),
|
||||
}
|
||||
|
||||
# Store the data in `pkgbases` dict. We do this so we only
|
||||
# end up processing a single `pkgbase` if repeated after
|
||||
# this loop
|
||||
pkgbases[pkgbase_name] = pkgbase_data
|
||||
|
||||
# Remove Popularity and NumVotes from package data.
|
||||
# These fields change quite often which causes git data
|
||||
# modification to explode.
|
||||
# data.pop("NumVotes")
|
||||
# data.pop("Popularity")
|
||||
|
||||
# Remove the ID key from package json.
|
||||
data.pop("ID")
|
||||
|
||||
# Add the `package`.Name to the pkgnames set
|
||||
name = data.get("Name")
|
||||
pkgnames[name] = data
|
||||
|
||||
# Add metadata outputs
|
||||
self.add_output(
|
||||
"pkgname.json",
|
||||
self.metadata_repo,
|
||||
orjson.dumps(pkgnames, option=ORJSON_OPTS),
|
||||
)
|
||||
self.add_output(
|
||||
"pkgbase.json",
|
||||
self.metadata_repo,
|
||||
orjson.dumps(pkgbases, option=ORJSON_OPTS),
|
||||
)
|
||||
|
||||
return self.outputs
|
26
aurweb/archives/spec/pkgbases.py
Normal file
26
aurweb/archives/spec/pkgbases.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
from typing import Iterable
|
||||
|
||||
import orjson
|
||||
|
||||
from aurweb import config, db
|
||||
from aurweb.models import PackageBase
|
||||
|
||||
from .base import GitInfo, SpecBase, SpecOutput
|
||||
|
||||
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
|
||||
|
||||
|
||||
class Spec(SpecBase):
|
||||
def __init__(self) -> "Spec":
|
||||
self.pkgbases_repo = GitInfo(config.get("git-archive", "pkgbases-repo"))
|
||||
|
||||
def generate(self) -> Iterable[SpecOutput]:
|
||||
query = db.query(PackageBase.Name).order_by(PackageBase.Name.asc()).all()
|
||||
pkgbases = [pkgbase.Name for pkgbase in query]
|
||||
|
||||
self.add_output(
|
||||
"pkgbase.json",
|
||||
self.pkgbases_repo,
|
||||
orjson.dumps(pkgbases, option=ORJSON_OPTS),
|
||||
)
|
||||
return self.outputs
|
31
aurweb/archives/spec/pkgnames.py
Normal file
31
aurweb/archives/spec/pkgnames.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
from typing import Iterable
|
||||
|
||||
import orjson
|
||||
|
||||
from aurweb import config, db
|
||||
from aurweb.models import Package, PackageBase
|
||||
|
||||
from .base import GitInfo, SpecBase, SpecOutput
|
||||
|
||||
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
|
||||
|
||||
|
||||
class Spec(SpecBase):
|
||||
def __init__(self) -> "Spec":
|
||||
self.pkgnames_repo = GitInfo(config.get("git-archive", "pkgnames-repo"))
|
||||
|
||||
def generate(self) -> Iterable[SpecOutput]:
|
||||
query = (
|
||||
db.query(Package.Name)
|
||||
.join(PackageBase, PackageBase.ID == Package.PackageBaseID)
|
||||
.order_by(Package.Name.asc())
|
||||
.all()
|
||||
)
|
||||
pkgnames = [pkg.Name for pkg in query]
|
||||
|
||||
self.add_output(
|
||||
"pkgname.json",
|
||||
self.pkgnames_repo,
|
||||
orjson.dumps(pkgnames, option=ORJSON_OPTS),
|
||||
)
|
||||
return self.outputs
|
26
aurweb/archives/spec/users.py
Normal file
26
aurweb/archives/spec/users.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
from typing import Iterable
|
||||
|
||||
import orjson
|
||||
|
||||
from aurweb import config, db
|
||||
from aurweb.models import User
|
||||
|
||||
from .base import GitInfo, SpecBase, SpecOutput
|
||||
|
||||
ORJSON_OPTS = orjson.OPT_SORT_KEYS | orjson.OPT_INDENT_2
|
||||
|
||||
|
||||
class Spec(SpecBase):
|
||||
def __init__(self) -> "Spec":
|
||||
self.users_repo = GitInfo(config.get("git-archive", "users-repo"))
|
||||
|
||||
def generate(self) -> Iterable[SpecOutput]:
|
||||
query = db.query(User.Username).order_by(User.Username.asc()).all()
|
||||
users = [user.Username for user in query]
|
||||
|
||||
self.add_output(
|
||||
"users.json",
|
||||
self.users_repo,
|
||||
orjson.dumps(users, option=ORJSON_OPTS),
|
||||
)
|
||||
return self.outputs
|
339
aurweb/asgi.py
Normal file
339
aurweb/asgi.py
Normal file
|
@ -0,0 +1,339 @@
|
|||
import hashlib
|
||||
import http
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import typing
|
||||
from contextlib import asynccontextmanager
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
import requests
|
||||
from fastapi import FastAPI, HTTPException, Request, Response
|
||||
from fastapi.responses import RedirectResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from jinja2 import TemplateNotFound
|
||||
from opentelemetry import trace
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
|
||||
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
from opentelemetry.sdk.trace import TracerProvider
|
||||
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
||||
from sqlalchemy import and_
|
||||
from starlette.exceptions import HTTPException as StarletteHTTPException
|
||||
from starlette.middleware.authentication import AuthenticationMiddleware
|
||||
from starlette.middleware.sessions import SessionMiddleware
|
||||
|
||||
import aurweb.captcha # noqa: F401
|
||||
import aurweb.config
|
||||
import aurweb.filters # noqa: F401
|
||||
from aurweb import aur_logging, prometheus, util
|
||||
from aurweb.aur_redis import redis_connection
|
||||
from aurweb.auth import BasicAuthBackend
|
||||
from aurweb.db import get_engine, query
|
||||
from aurweb.models import AcceptedTerm, Term
|
||||
from aurweb.packages.util import get_pkg_or_base
|
||||
from aurweb.prometheus import instrumentator
|
||||
from aurweb.routers import APP_ROUTES
|
||||
from aurweb.templates import make_context, render_template
|
||||
|
||||
logger = aur_logging.get_logger(__name__)
|
||||
session_secret = aurweb.config.get("fastapi", "session_secret")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await app_startup()
|
||||
yield
|
||||
|
||||
|
||||
# Setup the FastAPI app.
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
|
||||
# Instrument routes with the prometheus-fastapi-instrumentator
|
||||
# library with custom collectors and expose /metrics.
|
||||
instrumentator().add(prometheus.http_api_requests_total())
|
||||
instrumentator().add(prometheus.http_requests_total())
|
||||
instrumentator().instrument(app)
|
||||
|
||||
|
||||
# Instrument FastAPI for tracing
|
||||
FastAPIInstrumentor.instrument_app(app)
|
||||
|
||||
resource = Resource(attributes={"service.name": "aurweb"})
|
||||
otlp_endpoint = aurweb.config.get("tracing", "otlp_endpoint")
|
||||
otlp_exporter = OTLPSpanExporter(endpoint=otlp_endpoint)
|
||||
span_processor = BatchSpanProcessor(otlp_exporter)
|
||||
trace.set_tracer_provider(TracerProvider(resource=resource))
|
||||
trace.get_tracer_provider().add_span_processor(span_processor)
|
||||
|
||||
|
||||
async def app_startup():
|
||||
# https://stackoverflow.com/questions/67054759/about-the-maximum-recursion-error-in-fastapi
|
||||
# Test failures have been observed by internal starlette code when
|
||||
# using starlette.testclient.TestClient. Looking around in regards
|
||||
# to the recursion error has really not recommended a course of action
|
||||
# other than increasing the recursion limit. For now, that is how
|
||||
# we handle the issue: an optional TEST_RECURSION_LIMIT env var
|
||||
# provided by the user. Docker uses .env's TEST_RECURSION_LIMIT
|
||||
# when running test suites.
|
||||
# TODO: Find a proper fix to this issue.
|
||||
recursion_limit = int(
|
||||
os.environ.get("TEST_RECURSION_LIMIT", sys.getrecursionlimit() + 1000)
|
||||
)
|
||||
sys.setrecursionlimit(recursion_limit)
|
||||
|
||||
backend = aurweb.config.get("database", "backend")
|
||||
if backend not in aurweb.db.DRIVERS:
|
||||
raise ValueError(
|
||||
f"The configured database backend ({backend}) is unsupported. "
|
||||
f"Supported backends: {str(aurweb.db.DRIVERS.keys())}"
|
||||
)
|
||||
|
||||
if not session_secret:
|
||||
raise Exception("[fastapi] session_secret must not be empty")
|
||||
|
||||
if not os.environ.get("PROMETHEUS_MULTIPROC_DIR", None):
|
||||
logger.warning(
|
||||
"$PROMETHEUS_MULTIPROC_DIR is not set, the /metrics "
|
||||
"endpoint is disabled."
|
||||
)
|
||||
|
||||
app.mount("/static", StaticFiles(directory="static"), name="static_files")
|
||||
|
||||
# Add application routes.
|
||||
def add_router(module):
|
||||
app.include_router(module.router)
|
||||
|
||||
util.apply_all(APP_ROUTES, add_router)
|
||||
|
||||
# Initialize the database engine and ORM.
|
||||
get_engine()
|
||||
|
||||
|
||||
async def internal_server_error(request: Request, exc: Exception) -> Response:
|
||||
"""
|
||||
Catch all uncaught Exceptions thrown in a route.
|
||||
|
||||
:param request: FastAPI Request
|
||||
:return: Rendered 500.html template with status_code 500
|
||||
"""
|
||||
repo = aurweb.config.get("notifications", "gitlab-instance")
|
||||
project = aurweb.config.get("notifications", "error-project")
|
||||
token = aurweb.config.get("notifications", "error-token")
|
||||
|
||||
context = make_context(request, "Internal Server Error")
|
||||
|
||||
# Print out the exception via `traceback` and store the value
|
||||
# into the `traceback` context variable.
|
||||
tb_io = io.StringIO()
|
||||
traceback.print_exc(file=tb_io)
|
||||
tb = tb_io.getvalue()
|
||||
context["traceback"] = tb
|
||||
|
||||
# Produce a SHA1 hash of the traceback string.
|
||||
tb_hash = hashlib.sha1(tb.encode()).hexdigest()
|
||||
tb_id = tb_hash[:7]
|
||||
|
||||
redis = redis_connection()
|
||||
key = f"tb:{tb_hash}"
|
||||
retval = redis.get(key)
|
||||
if not retval:
|
||||
# Expire in one hour; this is just done to make sure we
|
||||
# don't infinitely store these values, but reduce the number
|
||||
# of automated reports (notification below). At this time of
|
||||
# writing, unexpected exceptions are not common, thus this
|
||||
# will not produce a large memory footprint in redis.
|
||||
pipe = redis.pipeline()
|
||||
pipe.set(key, tb)
|
||||
pipe.expire(key, 86400) # One day.
|
||||
pipe.execute()
|
||||
|
||||
# Send out notification about it.
|
||||
if "set-me" not in (project, token):
|
||||
proj = quote_plus(project)
|
||||
endp = f"{repo}/api/v4/projects/{proj}/issues"
|
||||
|
||||
base = f"{request.url.scheme}://{request.url.netloc}"
|
||||
title = f"Traceback [{tb_id}]: {base}{request.url.path}"
|
||||
desc = [
|
||||
"DISCLAIMER",
|
||||
"----------",
|
||||
"**This issue is confidential** and should be sanitized "
|
||||
"before sharing with users or developers. Please ensure "
|
||||
"you've completed the following tasks:",
|
||||
"- [ ] I have removed any sensitive data and "
|
||||
"the description history.",
|
||||
"",
|
||||
"Exception Details",
|
||||
"-----------------",
|
||||
f"- Route: `{request.url.path}`",
|
||||
f"- User: `{request.user.Username}`",
|
||||
f"- Email: `{request.user.Email}`",
|
||||
]
|
||||
|
||||
# Add method-specific information to the description.
|
||||
if request.method.lower() == "get":
|
||||
# get
|
||||
if request.url.query:
|
||||
desc = desc + [f"- Query: `{request.url.query}`"]
|
||||
desc += ["", f"```{tb}```"]
|
||||
else:
|
||||
# post
|
||||
form_data = str(dict(request.state.form_data))
|
||||
desc = desc + [f"- Data: `{form_data}`"] + ["", f"```{tb}```"]
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
data = {
|
||||
"title": title,
|
||||
"description": "\n".join(desc),
|
||||
"labels": ["triage"],
|
||||
"confidential": True,
|
||||
}
|
||||
logger.info(endp)
|
||||
resp = requests.post(endp, json=data, headers=headers)
|
||||
if resp.status_code != http.HTTPStatus.CREATED:
|
||||
logger.error(f"Unable to report exception to {repo}: {resp.text}")
|
||||
else:
|
||||
logger.warning(
|
||||
"Unable to report an exception found due to "
|
||||
"unset notifications.error-{{project,token}}"
|
||||
)
|
||||
|
||||
# Log details about the exception traceback.
|
||||
logger.error(f"FATAL[{tb_id}]: An unexpected exception has occurred.")
|
||||
logger.error(tb)
|
||||
else:
|
||||
retval = retval.decode()
|
||||
|
||||
return render_template(
|
||||
request,
|
||||
"errors/500.html",
|
||||
context,
|
||||
status_code=http.HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(StarletteHTTPException)
|
||||
async def http_exception_handler(request: Request, exc: HTTPException) -> Response:
|
||||
"""Handle an HTTPException thrown in a route."""
|
||||
phrase = http.HTTPStatus(exc.status_code).phrase
|
||||
context = make_context(request, phrase)
|
||||
context["exc"] = exc
|
||||
context["phrase"] = phrase
|
||||
|
||||
# Additional context for some exceptions.
|
||||
if exc.status_code == http.HTTPStatus.NOT_FOUND:
|
||||
tokens = request.url.path.split("/")
|
||||
matches = re.match("^([a-z0-9][a-z0-9.+_-]*?)(\\.git)?$", tokens[1])
|
||||
if matches and len(tokens) == 2:
|
||||
try:
|
||||
pkgbase = get_pkg_or_base(matches.group(1))
|
||||
context["pkgbase"] = pkgbase
|
||||
context["git_clone_uri_anon"] = aurweb.config.get(
|
||||
"options", "git_clone_uri_anon"
|
||||
)
|
||||
context["git_clone_uri_priv"] = aurweb.config.get(
|
||||
"options", "git_clone_uri_priv"
|
||||
)
|
||||
except HTTPException:
|
||||
pass
|
||||
|
||||
try:
|
||||
return render_template(
|
||||
request, f"errors/{exc.status_code}.html", context, exc.status_code
|
||||
)
|
||||
except TemplateNotFound:
|
||||
return render_template(request, "errors/detail.html", context, exc.status_code)
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def add_security_headers(request: Request, call_next: typing.Callable):
|
||||
"""This middleware adds the CSP, XCTO, XFO and RP security
|
||||
headers to the HTTP response associated with request.
|
||||
|
||||
CSP: Content-Security-Policy
|
||||
XCTO: X-Content-Type-Options
|
||||
RP: Referrer-Policy
|
||||
XFO: X-Frame-Options
|
||||
"""
|
||||
try:
|
||||
response = await util.error_or_result(call_next, request)
|
||||
except Exception as exc:
|
||||
return await internal_server_error(request, exc)
|
||||
|
||||
# Add CSP header.
|
||||
nonce = request.user.nonce
|
||||
csp = "default-src 'self'; "
|
||||
|
||||
# swagger-ui needs access to cdn.jsdelivr.net javascript
|
||||
script_hosts = ["cdn.jsdelivr.net"]
|
||||
csp += f"script-src 'self' 'unsafe-inline' 'nonce-{nonce}' " + " ".join(
|
||||
script_hosts
|
||||
)
|
||||
|
||||
# swagger-ui needs access to cdn.jsdelivr.net css
|
||||
css_hosts = ["cdn.jsdelivr.net"]
|
||||
csp += "; style-src 'self' 'unsafe-inline' " + " ".join(css_hosts)
|
||||
response.headers["Content-Security-Policy"] = csp
|
||||
|
||||
# Add XTCO header.
|
||||
xcto = "nosniff"
|
||||
response.headers["X-Content-Type-Options"] = xcto
|
||||
|
||||
# Add Referrer Policy header.
|
||||
rp = "same-origin"
|
||||
response.headers["Referrer-Policy"] = rp
|
||||
|
||||
# Add X-Frame-Options header.
|
||||
xfo = "SAMEORIGIN"
|
||||
response.headers["X-Frame-Options"] = xfo
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def check_terms_of_service(request: Request, call_next: typing.Callable):
|
||||
"""This middleware function redirects authenticated users if they
|
||||
have any outstanding Terms to agree to."""
|
||||
if request.user.is_authenticated() and request.url.path != "/tos":
|
||||
accepted = (
|
||||
query(Term)
|
||||
.join(AcceptedTerm)
|
||||
.filter(
|
||||
and_(
|
||||
AcceptedTerm.UsersID == request.user.ID,
|
||||
AcceptedTerm.TermsID == Term.ID,
|
||||
AcceptedTerm.Revision >= Term.Revision,
|
||||
),
|
||||
)
|
||||
)
|
||||
if query(Term).count() - accepted.count() > 0:
|
||||
return RedirectResponse("/tos", status_code=int(http.HTTPStatus.SEE_OTHER))
|
||||
|
||||
return await util.error_or_result(call_next, request)
|
||||
|
||||
|
||||
@app.middleware("http")
|
||||
async def id_redirect_middleware(request: Request, call_next: typing.Callable):
|
||||
id = request.query_params.get("id")
|
||||
|
||||
if id is not None:
|
||||
# Preserve query string.
|
||||
qs = []
|
||||
for k, v in request.query_params.items():
|
||||
if k != "id":
|
||||
qs.append(f"{k}={quote_plus(str(v))}")
|
||||
qs = str() if not qs else "?" + "&".join(qs)
|
||||
|
||||
path = request.url.path.rstrip("/")
|
||||
return RedirectResponse(f"{path}/{id}{qs}")
|
||||
|
||||
return await util.error_or_result(call_next, request)
|
||||
|
||||
|
||||
# Add application middlewares.
|
||||
app.add_middleware(AuthenticationMiddleware, backend=BasicAuthBackend())
|
||||
app.add_middleware(SessionMiddleware, secret_key=session_secret)
|
26
aurweb/aur_logging.py
Normal file
26
aurweb/aur_logging.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
import logging
|
||||
import logging.config
|
||||
import os
|
||||
|
||||
import aurweb.config
|
||||
|
||||
# For testing, users should set LOG_CONFIG=logging.test.conf
|
||||
# We test against various debug log output.
|
||||
aurwebdir = aurweb.config.get("options", "aurwebdir")
|
||||
log_config = os.environ.get("LOG_CONFIG", "logging.conf")
|
||||
config_path = os.path.join(aurwebdir, log_config)
|
||||
|
||||
logging.config.fileConfig(config_path, disable_existing_loggers=False)
|
||||
logging.getLogger("root").addHandler(logging.NullHandler())
|
||||
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
"""A logging.getLogger wrapper. Importing this function and
|
||||
using it to get a module-local logger ensures that logging.conf
|
||||
initialization is performed wherever loggers are used.
|
||||
|
||||
:param name: Logger name; typically `__name__`
|
||||
:returns: name's logging.Logger
|
||||
"""
|
||||
|
||||
return logging.getLogger(name)
|
58
aurweb/aur_redis.py
Normal file
58
aurweb/aur_redis.py
Normal file
|
@ -0,0 +1,58 @@
|
|||
import fakeredis
|
||||
from opentelemetry.instrumentation.redis import RedisInstrumentor
|
||||
from redis import ConnectionPool, Redis
|
||||
|
||||
import aurweb.config
|
||||
from aurweb import aur_logging
|
||||
|
||||
logger = aur_logging.get_logger(__name__)
|
||||
pool = None
|
||||
|
||||
RedisInstrumentor().instrument()
|
||||
|
||||
|
||||
class FakeConnectionPool:
|
||||
"""A fake ConnectionPool class which holds an internal reference
|
||||
to a fakeredis handle.
|
||||
|
||||
We normally deal with Redis by keeping its ConnectionPool globally
|
||||
referenced so we can persist connection state through different calls
|
||||
to redis_connection(), and since FakeRedis does not offer a ConnectionPool,
|
||||
we craft one up here to hang onto the same handle instance as long as the
|
||||
same instance is alive; this allows us to use a similar flow from the
|
||||
redis_connection() user's perspective.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.handle = fakeredis.FakeStrictRedis()
|
||||
|
||||
def disconnect(self):
|
||||
pass
|
||||
|
||||
|
||||
def redis_connection(): # pragma: no cover
|
||||
global pool
|
||||
|
||||
disabled = aurweb.config.get("options", "cache") != "redis"
|
||||
|
||||
# If we haven't initialized redis yet, construct a pool.
|
||||
if disabled:
|
||||
if pool is None:
|
||||
logger.debug("Initializing fake Redis instance.")
|
||||
pool = FakeConnectionPool()
|
||||
return pool.handle
|
||||
else:
|
||||
if pool is None:
|
||||
logger.debug("Initializing real Redis instance.")
|
||||
redis_addr = aurweb.config.get("options", "redis_address")
|
||||
pool = ConnectionPool.from_url(redis_addr)
|
||||
|
||||
# Create a connection to the pool.
|
||||
return Redis(connection_pool=pool)
|
||||
|
||||
|
||||
def kill_redis():
|
||||
global pool
|
||||
if pool:
|
||||
pool.disconnect()
|
||||
pool = None
|
227
aurweb/auth/__init__.py
Normal file
227
aurweb/auth/__init__.py
Normal file
|
@ -0,0 +1,227 @@
|
|||
import functools
|
||||
from http import HTTPStatus
|
||||
from typing import Callable
|
||||
|
||||
import fastapi
|
||||
from fastapi import HTTPException
|
||||
from fastapi.responses import RedirectResponse
|
||||
from starlette.authentication import AuthCredentials, AuthenticationBackend
|
||||
from starlette.requests import HTTPConnection
|
||||
|
||||
import aurweb.config
|
||||
from aurweb import db, filters, l10n, time, util
|
||||
from aurweb.models import Session, User
|
||||
from aurweb.models.account_type import ACCOUNT_TYPE_ID
|
||||
|
||||
|
||||
class StubQuery:
|
||||
"""Acts as a stubbed version of an orm.Query. Typically used
|
||||
to masquerade fake records for an AnonymousUser."""
|
||||
|
||||
def filter(self, *args):
|
||||
return StubQuery()
|
||||
|
||||
def scalar(self):
|
||||
return 0
|
||||
|
||||
|
||||
class AnonymousUser:
|
||||
"""A stubbed User class used when an unauthenticated User
|
||||
makes a request against FastAPI."""
|
||||
|
||||
# Stub attributes used to mimic a real user.
|
||||
ID = 0
|
||||
Username = "N/A"
|
||||
Email = "N/A"
|
||||
|
||||
class AccountType:
|
||||
"""A stubbed AccountType static class. In here, we use an ID
|
||||
and AccountType which do not exist in our constant records.
|
||||
All records primary keys (AccountType.ID) should be non-zero,
|
||||
so using a zero here means that we'll never match against a
|
||||
real AccountType."""
|
||||
|
||||
ID = 0
|
||||
AccountType = "Anonymous"
|
||||
|
||||
# AccountTypeID == AccountType.ID; assign a stubbed column.
|
||||
AccountTypeID = AccountType.ID
|
||||
|
||||
LangPreference = aurweb.config.get("options", "default_lang")
|
||||
Timezone = aurweb.config.get("options", "default_timezone")
|
||||
|
||||
Suspended = 0
|
||||
InactivityTS = 0
|
||||
|
||||
# A stub ssh_pub_key relationship.
|
||||
ssh_pub_key = None
|
||||
|
||||
# Add stubbed relationship backrefs.
|
||||
notifications = StubQuery()
|
||||
package_votes = StubQuery()
|
||||
|
||||
# A nonce attribute, needed for all browser sessions; set in __init__.
|
||||
nonce = None
|
||||
|
||||
def __init__(self):
|
||||
self.nonce = util.make_nonce()
|
||||
|
||||
@staticmethod
|
||||
def is_authenticated():
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def is_package_maintainer():
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def is_developer():
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def is_elevated():
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def has_credential(credential, **kwargs):
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def voted_for(package):
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def notified(package):
|
||||
return False
|
||||
|
||||
|
||||
class BasicAuthBackend(AuthenticationBackend):
|
||||
@db.async_retry_deadlock
|
||||
async def authenticate(self, conn: HTTPConnection):
|
||||
unauthenticated = (None, AnonymousUser())
|
||||
sid = conn.cookies.get("AURSID")
|
||||
if not sid:
|
||||
return unauthenticated
|
||||
|
||||
timeout = aurweb.config.getint("options", "login_timeout")
|
||||
remembered = conn.cookies.get("AURREMEMBER") == "True"
|
||||
if remembered:
|
||||
timeout = aurweb.config.getint("options", "persistent_cookie_timeout")
|
||||
|
||||
# If no session with sid and a LastUpdateTS now or later exists.
|
||||
now_ts = time.utcnow()
|
||||
record = db.query(Session).filter(Session.SessionID == sid).first()
|
||||
if not record:
|
||||
return unauthenticated
|
||||
elif record.LastUpdateTS < (now_ts - timeout):
|
||||
with db.begin():
|
||||
db.delete_all([record])
|
||||
return unauthenticated
|
||||
|
||||
# At this point, we cannot have an invalid user if the record
|
||||
# exists, due to ForeignKey constraints in the schema upheld
|
||||
# by mysqlclient.
|
||||
user = db.query(User).filter(User.ID == record.UsersID).first()
|
||||
user.nonce = util.make_nonce()
|
||||
user.authenticated = True
|
||||
|
||||
return AuthCredentials(["authenticated"]), user
|
||||
|
||||
|
||||
def _auth_required(auth_goal: bool = True):
|
||||
"""
|
||||
Enforce a user's authentication status, bringing them to the login page
|
||||
or homepage if their authentication status does not match the goal.
|
||||
|
||||
NOTE: This function should not need to be used in downstream code.
|
||||
See `requires_auth` and `requires_guest` for decorators meant to be
|
||||
used on routes (they're a bit more implicitly understandable).
|
||||
|
||||
:param auth_goal: Whether authentication is required or entirely disallowed
|
||||
for a user to perform this request.
|
||||
:return: Return the FastAPI function this decorator wraps.
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
async def wrapper(request, *args, **kwargs):
|
||||
if request.user.is_authenticated() == auth_goal:
|
||||
return await func(request, *args, **kwargs)
|
||||
|
||||
url = "/"
|
||||
if auth_goal is False:
|
||||
return RedirectResponse(url, status_code=int(HTTPStatus.SEE_OTHER))
|
||||
|
||||
# Use the request path when the user can visit a page directly but
|
||||
# is not authenticated and use the Referer header if visiting the
|
||||
# page itself is not directly possible (e.g. submitting a form).
|
||||
if request.method in ("GET", "HEAD"):
|
||||
url = request.url.path
|
||||
elif referer := request.headers.get("Referer"):
|
||||
aur = aurweb.config.get("options", "aur_location") + "/"
|
||||
if not referer.startswith(aur):
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
raise HTTPException(
|
||||
status_code=HTTPStatus.BAD_REQUEST,
|
||||
detail=_("Bad Referer header."),
|
||||
)
|
||||
url = referer[len(aur) - 1 :]
|
||||
url = "/login?" + filters.urlencode({"next": url})
|
||||
return RedirectResponse(url, status_code=int(HTTPStatus.SEE_OTHER))
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def requires_auth(func: Callable) -> Callable:
|
||||
"""Require an authenticated session for a particular route."""
|
||||
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
return await _auth_required(True)(func)(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def requires_guest(func: Callable) -> Callable:
|
||||
"""Require a guest (unauthenticated) session for a particular route."""
|
||||
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args, **kwargs):
|
||||
return await _auth_required(False)(func)(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def account_type_required(one_of: set):
|
||||
"""A decorator that can be used on FastAPI routes to dictate
|
||||
that a user belongs to one of the types defined in one_of.
|
||||
|
||||
This decorator should be run after an @auth_required(True) is
|
||||
dictated.
|
||||
|
||||
- Example code:
|
||||
|
||||
@router.get('/some_route')
|
||||
@auth_required(True)
|
||||
@account_type_required({"Package Maintainer", "Package Maintainer & Developer"})
|
||||
async def some_route(request: fastapi.Request):
|
||||
return Response()
|
||||
|
||||
:param one_of: A set consisting of strings to match against AccountType.
|
||||
:return: Return the FastAPI function this decorator wraps.
|
||||
"""
|
||||
# Convert any account type string constants to their integer IDs.
|
||||
one_of = {ACCOUNT_TYPE_ID[atype] for atype in one_of if isinstance(atype, str)}
|
||||
|
||||
def decorator(func):
|
||||
@functools.wraps(func)
|
||||
async def wrapper(request: fastapi.Request, *args, **kwargs):
|
||||
if request.user.AccountTypeID not in one_of:
|
||||
return RedirectResponse("/", status_code=int(HTTPStatus.SEE_OTHER))
|
||||
return await func(request, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
82
aurweb/auth/creds.py
Normal file
82
aurweb/auth/creds.py
Normal file
|
@ -0,0 +1,82 @@
|
|||
from aurweb.models.account_type import (
|
||||
DEVELOPER_ID,
|
||||
PACKAGE_MAINTAINER_AND_DEV_ID,
|
||||
PACKAGE_MAINTAINER_ID,
|
||||
USER_ID,
|
||||
)
|
||||
from aurweb.models.user import User
|
||||
|
||||
ACCOUNT_CHANGE_TYPE = 1
|
||||
ACCOUNT_EDIT = 2
|
||||
ACCOUNT_EDIT_DEV = 3
|
||||
ACCOUNT_LAST_LOGIN = 4
|
||||
ACCOUNT_SEARCH = 5
|
||||
ACCOUNT_LIST_COMMENTS = 28
|
||||
COMMENT_DELETE = 6
|
||||
COMMENT_UNDELETE = 27
|
||||
COMMENT_VIEW_DELETED = 22
|
||||
COMMENT_EDIT = 25
|
||||
COMMENT_PIN = 26
|
||||
PKGBASE_ADOPT = 7
|
||||
PKGBASE_SET_KEYWORDS = 8
|
||||
PKGBASE_DELETE = 9
|
||||
PKGBASE_DISOWN = 10
|
||||
PKGBASE_EDIT_COMAINTAINERS = 24
|
||||
PKGBASE_FLAG = 11
|
||||
PKGBASE_LIST_VOTERS = 12
|
||||
PKGBASE_NOTIFY = 13
|
||||
PKGBASE_UNFLAG = 15
|
||||
PKGBASE_VOTE = 16
|
||||
PKGREQ_FILE = 23
|
||||
PKGREQ_CLOSE = 17
|
||||
PKGREQ_LIST = 18
|
||||
PM_ADD_VOTE = 19
|
||||
PM_LIST_VOTES = 20
|
||||
PM_VOTE = 21
|
||||
PKGBASE_MERGE = 29
|
||||
|
||||
user_developer_or_package_maintainer = set(
|
||||
[USER_ID, PACKAGE_MAINTAINER_ID, DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID]
|
||||
)
|
||||
package_maintainer_or_dev = set(
|
||||
[PACKAGE_MAINTAINER_ID, DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID]
|
||||
)
|
||||
developer = set([DEVELOPER_ID, PACKAGE_MAINTAINER_AND_DEV_ID])
|
||||
package_maintainer = set([PACKAGE_MAINTAINER_ID, PACKAGE_MAINTAINER_AND_DEV_ID])
|
||||
|
||||
cred_filters = {
|
||||
PKGBASE_FLAG: user_developer_or_package_maintainer,
|
||||
PKGBASE_NOTIFY: user_developer_or_package_maintainer,
|
||||
PKGBASE_VOTE: user_developer_or_package_maintainer,
|
||||
PKGREQ_FILE: user_developer_or_package_maintainer,
|
||||
ACCOUNT_CHANGE_TYPE: package_maintainer_or_dev,
|
||||
ACCOUNT_EDIT: package_maintainer_or_dev,
|
||||
ACCOUNT_LAST_LOGIN: package_maintainer_or_dev,
|
||||
ACCOUNT_LIST_COMMENTS: package_maintainer_or_dev,
|
||||
ACCOUNT_SEARCH: package_maintainer_or_dev,
|
||||
COMMENT_DELETE: package_maintainer_or_dev,
|
||||
COMMENT_UNDELETE: package_maintainer_or_dev,
|
||||
COMMENT_VIEW_DELETED: package_maintainer_or_dev,
|
||||
COMMENT_EDIT: package_maintainer_or_dev,
|
||||
COMMENT_PIN: package_maintainer_or_dev,
|
||||
PKGBASE_ADOPT: package_maintainer_or_dev,
|
||||
PKGBASE_SET_KEYWORDS: package_maintainer_or_dev,
|
||||
PKGBASE_DELETE: package_maintainer_or_dev,
|
||||
PKGBASE_EDIT_COMAINTAINERS: package_maintainer_or_dev,
|
||||
PKGBASE_DISOWN: package_maintainer_or_dev,
|
||||
PKGBASE_LIST_VOTERS: package_maintainer_or_dev,
|
||||
PKGBASE_UNFLAG: package_maintainer_or_dev,
|
||||
PKGREQ_CLOSE: package_maintainer_or_dev,
|
||||
PKGREQ_LIST: package_maintainer_or_dev,
|
||||
PM_ADD_VOTE: package_maintainer,
|
||||
PM_LIST_VOTES: package_maintainer_or_dev,
|
||||
PM_VOTE: package_maintainer,
|
||||
ACCOUNT_EDIT_DEV: developer,
|
||||
PKGBASE_MERGE: package_maintainer_or_dev,
|
||||
}
|
||||
|
||||
|
||||
def has_credential(user: User, credential: int, approved: list = tuple()):
|
||||
if user in approved:
|
||||
return True
|
||||
return user.AccountTypeID in cred_filters[credential]
|
21
aurweb/benchmark.py
Normal file
21
aurweb/benchmark.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
from datetime import UTC, datetime
|
||||
|
||||
|
||||
class Benchmark:
|
||||
def __init__(self):
|
||||
self.start()
|
||||
|
||||
def _timestamp(self) -> float:
|
||||
"""Generate a timestamp."""
|
||||
return float(datetime.now(UTC).timestamp())
|
||||
|
||||
def start(self) -> int:
|
||||
"""Start a benchmark."""
|
||||
self.current = self._timestamp()
|
||||
return self.current
|
||||
|
||||
def end(self):
|
||||
"""Return the diff between now - start()."""
|
||||
n = self._timestamp() - self.current
|
||||
self.current = float(0)
|
||||
return n
|
64
aurweb/cache.py
Normal file
64
aurweb/cache.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
import pickle
|
||||
from typing import Any, Callable
|
||||
|
||||
from sqlalchemy import orm
|
||||
|
||||
from aurweb import config
|
||||
from aurweb.aur_redis import redis_connection
|
||||
from aurweb.prometheus import SEARCH_REQUESTS
|
||||
|
||||
_redis = redis_connection()
|
||||
|
||||
|
||||
def lambda_cache(key: str, value: Callable[[], Any], expire: int = None) -> list:
|
||||
"""Store and retrieve lambda results via redis cache.
|
||||
|
||||
:param key: Redis key
|
||||
:param value: Lambda callable returning the value
|
||||
:param expire: Optional expiration in seconds
|
||||
:return: result of callable or cache
|
||||
"""
|
||||
result = _redis.get(key)
|
||||
if result is not None:
|
||||
return pickle.loads(result)
|
||||
|
||||
_redis.set(key, (pickle.dumps(result := value())), ex=expire)
|
||||
return result
|
||||
|
||||
|
||||
def db_count_cache(key: str, query: orm.Query, expire: int = None) -> int:
|
||||
"""Store and retrieve a query.count() via redis cache.
|
||||
|
||||
:param key: Redis key
|
||||
:param query: SQLAlchemy ORM query
|
||||
:param expire: Optional expiration in seconds
|
||||
:return: query.count()
|
||||
"""
|
||||
result = _redis.get(key)
|
||||
if result is None:
|
||||
_redis.set(key, (result := int(query.count())))
|
||||
if expire:
|
||||
_redis.expire(key, expire)
|
||||
return int(result)
|
||||
|
||||
|
||||
def db_query_cache(key: str, query: orm.Query, expire: int = None) -> list:
|
||||
"""Store and retrieve query results via redis cache.
|
||||
|
||||
:param key: Redis key
|
||||
:param query: SQLAlchemy ORM query
|
||||
:param expire: Optional expiration in seconds
|
||||
:return: query.all()
|
||||
"""
|
||||
result = _redis.get(key)
|
||||
if result is None:
|
||||
SEARCH_REQUESTS.labels(cache="miss").inc()
|
||||
if _redis.dbsize() > config.getint("cache", "max_search_entries", 50000):
|
||||
return query.all()
|
||||
_redis.set(key, (result := pickle.dumps(query.all())))
|
||||
if expire:
|
||||
_redis.expire(key, expire)
|
||||
else:
|
||||
SEARCH_REQUESTS.labels(cache="hit").inc()
|
||||
|
||||
return pickle.loads(result)
|
62
aurweb/captcha.py
Normal file
62
aurweb/captcha.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
""" This module consists of aurweb's CAPTCHA utility functions and filters. """
|
||||
|
||||
import hashlib
|
||||
|
||||
from jinja2 import pass_context
|
||||
from sqlalchemy import func
|
||||
|
||||
from aurweb.db import query
|
||||
from aurweb.models import User
|
||||
from aurweb.templates import register_filter
|
||||
|
||||
|
||||
def get_captcha_salts():
|
||||
"""Produce salts based on the current user count."""
|
||||
count = query(func.count(User.ID)).scalar()
|
||||
|
||||
salts = []
|
||||
for i in range(0, 6):
|
||||
salts.append(f"aurweb-{count - i}")
|
||||
return salts
|
||||
|
||||
|
||||
def get_captcha_token(salt):
|
||||
"""Produce a token for the CAPTCHA salt."""
|
||||
return hashlib.md5(salt.encode()).hexdigest()[:3]
|
||||
|
||||
|
||||
def get_captcha_challenge(salt):
|
||||
"""Get a CAPTCHA challenge string (shell command) for a salt."""
|
||||
token = get_captcha_token(salt)
|
||||
return f"LC_ALL=C pacman -V|sed -r 's#[0-9]+#{token}#g'|md5sum|cut -c1-6"
|
||||
|
||||
|
||||
def get_captcha_answer(token):
|
||||
"""Compute the answer via md5 of the real template text, return the
|
||||
first six digits of the hexadecimal hash."""
|
||||
text = r"""
|
||||
.--. Pacman v%s.%s.%s - libalpm v%s.%s.%s
|
||||
/ _.-' .-. .-. .-. Copyright (C) %s-%s Pacman Development Team
|
||||
\ '-. '-' '-' '-' Copyright (C) %s-%s Judd Vinet
|
||||
'--'
|
||||
This program may be freely redistributed under
|
||||
the terms of the GNU General Public License.
|
||||
""" % tuple(
|
||||
[token] * 10
|
||||
)
|
||||
return hashlib.md5((text + "\n").encode()).hexdigest()[:6]
|
||||
|
||||
|
||||
@register_filter("captcha_salt")
|
||||
@pass_context
|
||||
def captcha_salt_filter(context):
|
||||
"""Returns the most recent CAPTCHA salt in the list of salts."""
|
||||
salts = get_captcha_salts()
|
||||
return salts[0]
|
||||
|
||||
|
||||
@register_filter("captcha_cmdline")
|
||||
@pass_context
|
||||
def captcha_cmdline_filter(context, salt):
|
||||
"""Returns a CAPTCHA challenge for a given salt."""
|
||||
return get_captcha_challenge(salt)
|
|
@ -1,5 +1,8 @@
|
|||
import configparser
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
import tomlkit
|
||||
|
||||
_parser = None
|
||||
|
||||
|
@ -8,10 +11,11 @@ def _get_parser():
|
|||
global _parser
|
||||
|
||||
if not _parser:
|
||||
path = os.environ.get('AUR_CONFIG', '/etc/aurweb/config')
|
||||
defaults = os.environ.get('AUR_CONFIG_DEFAULTS', path + '.defaults')
|
||||
path = os.environ.get("AUR_CONFIG", "/etc/aurweb/config")
|
||||
defaults = os.environ.get("AUR_CONFIG_DEFAULTS", path + ".defaults")
|
||||
|
||||
_parser = configparser.RawConfigParser()
|
||||
_parser.optionxform = lambda option: option
|
||||
if os.path.isfile(defaults):
|
||||
with open(defaults) as f:
|
||||
_parser.read_file(f)
|
||||
|
@ -20,13 +24,56 @@ def _get_parser():
|
|||
return _parser
|
||||
|
||||
|
||||
def rehash():
|
||||
"""Globally rehash the configuration parser."""
|
||||
global _parser
|
||||
_parser = None
|
||||
_get_parser()
|
||||
|
||||
|
||||
def get_with_fallback(section, option, fallback):
|
||||
return _get_parser().get(section, option, fallback=fallback)
|
||||
|
||||
|
||||
def get(section, option):
|
||||
return _get_parser().get(section, option)
|
||||
|
||||
|
||||
def _get_project_meta():
|
||||
with open(os.path.join(get("options", "aurwebdir"), "pyproject.toml")) as pyproject:
|
||||
file_contents = pyproject.read()
|
||||
|
||||
return tomlkit.parse(file_contents)["tool"]["poetry"]
|
||||
|
||||
|
||||
# Publicly visible version of aurweb. This is used to display
|
||||
# aurweb versioning in the footer and must be maintained.
|
||||
AURWEB_VERSION = str(_get_project_meta()["version"])
|
||||
|
||||
|
||||
def getboolean(section, option):
|
||||
return _get_parser().getboolean(section, option)
|
||||
|
||||
|
||||
def getint(section, option):
|
||||
return _get_parser().getint(section, option)
|
||||
def getint(section, option, fallback=None):
|
||||
return _get_parser().getint(section, option, fallback=fallback)
|
||||
|
||||
|
||||
def get_section(section):
|
||||
if section in _get_parser().sections():
|
||||
return _get_parser()[section]
|
||||
|
||||
|
||||
def unset_option(section: str, option: str) -> None:
|
||||
_get_parser().remove_option(section, option)
|
||||
|
||||
|
||||
def set_option(section: str, option: str, value: Any) -> None:
|
||||
_get_parser().set(section, option, value)
|
||||
return value
|
||||
|
||||
|
||||
def save() -> None:
|
||||
aur_config = os.environ.get("AUR_CONFIG", "/etc/aurweb/config")
|
||||
with open(aur_config, "w") as fp:
|
||||
_get_parser().write(fp)
|
||||
|
|
8
aurweb/cookies.py
Normal file
8
aurweb/cookies.py
Normal file
|
@ -0,0 +1,8 @@
|
|||
def samesite() -> str:
|
||||
"""Produce cookie SameSite value.
|
||||
|
||||
Currently this is hard-coded to return "lax"
|
||||
|
||||
:returns "lax"
|
||||
"""
|
||||
return "lax"
|
444
aurweb/db.py
444
aurweb/db.py
|
@ -1,43 +1,379 @@
|
|||
import mysql.connector
|
||||
import sqlite3
|
||||
|
||||
import aurweb.config
|
||||
# Supported database drivers.
|
||||
DRIVERS = {"mysql": "mysql+mysqldb"}
|
||||
|
||||
|
||||
class Connection:
|
||||
def make_random_value(table: str, column: str, length: int):
|
||||
"""Generate a unique, random value for a string column in a table.
|
||||
|
||||
:return: A unique string that is not in the database
|
||||
"""
|
||||
import aurweb.util
|
||||
|
||||
string = aurweb.util.make_random_string(length)
|
||||
while query(table).filter(column == string).first():
|
||||
string = aurweb.util.make_random_string(length)
|
||||
return string
|
||||
|
||||
|
||||
def test_name() -> str:
|
||||
"""
|
||||
Return the unhashed database name.
|
||||
|
||||
The unhashed database name is determined (lower = higher priority) by:
|
||||
-------------------------------------------
|
||||
1. {test_suite} portion of PYTEST_CURRENT_TEST
|
||||
2. aurweb.config.get("database", "name")
|
||||
|
||||
During `pytest` runs, the PYTEST_CURRENT_TEST environment variable
|
||||
is set to the current test in the format `{test_suite}::{test_func}`.
|
||||
|
||||
This allows tests to use a suite-specific database for its runs,
|
||||
which decouples database state from test suites.
|
||||
|
||||
:return: Unhashed database name
|
||||
"""
|
||||
import os
|
||||
|
||||
import aurweb.config
|
||||
|
||||
db = os.environ.get("PYTEST_CURRENT_TEST", aurweb.config.get("database", "name"))
|
||||
return db.split(":")[0]
|
||||
|
||||
|
||||
def name() -> str:
|
||||
"""
|
||||
Return sanitized database name that can be used for tests or production.
|
||||
|
||||
If test_name() starts with "test/", the database name is SHA-1 hashed,
|
||||
prefixed with 'db', and returned. Otherwise, test_name() is passed
|
||||
through and not hashed at all.
|
||||
|
||||
:return: SHA1-hashed database name prefixed with 'db'
|
||||
"""
|
||||
dbname = test_name()
|
||||
if not dbname.startswith("test/"):
|
||||
return dbname
|
||||
|
||||
import hashlib
|
||||
|
||||
sha1 = hashlib.sha1(dbname.encode()).hexdigest()
|
||||
|
||||
return "db" + sha1
|
||||
|
||||
|
||||
# Module-private global memo used to store SQLAlchemy sessions.
|
||||
_sessions = dict()
|
||||
|
||||
|
||||
def get_session(engine=None):
|
||||
"""Return aurweb.db's global session."""
|
||||
dbname = name()
|
||||
|
||||
global _sessions
|
||||
if dbname not in _sessions:
|
||||
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||
|
||||
if not engine: # pragma: no cover
|
||||
engine = get_engine()
|
||||
|
||||
Session = scoped_session(
|
||||
sessionmaker(autocommit=True, autoflush=False, bind=engine)
|
||||
)
|
||||
_sessions[dbname] = Session()
|
||||
|
||||
return _sessions.get(dbname)
|
||||
|
||||
|
||||
def pop_session(dbname: str) -> None:
|
||||
"""
|
||||
Pop a Session out of the private _sessions memo.
|
||||
|
||||
:param dbname: Database name
|
||||
:raises KeyError: When `dbname` does not exist in the memo
|
||||
"""
|
||||
global _sessions
|
||||
_sessions.pop(dbname)
|
||||
|
||||
|
||||
def refresh(model):
|
||||
"""
|
||||
Refresh the session's knowledge of `model`.
|
||||
|
||||
:returns: Passed in `model`
|
||||
"""
|
||||
get_session().refresh(model)
|
||||
return model
|
||||
|
||||
|
||||
def query(Model, *args, **kwargs):
|
||||
"""
|
||||
Perform an ORM query against the database session.
|
||||
|
||||
This method also runs Query.filter on the resulting model
|
||||
query with *args and **kwargs.
|
||||
|
||||
:param Model: Declarative ORM class
|
||||
"""
|
||||
return get_session().query(Model).filter(*args, **kwargs)
|
||||
|
||||
|
||||
def create(Model, *args, **kwargs):
|
||||
"""
|
||||
Create a record and add() it to the database session.
|
||||
|
||||
:param Model: Declarative ORM class
|
||||
:return: Model instance
|
||||
"""
|
||||
instance = Model(*args, **kwargs)
|
||||
return add(instance)
|
||||
|
||||
|
||||
def delete(model) -> None:
|
||||
"""
|
||||
Delete a set of records found by Query.filter(*args, **kwargs).
|
||||
|
||||
:param Model: Declarative ORM class
|
||||
"""
|
||||
get_session().delete(model)
|
||||
|
||||
|
||||
def delete_all(iterable) -> None:
|
||||
"""Delete each instance found in `iterable`."""
|
||||
import aurweb.util
|
||||
|
||||
session_ = get_session()
|
||||
aurweb.util.apply_all(iterable, session_.delete)
|
||||
|
||||
|
||||
def rollback() -> None:
|
||||
"""Rollback the database session."""
|
||||
get_session().rollback()
|
||||
|
||||
|
||||
def add(model):
|
||||
"""Add `model` to the database session."""
|
||||
get_session().add(model)
|
||||
return model
|
||||
|
||||
|
||||
def begin():
|
||||
"""Begin an SQLAlchemy SessionTransaction."""
|
||||
return get_session().begin()
|
||||
|
||||
|
||||
def retry_deadlock(func):
|
||||
from sqlalchemy.exc import OperationalError
|
||||
|
||||
def wrapper(*args, _i: int = 0, **kwargs):
|
||||
# Retry 10 times, then raise the exception
|
||||
# If we fail before the 10th, recurse into `wrapper`
|
||||
# If we fail on the 10th, continue to throw the exception
|
||||
limit = 10
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except OperationalError as exc:
|
||||
if _i < limit and "Deadlock found" in str(exc):
|
||||
# Retry on deadlock by recursing into `wrapper`
|
||||
return wrapper(*args, _i=_i + 1, **kwargs)
|
||||
# Otherwise, just raise the exception
|
||||
raise exc
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def async_retry_deadlock(func):
|
||||
from sqlalchemy.exc import OperationalError
|
||||
|
||||
async def wrapper(*args, _i: int = 0, **kwargs):
|
||||
# Retry 10 times, then raise the exception
|
||||
# If we fail before the 10th, recurse into `wrapper`
|
||||
# If we fail on the 10th, continue to throw the exception
|
||||
limit = 10
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except OperationalError as exc:
|
||||
if _i < limit and "Deadlock found" in str(exc):
|
||||
# Retry on deadlock by recursing into `wrapper`
|
||||
return await wrapper(*args, _i=_i + 1, **kwargs)
|
||||
# Otherwise, just raise the exception
|
||||
raise exc
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def get_sqlalchemy_url():
|
||||
"""
|
||||
Build an SQLAlchemy URL for use with create_engine.
|
||||
|
||||
:return: sqlalchemy.engine.url.URL
|
||||
"""
|
||||
import sqlalchemy
|
||||
from sqlalchemy.engine.url import URL
|
||||
|
||||
import aurweb.config
|
||||
|
||||
constructor = URL
|
||||
|
||||
parts = sqlalchemy.__version__.split(".")
|
||||
major = int(parts[0])
|
||||
minor = int(parts[1])
|
||||
if major == 1 and minor >= 4: # pragma: no cover
|
||||
constructor = URL.create
|
||||
|
||||
aur_db_backend = aurweb.config.get("database", "backend")
|
||||
if aur_db_backend == "mysql":
|
||||
param_query = {}
|
||||
port = aurweb.config.get_with_fallback("database", "port", None)
|
||||
if not port:
|
||||
param_query["unix_socket"] = aurweb.config.get("database", "socket")
|
||||
|
||||
return constructor(
|
||||
DRIVERS.get(aur_db_backend),
|
||||
username=aurweb.config.get("database", "user"),
|
||||
password=aurweb.config.get_with_fallback(
|
||||
"database", "password", fallback=None
|
||||
),
|
||||
host=aurweb.config.get("database", "host"),
|
||||
database=name(),
|
||||
port=port,
|
||||
query=param_query,
|
||||
)
|
||||
elif aur_db_backend == "sqlite":
|
||||
return constructor(
|
||||
"sqlite",
|
||||
database=aurweb.config.get("database", "name"),
|
||||
)
|
||||
else:
|
||||
raise ValueError("unsupported database backend")
|
||||
|
||||
|
||||
def sqlite_regexp(regex, item) -> bool: # pragma: no cover
|
||||
"""Method which mimics SQL's REGEXP for SQLite."""
|
||||
import re
|
||||
|
||||
return bool(re.search(regex, str(item)))
|
||||
|
||||
|
||||
def setup_sqlite(engine) -> None: # pragma: no cover
|
||||
"""Perform setup for an SQLite engine."""
|
||||
from sqlalchemy import event
|
||||
|
||||
@event.listens_for(engine, "connect")
|
||||
def do_begin(conn, record):
|
||||
import functools
|
||||
|
||||
create_deterministic_function = functools.partial(
|
||||
conn.create_function, deterministic=True
|
||||
)
|
||||
create_deterministic_function("REGEXP", 2, sqlite_regexp)
|
||||
|
||||
|
||||
# Module-private global memo used to store SQLAlchemy engines.
|
||||
_engines = dict()
|
||||
|
||||
|
||||
def get_engine(dbname: str = None, echo: bool = False):
|
||||
"""
|
||||
Return the SQLAlchemy engine for `dbname`.
|
||||
|
||||
The engine is created on the first call to get_engine and then stored in the
|
||||
`engine` global variable for the next calls.
|
||||
|
||||
:param dbname: Database name (default: aurweb.db.name())
|
||||
:param echo: Flag passed through to sqlalchemy.create_engine
|
||||
:return: SQLAlchemy Engine instance
|
||||
"""
|
||||
import aurweb.config
|
||||
|
||||
if not dbname:
|
||||
dbname = name()
|
||||
|
||||
global _engines
|
||||
if dbname not in _engines:
|
||||
db_backend = aurweb.config.get("database", "backend")
|
||||
connect_args = dict()
|
||||
|
||||
is_sqlite = bool(db_backend == "sqlite")
|
||||
if is_sqlite: # pragma: no cover
|
||||
connect_args["check_same_thread"] = False
|
||||
|
||||
kwargs = {"echo": echo, "connect_args": connect_args}
|
||||
from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor
|
||||
from sqlalchemy import create_engine
|
||||
|
||||
engine = create_engine(get_sqlalchemy_url(), **kwargs)
|
||||
SQLAlchemyInstrumentor().instrument(engine=engine)
|
||||
_engines[dbname] = engine
|
||||
|
||||
if is_sqlite: # pragma: no cover
|
||||
setup_sqlite(_engines.get(dbname))
|
||||
|
||||
return _engines.get(dbname)
|
||||
|
||||
|
||||
def pop_engine(dbname: str) -> None:
|
||||
"""
|
||||
Pop an Engine out of the private _engines memo.
|
||||
|
||||
:param dbname: Database name
|
||||
:raises KeyError: When `dbname` does not exist in the memo
|
||||
"""
|
||||
global _engines
|
||||
_engines.pop(dbname)
|
||||
|
||||
|
||||
def kill_engine() -> None:
|
||||
"""Close the current session and dispose of the engine."""
|
||||
dbname = name()
|
||||
|
||||
session = get_session()
|
||||
session.close()
|
||||
pop_session(dbname)
|
||||
|
||||
engine = get_engine()
|
||||
engine.dispose()
|
||||
pop_engine(dbname)
|
||||
|
||||
|
||||
def connect():
|
||||
"""
|
||||
Return an SQLAlchemy connection. Connections are usually pooled. See
|
||||
<https://docs.sqlalchemy.org/en/13/core/connections.html>.
|
||||
|
||||
Since SQLAlchemy connections are context managers too, you should use it
|
||||
with Python’s `with` operator, or with FastAPI’s dependency injection.
|
||||
"""
|
||||
return get_engine().connect()
|
||||
|
||||
|
||||
class ConnectionExecutor:
|
||||
_conn = None
|
||||
_paramstyle = None
|
||||
|
||||
def __init__(self):
|
||||
aur_db_backend = aurweb.config.get('database', 'backend')
|
||||
def __init__(self, conn, backend=None):
|
||||
import aurweb.config
|
||||
|
||||
backend = backend or aurweb.config.get("database", "backend")
|
||||
self._conn = conn
|
||||
if backend == "mysql":
|
||||
self._paramstyle = "format"
|
||||
elif backend == "sqlite":
|
||||
import sqlite3
|
||||
|
||||
if aur_db_backend == 'mysql':
|
||||
aur_db_host = aurweb.config.get('database', 'host')
|
||||
aur_db_name = aurweb.config.get('database', 'name')
|
||||
aur_db_user = aurweb.config.get('database', 'user')
|
||||
aur_db_pass = aurweb.config.get('database', 'password')
|
||||
aur_db_socket = aurweb.config.get('database', 'socket')
|
||||
self._conn = mysql.connector.connect(host=aur_db_host,
|
||||
user=aur_db_user,
|
||||
passwd=aur_db_pass,
|
||||
db=aur_db_name,
|
||||
unix_socket=aur_db_socket,
|
||||
buffered=True)
|
||||
self._paramstyle = mysql.connector.paramstyle
|
||||
elif aur_db_backend == 'sqlite':
|
||||
aur_db_name = aurweb.config.get('database', 'name')
|
||||
self._conn = sqlite3.connect(aur_db_name)
|
||||
self._paramstyle = sqlite3.paramstyle
|
||||
else:
|
||||
raise ValueError('unsupported database backend')
|
||||
|
||||
def execute(self, query, params=()):
|
||||
if self._paramstyle in ('format', 'pyformat'):
|
||||
query = query.replace('%', '%%').replace('?', '%s')
|
||||
elif self._paramstyle == 'qmark':
|
||||
def paramstyle(self):
|
||||
return self._paramstyle
|
||||
|
||||
def execute(self, query, params=()): # pragma: no cover
|
||||
# TODO: SQLite support has been removed in FastAPI. It remains
|
||||
# here to fund its support for the Sharness testsuite.
|
||||
if self._paramstyle in ("format", "pyformat"):
|
||||
query = query.replace("%", "%%").replace("?", "%s")
|
||||
elif self._paramstyle == "qmark":
|
||||
pass
|
||||
else:
|
||||
raise ValueError('unsupported paramstyle')
|
||||
raise ValueError("unsupported paramstyle")
|
||||
|
||||
cur = self._conn.cursor()
|
||||
cur.execute(query, params)
|
||||
|
@ -49,3 +385,51 @@ class Connection:
|
|||
|
||||
def close(self):
|
||||
self._conn.close()
|
||||
|
||||
|
||||
class Connection:
|
||||
_executor = None
|
||||
_conn = None
|
||||
|
||||
def __init__(self):
|
||||
import aurweb.config
|
||||
|
||||
aur_db_backend = aurweb.config.get("database", "backend")
|
||||
|
||||
if aur_db_backend == "mysql":
|
||||
import MySQLdb
|
||||
|
||||
aur_db_host = aurweb.config.get("database", "host")
|
||||
aur_db_name = name()
|
||||
aur_db_user = aurweb.config.get("database", "user")
|
||||
aur_db_pass = aurweb.config.get_with_fallback("database", "password", str())
|
||||
aur_db_socket = aurweb.config.get("database", "socket")
|
||||
self._conn = MySQLdb.connect(
|
||||
host=aur_db_host,
|
||||
user=aur_db_user,
|
||||
passwd=aur_db_pass,
|
||||
db=aur_db_name,
|
||||
unix_socket=aur_db_socket,
|
||||
)
|
||||
elif aur_db_backend == "sqlite": # pragma: no cover
|
||||
# TODO: SQLite support has been removed in FastAPI. It remains
|
||||
# here to fund its support for Sharness testsuite.
|
||||
import math
|
||||
import sqlite3
|
||||
|
||||
aur_db_name = aurweb.config.get("database", "name")
|
||||
self._conn = sqlite3.connect(aur_db_name)
|
||||
self._conn.create_function("POWER", 2, math.pow)
|
||||
else:
|
||||
raise ValueError("unsupported database backend")
|
||||
|
||||
self._conn = ConnectionExecutor(self._conn, aur_db_backend)
|
||||
|
||||
def execute(self, query, params=()):
|
||||
return self._conn.execute(query, params)
|
||||
|
||||
def commit(self):
|
||||
self._conn.commit()
|
||||
|
||||
def close(self):
|
||||
self._conn.close()
|
||||
|
|
24
aurweb/defaults.py
Normal file
24
aurweb/defaults.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
""" Constant default values centralized in one place. """
|
||||
|
||||
# Default [O]ffset
|
||||
O = 0
|
||||
|
||||
# Default [P]er [P]age
|
||||
PP = 50
|
||||
|
||||
# Default Comments Per Page
|
||||
COMMENTS_PER_PAGE = 10
|
||||
|
||||
# A whitelist of valid PP values
|
||||
PP_WHITELIST = {50, 100, 250}
|
||||
|
||||
# Default `by` parameter for RPC search.
|
||||
RPC_SEARCH_BY = "name-desc"
|
||||
|
||||
|
||||
def fallback_pp(per_page: int) -> int:
|
||||
"""If `per_page` is a valid value in PP_WHITELIST, return it.
|
||||
Otherwise, return defaults.PP."""
|
||||
if per_page not in PP_WHITELIST:
|
||||
return PP
|
||||
return per_page
|
|
@ -1,3 +1,9 @@
|
|||
import functools
|
||||
from typing import Any, Callable
|
||||
|
||||
import fastapi
|
||||
|
||||
|
||||
class AurwebException(Exception):
|
||||
pass
|
||||
|
||||
|
@ -12,58 +18,95 @@ class BannedException(AurwebException):
|
|||
|
||||
class PermissionDeniedException(AurwebException):
|
||||
def __init__(self, user):
|
||||
msg = 'permission denied: {:s}'.format(user)
|
||||
msg = "permission denied: {:s}".format(user)
|
||||
super(PermissionDeniedException, self).__init__(msg)
|
||||
|
||||
|
||||
class BrokenUpdateHookException(AurwebException):
|
||||
def __init__(self, cmd):
|
||||
msg = "broken update hook: {:s}".format(cmd)
|
||||
super(BrokenUpdateHookException, self).__init__(msg)
|
||||
|
||||
|
||||
class InvalidUserException(AurwebException):
|
||||
def __init__(self, user):
|
||||
msg = 'unknown user: {:s}'.format(user)
|
||||
msg = "unknown user: {:s}".format(user)
|
||||
super(InvalidUserException, self).__init__(msg)
|
||||
|
||||
|
||||
class InvalidPackageBaseException(AurwebException):
|
||||
def __init__(self, pkgbase):
|
||||
msg = 'package base not found: {:s}'.format(pkgbase)
|
||||
msg = "package base not found: {:s}".format(pkgbase)
|
||||
super(InvalidPackageBaseException, self).__init__(msg)
|
||||
|
||||
|
||||
class InvalidRepositoryNameException(AurwebException):
|
||||
def __init__(self, pkgbase):
|
||||
msg = 'invalid repository name: {:s}'.format(pkgbase)
|
||||
msg = "invalid repository name: {:s}".format(pkgbase)
|
||||
super(InvalidRepositoryNameException, self).__init__(msg)
|
||||
|
||||
|
||||
class PackageBaseExistsException(AurwebException):
|
||||
def __init__(self, pkgbase):
|
||||
msg = 'package base already exists: {:s}'.format(pkgbase)
|
||||
msg = "package base already exists: {:s}".format(pkgbase)
|
||||
super(PackageBaseExistsException, self).__init__(msg)
|
||||
|
||||
|
||||
class InvalidReasonException(AurwebException):
|
||||
def __init__(self, reason):
|
||||
msg = 'invalid reason: {:s}'.format(reason)
|
||||
msg = "invalid reason: {:s}".format(reason)
|
||||
super(InvalidReasonException, self).__init__(msg)
|
||||
|
||||
|
||||
class InvalidCommentException(AurwebException):
|
||||
def __init__(self, comment):
|
||||
msg = 'comment is too short: {:s}'.format(comment)
|
||||
msg = "comment is too short: {:s}".format(comment)
|
||||
super(InvalidCommentException, self).__init__(msg)
|
||||
|
||||
|
||||
class AlreadyVotedException(AurwebException):
|
||||
def __init__(self, comment):
|
||||
msg = 'already voted for package base: {:s}'.format(comment)
|
||||
msg = "already voted for package base: {:s}".format(comment)
|
||||
super(AlreadyVotedException, self).__init__(msg)
|
||||
|
||||
|
||||
class NotVotedException(AurwebException):
|
||||
def __init__(self, comment):
|
||||
msg = 'missing vote for package base: {:s}'.format(comment)
|
||||
msg = "missing vote for package base: {:s}".format(comment)
|
||||
super(NotVotedException, self).__init__(msg)
|
||||
|
||||
|
||||
class InvalidArgumentsException(AurwebException):
|
||||
def __init__(self, msg):
|
||||
super(InvalidArgumentsException, self).__init__(msg)
|
||||
|
||||
|
||||
class RPCError(AurwebException):
|
||||
pass
|
||||
|
||||
|
||||
class ValidationError(AurwebException):
|
||||
def __init__(self, data: Any, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.data = data
|
||||
|
||||
|
||||
class InvariantError(AurwebException):
|
||||
pass
|
||||
|
||||
|
||||
def handle_form_exceptions(route: Callable) -> fastapi.Response:
|
||||
"""
|
||||
A decorator required when fastapi POST routes are defined.
|
||||
|
||||
This decorator populates fastapi's `request.state` with a `form_data`
|
||||
attribute, which is then used to report form data when exceptions
|
||||
are caught and reported.
|
||||
"""
|
||||
|
||||
@functools.wraps(route)
|
||||
async def wrapper(request: fastapi.Request, *args, **kwargs):
|
||||
request.state.form_data = await request.form()
|
||||
return await route(request, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
|
181
aurweb/filters.py
Normal file
181
aurweb/filters.py
Normal file
|
@ -0,0 +1,181 @@
|
|||
import copy
|
||||
import math
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, Union
|
||||
from urllib.parse import quote_plus, urlencode
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import fastapi
|
||||
import paginate
|
||||
from jinja2 import pass_context
|
||||
from jinja2.filters import do_format
|
||||
|
||||
import aurweb.models
|
||||
from aurweb import config, l10n
|
||||
from aurweb.templates import register_filter, register_function
|
||||
|
||||
|
||||
@register_filter("pager_nav")
|
||||
@pass_context
|
||||
def pager_nav(context: dict[str, Any], page: int, total: int, prefix: str) -> str:
|
||||
page = int(page) # Make sure this is an int.
|
||||
|
||||
pp = context.get("PP", 50)
|
||||
|
||||
# Setup a local query string dict, optionally passed by caller.
|
||||
q = context.get("q", dict())
|
||||
|
||||
search_by = context.get("SeB", None)
|
||||
if search_by:
|
||||
q["SeB"] = search_by
|
||||
|
||||
sort_by = context.get("SB", None)
|
||||
if sort_by:
|
||||
q["SB"] = sort_by
|
||||
|
||||
def create_url(page: int):
|
||||
nonlocal q
|
||||
offset = max(page * pp - pp, 0)
|
||||
qs = to_qs(extend_query(q, ["O", offset]))
|
||||
return f"{prefix}?{qs}"
|
||||
|
||||
# Use the paginate module to produce our linkage.
|
||||
pager = paginate.Page(
|
||||
[], page=page + 1, items_per_page=pp, item_count=total, url_maker=create_url
|
||||
)
|
||||
|
||||
return pager.pager(
|
||||
link_attr={"class": "page"},
|
||||
curpage_attr={"class": "page"},
|
||||
separator=" ",
|
||||
format="$link_first $link_previous ~5~ $link_next $link_last",
|
||||
symbol_first="« First",
|
||||
symbol_previous="‹ Previous",
|
||||
symbol_next="Next ›",
|
||||
symbol_last="Last »",
|
||||
)
|
||||
|
||||
|
||||
@register_function("config_getint")
|
||||
def config_getint(section: str, key: str) -> int:
|
||||
return config.getint(section, key)
|
||||
|
||||
|
||||
@register_function("round")
|
||||
def do_round(f: float) -> int:
|
||||
return round(f)
|
||||
|
||||
|
||||
@register_filter("tr")
|
||||
@pass_context
|
||||
def tr(context: dict[str, Any], value: str):
|
||||
"""A translation filter; example: {{ "Hello" | tr("de") }}."""
|
||||
_ = l10n.get_translator_for_request(context.get("request"))
|
||||
return _(value)
|
||||
|
||||
|
||||
@register_filter("tn")
|
||||
@pass_context
|
||||
def tn(context: dict[str, Any], count: int, singular: str, plural: str) -> str:
|
||||
"""A singular and plural translation filter.
|
||||
|
||||
Example:
|
||||
{{ some_integer | tn("singular %d", "plural %d") }}
|
||||
|
||||
:param context: Response context
|
||||
:param count: The number used to decide singular or plural state
|
||||
:param singular: The singular translation
|
||||
:param plural: The plural translation
|
||||
:return: Translated string
|
||||
"""
|
||||
gettext = l10n.get_raw_translator_for_request(context.get("request"))
|
||||
return gettext.ngettext(singular, plural, count)
|
||||
|
||||
|
||||
@register_filter("dt")
|
||||
def timestamp_to_datetime(timestamp: int):
|
||||
return datetime.fromtimestamp(timestamp, UTC)
|
||||
|
||||
|
||||
@register_filter("as_timezone")
|
||||
def as_timezone(dt: datetime, timezone: str):
|
||||
return dt.astimezone(tz=ZoneInfo(timezone))
|
||||
|
||||
|
||||
@register_filter("extend_query")
|
||||
def extend_query(query: dict[str, Any], *additions) -> dict[str, Any]:
|
||||
"""Add additional key value pairs to query."""
|
||||
q = copy.copy(query)
|
||||
for k, v in list(additions):
|
||||
q[k] = v
|
||||
return q
|
||||
|
||||
|
||||
@register_filter("urlencode")
|
||||
def to_qs(query: dict[str, Any]) -> str:
|
||||
return urlencode(query, doseq=True)
|
||||
|
||||
|
||||
@register_filter("get_vote")
|
||||
def get_vote(voteinfo, request: fastapi.Request):
|
||||
from aurweb.models import Vote
|
||||
|
||||
return voteinfo.votes.filter(Vote.User == request.user).first()
|
||||
|
||||
|
||||
@register_filter("number_format")
|
||||
def number_format(value: float, places: int):
|
||||
"""A converter function similar to PHP's number_format."""
|
||||
return f"{value:.{places}f}"
|
||||
|
||||
|
||||
@register_filter("account_url")
|
||||
@pass_context
|
||||
def account_url(context: dict[str, Any], user: "aurweb.models.user.User") -> str:
|
||||
base = aurweb.config.get("options", "aur_location")
|
||||
return f"{base}/account/{user.Username}"
|
||||
|
||||
|
||||
@register_filter("quote_plus")
|
||||
def _quote_plus(*args, **kwargs) -> str:
|
||||
return quote_plus(*args, **kwargs)
|
||||
|
||||
|
||||
@register_filter("ceil")
|
||||
def ceil(*args, **kwargs) -> int:
|
||||
return math.ceil(*args, **kwargs)
|
||||
|
||||
|
||||
@register_function("date_strftime")
|
||||
@pass_context
|
||||
def date_strftime(context: dict[str, Any], dt: Union[int, datetime], fmt: str) -> str:
|
||||
if isinstance(dt, int):
|
||||
dt = timestamp_to_datetime(dt)
|
||||
tz = context.get("timezone")
|
||||
return as_timezone(dt, tz).strftime(fmt)
|
||||
|
||||
|
||||
@register_function("date_display")
|
||||
@pass_context
|
||||
def date_display(context: dict[str, Any], dt: Union[int, datetime]) -> str:
|
||||
return date_strftime(context, dt, "%Y-%m-%d (%Z)")
|
||||
|
||||
|
||||
@register_function("datetime_display")
|
||||
@pass_context
|
||||
def datetime_display(context: dict[str, Any], dt: Union[int, datetime]) -> str:
|
||||
return date_strftime(context, dt, "%Y-%m-%d %H:%M (%Z)")
|
||||
|
||||
|
||||
@register_filter("format")
|
||||
def safe_format(value: str, *args: Any, **kwargs: Any) -> str:
|
||||
"""Wrapper for jinja2 format function to perform additional checks."""
|
||||
|
||||
# If we don't have anything to be formatted, just return the value.
|
||||
# We have some translations that do not contain placeholders for replacement.
|
||||
# In these cases the jinja2 function is throwing an error:
|
||||
# "TypeError: not all arguments converted during string formatting"
|
||||
if "%" not in value:
|
||||
return value
|
||||
|
||||
return do_format(value, *args, **kwargs)
|
|
@ -1,8 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
import aurweb.config
|
||||
|
@ -10,12 +9,12 @@ import aurweb.db
|
|||
|
||||
|
||||
def format_command(env_vars, command, ssh_opts, ssh_key):
|
||||
environment = ''
|
||||
environment = ""
|
||||
for key, var in env_vars.items():
|
||||
environment += '{}={} '.format(key, shlex.quote(var))
|
||||
environment += "{}={} ".format(key, shlex.quote(var))
|
||||
|
||||
command = shlex.quote(command)
|
||||
command = '{}{}'.format(environment, command)
|
||||
command = "{}{}".format(environment, command)
|
||||
|
||||
# The command is being substituted into an authorized_keys line below,
|
||||
# so we need to escape the double quotes.
|
||||
|
@ -25,10 +24,10 @@ def format_command(env_vars, command, ssh_opts, ssh_key):
|
|||
|
||||
|
||||
def main():
|
||||
valid_keytypes = aurweb.config.get('auth', 'valid-keytypes').split()
|
||||
username_regex = aurweb.config.get('auth', 'username-regex')
|
||||
git_serve_cmd = aurweb.config.get('auth', 'git-serve-cmd')
|
||||
ssh_opts = aurweb.config.get('auth', 'ssh-options')
|
||||
valid_keytypes = aurweb.config.get("auth", "valid-keytypes").split()
|
||||
username_regex = aurweb.config.get("auth", "username-regex")
|
||||
git_serve_cmd = aurweb.config.get("auth", "git-serve-cmd")
|
||||
ssh_opts = aurweb.config.get("auth", "ssh-options")
|
||||
|
||||
keytype = sys.argv[1]
|
||||
keytext = sys.argv[2]
|
||||
|
@ -37,11 +36,13 @@ def main():
|
|||
|
||||
conn = aurweb.db.Connection()
|
||||
|
||||
cur = conn.execute("SELECT Users.Username, Users.AccountTypeID FROM Users "
|
||||
"INNER JOIN SSHPubKeys ON SSHPubKeys.UserID = Users.ID "
|
||||
"WHERE SSHPubKeys.PubKey = ? AND Users.Suspended = 0 "
|
||||
"AND NOT Users.Passwd = ''",
|
||||
(keytype + " " + keytext,))
|
||||
cur = conn.execute(
|
||||
"SELECT Users.Username, Users.AccountTypeID FROM Users "
|
||||
"INNER JOIN SSHPubKeys ON SSHPubKeys.UserID = Users.ID "
|
||||
"WHERE SSHPubKeys.PubKey = ? AND Users.Suspended = 0 "
|
||||
"AND NOT Users.Passwd = ''",
|
||||
(keytype + " " + keytext,),
|
||||
)
|
||||
|
||||
row = cur.fetchone()
|
||||
if not row or cur.fetchone():
|
||||
|
@ -52,13 +53,13 @@ def main():
|
|||
exit(1)
|
||||
|
||||
env_vars = {
|
||||
'AUR_USER': user,
|
||||
'AUR_PRIVILEGED': '1' if account_type > 1 else '0',
|
||||
"AUR_USER": user,
|
||||
"AUR_PRIVILEGED": "1" if account_type > 1 else "0",
|
||||
}
|
||||
key = keytype + ' ' + keytext
|
||||
key = keytype + " " + keytext
|
||||
|
||||
print(format_command(env_vars, git_serve_cmd, ssh_opts, key))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -11,16 +11,16 @@ import aurweb.config
|
|||
import aurweb.db
|
||||
import aurweb.exceptions
|
||||
|
||||
notify_cmd = aurweb.config.get('notifications', 'notify-cmd')
|
||||
notify_cmd = aurweb.config.get("notifications", "notify-cmd")
|
||||
|
||||
repo_path = aurweb.config.get('serve', 'repo-path')
|
||||
repo_regex = aurweb.config.get('serve', 'repo-regex')
|
||||
git_shell_cmd = aurweb.config.get('serve', 'git-shell-cmd')
|
||||
git_update_cmd = aurweb.config.get('serve', 'git-update-cmd')
|
||||
ssh_cmdline = aurweb.config.get('serve', 'ssh-cmdline')
|
||||
repo_path = aurweb.config.get("serve", "repo-path")
|
||||
repo_regex = aurweb.config.get("serve", "repo-regex")
|
||||
git_shell_cmd = aurweb.config.get("serve", "git-shell-cmd")
|
||||
git_update_cmd = aurweb.config.get("serve", "git-update-cmd")
|
||||
ssh_cmdline = aurweb.config.get("serve", "ssh-cmdline")
|
||||
|
||||
enable_maintenance = aurweb.config.getboolean('options', 'enable-maintenance')
|
||||
maintenance_exc = aurweb.config.get('options', 'maintenance-exceptions').split()
|
||||
enable_maintenance = aurweb.config.getboolean("options", "enable-maintenance")
|
||||
maintenance_exc = aurweb.config.get("options", "maintenance-exceptions").split()
|
||||
|
||||
|
||||
def pkgbase_from_name(pkgbase):
|
||||
|
@ -43,14 +43,16 @@ def list_repos(user):
|
|||
if userid == 0:
|
||||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
|
||||
cur = conn.execute("SELECT Name, PackagerUID FROM PackageBases " +
|
||||
"WHERE MaintainerUID = ?", [userid])
|
||||
cur = conn.execute(
|
||||
"SELECT Name, PackagerUID FROM PackageBases " + "WHERE MaintainerUID = ?",
|
||||
[userid],
|
||||
)
|
||||
for row in cur:
|
||||
print((' ' if row[1] else '*') + row[0])
|
||||
print((" " if row[1] else "*") + row[0])
|
||||
conn.close()
|
||||
|
||||
|
||||
def create_pkgbase(pkgbase, user):
|
||||
def validate_pkgbase(pkgbase, user):
|
||||
if not re.match(repo_regex, pkgbase):
|
||||
raise aurweb.exceptions.InvalidRepositoryNameException(pkgbase)
|
||||
if pkgbase_exists(pkgbase):
|
||||
|
@ -60,23 +62,12 @@ def create_pkgbase(pkgbase, user):
|
|||
|
||||
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
|
||||
userid = cur.fetchone()[0]
|
||||
|
||||
conn.close()
|
||||
|
||||
if userid == 0:
|
||||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
|
||||
now = int(time.time())
|
||||
cur = conn.execute("INSERT INTO PackageBases (Name, SubmittedTS, " +
|
||||
"ModifiedTS, SubmitterUID, MaintainerUID, " +
|
||||
"FlaggerComment) VALUES (?, ?, ?, ?, ?, '')",
|
||||
[pkgbase, now, now, userid, userid])
|
||||
pkgbase_id = cur.lastrowid
|
||||
|
||||
cur = conn.execute("INSERT INTO PackageNotifications " +
|
||||
"(PackageBaseID, UserID) VALUES (?, ?)",
|
||||
[pkgbase_id, userid])
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
def pkgbase_adopt(pkgbase, user, privileged):
|
||||
pkgbase_id = pkgbase_from_name(pkgbase)
|
||||
|
@ -85,8 +76,10 @@ def pkgbase_adopt(pkgbase, user, privileged):
|
|||
|
||||
conn = aurweb.db.Connection()
|
||||
|
||||
cur = conn.execute("SELECT ID FROM PackageBases WHERE ID = ? AND " +
|
||||
"MaintainerUID IS NULL", [pkgbase_id])
|
||||
cur = conn.execute(
|
||||
"SELECT ID FROM PackageBases WHERE ID = ? AND " + "MaintainerUID IS NULL",
|
||||
[pkgbase_id],
|
||||
)
|
||||
if not privileged and not cur.fetchone():
|
||||
raise aurweb.exceptions.PermissionDeniedException(user)
|
||||
|
||||
|
@ -95,19 +88,25 @@ def pkgbase_adopt(pkgbase, user, privileged):
|
|||
if userid == 0:
|
||||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
|
||||
cur = conn.execute("UPDATE PackageBases SET MaintainerUID = ? " +
|
||||
"WHERE ID = ?", [userid, pkgbase_id])
|
||||
cur = conn.execute(
|
||||
"UPDATE PackageBases SET MaintainerUID = ? " + "WHERE ID = ?",
|
||||
[userid, pkgbase_id],
|
||||
)
|
||||
|
||||
cur = conn.execute("SELECT COUNT(*) FROM PackageNotifications WHERE " +
|
||||
"PackageBaseID = ? AND UserID = ?",
|
||||
[pkgbase_id, userid])
|
||||
cur = conn.execute(
|
||||
"SELECT COUNT(*) FROM PackageNotifications WHERE "
|
||||
+ "PackageBaseID = ? AND UserID = ?",
|
||||
[pkgbase_id, userid],
|
||||
)
|
||||
if cur.fetchone()[0] == 0:
|
||||
cur = conn.execute("INSERT INTO PackageNotifications " +
|
||||
"(PackageBaseID, UserID) VALUES (?, ?)",
|
||||
[pkgbase_id, userid])
|
||||
cur = conn.execute(
|
||||
"INSERT INTO PackageNotifications "
|
||||
+ "(PackageBaseID, UserID) VALUES (?, ?)",
|
||||
[pkgbase_id, userid],
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
subprocess.Popen((notify_cmd, 'adopt', str(userid), str(pkgbase_id)))
|
||||
subprocess.Popen((notify_cmd, "adopt", str(userid), str(pkgbase_id)))
|
||||
|
||||
conn.close()
|
||||
|
||||
|
@ -115,13 +114,16 @@ def pkgbase_adopt(pkgbase, user, privileged):
|
|||
def pkgbase_get_comaintainers(pkgbase):
|
||||
conn = aurweb.db.Connection()
|
||||
|
||||
cur = conn.execute("SELECT UserName FROM PackageComaintainers " +
|
||||
"INNER JOIN Users " +
|
||||
"ON Users.ID = PackageComaintainers.UsersID " +
|
||||
"INNER JOIN PackageBases " +
|
||||
"ON PackageBases.ID = PackageComaintainers.PackageBaseID " +
|
||||
"WHERE PackageBases.Name = ? " +
|
||||
"ORDER BY Priority ASC", [pkgbase])
|
||||
cur = conn.execute(
|
||||
"SELECT UserName FROM PackageComaintainers "
|
||||
+ "INNER JOIN Users "
|
||||
+ "ON Users.ID = PackageComaintainers.UsersID "
|
||||
+ "INNER JOIN PackageBases "
|
||||
+ "ON PackageBases.ID = PackageComaintainers.PackageBaseID "
|
||||
+ "WHERE PackageBases.Name = ? "
|
||||
+ "ORDER BY Priority ASC",
|
||||
[pkgbase],
|
||||
)
|
||||
|
||||
return [row[0] for row in cur.fetchall()]
|
||||
|
||||
|
@ -140,8 +142,7 @@ def pkgbase_set_comaintainers(pkgbase, userlist, user, privileged):
|
|||
|
||||
uids_old = set()
|
||||
for olduser in userlist_old:
|
||||
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?",
|
||||
[olduser])
|
||||
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [olduser])
|
||||
userid = cur.fetchone()[0]
|
||||
if userid == 0:
|
||||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
|
@ -149,8 +150,7 @@ def pkgbase_set_comaintainers(pkgbase, userlist, user, privileged):
|
|||
|
||||
uids_new = set()
|
||||
for newuser in userlist:
|
||||
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?",
|
||||
[newuser])
|
||||
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [newuser])
|
||||
userid = cur.fetchone()[0]
|
||||
if userid == 0:
|
||||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
|
@ -162,24 +162,33 @@ def pkgbase_set_comaintainers(pkgbase, userlist, user, privileged):
|
|||
i = 1
|
||||
for userid in uids_new:
|
||||
if userid in uids_add:
|
||||
cur = conn.execute("INSERT INTO PackageComaintainers " +
|
||||
"(PackageBaseID, UsersID, Priority) " +
|
||||
"VALUES (?, ?, ?)", [pkgbase_id, userid, i])
|
||||
subprocess.Popen((notify_cmd, 'comaintainer-add', str(userid),
|
||||
str(pkgbase_id)))
|
||||
cur = conn.execute(
|
||||
"INSERT INTO PackageComaintainers "
|
||||
+ "(PackageBaseID, UsersID, Priority) "
|
||||
+ "VALUES (?, ?, ?)",
|
||||
[pkgbase_id, userid, i],
|
||||
)
|
||||
subprocess.Popen(
|
||||
(notify_cmd, "comaintainer-add", str(userid), str(pkgbase_id))
|
||||
)
|
||||
else:
|
||||
cur = conn.execute("UPDATE PackageComaintainers " +
|
||||
"SET Priority = ? " +
|
||||
"WHERE PackageBaseID = ? AND UsersID = ?",
|
||||
[i, pkgbase_id, userid])
|
||||
cur = conn.execute(
|
||||
"UPDATE PackageComaintainers "
|
||||
+ "SET Priority = ? "
|
||||
+ "WHERE PackageBaseID = ? AND UsersID = ?",
|
||||
[i, pkgbase_id, userid],
|
||||
)
|
||||
i += 1
|
||||
|
||||
for userid in uids_rem:
|
||||
cur = conn.execute("DELETE FROM PackageComaintainers " +
|
||||
"WHERE PackageBaseID = ? AND UsersID = ?",
|
||||
[pkgbase_id, userid])
|
||||
subprocess.Popen((notify_cmd, 'comaintainer-remove',
|
||||
str(userid), str(pkgbase_id)))
|
||||
cur = conn.execute(
|
||||
"DELETE FROM PackageComaintainers "
|
||||
+ "WHERE PackageBaseID = ? AND UsersID = ?",
|
||||
[pkgbase_id, userid],
|
||||
)
|
||||
subprocess.Popen(
|
||||
(notify_cmd, "comaintainer-remove", str(userid), str(pkgbase_id))
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
@ -188,18 +197,21 @@ def pkgbase_set_comaintainers(pkgbase, userlist, user, privileged):
|
|||
def pkgreq_by_pkgbase(pkgbase_id, reqtype):
|
||||
conn = aurweb.db.Connection()
|
||||
|
||||
cur = conn.execute("SELECT PackageRequests.ID FROM PackageRequests " +
|
||||
"INNER JOIN RequestTypes ON " +
|
||||
"RequestTypes.ID = PackageRequests.ReqTypeID " +
|
||||
"WHERE PackageRequests.Status = 0 " +
|
||||
"AND PackageRequests.PackageBaseID = ? " +
|
||||
"AND RequestTypes.Name = ?", [pkgbase_id, reqtype])
|
||||
cur = conn.execute(
|
||||
"SELECT PackageRequests.ID FROM PackageRequests "
|
||||
+ "INNER JOIN RequestTypes ON "
|
||||
+ "RequestTypes.ID = PackageRequests.ReqTypeID "
|
||||
+ "WHERE PackageRequests.Status = 0 "
|
||||
+ "AND PackageRequests.PackageBaseID = ? "
|
||||
+ "AND RequestTypes.Name = ?",
|
||||
[pkgbase_id, reqtype],
|
||||
)
|
||||
|
||||
return [row[0] for row in cur.fetchall()]
|
||||
|
||||
|
||||
def pkgreq_close(reqid, user, reason, comments, autoclose=False):
|
||||
statusmap = {'accepted': 2, 'rejected': 3}
|
||||
statusmap = {"accepted": 2, "rejected": 3}
|
||||
if reason not in statusmap:
|
||||
raise aurweb.exceptions.InvalidReasonException(reason)
|
||||
status = statusmap[reason]
|
||||
|
@ -207,20 +219,28 @@ def pkgreq_close(reqid, user, reason, comments, autoclose=False):
|
|||
conn = aurweb.db.Connection()
|
||||
|
||||
if autoclose:
|
||||
userid = 0
|
||||
userid = None
|
||||
else:
|
||||
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
|
||||
userid = cur.fetchone()[0]
|
||||
if userid == 0:
|
||||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
|
||||
conn.execute("UPDATE PackageRequests SET Status = ?, ClosureComment = ? " +
|
||||
"WHERE ID = ?", [status, comments, reqid])
|
||||
now = int(time.time())
|
||||
conn.execute(
|
||||
"UPDATE PackageRequests SET Status = ?, ClosedTS = ?, "
|
||||
+ "ClosedUID = ?, ClosureComment = ? "
|
||||
+ "WHERE ID = ?",
|
||||
[status, now, userid, comments, reqid],
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
subprocess.Popen((notify_cmd, 'request-close', str(userid), str(reqid),
|
||||
reason)).wait()
|
||||
if not userid:
|
||||
userid = 0
|
||||
subprocess.Popen(
|
||||
(notify_cmd, "request-close", str(userid), str(reqid), reason)
|
||||
).wait()
|
||||
|
||||
|
||||
def pkgbase_disown(pkgbase, user, privileged):
|
||||
|
@ -235,9 +255,9 @@ def pkgbase_disown(pkgbase, user, privileged):
|
|||
# TODO: Support disowning package bases via package request.
|
||||
|
||||
# Scan through pending orphan requests and close them.
|
||||
comment = 'The user {:s} disowned the package.'.format(user)
|
||||
for reqid in pkgreq_by_pkgbase(pkgbase_id, 'orphan'):
|
||||
pkgreq_close(reqid, user, 'accepted', comment, True)
|
||||
comment = "The user {:s} disowned the package.".format(user)
|
||||
for reqid in pkgreq_by_pkgbase(pkgbase_id, "orphan"):
|
||||
pkgreq_close(reqid, user, "accepted", comment, True)
|
||||
|
||||
comaintainers = []
|
||||
new_maintainer_userid = None
|
||||
|
@ -245,28 +265,31 @@ def pkgbase_disown(pkgbase, user, privileged):
|
|||
conn = aurweb.db.Connection()
|
||||
|
||||
# Make the first co-maintainer the new maintainer, unless the action was
|
||||
# enforced by a Trusted User.
|
||||
# enforced by a Package Maintainer.
|
||||
if initialized_by_owner:
|
||||
comaintainers = pkgbase_get_comaintainers(pkgbase)
|
||||
if len(comaintainers) > 0:
|
||||
new_maintainer = comaintainers[0]
|
||||
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?",
|
||||
[new_maintainer])
|
||||
cur = conn.execute(
|
||||
"SELECT ID FROM Users WHERE Username = ?", [new_maintainer]
|
||||
)
|
||||
new_maintainer_userid = cur.fetchone()[0]
|
||||
comaintainers.remove(new_maintainer)
|
||||
|
||||
pkgbase_set_comaintainers(pkgbase, comaintainers, user, privileged)
|
||||
cur = conn.execute("UPDATE PackageBases SET MaintainerUID = ? " +
|
||||
"WHERE ID = ?", [new_maintainer_userid, pkgbase_id])
|
||||
cur = conn.execute(
|
||||
"UPDATE PackageBases SET MaintainerUID = ? " + "WHERE ID = ?",
|
||||
[new_maintainer_userid, pkgbase_id],
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
|
||||
cur = conn.execute("SELECT ID FROM Users WHERE Username = ?", [user])
|
||||
userid = cur.fetchone()[0]
|
||||
if userid == 0:
|
||||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
|
||||
subprocess.Popen((notify_cmd, 'disown', str(userid), str(pkgbase_id)))
|
||||
subprocess.Popen((notify_cmd, "disown", str(userid), str(pkgbase_id)))
|
||||
|
||||
conn.close()
|
||||
|
||||
|
@ -286,14 +309,16 @@ def pkgbase_flag(pkgbase, user, comment):
|
|||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
|
||||
now = int(time.time())
|
||||
conn.execute("UPDATE PackageBases SET " +
|
||||
"OutOfDateTS = ?, FlaggerUID = ?, FlaggerComment = ? " +
|
||||
"WHERE ID = ? AND OutOfDateTS IS NULL",
|
||||
[now, userid, comment, pkgbase_id])
|
||||
conn.execute(
|
||||
"UPDATE PackageBases SET "
|
||||
+ "OutOfDateTS = ?, FlaggerUID = ?, FlaggerComment = ? "
|
||||
+ "WHERE ID = ? AND OutOfDateTS IS NULL",
|
||||
[now, userid, comment, pkgbase_id],
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
|
||||
subprocess.Popen((notify_cmd, 'flag', str(userid), str(pkgbase_id)))
|
||||
subprocess.Popen((notify_cmd, "flag", str(userid), str(pkgbase_id)))
|
||||
|
||||
|
||||
def pkgbase_unflag(pkgbase, user):
|
||||
|
@ -309,12 +334,15 @@ def pkgbase_unflag(pkgbase, user):
|
|||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
|
||||
if user in pkgbase_get_comaintainers(pkgbase):
|
||||
conn.execute("UPDATE PackageBases SET OutOfDateTS = NULL " +
|
||||
"WHERE ID = ?", [pkgbase_id])
|
||||
conn.execute(
|
||||
"UPDATE PackageBases SET OutOfDateTS = NULL " + "WHERE ID = ?", [pkgbase_id]
|
||||
)
|
||||
else:
|
||||
conn.execute("UPDATE PackageBases SET OutOfDateTS = NULL " +
|
||||
"WHERE ID = ? AND (MaintainerUID = ? OR FlaggerUID = ?)",
|
||||
[pkgbase_id, userid, userid])
|
||||
conn.execute(
|
||||
"UPDATE PackageBases SET OutOfDateTS = NULL "
|
||||
+ "WHERE ID = ? AND (MaintainerUID = ? OR FlaggerUID = ?)",
|
||||
[pkgbase_id, userid, userid],
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
@ -331,17 +359,24 @@ def pkgbase_vote(pkgbase, user):
|
|||
if userid == 0:
|
||||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
|
||||
cur = conn.execute("SELECT COUNT(*) FROM PackageVotes " +
|
||||
"WHERE UsersID = ? AND PackageBaseID = ?",
|
||||
[userid, pkgbase_id])
|
||||
cur = conn.execute(
|
||||
"SELECT COUNT(*) FROM PackageVotes "
|
||||
+ "WHERE UsersID = ? AND PackageBaseID = ?",
|
||||
[userid, pkgbase_id],
|
||||
)
|
||||
if cur.fetchone()[0] > 0:
|
||||
raise aurweb.exceptions.AlreadyVotedException(pkgbase)
|
||||
|
||||
now = int(time.time())
|
||||
conn.execute("INSERT INTO PackageVotes (UsersID, PackageBaseID, VoteTS) " +
|
||||
"VALUES (?, ?, ?)", [userid, pkgbase_id, now])
|
||||
conn.execute("UPDATE PackageBases SET NumVotes = NumVotes + 1 " +
|
||||
"WHERE ID = ?", [pkgbase_id])
|
||||
conn.execute(
|
||||
"INSERT INTO PackageVotes (UsersID, PackageBaseID, VoteTS) "
|
||||
+ "VALUES (?, ?, ?)",
|
||||
[userid, pkgbase_id, now],
|
||||
)
|
||||
conn.execute(
|
||||
"UPDATE PackageBases SET NumVotes = NumVotes + 1 " + "WHERE ID = ?",
|
||||
[pkgbase_id],
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
@ -357,16 +392,22 @@ def pkgbase_unvote(pkgbase, user):
|
|||
if userid == 0:
|
||||
raise aurweb.exceptions.InvalidUserException(user)
|
||||
|
||||
cur = conn.execute("SELECT COUNT(*) FROM PackageVotes " +
|
||||
"WHERE UsersID = ? AND PackageBaseID = ?",
|
||||
[userid, pkgbase_id])
|
||||
cur = conn.execute(
|
||||
"SELECT COUNT(*) FROM PackageVotes "
|
||||
+ "WHERE UsersID = ? AND PackageBaseID = ?",
|
||||
[userid, pkgbase_id],
|
||||
)
|
||||
if cur.fetchone()[0] == 0:
|
||||
raise aurweb.exceptions.NotVotedException(pkgbase)
|
||||
|
||||
conn.execute("DELETE FROM PackageVotes WHERE UsersID = ? AND " +
|
||||
"PackageBaseID = ?", [userid, pkgbase_id])
|
||||
conn.execute("UPDATE PackageBases SET NumVotes = NumVotes - 1 " +
|
||||
"WHERE ID = ?", [pkgbase_id])
|
||||
conn.execute(
|
||||
"DELETE FROM PackageVotes WHERE UsersID = ? AND " + "PackageBaseID = ?",
|
||||
[userid, pkgbase_id],
|
||||
)
|
||||
conn.execute(
|
||||
"UPDATE PackageBases SET NumVotes = NumVotes - 1 " + "WHERE ID = ?",
|
||||
[pkgbase_id],
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
@ -377,11 +418,12 @@ def pkgbase_set_keywords(pkgbase, keywords):
|
|||
|
||||
conn = aurweb.db.Connection()
|
||||
|
||||
conn.execute("DELETE FROM PackageKeywords WHERE PackageBaseID = ?",
|
||||
[pkgbase_id])
|
||||
conn.execute("DELETE FROM PackageKeywords WHERE PackageBaseID = ?", [pkgbase_id])
|
||||
for keyword in keywords:
|
||||
conn.execute("INSERT INTO PackageKeywords (PackageBaseID, Keyword) " +
|
||||
"VALUES (?, ?)", [pkgbase_id, keyword])
|
||||
conn.execute(
|
||||
"INSERT INTO PackageKeywords (PackageBaseID, Keyword) " + "VALUES (?, ?)",
|
||||
[pkgbase_id, keyword],
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
@ -390,24 +432,30 @@ def pkgbase_set_keywords(pkgbase, keywords):
|
|||
def pkgbase_has_write_access(pkgbase, user):
|
||||
conn = aurweb.db.Connection()
|
||||
|
||||
cur = conn.execute("SELECT COUNT(*) FROM PackageBases " +
|
||||
"LEFT JOIN PackageComaintainers " +
|
||||
"ON PackageComaintainers.PackageBaseID = PackageBases.ID " +
|
||||
"INNER JOIN Users " +
|
||||
"ON Users.ID = PackageBases.MaintainerUID " +
|
||||
"OR PackageBases.MaintainerUID IS NULL " +
|
||||
"OR Users.ID = PackageComaintainers.UsersID " +
|
||||
"WHERE Name = ? AND Username = ?", [pkgbase, user])
|
||||
cur = conn.execute(
|
||||
"SELECT COUNT(*) FROM PackageBases "
|
||||
+ "LEFT JOIN PackageComaintainers "
|
||||
+ "ON PackageComaintainers.PackageBaseID = PackageBases.ID "
|
||||
+ "INNER JOIN Users "
|
||||
+ "ON Users.ID = PackageBases.MaintainerUID "
|
||||
+ "OR PackageBases.MaintainerUID IS NULL "
|
||||
+ "OR Users.ID = PackageComaintainers.UsersID "
|
||||
+ "WHERE Name = ? AND Username = ?",
|
||||
[pkgbase, user],
|
||||
)
|
||||
return cur.fetchone()[0] > 0
|
||||
|
||||
|
||||
def pkgbase_has_full_access(pkgbase, user):
|
||||
conn = aurweb.db.Connection()
|
||||
|
||||
cur = conn.execute("SELECT COUNT(*) FROM PackageBases " +
|
||||
"INNER JOIN Users " +
|
||||
"ON Users.ID = PackageBases.MaintainerUID " +
|
||||
"WHERE Name = ? AND Username = ?", [pkgbase, user])
|
||||
cur = conn.execute(
|
||||
"SELECT COUNT(*) FROM PackageBases "
|
||||
+ "INNER JOIN Users "
|
||||
+ "ON Users.ID = PackageBases.MaintainerUID "
|
||||
+ "WHERE Name = ? AND Username = ?",
|
||||
[pkgbase, user],
|
||||
)
|
||||
return cur.fetchone()[0] > 0
|
||||
|
||||
|
||||
|
@ -415,9 +463,11 @@ def log_ssh_login(user, remote_addr):
|
|||
conn = aurweb.db.Connection()
|
||||
|
||||
now = int(time.time())
|
||||
conn.execute("UPDATE Users SET LastSSHLogin = ?, " +
|
||||
"LastSSHLoginIPAddress = ? WHERE Username = ?",
|
||||
[now, remote_addr, user])
|
||||
conn.execute(
|
||||
"UPDATE Users SET LastSSHLogin = ?, "
|
||||
+ "LastSSHLoginIPAddress = ? WHERE Username = ?",
|
||||
[now, remote_addr, user],
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
@ -426,8 +476,7 @@ def log_ssh_login(user, remote_addr):
|
|||
def bans_match(remote_addr):
|
||||
conn = aurweb.db.Connection()
|
||||
|
||||
cur = conn.execute("SELECT COUNT(*) FROM Bans WHERE IPAddress = ?",
|
||||
[remote_addr])
|
||||
cur = conn.execute("SELECT COUNT(*) FROM Bans WHERE IPAddress = ?", [remote_addr])
|
||||
return cur.fetchone()[0] > 0
|
||||
|
||||
|
||||
|
@ -454,13 +503,13 @@ def usage(cmds):
|
|||
|
||||
def checkarg_atleast(cmdargv, *argdesc):
|
||||
if len(cmdargv) - 1 < len(argdesc):
|
||||
msg = 'missing {:s}'.format(argdesc[len(cmdargv) - 1])
|
||||
msg = "missing {:s}".format(argdesc[len(cmdargv) - 1])
|
||||
raise aurweb.exceptions.InvalidArgumentsException(msg)
|
||||
|
||||
|
||||
def checkarg_atmost(cmdargv, *argdesc):
|
||||
if len(cmdargv) - 1 > len(argdesc):
|
||||
raise aurweb.exceptions.InvalidArgumentsException('too many arguments')
|
||||
raise aurweb.exceptions.InvalidArgumentsException("too many arguments")
|
||||
|
||||
|
||||
def checkarg(cmdargv, *argdesc):
|
||||
|
@ -468,7 +517,7 @@ def checkarg(cmdargv, *argdesc):
|
|||
checkarg_atmost(cmdargv, *argdesc)
|
||||
|
||||
|
||||
def serve(action, cmdargv, user, privileged, remote_addr):
|
||||
def serve(action, cmdargv, user, privileged, remote_addr): # noqa: C901
|
||||
if enable_maintenance:
|
||||
if remote_addr not in maintenance_exc:
|
||||
raise aurweb.exceptions.MaintenanceException
|
||||
|
@ -476,89 +525,87 @@ def serve(action, cmdargv, user, privileged, remote_addr):
|
|||
raise aurweb.exceptions.BannedException
|
||||
log_ssh_login(user, remote_addr)
|
||||
|
||||
if action == 'git' and cmdargv[1] in ('upload-pack', 'receive-pack'):
|
||||
action = action + '-' + cmdargv[1]
|
||||
if action == "git" and cmdargv[1] in ("upload-pack", "receive-pack"):
|
||||
action = action + "-" + cmdargv[1]
|
||||
del cmdargv[1]
|
||||
|
||||
if action == 'git-upload-pack' or action == 'git-receive-pack':
|
||||
checkarg(cmdargv, 'path')
|
||||
if action == "git-upload-pack" or action == "git-receive-pack":
|
||||
checkarg(cmdargv, "path")
|
||||
|
||||
path = cmdargv[1].rstrip('/')
|
||||
if not path.startswith('/'):
|
||||
path = '/' + path
|
||||
if not path.endswith('.git'):
|
||||
path = path + '.git'
|
||||
path = cmdargv[1].rstrip("/")
|
||||
if not path.startswith("/"):
|
||||
path = "/" + path
|
||||
if not path.endswith(".git"):
|
||||
path = path + ".git"
|
||||
pkgbase = path[1:-4]
|
||||
if not re.match(repo_regex, pkgbase):
|
||||
raise aurweb.exceptions.InvalidRepositoryNameException(pkgbase)
|
||||
|
||||
if action == 'git-receive-pack' and pkgbase_exists(pkgbase):
|
||||
if action == "git-receive-pack" and pkgbase_exists(pkgbase):
|
||||
if not privileged and not pkgbase_has_write_access(pkgbase, user):
|
||||
raise aurweb.exceptions.PermissionDeniedException(user)
|
||||
|
||||
if not os.access(git_update_cmd, os.R_OK | os.X_OK):
|
||||
raise aurweb.exceptions.BrokenUpdateHookException(git_update_cmd)
|
||||
|
||||
os.environ["AUR_USER"] = user
|
||||
os.environ["AUR_PKGBASE"] = pkgbase
|
||||
os.environ["GIT_NAMESPACE"] = pkgbase
|
||||
cmd = action + " '" + repo_path + "'"
|
||||
os.execl(git_shell_cmd, git_shell_cmd, '-c', cmd)
|
||||
elif action == 'set-keywords':
|
||||
checkarg_atleast(cmdargv, 'repository name')
|
||||
os.execl(git_shell_cmd, git_shell_cmd, "-c", cmd)
|
||||
elif action == "set-keywords":
|
||||
checkarg_atleast(cmdargv, "repository name")
|
||||
pkgbase_set_keywords(cmdargv[1], cmdargv[2:])
|
||||
elif action == 'list-repos':
|
||||
elif action == "list-repos":
|
||||
checkarg(cmdargv)
|
||||
list_repos(user)
|
||||
elif action == 'setup-repo':
|
||||
checkarg(cmdargv, 'repository name')
|
||||
warn('{:s} is deprecated. '
|
||||
'Use `git push` to create new repositories.'.format(action))
|
||||
create_pkgbase(cmdargv[1], user)
|
||||
elif action == 'restore':
|
||||
checkarg(cmdargv, 'repository name')
|
||||
elif action == "restore":
|
||||
checkarg(cmdargv, "repository name")
|
||||
|
||||
pkgbase = cmdargv[1]
|
||||
create_pkgbase(pkgbase, user)
|
||||
validate_pkgbase(pkgbase, user)
|
||||
|
||||
os.environ["AUR_USER"] = user
|
||||
os.environ["AUR_PKGBASE"] = pkgbase
|
||||
os.execl(git_update_cmd, git_update_cmd, 'restore')
|
||||
elif action == 'adopt':
|
||||
checkarg(cmdargv, 'repository name')
|
||||
os.execl(git_update_cmd, git_update_cmd, "restore")
|
||||
elif action == "adopt":
|
||||
checkarg(cmdargv, "repository name")
|
||||
|
||||
pkgbase = cmdargv[1]
|
||||
pkgbase_adopt(pkgbase, user, privileged)
|
||||
elif action == 'disown':
|
||||
checkarg(cmdargv, 'repository name')
|
||||
elif action == "disown":
|
||||
checkarg(cmdargv, "repository name")
|
||||
|
||||
pkgbase = cmdargv[1]
|
||||
pkgbase_disown(pkgbase, user, privileged)
|
||||
elif action == 'flag':
|
||||
checkarg(cmdargv, 'repository name', 'comment')
|
||||
elif action == "flag":
|
||||
checkarg(cmdargv, "repository name", "comment")
|
||||
|
||||
pkgbase = cmdargv[1]
|
||||
comment = cmdargv[2]
|
||||
pkgbase_flag(pkgbase, user, comment)
|
||||
elif action == 'unflag':
|
||||
checkarg(cmdargv, 'repository name')
|
||||
elif action == "unflag":
|
||||
checkarg(cmdargv, "repository name")
|
||||
|
||||
pkgbase = cmdargv[1]
|
||||
pkgbase_unflag(pkgbase, user)
|
||||
elif action == 'vote':
|
||||
checkarg(cmdargv, 'repository name')
|
||||
elif action == "vote":
|
||||
checkarg(cmdargv, "repository name")
|
||||
|
||||
pkgbase = cmdargv[1]
|
||||
pkgbase_vote(pkgbase, user)
|
||||
elif action == 'unvote':
|
||||
checkarg(cmdargv, 'repository name')
|
||||
elif action == "unvote":
|
||||
checkarg(cmdargv, "repository name")
|
||||
|
||||
pkgbase = cmdargv[1]
|
||||
pkgbase_unvote(pkgbase, user)
|
||||
elif action == 'set-comaintainers':
|
||||
checkarg_atleast(cmdargv, 'repository name')
|
||||
elif action == "set-comaintainers":
|
||||
checkarg_atleast(cmdargv, "repository name")
|
||||
|
||||
pkgbase = cmdargv[1]
|
||||
userlist = cmdargv[2:]
|
||||
pkgbase_set_comaintainers(pkgbase, userlist, user, privileged)
|
||||
elif action == 'help':
|
||||
elif action == "help":
|
||||
cmds = {
|
||||
"adopt <name>": "Adopt a package base.",
|
||||
"disown <name>": "Disown a package base.",
|
||||
|
@ -568,7 +615,6 @@ def serve(action, cmdargv, user, privileged, remote_addr):
|
|||
"restore <name>": "Restore a deleted package base.",
|
||||
"set-comaintainers <name> [...]": "Set package base co-maintainers.",
|
||||
"set-keywords <name> [...]": "Change package base keywords.",
|
||||
"setup-repo <name>": "Create a repository (deprecated).",
|
||||
"unflag <name>": "Remove out-of-date flag from a package base.",
|
||||
"unvote <name>": "Remove vote from a package base.",
|
||||
"vote <name>": "Vote for a package base.",
|
||||
|
@ -577,21 +623,21 @@ def serve(action, cmdargv, user, privileged, remote_addr):
|
|||
}
|
||||
usage(cmds)
|
||||
else:
|
||||
msg = 'invalid command: {:s}'.format(action)
|
||||
msg = "invalid command: {:s}".format(action)
|
||||
raise aurweb.exceptions.InvalidArgumentsException(msg)
|
||||
|
||||
|
||||
def main():
|
||||
user = os.environ.get('AUR_USER')
|
||||
privileged = (os.environ.get('AUR_PRIVILEGED', '0') == '1')
|
||||
ssh_cmd = os.environ.get('SSH_ORIGINAL_COMMAND')
|
||||
ssh_client = os.environ.get('SSH_CLIENT')
|
||||
user = os.environ.get("AUR_USER")
|
||||
privileged = os.environ.get("AUR_PRIVILEGED", "0") == "1"
|
||||
ssh_cmd = os.environ.get("SSH_ORIGINAL_COMMAND")
|
||||
ssh_client = os.environ.get("SSH_CLIENT")
|
||||
|
||||
if not ssh_cmd:
|
||||
die_with_help("Interactive shell is disabled.")
|
||||
die_with_help(f"Welcome to AUR, {user}! Interactive shell is disabled.")
|
||||
cmdargv = shlex.split(ssh_cmd)
|
||||
action = cmdargv[0]
|
||||
remote_addr = ssh_client.split(' ')[0] if ssh_client else None
|
||||
remote_addr = ssh_client.split(" ")[0] if ssh_client else None
|
||||
|
||||
try:
|
||||
serve(action, cmdargv, user, privileged, remote_addr)
|
||||
|
@ -600,10 +646,10 @@ def main():
|
|||
except aurweb.exceptions.BannedException:
|
||||
die("The SSH interface is disabled for your IP address.")
|
||||
except aurweb.exceptions.InvalidArgumentsException as e:
|
||||
die_with_help('{:s}: {}'.format(action, e))
|
||||
die_with_help("{:s}: {}".format(action, e))
|
||||
except aurweb.exceptions.AurwebException as e:
|
||||
die('{:s}: {}'.format(action, e))
|
||||
die("{:s}: {}".format(action, e))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -1,35 +1,35 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import pygit2
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
import pygit2
|
||||
import srcinfo.parse
|
||||
import srcinfo.utils
|
||||
|
||||
import aurweb.config
|
||||
import aurweb.db
|
||||
|
||||
notify_cmd = aurweb.config.get('notifications', 'notify-cmd')
|
||||
notify_cmd = aurweb.config.get("notifications", "notify-cmd")
|
||||
|
||||
repo_path = aurweb.config.get('serve', 'repo-path')
|
||||
repo_regex = aurweb.config.get('serve', 'repo-regex')
|
||||
repo_path = aurweb.config.get("serve", "repo-path")
|
||||
repo_regex = aurweb.config.get("serve", "repo-regex")
|
||||
|
||||
max_blob_size = aurweb.config.getint('update', 'max-blob-size')
|
||||
max_blob_size = aurweb.config.getint("update", "max-blob-size")
|
||||
|
||||
|
||||
def size_humanize(num):
|
||||
for unit in ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB']:
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB"]:
|
||||
if abs(num) < 2048.0:
|
||||
if isinstance(num, int):
|
||||
return "{}{}".format(num, unit)
|
||||
else:
|
||||
return "{:.2f}{}".format(num, unit)
|
||||
num /= 1024.0
|
||||
return "{:.2f}{}".format(num, 'YiB')
|
||||
return "{:.2f}{}".format(num, "YiB")
|
||||
|
||||
|
||||
def extract_arch_fields(pkginfo, field):
|
||||
|
@ -39,20 +39,20 @@ def extract_arch_fields(pkginfo, field):
|
|||
for val in pkginfo[field]:
|
||||
values.append({"value": val, "arch": None})
|
||||
|
||||
for arch in pkginfo['arch']:
|
||||
if field + '_' + arch in pkginfo:
|
||||
for val in pkginfo[field + '_' + arch]:
|
||||
for arch in pkginfo["arch"]:
|
||||
if field + "_" + arch in pkginfo:
|
||||
for val in pkginfo[field + "_" + arch]:
|
||||
values.append({"value": val, "arch": arch})
|
||||
|
||||
return values
|
||||
|
||||
|
||||
def parse_dep(depstring):
|
||||
dep, _, desc = depstring.partition(': ')
|
||||
depname = re.sub(r'(<|=|>).*', '', dep)
|
||||
depcond = dep[len(depname):]
|
||||
dep, _, desc = depstring.partition(": ")
|
||||
depname = re.sub(r"(<|=|>).*", "", dep)
|
||||
depcond = dep[len(depname) :]
|
||||
|
||||
return (depname, desc, depcond)
|
||||
return depname, desc, depcond
|
||||
|
||||
|
||||
def create_pkgbase(conn, pkgbase, user):
|
||||
|
@ -60,26 +60,30 @@ def create_pkgbase(conn, pkgbase, user):
|
|||
userid = cur.fetchone()[0]
|
||||
|
||||
now = int(time.time())
|
||||
cur = conn.execute("INSERT INTO PackageBases (Name, SubmittedTS, " +
|
||||
"ModifiedTS, SubmitterUID, MaintainerUID, " +
|
||||
"FlaggerComment) VALUES (?, ?, ?, ?, ?, '')",
|
||||
[pkgbase, now, now, userid, userid])
|
||||
cur = conn.execute(
|
||||
"INSERT INTO PackageBases (Name, SubmittedTS, "
|
||||
+ "ModifiedTS, SubmitterUID, MaintainerUID, "
|
||||
+ "FlaggerComment) VALUES (?, ?, ?, ?, ?, '')",
|
||||
[pkgbase, now, now, userid, userid],
|
||||
)
|
||||
pkgbase_id = cur.lastrowid
|
||||
|
||||
cur = conn.execute("INSERT INTO PackageNotifications " +
|
||||
"(PackageBaseID, UserID) VALUES (?, ?)",
|
||||
[pkgbase_id, userid])
|
||||
cur = conn.execute(
|
||||
"INSERT INTO PackageNotifications " + "(PackageBaseID, UserID) VALUES (?, ?)",
|
||||
[pkgbase_id, userid],
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
|
||||
return pkgbase_id
|
||||
|
||||
|
||||
def save_metadata(metadata, conn, user):
|
||||
def save_metadata(metadata, conn, user): # noqa: C901
|
||||
# Obtain package base ID and previous maintainer.
|
||||
pkgbase = metadata['pkgbase']
|
||||
cur = conn.execute("SELECT ID, MaintainerUID FROM PackageBases "
|
||||
"WHERE Name = ?", [pkgbase])
|
||||
pkgbase = metadata["pkgbase"]
|
||||
cur = conn.execute(
|
||||
"SELECT ID, MaintainerUID FROM PackageBases " "WHERE Name = ?", [pkgbase]
|
||||
)
|
||||
(pkgbase_id, maintainer_uid) = cur.fetchone()
|
||||
was_orphan = not maintainer_uid
|
||||
|
||||
|
@ -89,119 +93,142 @@ def save_metadata(metadata, conn, user):
|
|||
|
||||
# Update package base details and delete current packages.
|
||||
now = int(time.time())
|
||||
conn.execute("UPDATE PackageBases SET ModifiedTS = ?, " +
|
||||
"PackagerUID = ?, OutOfDateTS = NULL WHERE ID = ?",
|
||||
[now, user_id, pkgbase_id])
|
||||
conn.execute("UPDATE PackageBases SET MaintainerUID = ? " +
|
||||
"WHERE ID = ? AND MaintainerUID IS NULL",
|
||||
[user_id, pkgbase_id])
|
||||
for table in ('Sources', 'Depends', 'Relations', 'Licenses', 'Groups'):
|
||||
conn.execute("DELETE FROM Package" + table + " WHERE EXISTS (" +
|
||||
"SELECT * FROM Packages " +
|
||||
"WHERE Packages.PackageBaseID = ? AND " +
|
||||
"Package" + table + ".PackageID = Packages.ID)",
|
||||
[pkgbase_id])
|
||||
conn.execute(
|
||||
"UPDATE PackageBases SET ModifiedTS = ?, "
|
||||
+ "PackagerUID = ?, OutOfDateTS = NULL WHERE ID = ?",
|
||||
[now, user_id, pkgbase_id],
|
||||
)
|
||||
conn.execute(
|
||||
"UPDATE PackageBases SET MaintainerUID = ? "
|
||||
+ "WHERE ID = ? AND MaintainerUID IS NULL",
|
||||
[user_id, pkgbase_id],
|
||||
)
|
||||
for table in ("Sources", "Depends", "Relations", "Licenses", "Groups"):
|
||||
conn.execute(
|
||||
"DELETE FROM Package"
|
||||
+ table
|
||||
+ " WHERE EXISTS ("
|
||||
+ "SELECT * FROM Packages "
|
||||
+ "WHERE Packages.PackageBaseID = ? AND "
|
||||
+ "Package"
|
||||
+ table
|
||||
+ ".PackageID = Packages.ID)",
|
||||
[pkgbase_id],
|
||||
)
|
||||
conn.execute("DELETE FROM Packages WHERE PackageBaseID = ?", [pkgbase_id])
|
||||
|
||||
for pkgname in srcinfo.utils.get_package_names(metadata):
|
||||
pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata)
|
||||
|
||||
if 'epoch' in pkginfo and int(pkginfo['epoch']) > 0:
|
||||
ver = '{:d}:{:s}-{:s}'.format(int(pkginfo['epoch']),
|
||||
pkginfo['pkgver'],
|
||||
pkginfo['pkgrel'])
|
||||
if "epoch" in pkginfo and int(pkginfo["epoch"]) > 0:
|
||||
ver = "{:d}:{:s}-{:s}".format(
|
||||
int(pkginfo["epoch"]), pkginfo["pkgver"], pkginfo["pkgrel"]
|
||||
)
|
||||
else:
|
||||
ver = '{:s}-{:s}'.format(pkginfo['pkgver'], pkginfo['pkgrel'])
|
||||
ver = "{:s}-{:s}".format(pkginfo["pkgver"], pkginfo["pkgrel"])
|
||||
|
||||
for field in ('pkgdesc', 'url'):
|
||||
for field in ("pkgdesc", "url"):
|
||||
if field not in pkginfo:
|
||||
pkginfo[field] = None
|
||||
|
||||
# Create a new package.
|
||||
cur = conn.execute("INSERT INTO Packages (PackageBaseID, Name, " +
|
||||
"Version, Description, URL) " +
|
||||
"VALUES (?, ?, ?, ?, ?)",
|
||||
[pkgbase_id, pkginfo['pkgname'], ver,
|
||||
pkginfo['pkgdesc'], pkginfo['url']])
|
||||
cur = conn.execute(
|
||||
"INSERT INTO Packages (PackageBaseID, Name, "
|
||||
+ "Version, Description, URL) "
|
||||
+ "VALUES (?, ?, ?, ?, ?)",
|
||||
[pkgbase_id, pkginfo["pkgname"], ver, pkginfo["pkgdesc"], pkginfo["url"]],
|
||||
)
|
||||
conn.commit()
|
||||
pkgid = cur.lastrowid
|
||||
|
||||
# Add package sources.
|
||||
for source_info in extract_arch_fields(pkginfo, 'source'):
|
||||
conn.execute("INSERT INTO PackageSources (PackageID, Source, " +
|
||||
"SourceArch) VALUES (?, ?, ?)",
|
||||
[pkgid, source_info['value'], source_info['arch']])
|
||||
for source_info in extract_arch_fields(pkginfo, "source"):
|
||||
conn.execute(
|
||||
"INSERT INTO PackageSources (PackageID, Source, "
|
||||
+ "SourceArch) VALUES (?, ?, ?)",
|
||||
[pkgid, source_info["value"], source_info["arch"]],
|
||||
)
|
||||
|
||||
# Add package dependencies.
|
||||
for deptype in ('depends', 'makedepends',
|
||||
'checkdepends', 'optdepends'):
|
||||
cur = conn.execute("SELECT ID FROM DependencyTypes WHERE Name = ?",
|
||||
[deptype])
|
||||
for deptype in ("depends", "makedepends", "checkdepends", "optdepends"):
|
||||
cur = conn.execute(
|
||||
"SELECT ID FROM DependencyTypes WHERE Name = ?", [deptype]
|
||||
)
|
||||
deptypeid = cur.fetchone()[0]
|
||||
for dep_info in extract_arch_fields(pkginfo, deptype):
|
||||
depname, depdesc, depcond = parse_dep(dep_info['value'])
|
||||
deparch = dep_info['arch']
|
||||
conn.execute("INSERT INTO PackageDepends (PackageID, " +
|
||||
"DepTypeID, DepName, DepDesc, DepCondition, " +
|
||||
"DepArch) VALUES (?, ?, ?, ?, ?, ?)",
|
||||
[pkgid, deptypeid, depname, depdesc, depcond,
|
||||
deparch])
|
||||
depname, depdesc, depcond = parse_dep(dep_info["value"])
|
||||
deparch = dep_info["arch"]
|
||||
conn.execute(
|
||||
"INSERT INTO PackageDepends (PackageID, "
|
||||
+ "DepTypeID, DepName, DepDesc, DepCondition, "
|
||||
+ "DepArch) VALUES (?, ?, ?, ?, ?, ?)",
|
||||
[pkgid, deptypeid, depname, depdesc, depcond, deparch],
|
||||
)
|
||||
|
||||
# Add package relations (conflicts, provides, replaces).
|
||||
for reltype in ('conflicts', 'provides', 'replaces'):
|
||||
cur = conn.execute("SELECT ID FROM RelationTypes WHERE Name = ?",
|
||||
[reltype])
|
||||
for reltype in ("conflicts", "provides", "replaces"):
|
||||
cur = conn.execute("SELECT ID FROM RelationTypes WHERE Name = ?", [reltype])
|
||||
reltypeid = cur.fetchone()[0]
|
||||
for rel_info in extract_arch_fields(pkginfo, reltype):
|
||||
relname, _, relcond = parse_dep(rel_info['value'])
|
||||
relarch = rel_info['arch']
|
||||
conn.execute("INSERT INTO PackageRelations (PackageID, " +
|
||||
"RelTypeID, RelName, RelCondition, RelArch) " +
|
||||
"VALUES (?, ?, ?, ?, ?)",
|
||||
[pkgid, reltypeid, relname, relcond, relarch])
|
||||
relname, _, relcond = parse_dep(rel_info["value"])
|
||||
relarch = rel_info["arch"]
|
||||
conn.execute(
|
||||
"INSERT INTO PackageRelations (PackageID, "
|
||||
+ "RelTypeID, RelName, RelCondition, RelArch) "
|
||||
+ "VALUES (?, ?, ?, ?, ?)",
|
||||
[pkgid, reltypeid, relname, relcond, relarch],
|
||||
)
|
||||
|
||||
# Add package licenses.
|
||||
if 'license' in pkginfo:
|
||||
for license in pkginfo['license']:
|
||||
cur = conn.execute("SELECT ID FROM Licenses WHERE Name = ?",
|
||||
[license])
|
||||
if "license" in pkginfo:
|
||||
for license in pkginfo["license"]:
|
||||
cur = conn.execute("SELECT ID FROM Licenses WHERE Name = ?", [license])
|
||||
row = cur.fetchone()
|
||||
if row:
|
||||
licenseid = row[0]
|
||||
else:
|
||||
cur = conn.execute("INSERT INTO Licenses (Name) " +
|
||||
"VALUES (?)", [license])
|
||||
cur = conn.execute(
|
||||
"INSERT INTO Licenses (Name) " + "VALUES (?)", [license]
|
||||
)
|
||||
conn.commit()
|
||||
licenseid = cur.lastrowid
|
||||
conn.execute("INSERT INTO PackageLicenses (PackageID, " +
|
||||
"LicenseID) VALUES (?, ?)",
|
||||
[pkgid, licenseid])
|
||||
conn.execute(
|
||||
"INSERT INTO PackageLicenses (PackageID, "
|
||||
+ "LicenseID) VALUES (?, ?)",
|
||||
[pkgid, licenseid],
|
||||
)
|
||||
|
||||
# Add package groups.
|
||||
if 'groups' in pkginfo:
|
||||
for group in pkginfo['groups']:
|
||||
cur = conn.execute("SELECT ID FROM `Groups` WHERE Name = ?",
|
||||
[group])
|
||||
if "groups" in pkginfo:
|
||||
for group in pkginfo["groups"]:
|
||||
cur = conn.execute("SELECT ID FROM `Groups` WHERE Name = ?", [group])
|
||||
row = cur.fetchone()
|
||||
if row:
|
||||
groupid = row[0]
|
||||
else:
|
||||
cur = conn.execute("INSERT INTO `Groups` (Name) VALUES (?)",
|
||||
[group])
|
||||
cur = conn.execute(
|
||||
"INSERT INTO `Groups` (Name) VALUES (?)", [group]
|
||||
)
|
||||
conn.commit()
|
||||
groupid = cur.lastrowid
|
||||
conn.execute("INSERT INTO PackageGroups (PackageID, "
|
||||
"GroupID) VALUES (?, ?)", [pkgid, groupid])
|
||||
conn.execute(
|
||||
"INSERT INTO PackageGroups (PackageID, " "GroupID) VALUES (?, ?)",
|
||||
[pkgid, groupid],
|
||||
)
|
||||
|
||||
# Add user to notification list on adoption.
|
||||
if was_orphan:
|
||||
cur = conn.execute("SELECT COUNT(*) FROM PackageNotifications WHERE " +
|
||||
"PackageBaseID = ? AND UserID = ?",
|
||||
[pkgbase_id, user_id])
|
||||
cur = conn.execute(
|
||||
"SELECT COUNT(*) FROM PackageNotifications WHERE "
|
||||
+ "PackageBaseID = ? AND UserID = ?",
|
||||
[pkgbase_id, user_id],
|
||||
)
|
||||
if cur.fetchone()[0] == 0:
|
||||
conn.execute("INSERT INTO PackageNotifications " +
|
||||
"(PackageBaseID, UserID) VALUES (?, ?)",
|
||||
[pkgbase_id, user_id])
|
||||
conn.execute(
|
||||
"INSERT INTO PackageNotifications "
|
||||
+ "(PackageBaseID, UserID) VALUES (?, ?)",
|
||||
[pkgbase_id, user_id],
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
@ -212,7 +239,7 @@ def update_notify(conn, user, pkgbase_id):
|
|||
user_id = int(cur.fetchone()[0])
|
||||
|
||||
# Execute the notification script.
|
||||
subprocess.Popen((notify_cmd, 'update', str(user_id), str(pkgbase_id)))
|
||||
subprocess.Popen((notify_cmd, "update", str(user_id), str(pkgbase_id)))
|
||||
|
||||
|
||||
def die(msg):
|
||||
|
@ -225,28 +252,91 @@ def warn(msg):
|
|||
|
||||
|
||||
def die_commit(msg, commit):
|
||||
sys.stderr.write("error: The following error " +
|
||||
"occurred when parsing commit\n")
|
||||
sys.stderr.write("error: The following error " + "occurred when parsing commit\n")
|
||||
sys.stderr.write("error: {:s}:\n".format(commit))
|
||||
sys.stderr.write("error: {:s}\n".format(msg))
|
||||
exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
def validate_metadata(metadata, commit): # noqa: C901
|
||||
try:
|
||||
metadata_pkgbase = metadata["pkgbase"]
|
||||
except KeyError:
|
||||
die_commit(
|
||||
"invalid .SRCINFO, does not contain a pkgbase (is the file empty?)",
|
||||
str(commit.id),
|
||||
)
|
||||
if not re.match(repo_regex, metadata_pkgbase):
|
||||
die_commit("invalid pkgbase: {:s}".format(metadata_pkgbase), str(commit.id))
|
||||
|
||||
if not metadata["packages"]:
|
||||
die_commit("missing pkgname entry", str(commit.id))
|
||||
|
||||
for pkgname in set(metadata["packages"].keys()):
|
||||
pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata)
|
||||
|
||||
for field in ("pkgver", "pkgrel", "pkgname"):
|
||||
if field not in pkginfo:
|
||||
die_commit(
|
||||
"missing mandatory field: {:s}".format(field), str(commit.id)
|
||||
)
|
||||
|
||||
if "epoch" in pkginfo and not pkginfo["epoch"].isdigit():
|
||||
die_commit("invalid epoch: {:s}".format(pkginfo["epoch"]), str(commit.id))
|
||||
|
||||
if not re.match(r"[a-z0-9][a-z0-9\.+_-]*$", pkginfo["pkgname"]):
|
||||
die_commit(
|
||||
"invalid package name: {:s}".format(pkginfo["pkgname"]),
|
||||
str(commit.id),
|
||||
)
|
||||
|
||||
max_len = {"pkgname": 255, "pkgdesc": 255, "url": 8000}
|
||||
for field in max_len.keys():
|
||||
if field in pkginfo and len(pkginfo[field]) > max_len[field]:
|
||||
die_commit(
|
||||
"{:s} field too long: {:s}".format(field, pkginfo[field]),
|
||||
str(commit.id),
|
||||
)
|
||||
|
||||
for field in ("install", "changelog"):
|
||||
if field in pkginfo and not pkginfo[field] in commit.tree:
|
||||
die_commit(
|
||||
"missing {:s} file: {:s}".format(field, pkginfo[field]),
|
||||
str(commit.id),
|
||||
)
|
||||
|
||||
for field in extract_arch_fields(pkginfo, "source"):
|
||||
fname = field["value"]
|
||||
if len(fname) > 8000:
|
||||
die_commit("source entry too long: {:s}".format(fname), str(commit.id))
|
||||
if "://" in fname or "lp:" in fname:
|
||||
continue
|
||||
if fname not in commit.tree:
|
||||
die_commit("missing source file: {:s}".format(fname), str(commit.id))
|
||||
|
||||
|
||||
def validate_blob_size(blob: pygit2.Object, commit: pygit2.Commit):
|
||||
if isinstance(blob, pygit2.Blob) and blob.size > max_blob_size:
|
||||
die_commit(
|
||||
"maximum blob size ({:s}) exceeded".format(size_humanize(max_blob_size)),
|
||||
str(commit.id),
|
||||
)
|
||||
|
||||
|
||||
def main(): # noqa: C901
|
||||
repo = pygit2.Repository(repo_path)
|
||||
|
||||
user = os.environ.get("AUR_USER")
|
||||
pkgbase = os.environ.get("AUR_PKGBASE")
|
||||
privileged = (os.environ.get("AUR_PRIVILEGED", '0') == '1')
|
||||
allow_overwrite = (os.environ.get("AUR_OVERWRITE", '0') == '1') and privileged
|
||||
privileged = os.environ.get("AUR_PRIVILEGED", "0") == "1"
|
||||
allow_overwrite = (os.environ.get("AUR_OVERWRITE", "0") == "1") and privileged
|
||||
warn_or_die = warn if privileged else die
|
||||
|
||||
if len(sys.argv) == 2 and sys.argv[1] == "restore":
|
||||
if 'refs/heads/' + pkgbase not in repo.listall_references():
|
||||
die('{:s}: repository not found: {:s}'.format(sys.argv[1],
|
||||
pkgbase))
|
||||
if "refs/heads/" + pkgbase not in repo.listall_references():
|
||||
die("{:s}: repository not found: {:s}".format(sys.argv[1], pkgbase))
|
||||
refname = "refs/heads/master"
|
||||
branchref = 'refs/heads/' + pkgbase
|
||||
branchref = "refs/heads/" + pkgbase
|
||||
sha1_old = sha1_new = repo.lookup_reference(branchref).target
|
||||
elif len(sys.argv) == 4:
|
||||
refname, sha1_old, sha1_new = sys.argv[1:4]
|
||||
|
@ -266,133 +356,115 @@ def main():
|
|||
die("denying non-fast-forward (you should pull first)")
|
||||
|
||||
# Prepare the walker that validates new commits.
|
||||
walker = repo.walk(sha1_new, pygit2.GIT_SORT_TOPOLOGICAL)
|
||||
walker = repo.walk(sha1_new, pygit2.GIT_SORT_REVERSE)
|
||||
if sha1_old != "0" * 40:
|
||||
walker.hide(sha1_old)
|
||||
|
||||
head_commit = repo[sha1_new]
|
||||
if ".SRCINFO" not in head_commit.tree:
|
||||
die_commit("missing .SRCINFO", str(head_commit.id))
|
||||
|
||||
# Read .SRCINFO from the HEAD commit.
|
||||
metadata_raw = repo[head_commit.tree[".SRCINFO"].id].data.decode()
|
||||
(metadata, errors) = srcinfo.parse.parse_srcinfo(metadata_raw)
|
||||
if errors:
|
||||
sys.stderr.write(
|
||||
"error: The following errors occurred " "when parsing .SRCINFO in commit\n"
|
||||
)
|
||||
sys.stderr.write("error: {:s}:\n".format(str(head_commit.id)))
|
||||
for error in errors:
|
||||
for err in error["error"]:
|
||||
sys.stderr.write("error: line {:d}: {:s}\n".format(error["line"], err))
|
||||
exit(1)
|
||||
|
||||
# check if there is a correct .SRCINFO file in the latest revision
|
||||
validate_metadata(metadata, head_commit)
|
||||
|
||||
# Validate all new commits.
|
||||
for commit in walker:
|
||||
for fname in ('.SRCINFO', 'PKGBUILD'):
|
||||
if fname not in commit.tree:
|
||||
die_commit("missing {:s}".format(fname), str(commit.id))
|
||||
if "PKGBUILD" not in commit.tree:
|
||||
die_commit("missing PKGBUILD", str(commit.id))
|
||||
|
||||
# Iterate over files in root dir
|
||||
for treeobj in commit.tree:
|
||||
blob = repo[treeobj.id]
|
||||
# Don't allow any subdirs besides "keys/"
|
||||
if isinstance(treeobj, pygit2.Tree) and treeobj.name != "keys":
|
||||
die_commit(
|
||||
"the repository must not contain subdirectories",
|
||||
str(commit.id),
|
||||
)
|
||||
|
||||
if isinstance(blob, pygit2.Tree):
|
||||
die_commit("the repository must not contain subdirectories",
|
||||
str(commit.id))
|
||||
# Check size of files in root dir
|
||||
validate_blob_size(treeobj, commit)
|
||||
|
||||
if not isinstance(blob, pygit2.Blob):
|
||||
die_commit("not a blob object: {:s}".format(treeobj),
|
||||
str(commit.id))
|
||||
|
||||
if blob.size > max_blob_size:
|
||||
die_commit("maximum blob size ({:s}) exceeded".format(
|
||||
size_humanize(max_blob_size)), str(commit.id))
|
||||
|
||||
metadata_raw = repo[commit.tree['.SRCINFO'].id].data.decode()
|
||||
(metadata, errors) = srcinfo.parse.parse_srcinfo(metadata_raw)
|
||||
if errors:
|
||||
sys.stderr.write("error: The following errors occurred "
|
||||
"when parsing .SRCINFO in commit\n")
|
||||
sys.stderr.write("error: {:s}:\n".format(str(commit.id)))
|
||||
for error in errors:
|
||||
for err in error['error']:
|
||||
sys.stderr.write("error: line {:d}: {:s}\n".format(
|
||||
error['line'], err))
|
||||
exit(1)
|
||||
|
||||
metadata_pkgbase = metadata['pkgbase']
|
||||
if not re.match(repo_regex, metadata_pkgbase):
|
||||
die_commit('invalid pkgbase: {:s}'.format(metadata_pkgbase),
|
||||
str(commit.id))
|
||||
|
||||
if not metadata['packages']:
|
||||
die_commit('missing pkgname entry', str(commit.id))
|
||||
|
||||
for pkgname in set(metadata['packages'].keys()):
|
||||
pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata)
|
||||
|
||||
for field in ('pkgver', 'pkgrel', 'pkgname'):
|
||||
if field not in pkginfo:
|
||||
die_commit('missing mandatory field: {:s}'.format(field),
|
||||
str(commit.id))
|
||||
|
||||
if 'epoch' in pkginfo and not pkginfo['epoch'].isdigit():
|
||||
die_commit('invalid epoch: {:s}'.format(pkginfo['epoch']),
|
||||
str(commit.id))
|
||||
|
||||
if not re.match(r'[a-z0-9][a-z0-9\.+_-]*$', pkginfo['pkgname']):
|
||||
die_commit('invalid package name: {:s}'.format(
|
||||
pkginfo['pkgname']), str(commit.id))
|
||||
|
||||
max_len = {'pkgname': 255, 'pkgdesc': 255, 'url': 8000}
|
||||
for field in max_len.keys():
|
||||
if field in pkginfo and len(pkginfo[field]) > max_len[field]:
|
||||
die_commit('{:s} field too long: {:s}'.format(field,
|
||||
pkginfo[field]), str(commit.id))
|
||||
|
||||
for field in ('install', 'changelog'):
|
||||
if field in pkginfo and not pkginfo[field] in commit.tree:
|
||||
die_commit('missing {:s} file: {:s}'.format(field,
|
||||
pkginfo[field]), str(commit.id))
|
||||
|
||||
for field in extract_arch_fields(pkginfo, 'source'):
|
||||
fname = field['value']
|
||||
if len(fname) > 8000:
|
||||
die_commit('source entry too long: {:s}'.format(fname),
|
||||
str(commit.id))
|
||||
if "://" in fname or "lp:" in fname:
|
||||
continue
|
||||
if fname not in commit.tree:
|
||||
die_commit('missing source file: {:s}'.format(fname),
|
||||
str(commit.id))
|
||||
# If we got a subdir keys/,
|
||||
# make sure it only contains a pgp/ subdir with key files
|
||||
if "keys" in commit.tree:
|
||||
# Check for forbidden files/dirs in keys/
|
||||
for keyobj in commit.tree["keys"]:
|
||||
if not isinstance(keyobj, pygit2.Tree) or keyobj.name != "pgp":
|
||||
die_commit(
|
||||
"the keys/ subdir may only contain a pgp/ directory",
|
||||
str(commit.id),
|
||||
)
|
||||
# Check for forbidden files in keys/pgp/
|
||||
if "keys/pgp" in commit.tree:
|
||||
for pgpobj in commit.tree["keys/pgp"]:
|
||||
if not isinstance(pgpobj, pygit2.Blob) or not pgpobj.name.endswith(
|
||||
".asc"
|
||||
):
|
||||
die_commit(
|
||||
"the subdir may only contain .asc (PGP pub key) files",
|
||||
str(commit.id),
|
||||
)
|
||||
# Check file size for pgp key files
|
||||
validate_blob_size(pgpobj, commit)
|
||||
|
||||
# Display a warning if .SRCINFO is unchanged.
|
||||
if sha1_old not in ("0000000000000000000000000000000000000000", sha1_new):
|
||||
srcinfo_id_old = repo[sha1_old].tree['.SRCINFO'].id
|
||||
srcinfo_id_new = repo[sha1_new].tree['.SRCINFO'].id
|
||||
srcinfo_id_old = repo[sha1_old].tree[".SRCINFO"].id
|
||||
srcinfo_id_new = repo[sha1_new].tree[".SRCINFO"].id
|
||||
if srcinfo_id_old == srcinfo_id_new:
|
||||
warn(".SRCINFO unchanged. "
|
||||
"The package database will not be updated!")
|
||||
|
||||
# Read .SRCINFO from the HEAD commit.
|
||||
metadata_raw = repo[repo[sha1_new].tree['.SRCINFO'].id].data.decode()
|
||||
(metadata, errors) = srcinfo.parse.parse_srcinfo(metadata_raw)
|
||||
warn(".SRCINFO unchanged. " "The package database will not be updated!")
|
||||
|
||||
# Ensure that the package base name matches the repository name.
|
||||
metadata_pkgbase = metadata['pkgbase']
|
||||
metadata_pkgbase = metadata["pkgbase"]
|
||||
if metadata_pkgbase != pkgbase:
|
||||
die('invalid pkgbase: {:s}, expected {:s}'.format(metadata_pkgbase,
|
||||
pkgbase))
|
||||
die("invalid pkgbase: {:s}, expected {:s}".format(metadata_pkgbase, pkgbase))
|
||||
|
||||
# Ensure that packages are neither blacklisted nor overwritten.
|
||||
pkgbase = metadata['pkgbase']
|
||||
pkgbase = metadata["pkgbase"]
|
||||
cur = conn.execute("SELECT ID FROM PackageBases WHERE Name = ?", [pkgbase])
|
||||
row = cur.fetchone()
|
||||
pkgbase_id = row[0] if row else 0
|
||||
|
||||
cur = conn.execute("SELECT Name FROM PackageBlacklist")
|
||||
blacklist = [row[0] for row in cur.fetchall()]
|
||||
if pkgbase in blacklist:
|
||||
warn_or_die("pkgbase is blacklisted: {:s}".format(pkgbase))
|
||||
|
||||
cur = conn.execute("SELECT Name, Repo FROM OfficialProviders")
|
||||
providers = dict(cur.fetchall())
|
||||
|
||||
for pkgname in srcinfo.utils.get_package_names(metadata):
|
||||
pkginfo = srcinfo.utils.get_merged_package(pkgname, metadata)
|
||||
pkgname = pkginfo['pkgname']
|
||||
pkgname = pkginfo["pkgname"]
|
||||
|
||||
if pkgname in blacklist:
|
||||
warn_or_die('package is blacklisted: {:s}'.format(pkgname))
|
||||
warn_or_die("package is blacklisted: {:s}".format(pkgname))
|
||||
if pkgname in providers:
|
||||
warn_or_die('package already provided by [{:s}]: {:s}'.format(
|
||||
providers[pkgname], pkgname))
|
||||
warn_or_die(
|
||||
"package already provided by [{:s}]: {:s}".format(
|
||||
providers[pkgname], pkgname
|
||||
)
|
||||
)
|
||||
|
||||
cur = conn.execute("SELECT COUNT(*) FROM Packages WHERE Name = ? " +
|
||||
"AND PackageBaseID <> ?", [pkgname, pkgbase_id])
|
||||
cur = conn.execute(
|
||||
"SELECT COUNT(*) FROM Packages WHERE Name = ? " + "AND PackageBaseID <> ?",
|
||||
[pkgname, pkgbase_id],
|
||||
)
|
||||
if cur.fetchone()[0] > 0:
|
||||
die('cannot overwrite package: {:s}'.format(pkgname))
|
||||
die("cannot overwrite package: {:s}".format(pkgname))
|
||||
|
||||
# Create a new package base if it does not exist yet.
|
||||
if pkgbase_id == 0:
|
||||
|
@ -403,7 +475,7 @@ def main():
|
|||
|
||||
# Create (or update) a branch with the name of the package base for better
|
||||
# accessibility.
|
||||
branchref = 'refs/heads/' + pkgbase
|
||||
branchref = "refs/heads/" + pkgbase
|
||||
repo.create_reference(branchref, sha1_new, True)
|
||||
|
||||
# Work around a Git bug: The HEAD ref is not updated when using
|
||||
|
@ -411,7 +483,7 @@ def main():
|
|||
# mainline. See
|
||||
# http://git.661346.n2.nabble.com/PATCH-receive-pack-Create-a-HEAD-ref-for-ref-namespace-td7632149.html
|
||||
# for details.
|
||||
headref = 'refs/namespaces/' + pkgbase + '/HEAD'
|
||||
headref = "refs/namespaces/" + pkgbase + "/HEAD"
|
||||
repo.create_reference(headref, sha1_new, True)
|
||||
|
||||
# Send package update notifications.
|
||||
|
@ -422,5 +494,5 @@ def main():
|
|||
conn.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
83
aurweb/initdb.py
Normal file
83
aurweb/initdb.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import argparse
|
||||
|
||||
import alembic.command
|
||||
import alembic.config
|
||||
|
||||
import aurweb.aur_logging
|
||||
import aurweb.db
|
||||
import aurweb.schema
|
||||
|
||||
|
||||
def feed_initial_data(conn):
|
||||
conn.execute(
|
||||
aurweb.schema.AccountTypes.insert(),
|
||||
[
|
||||
{"ID": 1, "AccountType": "User"},
|
||||
{"ID": 2, "AccountType": "Package Maintainer"},
|
||||
{"ID": 3, "AccountType": "Developer"},
|
||||
{"ID": 4, "AccountType": "Package Maintainer & Developer"},
|
||||
],
|
||||
)
|
||||
conn.execute(
|
||||
aurweb.schema.DependencyTypes.insert(),
|
||||
[
|
||||
{"ID": 1, "Name": "depends"},
|
||||
{"ID": 2, "Name": "makedepends"},
|
||||
{"ID": 3, "Name": "checkdepends"},
|
||||
{"ID": 4, "Name": "optdepends"},
|
||||
],
|
||||
)
|
||||
conn.execute(
|
||||
aurweb.schema.RelationTypes.insert(),
|
||||
[
|
||||
{"ID": 1, "Name": "conflicts"},
|
||||
{"ID": 2, "Name": "provides"},
|
||||
{"ID": 3, "Name": "replaces"},
|
||||
],
|
||||
)
|
||||
conn.execute(
|
||||
aurweb.schema.RequestTypes.insert(),
|
||||
[
|
||||
{"ID": 1, "Name": "deletion"},
|
||||
{"ID": 2, "Name": "orphan"},
|
||||
{"ID": 3, "Name": "merge"},
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def run(args):
|
||||
aurweb.config.rehash()
|
||||
|
||||
# Ensure Alembic is fine before we do the real work, in order not to fail at
|
||||
# the last step and leave the database in an inconsistent state. The
|
||||
# configuration is loaded lazily, so we query it to force its loading.
|
||||
if args.use_alembic:
|
||||
alembic_config = alembic.config.Config("alembic.ini")
|
||||
alembic_config.get_main_option("script_location")
|
||||
alembic_config.attributes["configure_logger"] = False
|
||||
|
||||
engine = aurweb.db.get_engine(echo=(args.verbose >= 1))
|
||||
aurweb.schema.metadata.create_all(engine)
|
||||
conn = engine.connect()
|
||||
feed_initial_data(conn)
|
||||
conn.close()
|
||||
|
||||
if args.use_alembic:
|
||||
alembic.command.stamp(alembic_config, "head")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="python -m aurweb.initdb", description="Initialize the aurweb database."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", action="count", default=0, help="increase verbosity"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-alembic",
|
||||
help="disable Alembic migrations support",
|
||||
dest="use_alembic",
|
||||
action="store_false",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
run(args)
|
101
aurweb/l10n.py
101
aurweb/l10n.py
|
@ -1,19 +1,102 @@
|
|||
import gettext
|
||||
from collections import OrderedDict
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
import aurweb.config
|
||||
|
||||
SUPPORTED_LANGUAGES = OrderedDict(
|
||||
{
|
||||
"ar": "العربية",
|
||||
"ast": "Asturianu",
|
||||
"ca": "Català",
|
||||
"cs": "Český",
|
||||
"da": "Dansk",
|
||||
"de": "Deutsch",
|
||||
"el": "Ελληνικά",
|
||||
"en": "English",
|
||||
"es": "Español",
|
||||
"es_419": "Español (Latinoamérica)",
|
||||
"fi": "Suomi",
|
||||
"fr": "Français",
|
||||
"he": "עברית",
|
||||
"hr": "Hrvatski",
|
||||
"hu": "Magyar",
|
||||
"it": "Italiano",
|
||||
"ja": "日本語",
|
||||
"nb": "Norsk",
|
||||
"nl": "Nederlands",
|
||||
"pl": "Polski",
|
||||
"pt_BR": "Português (Brasil)",
|
||||
"pt_PT": "Português (Portugal)",
|
||||
"ro": "Română",
|
||||
"ru": "Русский",
|
||||
"sk": "Slovenčina",
|
||||
"sr": "Srpski",
|
||||
"tr": "Türkçe",
|
||||
"uk": "Українська",
|
||||
"zh_CN": "简体中文",
|
||||
"zh_TW": "正體中文",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
RIGHT_TO_LEFT_LANGUAGES = ("he", "ar")
|
||||
|
||||
|
||||
class Translator:
|
||||
def __init__(self):
|
||||
self._localedir = aurweb.config.get('options', 'localedir')
|
||||
self._localedir = aurweb.config.get("options", "localedir")
|
||||
self._translator = {}
|
||||
|
||||
def translate(self, s, lang):
|
||||
if lang == 'en':
|
||||
return s
|
||||
def get_translator(self, lang: str):
|
||||
if lang not in self._translator:
|
||||
self._translator[lang] = gettext.translation("aurweb",
|
||||
self._localedir,
|
||||
languages=[lang])
|
||||
self._translator[lang].install()
|
||||
return _(s)
|
||||
self._translator[lang] = gettext.translation(
|
||||
"aurweb", self._localedir, languages=[lang], fallback=True
|
||||
)
|
||||
return self._translator.get(lang)
|
||||
|
||||
def translate(self, s: str, lang: str):
|
||||
return self.get_translator(lang).gettext(s)
|
||||
|
||||
|
||||
# Global translator object.
|
||||
translator = Translator()
|
||||
|
||||
|
||||
def get_request_language(request: Request) -> str:
|
||||
"""Get a request's language from either query param, user setting or
|
||||
cookie. We use the configuration's [options] default_lang otherwise.
|
||||
|
||||
@param request FastAPI request
|
||||
"""
|
||||
request_lang = request.query_params.get("language")
|
||||
cookie_lang = request.cookies.get("AURLANG")
|
||||
if request_lang and request_lang in SUPPORTED_LANGUAGES:
|
||||
return request_lang
|
||||
elif (
|
||||
request.user.is_authenticated()
|
||||
and request.user.LangPreference in SUPPORTED_LANGUAGES
|
||||
):
|
||||
return request.user.LangPreference
|
||||
elif cookie_lang and cookie_lang in SUPPORTED_LANGUAGES:
|
||||
return cookie_lang
|
||||
return aurweb.config.get_with_fallback("options", "default_lang", "en")
|
||||
|
||||
|
||||
def get_raw_translator_for_request(request: Request):
|
||||
lang = get_request_language(request)
|
||||
return translator.get_translator(lang)
|
||||
|
||||
|
||||
def get_translator_for_request(request: Request):
|
||||
"""
|
||||
Determine the preferred language from a FastAPI request object and build a
|
||||
translator function for it.
|
||||
"""
|
||||
lang = get_request_language(request)
|
||||
|
||||
def translate(message):
|
||||
return translator.translate(message, lang)
|
||||
|
||||
return translate
|
||||
|
|
32
aurweb/models/__init__.py
Normal file
32
aurweb/models/__init__.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
""" Collection of all aurweb SQLAlchemy declarative models. """
|
||||
|
||||
from .accepted_term import AcceptedTerm # noqa: F401
|
||||
from .account_type import AccountType # noqa: F401
|
||||
from .api_rate_limit import ApiRateLimit # noqa: F401
|
||||
from .ban import Ban # noqa: F401
|
||||
from .dependency_type import DependencyType # noqa: F401
|
||||
from .group import Group # noqa: F401
|
||||
from .license import License # noqa: F401
|
||||
from .official_provider import OfficialProvider # noqa: F401
|
||||
from .package import Package # noqa: F401
|
||||
from .package_base import PackageBase # noqa: F401
|
||||
from .package_blacklist import PackageBlacklist # noqa: F401
|
||||
from .package_comaintainer import PackageComaintainer # noqa: F401
|
||||
from .package_comment import PackageComment # noqa: F401
|
||||
from .package_dependency import PackageDependency # noqa: F401
|
||||
from .package_group import PackageGroup # noqa: F401
|
||||
from .package_keyword import PackageKeyword # noqa: F401
|
||||
from .package_license import PackageLicense # noqa: F401
|
||||
from .package_notification import PackageNotification # noqa: F401
|
||||
from .package_relation import PackageRelation # noqa: F401
|
||||
from .package_request import PackageRequest # noqa: F401
|
||||
from .package_source import PackageSource # noqa: F401
|
||||
from .package_vote import PackageVote # noqa: F401
|
||||
from .relation_type import RelationType # noqa: F401
|
||||
from .request_type import RequestType # noqa: F401
|
||||
from .session import Session # noqa: F401
|
||||
from .ssh_pub_key import SSHPubKey # noqa: F401
|
||||
from .term import Term # noqa: F401
|
||||
from .user import User # noqa: F401
|
||||
from .vote import Vote # noqa: F401
|
||||
from .voteinfo import VoteInfo # noqa: F401
|
42
aurweb/models/accepted_term.py
Normal file
42
aurweb/models/accepted_term.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.term import Term as _Term
|
||||
from aurweb.models.user import User as _User
|
||||
|
||||
|
||||
class AcceptedTerm(Base):
|
||||
__table__ = schema.AcceptedTerms
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.TermsID]}
|
||||
|
||||
User = relationship(
|
||||
_User,
|
||||
backref=backref("accepted_terms", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.UsersID],
|
||||
)
|
||||
|
||||
Term = relationship(
|
||||
_Term,
|
||||
backref=backref("accepted_terms", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.TermsID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.User and not self.UsersID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key UsersID cannot be null.",
|
||||
orig="AcceptedTerms.UserID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.Term and not self.TermsID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key TermID cannot be null.",
|
||||
orig="AcceptedTerms.TermID",
|
||||
params=("NULL"),
|
||||
)
|
40
aurweb/models/account_type.py
Normal file
40
aurweb/models/account_type.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
USER = "User"
|
||||
PACKAGE_MAINTAINER = "Package Maintainer"
|
||||
DEVELOPER = "Developer"
|
||||
PACKAGE_MAINTAINER_AND_DEV = "Package Maintainer & Developer"
|
||||
|
||||
USER_ID = 1
|
||||
PACKAGE_MAINTAINER_ID = 2
|
||||
DEVELOPER_ID = 3
|
||||
PACKAGE_MAINTAINER_AND_DEV_ID = 4
|
||||
|
||||
# Map string constants to integer constants.
|
||||
ACCOUNT_TYPE_ID = {
|
||||
USER: USER_ID,
|
||||
PACKAGE_MAINTAINER: PACKAGE_MAINTAINER_ID,
|
||||
DEVELOPER: DEVELOPER_ID,
|
||||
PACKAGE_MAINTAINER_AND_DEV: PACKAGE_MAINTAINER_AND_DEV_ID,
|
||||
}
|
||||
|
||||
# Reversed ACCOUNT_TYPE_ID mapping.
|
||||
ACCOUNT_TYPE_NAME = {v: k for k, v in ACCOUNT_TYPE_ID.items()}
|
||||
|
||||
|
||||
class AccountType(Base):
|
||||
"""An ORM model of a single AccountTypes record."""
|
||||
|
||||
__table__ = schema.AccountTypes
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.AccountType = kwargs.pop("AccountType")
|
||||
|
||||
def __str__(self):
|
||||
return str(self.AccountType)
|
||||
|
||||
def __repr__(self):
|
||||
return "<AccountType(ID='%s', AccountType='%s')>" % (self.ID, str(self))
|
27
aurweb/models/api_rate_limit.py
Normal file
27
aurweb/models/api_rate_limit.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
|
||||
class ApiRateLimit(Base):
|
||||
__table__ = schema.ApiRateLimit
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.IP]}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if self.Requests is None:
|
||||
raise IntegrityError(
|
||||
statement="Column Requests cannot be null.",
|
||||
orig="ApiRateLimit.Requests",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if self.WindowStart is None:
|
||||
raise IntegrityError(
|
||||
statement="Column WindowStart cannot be null.",
|
||||
orig="ApiRateLimit.WindowStart",
|
||||
params=("NULL"),
|
||||
)
|
20
aurweb/models/ban.py
Normal file
20
aurweb/models/ban.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
from fastapi import Request
|
||||
|
||||
from aurweb import db, schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.util import get_client_ip
|
||||
|
||||
|
||||
class Ban(Base):
|
||||
__table__ = schema.Bans
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.IPAddress]}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
def is_banned(request: Request):
|
||||
ip = get_client_ip(request)
|
||||
exists = db.query(Ban).filter(Ban.IPAddress == ip).exists()
|
||||
return db.query(exists).scalar()
|
29
aurweb/models/declarative.py
Normal file
29
aurweb/models/declarative.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
import json
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
from aurweb import util
|
||||
|
||||
|
||||
def to_dict(model):
|
||||
return {c.name: getattr(model, c.name) for c in model.__table__.columns}
|
||||
|
||||
|
||||
def to_json(model, indent: int = None):
|
||||
return json.dumps(
|
||||
{k: util.jsonify(v) for k, v in to_dict(model).items()}, indent=indent
|
||||
)
|
||||
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
# Setup __table_args__ applicable to every table.
|
||||
Base.__table_args__ = {"autoload": False, "extend_existing": True}
|
||||
|
||||
# Setup Base.as_dict and Base.json.
|
||||
#
|
||||
# With this, declarative models can use .as_dict() or .json()
|
||||
# at any time to produce a dict and json out of table columns.
|
||||
#
|
||||
Base.as_dict = to_dict
|
||||
Base.json = to_json
|
21
aurweb/models/dependency_type.py
Normal file
21
aurweb/models/dependency_type.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
DEPENDS = "depends"
|
||||
MAKEDEPENDS = "makedepends"
|
||||
CHECKDEPENDS = "checkdepends"
|
||||
OPTDEPENDS = "optdepends"
|
||||
|
||||
DEPENDS_ID = 1
|
||||
MAKEDEPENDS_ID = 2
|
||||
CHECKDEPENDS_ID = 3
|
||||
OPTDEPENDS_ID = 4
|
||||
|
||||
|
||||
class DependencyType(Base):
|
||||
__table__ = schema.DependencyTypes
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
def __init__(self, Name: str = None):
|
||||
self.Name = Name
|
19
aurweb/models/group.py
Normal file
19
aurweb/models/group.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
|
||||
class Group(Base):
|
||||
__table__ = schema.Groups
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
if self.Name is None:
|
||||
raise IntegrityError(
|
||||
statement="Column Name cannot be null.",
|
||||
orig="Groups.Name",
|
||||
params=("NULL"),
|
||||
)
|
20
aurweb/models/license.py
Normal file
20
aurweb/models/license.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
|
||||
class License(Base):
|
||||
__table__ = schema.Licenses
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.Name:
|
||||
raise IntegrityError(
|
||||
statement="Column Name cannot be null.",
|
||||
orig="Licenses.Name",
|
||||
params=("NULL"),
|
||||
)
|
39
aurweb/models/official_provider.py
Normal file
39
aurweb/models/official_provider.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
OFFICIAL_BASE = "https://archlinux.org"
|
||||
|
||||
|
||||
class OfficialProvider(Base):
|
||||
__table__ = schema.OfficialProviders
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
# OfficialProvider instances are official packages.
|
||||
is_official = True
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.Name:
|
||||
raise IntegrityError(
|
||||
statement="Column Name cannot be null.",
|
||||
orig="OfficialProviders.Name",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.Repo:
|
||||
raise IntegrityError(
|
||||
statement="Column Repo cannot be null.",
|
||||
orig="OfficialProviders.Repo",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.Provides:
|
||||
raise IntegrityError(
|
||||
statement="Column Provides cannot be null.",
|
||||
orig="OfficialProviders.Provides",
|
||||
params=("NULL"),
|
||||
)
|
38
aurweb/models/package.py
Normal file
38
aurweb/models/package.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.package_base import PackageBase as _PackageBase
|
||||
|
||||
|
||||
class Package(Base):
|
||||
__table__ = schema.Packages
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
PackageBase = relationship(
|
||||
_PackageBase,
|
||||
backref=backref("packages", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.PackageBaseID],
|
||||
)
|
||||
|
||||
# No Package instances are official packages.
|
||||
is_official = False
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.PackageBase and not self.PackageBaseID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key PackageBaseID cannot be null.",
|
||||
orig="Packages.PackageBaseID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if self.Name is None:
|
||||
raise IntegrityError(
|
||||
statement="Column Name cannot be null.",
|
||||
orig="Packages.Name",
|
||||
params=("NULL"),
|
||||
)
|
76
aurweb/models/package_base.py
Normal file
76
aurweb/models/package_base.py
Normal file
|
@ -0,0 +1,76 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema, time
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.user import User as _User
|
||||
|
||||
|
||||
class PackageBase(Base):
|
||||
__table__ = schema.PackageBases
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
Flagger = relationship(
|
||||
_User,
|
||||
backref=backref("flagged_bases", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.FlaggerUID],
|
||||
)
|
||||
|
||||
Submitter = relationship(
|
||||
_User,
|
||||
backref=backref("submitted_bases", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.SubmitterUID],
|
||||
)
|
||||
|
||||
Maintainer = relationship(
|
||||
_User,
|
||||
backref=backref("maintained_bases", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.MaintainerUID],
|
||||
)
|
||||
|
||||
Packager = relationship(
|
||||
_User,
|
||||
backref=backref("package_bases", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.PackagerUID],
|
||||
)
|
||||
|
||||
# A set used to check for floatable values.
|
||||
TO_FLOAT = {"Popularity"}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if self.Name is None:
|
||||
raise IntegrityError(
|
||||
statement="Column Name cannot be null.",
|
||||
orig="PackageBases.Name",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
# If no SubmittedTS/ModifiedTS is provided on creation, set them
|
||||
# here to the current utc timestamp.
|
||||
now = time.utcnow()
|
||||
if not self.SubmittedTS:
|
||||
self.SubmittedTS = now
|
||||
if not self.ModifiedTS:
|
||||
self.ModifiedTS = now
|
||||
|
||||
if not self.FlaggerComment:
|
||||
self.FlaggerComment = str()
|
||||
|
||||
def __getattribute__(self, key: str):
|
||||
attr = super().__getattribute__(key)
|
||||
if key in PackageBase.TO_FLOAT and not isinstance(attr, float):
|
||||
return float(attr)
|
||||
return attr
|
||||
|
||||
|
||||
def popularity_decay(pkgbase: PackageBase, utcnow: int):
|
||||
"""Return the delta between now and the last time popularity was updated, in days"""
|
||||
return int((utcnow - pkgbase.PopularityUpdated.timestamp()) / 86400)
|
||||
|
||||
|
||||
def popularity(pkgbase: PackageBase, utcnow: int):
|
||||
"""Return up-to-date popularity"""
|
||||
return float(pkgbase.Popularity) * (0.98 ** popularity_decay(pkgbase, utcnow))
|
20
aurweb/models/package_blacklist.py
Normal file
20
aurweb/models/package_blacklist.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
|
||||
class PackageBlacklist(Base):
|
||||
__table__ = schema.PackageBlacklist
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.Name:
|
||||
raise IntegrityError(
|
||||
statement="Column Name cannot be null.",
|
||||
orig="PackageBlacklist.Name",
|
||||
params=("NULL"),
|
||||
)
|
49
aurweb/models/package_comaintainer.py
Normal file
49
aurweb/models/package_comaintainer.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.package_base import PackageBase as _PackageBase
|
||||
from aurweb.models.user import User as _User
|
||||
|
||||
|
||||
class PackageComaintainer(Base):
|
||||
__table__ = schema.PackageComaintainers
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.UsersID, __table__.c.PackageBaseID]}
|
||||
|
||||
User = relationship(
|
||||
_User,
|
||||
backref=backref("comaintained", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.UsersID],
|
||||
)
|
||||
|
||||
PackageBase = relationship(
|
||||
_PackageBase,
|
||||
backref=backref("comaintainers", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.PackageBaseID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.User and not self.UsersID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key UsersID cannot be null.",
|
||||
orig="PackageComaintainers.UsersID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.PackageBase and not self.PackageBaseID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key PackageBaseID cannot be null.",
|
||||
orig="PackageComaintainers.PackageBaseID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.Priority:
|
||||
raise IntegrityError(
|
||||
statement="Column Priority cannot be null.",
|
||||
orig="PackageComaintainers.Priority",
|
||||
params=("NULL"),
|
||||
)
|
73
aurweb/models/package_comment.py
Normal file
73
aurweb/models/package_comment.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.package_base import PackageBase as _PackageBase
|
||||
from aurweb.models.user import User as _User
|
||||
|
||||
|
||||
class PackageComment(Base):
|
||||
__table__ = schema.PackageComments
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
PackageBase = relationship(
|
||||
_PackageBase,
|
||||
backref=backref("comments", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.PackageBaseID],
|
||||
)
|
||||
|
||||
User = relationship(
|
||||
_User,
|
||||
backref=backref("package_comments", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.UsersID],
|
||||
)
|
||||
|
||||
Editor = relationship(
|
||||
_User,
|
||||
backref=backref("edited_comments", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.EditedUsersID],
|
||||
)
|
||||
|
||||
Deleter = relationship(
|
||||
_User,
|
||||
backref=backref("deleted_comments", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.DelUsersID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.PackageBase and not self.PackageBaseID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key PackageBaseID cannot be null.",
|
||||
orig="PackageComments.PackageBaseID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.User and not self.UsersID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key UsersID cannot be null.",
|
||||
orig="PackageComments.UsersID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if self.Comments is None:
|
||||
raise IntegrityError(
|
||||
statement="Column Comments cannot be null.",
|
||||
orig="PackageComments.Comments",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if self.RenderedComment is None:
|
||||
self.RenderedComment = str()
|
||||
|
||||
def maintainers(self):
|
||||
return list(
|
||||
filter(
|
||||
lambda e: e is not None,
|
||||
[self.PackageBase.Maintainer]
|
||||
+ [c.User for c in self.PackageBase.comaintainers],
|
||||
)
|
||||
)
|
100
aurweb/models/package_dependency.py
Normal file
100
aurweb/models/package_dependency.py
Normal file
|
@ -0,0 +1,100 @@
|
|||
from sqlalchemy import and_, literal
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import db, schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.dependency_type import DependencyType as _DependencyType
|
||||
from aurweb.models.official_provider import OfficialProvider as _OfficialProvider
|
||||
from aurweb.models.package import Package as _Package
|
||||
from aurweb.models.package_relation import PackageRelation
|
||||
|
||||
|
||||
class PackageDependency(Base):
|
||||
__table__ = schema.PackageDepends
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {
|
||||
"primary_key": [
|
||||
__table__.c.PackageID,
|
||||
__table__.c.DepTypeID,
|
||||
__table__.c.DepName,
|
||||
]
|
||||
}
|
||||
|
||||
Package = relationship(
|
||||
_Package,
|
||||
backref=backref("package_dependencies", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.PackageID],
|
||||
)
|
||||
|
||||
DependencyType = relationship(
|
||||
_DependencyType,
|
||||
backref=backref("package_dependencies", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.DepTypeID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.Package and not self.PackageID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key PackageID cannot be null.",
|
||||
orig="PackageDependencies.PackageID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.DependencyType and not self.DepTypeID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key DepTypeID cannot be null.",
|
||||
orig="PackageDependencies.DepTypeID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if self.DepName is None:
|
||||
raise IntegrityError(
|
||||
statement="Column DepName cannot be null.",
|
||||
orig="PackageDependencies.DepName",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
def is_aur_package(self) -> bool:
|
||||
pkg = db.query(_Package).filter(_Package.Name == self.DepName).exists()
|
||||
return db.query(pkg).scalar()
|
||||
|
||||
def is_package(self) -> bool:
|
||||
official = (
|
||||
db.query(_OfficialProvider)
|
||||
.filter(_OfficialProvider.Name == self.DepName)
|
||||
.exists()
|
||||
)
|
||||
return self.is_aur_package() or db.query(official).scalar()
|
||||
|
||||
def provides(self) -> list[PackageRelation]:
|
||||
from aurweb.models.relation_type import PROVIDES_ID
|
||||
|
||||
rels = (
|
||||
db.query(PackageRelation)
|
||||
.join(_Package)
|
||||
.filter(
|
||||
and_(
|
||||
PackageRelation.RelTypeID == PROVIDES_ID,
|
||||
PackageRelation.RelName == self.DepName,
|
||||
)
|
||||
)
|
||||
.with_entities(_Package.Name, literal(False).label("is_official"))
|
||||
.order_by(_Package.Name.asc())
|
||||
)
|
||||
|
||||
official_rels = (
|
||||
db.query(_OfficialProvider)
|
||||
.filter(
|
||||
and_(
|
||||
_OfficialProvider.Provides == self.DepName,
|
||||
_OfficialProvider.Name != self.DepName,
|
||||
)
|
||||
)
|
||||
.with_entities(_OfficialProvider.Name, literal(True).label("is_official"))
|
||||
.order_by(_OfficialProvider.Name.asc())
|
||||
)
|
||||
|
||||
return rels.union(official_rels).all()
|
42
aurweb/models/package_group.py
Normal file
42
aurweb/models/package_group.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.group import Group as _Group
|
||||
from aurweb.models.package import Package as _Package
|
||||
|
||||
|
||||
class PackageGroup(Base):
|
||||
__table__ = schema.PackageGroups
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.PackageID, __table__.c.GroupID]}
|
||||
|
||||
Package = relationship(
|
||||
_Package,
|
||||
backref=backref("package_groups", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.PackageID],
|
||||
)
|
||||
|
||||
Group = relationship(
|
||||
_Group,
|
||||
backref=backref("package_groups", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.GroupID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.Package and not self.PackageID:
|
||||
raise IntegrityError(
|
||||
statement="Primary key PackageID cannot be null.",
|
||||
orig="PackageGroups.PackageID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.Group and not self.GroupID:
|
||||
raise IntegrityError(
|
||||
statement="Primary key GroupID cannot be null.",
|
||||
orig="PackageGroups.GroupID",
|
||||
params=("NULL"),
|
||||
)
|
28
aurweb/models/package_keyword.py
Normal file
28
aurweb/models/package_keyword.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.package_base import PackageBase as _PackageBase
|
||||
|
||||
|
||||
class PackageKeyword(Base):
|
||||
__table__ = schema.PackageKeywords
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.PackageBaseID, __table__.c.Keyword]}
|
||||
|
||||
PackageBase = relationship(
|
||||
_PackageBase,
|
||||
backref=backref("keywords", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.PackageBaseID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.PackageBase and not self.PackageBaseID:
|
||||
raise IntegrityError(
|
||||
statement="Primary key PackageBaseID cannot be null.",
|
||||
orig="PackageKeywords.PackageBaseID",
|
||||
params=("NULL"),
|
||||
)
|
42
aurweb/models/package_license.py
Normal file
42
aurweb/models/package_license.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.license import License as _License
|
||||
from aurweb.models.package import Package as _Package
|
||||
|
||||
|
||||
class PackageLicense(Base):
|
||||
__table__ = schema.PackageLicenses
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.PackageID, __table__.c.LicenseID]}
|
||||
|
||||
Package = relationship(
|
||||
_Package,
|
||||
backref=backref("package_licenses", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.PackageID],
|
||||
)
|
||||
|
||||
License = relationship(
|
||||
_License,
|
||||
backref=backref("package_licenses", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.LicenseID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.Package and not self.PackageID:
|
||||
raise IntegrityError(
|
||||
statement="Primary key PackageID cannot be null.",
|
||||
orig="PackageLicenses.PackageID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.License and not self.LicenseID:
|
||||
raise IntegrityError(
|
||||
statement="Primary key LicenseID cannot be null.",
|
||||
orig="PackageLicenses.LicenseID",
|
||||
params=("NULL"),
|
||||
)
|
42
aurweb/models/package_notification.py
Normal file
42
aurweb/models/package_notification.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.package_base import PackageBase as _PackageBase
|
||||
from aurweb.models.user import User as _User
|
||||
|
||||
|
||||
class PackageNotification(Base):
|
||||
__table__ = schema.PackageNotifications
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.UserID, __table__.c.PackageBaseID]}
|
||||
|
||||
User = relationship(
|
||||
_User,
|
||||
backref=backref("notifications", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.UserID],
|
||||
)
|
||||
|
||||
PackageBase = relationship(
|
||||
_PackageBase,
|
||||
backref=backref("notifications", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.PackageBaseID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.User and not self.UserID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key UserID cannot be null.",
|
||||
orig="PackageNotifications.UserID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.PackageBase and not self.PackageBaseID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key PackageBaseID cannot be null.",
|
||||
orig="PackageNotifications.PackageBaseID",
|
||||
params=("NULL"),
|
||||
)
|
55
aurweb/models/package_relation.py
Normal file
55
aurweb/models/package_relation.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.package import Package as _Package
|
||||
from aurweb.models.relation_type import RelationType as _RelationType
|
||||
|
||||
|
||||
class PackageRelation(Base):
|
||||
__table__ = schema.PackageRelations
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {
|
||||
"primary_key": [
|
||||
__table__.c.PackageID,
|
||||
__table__.c.RelTypeID,
|
||||
__table__.c.RelName,
|
||||
]
|
||||
}
|
||||
|
||||
Package = relationship(
|
||||
_Package,
|
||||
backref=backref("package_relations", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.PackageID],
|
||||
)
|
||||
|
||||
RelationType = relationship(
|
||||
_RelationType,
|
||||
backref=backref("package_relations", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.RelTypeID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.Package and not self.PackageID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key PackageID cannot be null.",
|
||||
orig="PackageRelations.PackageID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.RelationType and not self.RelTypeID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key RelTypeID cannot be null.",
|
||||
orig="PackageRelations.RelTypeID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.RelName:
|
||||
raise IntegrityError(
|
||||
statement="Column RelName cannot be null.",
|
||||
orig="PackageRelations.RelName",
|
||||
params=("NULL"),
|
||||
)
|
121
aurweb/models/package_request.py
Normal file
121
aurweb/models/package_request.py
Normal file
|
@ -0,0 +1,121 @@
|
|||
import base64
|
||||
import hashlib
|
||||
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import config, schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.package_base import PackageBase as _PackageBase
|
||||
from aurweb.models.request_type import RequestType as _RequestType
|
||||
from aurweb.models.user import User as _User
|
||||
|
||||
PENDING = "Pending"
|
||||
CLOSED = "Closed"
|
||||
ACCEPTED = "Accepted"
|
||||
REJECTED = "Rejected"
|
||||
|
||||
# Integer values used for the Status column of PackageRequest.
|
||||
PENDING_ID = 0
|
||||
CLOSED_ID = 1
|
||||
ACCEPTED_ID = 2
|
||||
REJECTED_ID = 3
|
||||
|
||||
|
||||
class PackageRequest(Base):
|
||||
__table__ = schema.PackageRequests
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
RequestType = relationship(
|
||||
_RequestType,
|
||||
backref=backref("package_requests", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.ReqTypeID],
|
||||
)
|
||||
|
||||
User = relationship(
|
||||
_User,
|
||||
backref=backref("package_requests", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.UsersID],
|
||||
)
|
||||
|
||||
PackageBase = relationship(
|
||||
_PackageBase,
|
||||
backref=backref("requests", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.PackageBaseID],
|
||||
)
|
||||
|
||||
Closer = relationship(
|
||||
_User,
|
||||
backref=backref("closed_requests", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.ClosedUID],
|
||||
)
|
||||
|
||||
STATUS_DISPLAY = {
|
||||
PENDING_ID: PENDING,
|
||||
CLOSED_ID: CLOSED,
|
||||
ACCEPTED_ID: ACCEPTED,
|
||||
REJECTED_ID: REJECTED,
|
||||
}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.RequestType and not self.ReqTypeID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key ReqTypeID cannot be null.",
|
||||
orig="PackageRequests.ReqTypeID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.PackageBase and not self.PackageBaseID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key PackageBaseID cannot be null.",
|
||||
orig="PackageRequests.PackageBaseID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.PackageBaseName:
|
||||
raise IntegrityError(
|
||||
statement="Column PackageBaseName cannot be null.",
|
||||
orig="PackageRequests.PackageBaseName",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.User and not self.UsersID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key UsersID cannot be null.",
|
||||
orig="PackageRequests.UsersID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if self.Comments is None:
|
||||
raise IntegrityError(
|
||||
statement="Column Comments cannot be null.",
|
||||
orig="PackageRequests.Comments",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if self.ClosureComment is None:
|
||||
raise IntegrityError(
|
||||
statement="Column ClosureComment cannot be null.",
|
||||
orig="PackageRequests.ClosureComment",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
def status_display(self) -> str:
|
||||
"""Return a display string for the Status column."""
|
||||
return self.STATUS_DISPLAY[self.Status]
|
||||
|
||||
def ml_message_id_hash(self) -> str:
|
||||
"""Return the X-Message-ID-Hash that is used in the mailing list archive."""
|
||||
# X-Message-ID-Hash is a base32 encoded SHA1 hash
|
||||
msgid = f"pkg-request-{str(self.ID)}@aur.archlinux.org"
|
||||
sha1 = hashlib.sha1(msgid.encode()).digest()
|
||||
|
||||
return base64.b32encode(sha1).decode()
|
||||
|
||||
def ml_message_url(self) -> str:
|
||||
"""Return the mailing list URL for the request."""
|
||||
url = config.get("options", "ml_thread_url") % (self.ml_message_id_hash())
|
||||
return url
|
31
aurweb/models/package_source.py
Normal file
31
aurweb/models/package_source.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.package import Package as _Package
|
||||
|
||||
|
||||
class PackageSource(Base):
|
||||
__table__ = schema.PackageSources
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.PackageID, __table__.c.Source]}
|
||||
|
||||
Package = relationship(
|
||||
_Package,
|
||||
backref=backref("package_sources", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.PackageID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.Package and not self.PackageID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key PackageID cannot be null.",
|
||||
orig="PackageSources.PackageID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.Source:
|
||||
self.Source = "/dev/null"
|
49
aurweb/models/package_vote.py
Normal file
49
aurweb/models/package_vote.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.package_base import PackageBase as _PackageBase
|
||||
from aurweb.models.user import User as _User
|
||||
|
||||
|
||||
class PackageVote(Base):
|
||||
__table__ = schema.PackageVotes
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.UsersID, __table__.c.PackageBaseID]}
|
||||
|
||||
User = relationship(
|
||||
_User,
|
||||
backref=backref("package_votes", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.UsersID],
|
||||
)
|
||||
|
||||
PackageBase = relationship(
|
||||
_PackageBase,
|
||||
backref=backref("package_votes", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.PackageBaseID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.User and not self.UsersID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key UsersID cannot be null.",
|
||||
orig="PackageVotes.UsersID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.PackageBase and not self.PackageBaseID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key PackageBaseID cannot be null.",
|
||||
orig="PackageVotes.PackageBaseID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.VoteTS:
|
||||
raise IntegrityError(
|
||||
statement="Column VoteTS cannot be null.",
|
||||
orig="PackageVotes.VoteTS",
|
||||
params=("NULL"),
|
||||
)
|
19
aurweb/models/relation_type.py
Normal file
19
aurweb/models/relation_type.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
CONFLICTS = "conflicts"
|
||||
PROVIDES = "provides"
|
||||
REPLACES = "replaces"
|
||||
|
||||
CONFLICTS_ID = 1
|
||||
PROVIDES_ID = 2
|
||||
REPLACES_ID = 3
|
||||
|
||||
|
||||
class RelationType(Base):
|
||||
__table__ = schema.RelationTypes
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
def __init__(self, Name: str = None):
|
||||
self.Name = Name
|
20
aurweb/models/request_type.py
Normal file
20
aurweb/models/request_type.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
DELETION = "deletion"
|
||||
ORPHAN = "orphan"
|
||||
MERGE = "merge"
|
||||
|
||||
DELETION_ID = 1
|
||||
ORPHAN_ID = 2
|
||||
MERGE_ID = 3
|
||||
|
||||
|
||||
class RequestType(Base):
|
||||
__table__ = schema.RequestTypes
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
def name_display(self) -> str:
|
||||
"""Return the Name column with its first char capitalized."""
|
||||
return self.Name.title()
|
44
aurweb/models/session.py
Normal file
44
aurweb/models/session.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import db, schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.user import User as _User
|
||||
|
||||
|
||||
class Session(Base):
|
||||
__table__ = schema.Sessions
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.UsersID]}
|
||||
|
||||
User = relationship(
|
||||
_User,
|
||||
backref=backref("session", cascade="all, delete", uselist=False),
|
||||
foreign_keys=[__table__.c.UsersID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
# We'll try to either use UsersID or User.ID if we can.
|
||||
# If neither exist, an AttributeError is raised, in which case
|
||||
# we set the uid to 0, which triggers IntegrityError below.
|
||||
try:
|
||||
uid = self.UsersID or self.User.ID
|
||||
except AttributeError:
|
||||
uid = 0
|
||||
|
||||
user_exists = db.query(_User).filter(_User.ID == uid).exists()
|
||||
if not db.query(user_exists).scalar():
|
||||
raise IntegrityError(
|
||||
statement=(
|
||||
"Foreign key UsersID cannot be null and "
|
||||
"must be a valid user's ID."
|
||||
),
|
||||
orig="Sessions.UsersID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
|
||||
def generate_unique_sid():
|
||||
return db.make_random_value(Session, Session.SessionID, 32)
|
29
aurweb/models/ssh_pub_key.py
Normal file
29
aurweb/models/ssh_pub_key.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
from subprocess import PIPE, Popen
|
||||
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
|
||||
class SSHPubKey(Base):
|
||||
__table__ = schema.SSHPubKeys
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.Fingerprint]}
|
||||
|
||||
User = relationship(
|
||||
"User",
|
||||
backref=backref("ssh_pub_keys", lazy="dynamic", cascade="all, delete"),
|
||||
foreign_keys=[__table__.c.UserID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
def get_fingerprint(pubkey: str) -> str:
|
||||
proc = Popen(["ssh-keygen", "-l", "-f", "-"], stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
||||
out, _ = proc.communicate(pubkey.encode())
|
||||
if proc.returncode:
|
||||
raise ValueError("The SSH public key is invalid.")
|
||||
return out.decode().split()[1].split(":", 1)[1]
|
27
aurweb/models/term.py
Normal file
27
aurweb/models/term.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
|
||||
class Term(Base):
|
||||
__table__ = schema.Terms
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.Description:
|
||||
raise IntegrityError(
|
||||
statement="Column Description cannot be null.",
|
||||
orig="Terms.Description",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.URL:
|
||||
raise IntegrityError(
|
||||
statement="Column URL cannot be null.",
|
||||
orig="Terms.URL",
|
||||
params=("NULL"),
|
||||
)
|
272
aurweb/models/user.py
Normal file
272
aurweb/models/user.py
Normal file
|
@ -0,0 +1,272 @@
|
|||
import hashlib
|
||||
from typing import Set
|
||||
|
||||
import bcrypt
|
||||
from fastapi import Request
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
import aurweb.config
|
||||
import aurweb.models.account_type
|
||||
import aurweb.schema
|
||||
from aurweb import aur_logging, db, schema, time, util
|
||||
from aurweb.models.account_type import AccountType as _AccountType
|
||||
from aurweb.models.ban import is_banned
|
||||
from aurweb.models.declarative import Base
|
||||
|
||||
logger = aur_logging.get_logger(__name__)
|
||||
|
||||
SALT_ROUNDS_DEFAULT = 12
|
||||
|
||||
|
||||
class User(Base):
|
||||
"""An ORM model of a single Users record."""
|
||||
|
||||
__table__ = schema.Users
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
AccountType = relationship(
|
||||
_AccountType,
|
||||
backref=backref("users", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.AccountTypeID],
|
||||
uselist=False,
|
||||
)
|
||||
|
||||
# High-level variables used to track authentication (not in DB).
|
||||
authenticated = False
|
||||
nonce = None
|
||||
|
||||
# Make this static to the class just in case SQLAlchemy ever
|
||||
# does something to bypass our constructor.
|
||||
salt_rounds = aurweb.config.getint("options", "salt_rounds", SALT_ROUNDS_DEFAULT)
|
||||
|
||||
def __init__(self, Passwd: str = str(), **kwargs):
|
||||
super().__init__(**kwargs, Passwd=str())
|
||||
|
||||
# Run this again in the constructor in case we rehashed config.
|
||||
self.salt_rounds = aurweb.config.getint(
|
||||
"options", "salt_rounds", SALT_ROUNDS_DEFAULT
|
||||
)
|
||||
if Passwd:
|
||||
self.update_password(Passwd)
|
||||
|
||||
def update_password(self, password):
|
||||
self.Passwd = bcrypt.hashpw(
|
||||
password.encode(), bcrypt.gensalt(rounds=self.salt_rounds)
|
||||
).decode()
|
||||
|
||||
@staticmethod
|
||||
def minimum_passwd_length():
|
||||
return aurweb.config.getint("options", "passwd_min_len")
|
||||
|
||||
def is_authenticated(self):
|
||||
"""Return internal authenticated state."""
|
||||
return self.authenticated
|
||||
|
||||
def valid_password(self, password: str):
|
||||
"""Check authentication against a given password."""
|
||||
if password is None:
|
||||
return False
|
||||
|
||||
password_is_valid = False
|
||||
|
||||
try:
|
||||
password_is_valid = bcrypt.checkpw(password.encode(), self.Passwd.encode())
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# If our Salt column is not empty, we're using a legacy password.
|
||||
if not password_is_valid and self.Salt != str():
|
||||
# Try to login with legacy method.
|
||||
password_is_valid = (
|
||||
hashlib.md5(f"{self.Salt}{password}".encode()).hexdigest()
|
||||
== self.Passwd
|
||||
)
|
||||
|
||||
# We got here, we passed the legacy authentication.
|
||||
# Update the password to our modern hash style.
|
||||
if password_is_valid:
|
||||
self.update_password(password)
|
||||
|
||||
return password_is_valid
|
||||
|
||||
def _login_approved(self, request: Request):
|
||||
return not is_banned(request) and not self.Suspended
|
||||
|
||||
def login(self, request: Request, password: str) -> str:
|
||||
"""Login and authenticate a request."""
|
||||
|
||||
from aurweb import db
|
||||
from aurweb.models.session import Session, generate_unique_sid
|
||||
|
||||
if not self._login_approved(request):
|
||||
return None
|
||||
|
||||
self.authenticated = self.valid_password(password)
|
||||
if not self.authenticated:
|
||||
return None
|
||||
|
||||
# Maximum number of iterations where we attempt to generate
|
||||
# a unique SID. In cases where the Session table has
|
||||
# exhausted all possible values, this will catch exceptions
|
||||
# instead of raising them and include details about failing
|
||||
# generation in an HTTPException.
|
||||
tries = 36
|
||||
|
||||
exc = None
|
||||
for i in range(tries):
|
||||
exc = None
|
||||
now_ts = time.utcnow()
|
||||
try:
|
||||
with db.begin():
|
||||
self.LastLogin = now_ts
|
||||
self.LastLoginIPAddress = util.get_client_ip(request)
|
||||
if not self.session:
|
||||
sid = generate_unique_sid()
|
||||
self.session = db.create(
|
||||
Session, User=self, SessionID=sid, LastUpdateTS=now_ts
|
||||
)
|
||||
else:
|
||||
last_updated = self.session.LastUpdateTS
|
||||
if last_updated and last_updated < now_ts:
|
||||
self.session.SessionID = generate_unique_sid()
|
||||
self.session.LastUpdateTS = now_ts
|
||||
|
||||
# Unset InactivityTS, we've logged in!
|
||||
self.InactivityTS = 0
|
||||
|
||||
break
|
||||
except IntegrityError as exc_:
|
||||
exc = exc_
|
||||
|
||||
if exc:
|
||||
raise exc
|
||||
|
||||
return self.session.SessionID
|
||||
|
||||
def has_credential(self, credential: Set[int], approved: list["User"] = list()):
|
||||
from aurweb.auth.creds import has_credential
|
||||
|
||||
return has_credential(self, credential, approved)
|
||||
|
||||
def logout(self, request: Request) -> None:
|
||||
self.authenticated = False
|
||||
if self.session:
|
||||
with db.begin():
|
||||
db.delete(self.session)
|
||||
|
||||
def is_package_maintainer(self):
|
||||
return self.AccountType.ID in {
|
||||
aurweb.models.account_type.PACKAGE_MAINTAINER_ID,
|
||||
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID,
|
||||
}
|
||||
|
||||
def is_developer(self):
|
||||
return self.AccountType.ID in {
|
||||
aurweb.models.account_type.DEVELOPER_ID,
|
||||
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID,
|
||||
}
|
||||
|
||||
def is_elevated(self):
|
||||
"""A User is 'elevated' when they have either a
|
||||
Package Maintainer or Developer AccountType."""
|
||||
return self.AccountType.ID in {
|
||||
aurweb.models.account_type.PACKAGE_MAINTAINER_ID,
|
||||
aurweb.models.account_type.DEVELOPER_ID,
|
||||
aurweb.models.account_type.PACKAGE_MAINTAINER_AND_DEV_ID,
|
||||
}
|
||||
|
||||
def can_edit_user(self, target: "User") -> bool:
|
||||
"""
|
||||
Whether this User instance can edit `target`.
|
||||
|
||||
This User can edit user `target` if we both: have credentials and
|
||||
self.AccountTypeID is greater or equal to `target`.AccountTypeID.
|
||||
|
||||
In short, a user must at least have credentials and be at least
|
||||
the same account type as the target.
|
||||
|
||||
User < Package Maintainer < Developer < Package Maintainer & Developer
|
||||
|
||||
:param target: Target User to be edited
|
||||
:return: Boolean indicating whether `self` can edit `target`
|
||||
"""
|
||||
from aurweb.auth import creds
|
||||
|
||||
has_cred = self.has_credential(creds.ACCOUNT_EDIT, approved=[target])
|
||||
return has_cred and self.AccountTypeID >= target.AccountTypeID
|
||||
|
||||
def voted_for(self, package) -> bool:
|
||||
"""Has this User voted for package?"""
|
||||
from aurweb.models.package_vote import PackageVote
|
||||
|
||||
return bool(
|
||||
package.PackageBase.package_votes.filter(
|
||||
PackageVote.UsersID == self.ID
|
||||
).scalar()
|
||||
)
|
||||
|
||||
def notified(self, package) -> bool:
|
||||
"""Is this User being notified about package (or package base)?
|
||||
|
||||
:param package: Package or PackageBase instance
|
||||
:return: Boolean indicating state of package notification
|
||||
in relation to this User
|
||||
"""
|
||||
from aurweb.models.package import Package
|
||||
from aurweb.models.package_base import PackageBase
|
||||
from aurweb.models.package_notification import PackageNotification
|
||||
|
||||
query = None
|
||||
if isinstance(package, Package):
|
||||
query = package.PackageBase.notifications
|
||||
elif isinstance(package, PackageBase):
|
||||
query = package.notifications
|
||||
|
||||
# Run an exists() query where a pkgbase-related
|
||||
# PackageNotification exists for self (a user).
|
||||
return bool(
|
||||
db.query(
|
||||
query.filter(PackageNotification.UserID == self.ID).exists()
|
||||
).scalar()
|
||||
)
|
||||
|
||||
def packages(self):
|
||||
"""Returns an ORM query to Package objects owned by this user.
|
||||
|
||||
This should really be replaced with an internal ORM join
|
||||
configured for the User model. This has not been done yet
|
||||
due to issues I've been encountering in the process, so
|
||||
sticking with this function until we can properly implement it.
|
||||
|
||||
:return: ORM query of User-packaged or maintained Package objects
|
||||
"""
|
||||
from aurweb.models.package import Package
|
||||
from aurweb.models.package_base import PackageBase
|
||||
|
||||
return (
|
||||
db.query(Package)
|
||||
.join(PackageBase)
|
||||
.filter(
|
||||
or_(
|
||||
PackageBase.PackagerUID == self.ID,
|
||||
PackageBase.MaintainerUID == self.ID,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<User(ID='%s', AccountType='%s', Username='%s')>" % (
|
||||
self.ID,
|
||||
str(self.AccountType),
|
||||
self.Username,
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.Username
|
||||
|
||||
|
||||
def generate_resetkey():
|
||||
return util.make_random_string(32)
|
42
aurweb/models/vote.py
Normal file
42
aurweb/models/vote.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.user import User as _User
|
||||
from aurweb.models.voteinfo import VoteInfo as _VoteInfo
|
||||
|
||||
|
||||
class Vote(Base):
|
||||
__table__ = schema.Votes
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.VoteID, __table__.c.UserID]}
|
||||
|
||||
VoteInfo = relationship(
|
||||
_VoteInfo,
|
||||
backref=backref("votes", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.VoteID],
|
||||
)
|
||||
|
||||
User = relationship(
|
||||
_User,
|
||||
backref=backref("votes", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.UserID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if not self.VoteInfo and not self.VoteID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key VoteID cannot be null.",
|
||||
orig="Votes.VoteID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.User and not self.UserID:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key UserID cannot be null.",
|
||||
orig="Votes.UserID",
|
||||
params=("NULL"),
|
||||
)
|
82
aurweb/models/voteinfo.py
Normal file
82
aurweb/models/voteinfo.py
Normal file
|
@ -0,0 +1,82 @@
|
|||
import typing
|
||||
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import backref, relationship
|
||||
|
||||
from aurweb import schema, time
|
||||
from aurweb.models.declarative import Base
|
||||
from aurweb.models.user import User as _User
|
||||
|
||||
|
||||
class VoteInfo(Base):
|
||||
__table__ = schema.VoteInfo
|
||||
__tablename__ = __table__.name
|
||||
__mapper_args__ = {"primary_key": [__table__.c.ID]}
|
||||
|
||||
Submitter = relationship(
|
||||
_User,
|
||||
backref=backref("voteinfo_set", lazy="dynamic"),
|
||||
foreign_keys=[__table__.c.SubmitterID],
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# Default Quorum, Yes, No and Abstain columns to 0.
|
||||
for col in ("Quorum", "Yes", "No", "Abstain"):
|
||||
if col not in kwargs:
|
||||
kwargs.update({col: 0})
|
||||
|
||||
super().__init__(**kwargs)
|
||||
|
||||
if self.Agenda is None:
|
||||
raise IntegrityError(
|
||||
statement="Column Agenda cannot be null.",
|
||||
orig="VoteInfo.Agenda",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if self.User is None:
|
||||
raise IntegrityError(
|
||||
statement="Column User cannot be null.",
|
||||
orig="VoteInfo.User",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if self.Submitted is None:
|
||||
raise IntegrityError(
|
||||
statement="Column Submitted cannot be null.",
|
||||
orig="VoteInfo.Submitted",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if self.End is None:
|
||||
raise IntegrityError(
|
||||
statement="Column End cannot be null.",
|
||||
orig="VoteInfo.End",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
if not self.Submitter:
|
||||
raise IntegrityError(
|
||||
statement="Foreign key SubmitterID cannot be null.",
|
||||
orig="VoteInfo.SubmitterID",
|
||||
params=("NULL"),
|
||||
)
|
||||
|
||||
def __setattr__(self, key: str, value: typing.Any):
|
||||
"""Customize setattr to stringify any Quorum keys given."""
|
||||
if key == "Quorum":
|
||||
value = str(value)
|
||||
return super().__setattr__(key, value)
|
||||
|
||||
def __getattribute__(self, key: str):
|
||||
"""Customize getattr to floatify any fetched Quorum values."""
|
||||
attr = super().__getattribute__(key)
|
||||
if key == "Quorum":
|
||||
return float(attr)
|
||||
return attr
|
||||
|
||||
def is_running(self):
|
||||
return self.End > time.utcnow()
|
||||
|
||||
def total_votes(self):
|
||||
return self.Yes + self.No + self.Abstain
|
0
aurweb/packages/__init__.py
Normal file
0
aurweb/packages/__init__.py
Normal file
269
aurweb/packages/requests.py
Normal file
269
aurweb/packages/requests.py
Normal file
|
@ -0,0 +1,269 @@
|
|||
from typing import Optional, Set
|
||||
|
||||
from fastapi import Request
|
||||
from sqlalchemy import and_, orm
|
||||
|
||||
from aurweb import config, db, l10n, time, util
|
||||
from aurweb.exceptions import InvariantError
|
||||
from aurweb.models import PackageBase, PackageRequest, User
|
||||
from aurweb.models.package_request import ACCEPTED_ID, PENDING_ID, REJECTED_ID
|
||||
from aurweb.models.request_type import (
|
||||
DELETION,
|
||||
DELETION_ID,
|
||||
MERGE,
|
||||
MERGE_ID,
|
||||
ORPHAN,
|
||||
ORPHAN_ID,
|
||||
)
|
||||
from aurweb.scripts import notify
|
||||
|
||||
|
||||
class ClosureFactory:
|
||||
"""A factory class used to autogenerate closure comments."""
|
||||
|
||||
REQTYPE_NAMES = {DELETION_ID: DELETION, MERGE_ID: MERGE, ORPHAN_ID: ORPHAN}
|
||||
|
||||
def _deletion_closure(
|
||||
self, requester: User, pkgbase: PackageBase, target: PackageBase = None
|
||||
):
|
||||
return f"[Autogenerated] Accepted deletion for {pkgbase.Name}."
|
||||
|
||||
def _merge_closure(
|
||||
self, requester: User, pkgbase: PackageBase, target: PackageBase = None
|
||||
):
|
||||
return (
|
||||
f"[Autogenerated] Accepted merge for {pkgbase.Name} " f"into {target.Name}."
|
||||
)
|
||||
|
||||
def _orphan_closure(
|
||||
self, requester: User, pkgbase: PackageBase, target: PackageBase = None
|
||||
):
|
||||
return f"[Autogenerated] Accepted orphan for {pkgbase.Name}."
|
||||
|
||||
def _rejected_merge_closure(
|
||||
self, requester: User, pkgbase: PackageBase, target: PackageBase = None
|
||||
):
|
||||
return (
|
||||
f"[Autogenerated] Another request to merge {pkgbase.Name} "
|
||||
f"into {target.Name} has rendered this request invalid."
|
||||
)
|
||||
|
||||
def get_closure(
|
||||
self,
|
||||
reqtype_id: int,
|
||||
requester: User,
|
||||
pkgbase: PackageBase,
|
||||
target: PackageBase = None,
|
||||
status: int = ACCEPTED_ID,
|
||||
) -> str:
|
||||
"""
|
||||
Return a closure comment handled by this class.
|
||||
|
||||
:param reqtype_id: RequestType.ID
|
||||
:param requester: User who is closing a request
|
||||
:param pkgbase: PackageBase instance related to the request
|
||||
:param target: Merge request target PackageBase instance
|
||||
:param status: PackageRequest.Status
|
||||
"""
|
||||
reqtype = ClosureFactory.REQTYPE_NAMES.get(reqtype_id)
|
||||
|
||||
partial = str()
|
||||
if status == REJECTED_ID:
|
||||
partial = "_rejected"
|
||||
|
||||
try:
|
||||
handler = getattr(self, f"{partial}_{reqtype}_closure")
|
||||
except AttributeError:
|
||||
raise NotImplementedError("Unsupported 'reqtype_id' value.")
|
||||
return handler(requester, pkgbase, target)
|
||||
|
||||
|
||||
def update_closure_comment(
|
||||
pkgbase: PackageBase, reqtype_id: int, comments: str, target: PackageBase = None
|
||||
) -> None:
|
||||
"""
|
||||
Update all pending requests related to `pkgbase` with a closure comment.
|
||||
|
||||
In order to persist closure comments through `handle_request`'s
|
||||
algorithm, we must set `PackageRequest.ClosureComment` before calling
|
||||
it. This function can be used to update the closure comment of all
|
||||
package requests related to `pkgbase` and `reqtype_id`.
|
||||
|
||||
If an empty `comments` string is provided, we no-op out of this.
|
||||
|
||||
:param pkgbase: PackageBase instance
|
||||
:param reqtype_id: RequestType.ID
|
||||
:param comments: PackageRequest.ClosureComment to update to
|
||||
:param target: Merge request target PackageBase instance
|
||||
"""
|
||||
if not comments:
|
||||
return
|
||||
|
||||
query = pkgbase.requests.filter(
|
||||
and_(
|
||||
PackageRequest.ReqTypeID == reqtype_id, PackageRequest.Status == PENDING_ID
|
||||
)
|
||||
)
|
||||
if reqtype_id == MERGE_ID:
|
||||
query = query.filter(PackageRequest.MergeBaseName == target.Name)
|
||||
|
||||
for pkgreq in query:
|
||||
pkgreq.ClosureComment = comments
|
||||
|
||||
|
||||
def verify_orphan_request(user: User, pkgbase: PackageBase):
|
||||
"""Verify that an undue orphan request exists in `requests`."""
|
||||
requests = pkgbase.requests.filter(PackageRequest.ReqTypeID == ORPHAN_ID)
|
||||
for pkgreq in requests:
|
||||
idle_time = config.getint("options", "request_idle_time")
|
||||
time_delta = time.utcnow() - pkgreq.RequestTS
|
||||
is_due = pkgreq.Status == PENDING_ID and time_delta > idle_time
|
||||
if is_due:
|
||||
# If the requester is the pkgbase maintainer or the
|
||||
# request is already due, we're good to go: return True.
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def close_pkgreq(
|
||||
pkgreq: PackageRequest,
|
||||
closer: User,
|
||||
pkgbase: PackageBase,
|
||||
target: Optional[PackageBase],
|
||||
status: int,
|
||||
) -> None:
|
||||
"""
|
||||
Close a package request with `pkgreq`.Status == `status`.
|
||||
|
||||
:param pkgreq: PackageRequest instance
|
||||
:param closer: `pkgreq`.Closer User instance to update to
|
||||
:param pkgbase: PackageBase instance which `pkgreq` is about
|
||||
:param target: Optional PackageBase instance to merge into
|
||||
:param status: `pkgreq`.Status value to update to
|
||||
"""
|
||||
now = time.utcnow()
|
||||
pkgreq.Status = status
|
||||
pkgreq.Closer = closer
|
||||
pkgreq.ClosureComment = pkgreq.ClosureComment or ClosureFactory().get_closure(
|
||||
pkgreq.ReqTypeID, closer, pkgbase, target, status
|
||||
)
|
||||
pkgreq.ClosedTS = now
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def handle_request(
|
||||
request: Request,
|
||||
reqtype_id: int,
|
||||
pkgbase: PackageBase,
|
||||
target: PackageBase = None,
|
||||
comments: str = str(),
|
||||
) -> list[notify.Notification]:
|
||||
"""
|
||||
Handle package requests before performing an action.
|
||||
|
||||
The actions we're interested in are disown (orphan), delete and
|
||||
merge. There is now an automated request generation and closure
|
||||
notification when a privileged user performs one of these actions
|
||||
without a pre-existing request. They all commit changes to the
|
||||
database, and thus before calling, state should be verified to
|
||||
avoid leaked database records regarding these requests.
|
||||
|
||||
Otherwise, we accept and reject requests based on their state
|
||||
and send out the relevent notifications.
|
||||
|
||||
:param requester: User who needs this a `pkgbase` request handled
|
||||
:param reqtype_id: RequestType.ID
|
||||
:param pkgbase: PackageBase which the request is about
|
||||
:param target: Optional target to merge into
|
||||
"""
|
||||
notifs: list[notify.Notification] = []
|
||||
|
||||
# If it's an orphan request, perform further verification
|
||||
# regarding existing requests.
|
||||
if reqtype_id == ORPHAN_ID:
|
||||
if not verify_orphan_request(request.user, pkgbase):
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
raise InvariantError(
|
||||
_("No due existing orphan requests to accept for %s.") % pkgbase.Name
|
||||
)
|
||||
|
||||
# Produce a base query for requests related to `pkgbase`, based
|
||||
# on ReqTypeID matching `reqtype_id`, pending status and a correct
|
||||
# PackagBaseName column.
|
||||
query: orm.Query = pkgbase.requests.filter(
|
||||
and_(
|
||||
PackageRequest.ReqTypeID == reqtype_id,
|
||||
PackageRequest.Status == PENDING_ID,
|
||||
PackageRequest.PackageBaseName == pkgbase.Name,
|
||||
)
|
||||
)
|
||||
|
||||
# Build a query for records we should accept. For merge requests,
|
||||
# this is specific to a matching MergeBaseName. For others, this
|
||||
# just ends up becoming `query`.
|
||||
accept_query: orm.Query = query
|
||||
if target:
|
||||
# If a `target` was supplied, filter by MergeBaseName
|
||||
accept_query = query.filter(PackageRequest.MergeBaseName == target.Name)
|
||||
|
||||
# Build an accept list out of `accept_query`.
|
||||
to_accept: list[PackageRequest] = accept_query.all()
|
||||
accepted_ids: Set[int] = set(p.ID for p in to_accept)
|
||||
|
||||
# Build a reject list out of `query` filtered by IDs not found
|
||||
# in `to_accept`. That is, unmatched records of the same base
|
||||
# query properties.
|
||||
to_reject: list[PackageRequest] = query.filter(
|
||||
~PackageRequest.ID.in_(accepted_ids)
|
||||
).all()
|
||||
|
||||
# If we have no requests to accept, create a new one.
|
||||
# This is done to increase tracking of actions occurring
|
||||
# through the website.
|
||||
if not to_accept:
|
||||
utcnow = time.utcnow()
|
||||
with db.begin():
|
||||
pkgreq = db.create(
|
||||
PackageRequest,
|
||||
ReqTypeID=reqtype_id,
|
||||
RequestTS=utcnow,
|
||||
User=request.user,
|
||||
PackageBase=pkgbase,
|
||||
PackageBaseName=pkgbase.Name,
|
||||
Comments="Autogenerated by aurweb.",
|
||||
ClosureComment=comments,
|
||||
)
|
||||
|
||||
# If it's a merge request, set MergeBaseName to `target`.Name.
|
||||
if pkgreq.ReqTypeID == MERGE_ID:
|
||||
pkgreq.MergeBaseName = target.Name
|
||||
|
||||
# Add the new request to `to_accept` and allow standard
|
||||
# flow to continue afterward.
|
||||
to_accept.append(pkgreq)
|
||||
|
||||
# Update requests with their new status and closures.
|
||||
@db.retry_deadlock
|
||||
def retry_closures():
|
||||
with db.begin():
|
||||
util.apply_all(
|
||||
to_accept,
|
||||
lambda p: close_pkgreq(p, request.user, pkgbase, target, ACCEPTED_ID),
|
||||
)
|
||||
util.apply_all(
|
||||
to_reject,
|
||||
lambda p: close_pkgreq(p, request.user, pkgbase, target, REJECTED_ID),
|
||||
)
|
||||
|
||||
retry_closures()
|
||||
|
||||
# Create RequestCloseNotifications for all requests involved.
|
||||
for pkgreq in to_accept + to_reject:
|
||||
notif = notify.RequestCloseNotification(
|
||||
request.user.ID, pkgreq.ID, pkgreq.status_display()
|
||||
)
|
||||
notifs.append(notif)
|
||||
|
||||
# Return notifications to the caller for sending.
|
||||
return notifs
|
403
aurweb/packages/search.py
Normal file
403
aurweb/packages/search.py
Normal file
|
@ -0,0 +1,403 @@
|
|||
from typing import Set
|
||||
|
||||
from sqlalchemy import and_, case, or_, orm
|
||||
|
||||
from aurweb import db, models
|
||||
from aurweb.models import Group, Package, PackageBase, User
|
||||
from aurweb.models.dependency_type import (
|
||||
CHECKDEPENDS_ID,
|
||||
DEPENDS_ID,
|
||||
MAKEDEPENDS_ID,
|
||||
OPTDEPENDS_ID,
|
||||
)
|
||||
from aurweb.models.package_comaintainer import PackageComaintainer
|
||||
from aurweb.models.package_group import PackageGroup
|
||||
from aurweb.models.package_keyword import PackageKeyword
|
||||
from aurweb.models.package_notification import PackageNotification
|
||||
from aurweb.models.package_vote import PackageVote
|
||||
from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID
|
||||
|
||||
|
||||
class PackageSearch:
|
||||
"""A Package search query builder."""
|
||||
|
||||
# A constant mapping of short to full name sort orderings.
|
||||
FULL_SORT_ORDER = {"d": "desc", "a": "asc"}
|
||||
|
||||
def __init__(self, user: models.User = None):
|
||||
self.query = db.query(Package).join(PackageBase)
|
||||
|
||||
self.user = user
|
||||
if self.user:
|
||||
self.query = self.query.join(
|
||||
PackageVote,
|
||||
and_(
|
||||
PackageVote.PackageBaseID == PackageBase.ID,
|
||||
PackageVote.UsersID == self.user.ID,
|
||||
),
|
||||
isouter=True,
|
||||
).join(
|
||||
PackageNotification,
|
||||
and_(
|
||||
PackageNotification.PackageBaseID == PackageBase.ID,
|
||||
PackageNotification.UserID == self.user.ID,
|
||||
),
|
||||
isouter=True,
|
||||
)
|
||||
|
||||
self.ordering = "d"
|
||||
|
||||
# Setup SeB (Search By) callbacks.
|
||||
self.search_by_cb = {
|
||||
"nd": self._search_by_namedesc,
|
||||
"n": self._search_by_name,
|
||||
"b": self._search_by_pkgbase,
|
||||
"N": self._search_by_exact_name,
|
||||
"B": self._search_by_exact_pkgbase,
|
||||
"k": self._search_by_keywords,
|
||||
"m": self._search_by_maintainer,
|
||||
"c": self._search_by_comaintainer,
|
||||
"M": self._search_by_co_or_maintainer,
|
||||
"s": self._search_by_submitter,
|
||||
}
|
||||
|
||||
# Setup SB (Sort By) callbacks.
|
||||
self.sort_by_cb = {
|
||||
"n": self._sort_by_name,
|
||||
"v": self._sort_by_votes,
|
||||
"p": self._sort_by_popularity,
|
||||
"w": self._sort_by_voted,
|
||||
"o": self._sort_by_notify,
|
||||
"m": self._sort_by_maintainer,
|
||||
"l": self._sort_by_last_modified,
|
||||
}
|
||||
|
||||
self._joined_user = False
|
||||
self._joined_keywords = False
|
||||
self._joined_comaint = False
|
||||
|
||||
def _join_user(self, outer: bool = True) -> orm.Query:
|
||||
"""Centralized joining of a package base's maintainer."""
|
||||
if not self._joined_user:
|
||||
self.query = self.query.join(
|
||||
User, User.ID == PackageBase.MaintainerUID, isouter=outer
|
||||
)
|
||||
self._joined_user = True
|
||||
return self.query
|
||||
|
||||
def _join_keywords(self) -> orm.Query:
|
||||
if not self._joined_keywords:
|
||||
self.query = self.query.join(PackageKeyword)
|
||||
self._joined_keywords = True
|
||||
return self.query
|
||||
|
||||
def _join_comaint(self, isouter: bool = False) -> orm.Query:
|
||||
if not self._joined_comaint:
|
||||
self.query = self.query.join(
|
||||
PackageComaintainer,
|
||||
PackageComaintainer.PackageBaseID == PackageBase.ID,
|
||||
isouter=isouter,
|
||||
)
|
||||
self._joined_comaint = True
|
||||
return self.query
|
||||
|
||||
def _search_by_namedesc(self, keywords: str) -> orm.Query:
|
||||
self._join_user()
|
||||
self.query = self.query.filter(
|
||||
or_(
|
||||
Package.Name.like(f"%{keywords}%"),
|
||||
Package.Description.like(f"%{keywords}%"),
|
||||
)
|
||||
)
|
||||
return self
|
||||
|
||||
def _search_by_name(self, keywords: str) -> orm.Query:
|
||||
self._join_user()
|
||||
self.query = self.query.filter(Package.Name.like(f"%{keywords}%"))
|
||||
return self
|
||||
|
||||
def _search_by_exact_name(self, keywords: str) -> orm.Query:
|
||||
self._join_user()
|
||||
self.query = self.query.filter(Package.Name == keywords)
|
||||
return self
|
||||
|
||||
def _search_by_pkgbase(self, keywords: str) -> orm.Query:
|
||||
self._join_user()
|
||||
self.query = self.query.filter(PackageBase.Name.like(f"%{keywords}%"))
|
||||
|
||||
return self
|
||||
|
||||
def _search_by_exact_pkgbase(self, keywords: str) -> orm.Query:
|
||||
self._join_user()
|
||||
self.query = self.query.filter(PackageBase.Name == keywords)
|
||||
return self
|
||||
|
||||
def _search_by_keywords(self, keywords: Set[str]) -> orm.Query:
|
||||
self._join_user()
|
||||
self._join_keywords()
|
||||
keywords = set(k.lower() for k in keywords)
|
||||
self.query = self.query.filter(PackageKeyword.Keyword.in_(keywords)).group_by(
|
||||
models.Package.Name
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
def _search_by_maintainer(self, keywords: str) -> orm.Query:
|
||||
self._join_user()
|
||||
if keywords:
|
||||
self.query = self.query.filter(
|
||||
and_(User.Username == keywords, User.ID == PackageBase.MaintainerUID)
|
||||
)
|
||||
else:
|
||||
self.query = self.query.filter(PackageBase.MaintainerUID.is_(None))
|
||||
return self
|
||||
|
||||
def _search_by_comaintainer(self, keywords: str) -> orm.Query:
|
||||
self._join_user()
|
||||
self._join_comaint()
|
||||
user = db.query(User).filter(User.Username == keywords).first()
|
||||
uid = 0 if not user else user.ID
|
||||
self.query = self.query.filter(PackageComaintainer.UsersID == uid)
|
||||
return self
|
||||
|
||||
def _search_by_co_or_maintainer(self, keywords: str) -> orm.Query:
|
||||
self._join_user()
|
||||
self._join_comaint(True)
|
||||
user = db.query(User).filter(User.Username == keywords).first()
|
||||
uid = 0 if not user else user.ID
|
||||
self.query = self.query.filter(
|
||||
or_(PackageComaintainer.UsersID == uid, User.ID == uid)
|
||||
)
|
||||
return self
|
||||
|
||||
def _search_by_submitter(self, keywords: str) -> orm.Query:
|
||||
self._join_user()
|
||||
|
||||
uid = 0
|
||||
user = db.query(User).filter(User.Username == keywords).first()
|
||||
if user:
|
||||
uid = user.ID
|
||||
|
||||
self.query = self.query.filter(PackageBase.SubmitterUID == uid)
|
||||
return self
|
||||
|
||||
def search_by(self, search_by: str, keywords: str) -> orm.Query:
|
||||
if search_by not in self.search_by_cb:
|
||||
search_by = "nd" # Default: Name, Description
|
||||
callback = self.search_by_cb.get(search_by)
|
||||
result = callback(keywords)
|
||||
return result
|
||||
|
||||
def _sort_by_name(self, order: str):
|
||||
column = getattr(models.Package.Name, order)
|
||||
self.query = self.query.order_by(column())
|
||||
return self
|
||||
|
||||
def _sort_by_votes(self, order: str):
|
||||
column = getattr(models.PackageBase.NumVotes, order)
|
||||
name = getattr(models.PackageBase.Name, order)
|
||||
self.query = self.query.order_by(column(), name())
|
||||
return self
|
||||
|
||||
def _sort_by_popularity(self, order: str):
|
||||
column = getattr(models.PackageBase.Popularity, order)
|
||||
name = getattr(models.PackageBase.Name, order)
|
||||
self.query = self.query.order_by(column(), name())
|
||||
return self
|
||||
|
||||
def _sort_by_voted(self, order: str):
|
||||
# FIXME: Currently, PHP is destroying this implementation
|
||||
# in terms of performance. We should improve this; there's no
|
||||
# reason it should take _longer_.
|
||||
column = getattr(
|
||||
case([(models.PackageVote.UsersID == self.user.ID, 1)], else_=0), order
|
||||
)
|
||||
name = getattr(models.Package.Name, order)
|
||||
self.query = self.query.order_by(column(), name())
|
||||
return self
|
||||
|
||||
def _sort_by_notify(self, order: str):
|
||||
# FIXME: Currently, PHP is destroying this implementation
|
||||
# in terms of performance. We should improve this; there's no
|
||||
# reason it should take _longer_.
|
||||
column = getattr(
|
||||
case([(models.PackageNotification.UserID == self.user.ID, 1)], else_=0),
|
||||
order,
|
||||
)
|
||||
name = getattr(models.Package.Name, order)
|
||||
self.query = self.query.order_by(column(), name())
|
||||
return self
|
||||
|
||||
def _sort_by_maintainer(self, order: str):
|
||||
column = getattr(models.User.Username, order)
|
||||
name = getattr(models.Package.Name, order)
|
||||
self.query = self.query.order_by(column(), name())
|
||||
return self
|
||||
|
||||
def _sort_by_last_modified(self, order: str):
|
||||
column = getattr(models.PackageBase.ModifiedTS, order)
|
||||
name = getattr(models.PackageBase.Name, order)
|
||||
self.query = self.query.order_by(column(), name())
|
||||
return self
|
||||
|
||||
def sort_by(self, sort_by: str, ordering: str = "d") -> orm.Query:
|
||||
if sort_by not in self.sort_by_cb:
|
||||
sort_by = "p" # Default: Popularity
|
||||
callback = self.sort_by_cb.get(sort_by)
|
||||
if ordering not in self.FULL_SORT_ORDER:
|
||||
ordering = "d" # Default: Descending
|
||||
ordering = self.FULL_SORT_ORDER.get(ordering)
|
||||
return callback(ordering)
|
||||
|
||||
def count(self) -> int:
|
||||
"""Return internal query's count."""
|
||||
return self.query.count()
|
||||
|
||||
def results(self) -> orm.Query:
|
||||
"""Return internal query."""
|
||||
return self.query
|
||||
|
||||
|
||||
class RPCSearch(PackageSearch):
|
||||
"""A PackageSearch-derived RPC package search query builder.
|
||||
|
||||
With RPC search, we need a subset of PackageSearch's handlers,
|
||||
with a few additional handlers added. So, within the RPCSearch
|
||||
constructor, we pop unneeded keys out of inherited self.search_by_cb
|
||||
and add a few more keys to it, namely: depends, makedepends,
|
||||
optdepends and checkdepends.
|
||||
|
||||
Additionally, some logic within the inherited PackageSearch.search_by
|
||||
method is not needed, so it is overridden in this class without
|
||||
sanitization done for the PackageSearch `by` argument.
|
||||
"""
|
||||
|
||||
keys_removed = ("b", "N", "B", "M")
|
||||
|
||||
def __init__(self) -> "RPCSearch":
|
||||
super().__init__()
|
||||
|
||||
# Fix-up inherited search_by_cb to reflect RPC-specific by params.
|
||||
# We keep: "nd", "n" and "m". We also overlay four new by params
|
||||
# on top: "depends", "makedepends", "optdepends" and "checkdepends".
|
||||
self.search_by_cb = {
|
||||
k: v
|
||||
for k, v in self.search_by_cb.items()
|
||||
if k not in RPCSearch.keys_removed
|
||||
}
|
||||
self.search_by_cb.update(
|
||||
{
|
||||
"depends": self._search_by_depends,
|
||||
"makedepends": self._search_by_makedepends,
|
||||
"optdepends": self._search_by_optdepends,
|
||||
"checkdepends": self._search_by_checkdepends,
|
||||
"provides": self._search_by_provides,
|
||||
"conflicts": self._search_by_conflicts,
|
||||
"replaces": self._search_by_replaces,
|
||||
"groups": self._search_by_groups,
|
||||
}
|
||||
)
|
||||
|
||||
# We always want an optional Maintainer in the RPC.
|
||||
self._join_user()
|
||||
|
||||
def _join_depends(self, dep_type_id: int) -> orm.Query:
|
||||
"""Join Package with PackageDependency and filter results
|
||||
based on `dep_type_id`.
|
||||
|
||||
:param dep_type_id: DependencyType ID
|
||||
:returns: PackageDependency-joined orm.Query
|
||||
"""
|
||||
self.query = self.query.join(models.PackageDependency).filter(
|
||||
models.PackageDependency.DepTypeID == dep_type_id
|
||||
)
|
||||
return self.query
|
||||
|
||||
def _join_relations(self, rel_type_id: int) -> orm.Query:
|
||||
"""Join Package with PackageRelation and filter results
|
||||
based on `rel_type_id`.
|
||||
|
||||
:param rel_type_id: RelationType ID
|
||||
:returns: PackageRelation-joined orm.Query
|
||||
"""
|
||||
self.query = self.query.join(models.PackageRelation).filter(
|
||||
models.PackageRelation.RelTypeID == rel_type_id
|
||||
)
|
||||
return self.query
|
||||
|
||||
def _join_groups(self) -> orm.Query:
|
||||
"""Join Package with PackageGroup and Group.
|
||||
|
||||
:returns: PackageGroup/Group-joined orm.Query
|
||||
"""
|
||||
self.query = self.query.join(PackageGroup).join(Group)
|
||||
return self.query
|
||||
|
||||
def _search_by_depends(self, keywords: str) -> "RPCSearch":
|
||||
self.query = self._join_depends(DEPENDS_ID).filter(
|
||||
models.PackageDependency.DepName == keywords
|
||||
)
|
||||
return self
|
||||
|
||||
def _search_by_makedepends(self, keywords: str) -> "RPCSearch":
|
||||
self.query = self._join_depends(MAKEDEPENDS_ID).filter(
|
||||
models.PackageDependency.DepName == keywords
|
||||
)
|
||||
return self
|
||||
|
||||
def _search_by_optdepends(self, keywords: str) -> "RPCSearch":
|
||||
self.query = self._join_depends(OPTDEPENDS_ID).filter(
|
||||
models.PackageDependency.DepName == keywords
|
||||
)
|
||||
return self
|
||||
|
||||
def _search_by_checkdepends(self, keywords: str) -> "RPCSearch":
|
||||
self.query = self._join_depends(CHECKDEPENDS_ID).filter(
|
||||
models.PackageDependency.DepName == keywords
|
||||
)
|
||||
return self
|
||||
|
||||
def _search_by_provides(self, keywords: str) -> "RPCSearch":
|
||||
self.query = self._join_relations(PROVIDES_ID).filter(
|
||||
models.PackageRelation.RelName == keywords
|
||||
)
|
||||
return self
|
||||
|
||||
def _search_by_conflicts(self, keywords: str) -> "RPCSearch":
|
||||
self.query = self._join_relations(CONFLICTS_ID).filter(
|
||||
models.PackageRelation.RelName == keywords
|
||||
)
|
||||
return self
|
||||
|
||||
def _search_by_replaces(self, keywords: str) -> "RPCSearch":
|
||||
self.query = self._join_relations(REPLACES_ID).filter(
|
||||
models.PackageRelation.RelName == keywords
|
||||
)
|
||||
return self
|
||||
|
||||
def _search_by_groups(self, keywords: str) -> "RPCSearch":
|
||||
self._join_groups()
|
||||
self.query = self.query.filter(Group.Name == keywords)
|
||||
return self
|
||||
|
||||
def _search_by_keywords(self, keywords: str) -> "RPCSearch":
|
||||
self._join_keywords()
|
||||
self.query = self.query.filter(PackageKeyword.Keyword == keywords)
|
||||
return self
|
||||
|
||||
def search_by(self, by: str, keywords: str) -> "RPCSearch":
|
||||
"""Override inherited search_by. In this override, we reduce the
|
||||
scope of what we handle within this function. We do not set `by`
|
||||
to a default of "nd" in the RPC, as the RPC returns an error when
|
||||
incorrect `by` fields are specified.
|
||||
|
||||
:param by: RPC `by` argument
|
||||
:param keywords: RPC `arg` argument
|
||||
:returns: self
|
||||
"""
|
||||
callback = self.search_by_cb.get(by)
|
||||
result = callback(keywords)
|
||||
return result
|
||||
|
||||
def results(self) -> orm.Query:
|
||||
return self.query
|
253
aurweb/packages/util.py
Normal file
253
aurweb/packages/util.py
Normal file
|
@ -0,0 +1,253 @@
|
|||
from collections import defaultdict
|
||||
from http import HTTPStatus
|
||||
from typing import Tuple, Union
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
import orjson
|
||||
from fastapi import HTTPException
|
||||
from sqlalchemy import orm
|
||||
|
||||
from aurweb import config, db, models
|
||||
from aurweb.aur_redis import redis_connection
|
||||
from aurweb.models import Package
|
||||
from aurweb.models.official_provider import OFFICIAL_BASE, OfficialProvider
|
||||
from aurweb.models.package_dependency import PackageDependency
|
||||
from aurweb.models.package_relation import PackageRelation
|
||||
from aurweb.templates import register_filter
|
||||
|
||||
Providers = list[Union[PackageRelation, OfficialProvider]]
|
||||
|
||||
|
||||
def dep_extra_with_arch(dep: models.PackageDependency, annotation: str) -> str:
|
||||
output = [annotation]
|
||||
if dep.DepArch:
|
||||
output.append(dep.DepArch)
|
||||
return f"({', '.join(output)})"
|
||||
|
||||
|
||||
def dep_depends_extra(dep: models.PackageDependency) -> str:
|
||||
return str()
|
||||
|
||||
|
||||
def dep_makedepends_extra(dep: models.PackageDependency) -> str:
|
||||
return dep_extra_with_arch(dep, "make")
|
||||
|
||||
|
||||
def dep_checkdepends_extra(dep: models.PackageDependency) -> str:
|
||||
return dep_extra_with_arch(dep, "check")
|
||||
|
||||
|
||||
def dep_optdepends_extra(dep: models.PackageDependency) -> str:
|
||||
return dep_extra_with_arch(dep, "optional")
|
||||
|
||||
|
||||
@register_filter("dep_extra")
|
||||
def dep_extra(dep: models.PackageDependency) -> str:
|
||||
"""Some dependency types have extra text added to their
|
||||
display. This function provides that output. However, it
|
||||
**assumes** that the dep passed is bound to a valid one
|
||||
of: depends, makedepends, checkdepends or optdepends."""
|
||||
f = globals().get(f"dep_{dep.DependencyType.Name}_extra")
|
||||
return f(dep)
|
||||
|
||||
|
||||
@register_filter("dep_extra_desc")
|
||||
def dep_extra_desc(dep: models.PackageDependency) -> str:
|
||||
extra = dep_extra(dep)
|
||||
if not dep.DepDesc:
|
||||
return extra
|
||||
return extra + f" – {dep.DepDesc}"
|
||||
|
||||
|
||||
@register_filter("pkgname_link")
|
||||
def pkgname_link(pkgname: str) -> str:
|
||||
record = db.query(Package).filter(Package.Name == pkgname).exists()
|
||||
if db.query(record).scalar():
|
||||
return f"/packages/{pkgname}"
|
||||
|
||||
official = (
|
||||
db.query(OfficialProvider).filter(OfficialProvider.Name == pkgname).exists()
|
||||
)
|
||||
if db.query(official).scalar():
|
||||
base = "/".join([OFFICIAL_BASE, "packages"])
|
||||
return f"{base}/?q={pkgname}"
|
||||
|
||||
|
||||
@register_filter("package_link")
|
||||
def package_link(package: Union[Package, OfficialProvider]) -> str:
|
||||
if package.is_official:
|
||||
base = "/".join([OFFICIAL_BASE, "packages"])
|
||||
return f"{base}/?q={package.Name}"
|
||||
return f"/packages/{package.Name}"
|
||||
|
||||
|
||||
@register_filter("provides_markup")
|
||||
def provides_markup(provides: Providers) -> str:
|
||||
links = []
|
||||
for pkg in provides:
|
||||
aur = "<sup><small>AUR</small></sup>" if not pkg.is_official else ""
|
||||
links.append(f'<a href="{package_link(pkg)}">{pkg.Name}</a>{aur}')
|
||||
return ", ".join(links)
|
||||
|
||||
|
||||
def get_pkg_or_base(
|
||||
name: str, cls: Union[models.Package, models.PackageBase] = models.PackageBase
|
||||
) -> Union[models.Package, models.PackageBase]:
|
||||
"""Get a PackageBase instance by its name or raise a 404 if
|
||||
it can't be found in the database.
|
||||
|
||||
:param name: {Package,PackageBase}.Name
|
||||
:param exception: Whether to raise an HTTPException or simply return None if
|
||||
the package can't be found.
|
||||
:raises HTTPException: With status code 404 if record doesn't exist
|
||||
:return: {Package,PackageBase} instance
|
||||
"""
|
||||
instance = db.query(cls).filter(cls.Name == name).first()
|
||||
if not instance:
|
||||
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
|
||||
return instance
|
||||
|
||||
|
||||
def get_pkgbase_comment(pkgbase: models.PackageBase, id: int) -> models.PackageComment:
|
||||
comment = pkgbase.comments.filter(models.PackageComment.ID == id).first()
|
||||
if not comment:
|
||||
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
|
||||
return db.refresh(comment)
|
||||
|
||||
|
||||
@register_filter("out_of_date")
|
||||
def out_of_date(packages: orm.Query) -> orm.Query:
|
||||
return packages.filter(models.PackageBase.OutOfDateTS.isnot(None))
|
||||
|
||||
|
||||
def updated_packages(limit: int = 0, cache_ttl: int = 600) -> list[models.Package]:
|
||||
"""Return a list of valid Package objects ordered by their
|
||||
ModifiedTS column in descending order from cache, after setting
|
||||
the cache when no key yet exists.
|
||||
|
||||
:param limit: Optional record limit
|
||||
:param cache_ttl: Cache expiration time (in seconds)
|
||||
:return: A list of Packages
|
||||
"""
|
||||
redis = redis_connection()
|
||||
packages = redis.get("package_updates")
|
||||
if packages:
|
||||
# If we already have a cache, deserialize it and return.
|
||||
return orjson.loads(packages)
|
||||
|
||||
query = (
|
||||
db.query(models.Package)
|
||||
.join(models.PackageBase)
|
||||
.order_by(models.PackageBase.ModifiedTS.desc())
|
||||
)
|
||||
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
packages = []
|
||||
for pkg in query:
|
||||
# For each Package returned by the query, append a dict
|
||||
# containing Package columns we're interested in.
|
||||
packages.append(
|
||||
{
|
||||
"Name": pkg.Name,
|
||||
"Version": pkg.Version,
|
||||
"PackageBase": {"ModifiedTS": pkg.PackageBase.ModifiedTS},
|
||||
}
|
||||
)
|
||||
|
||||
# Store the JSON serialization of the package_updates key into Redis.
|
||||
redis.set("package_updates", orjson.dumps(packages))
|
||||
redis.expire("package_updates", cache_ttl)
|
||||
|
||||
# Return the deserialized list of packages.
|
||||
return packages
|
||||
|
||||
|
||||
def query_voted(query: list[models.Package], user: models.User) -> dict[int, bool]:
|
||||
"""Produce a dictionary of package base ID keys to boolean values,
|
||||
which indicate whether or not the package base has a vote record
|
||||
related to user.
|
||||
|
||||
:param query: A collection of Package models
|
||||
:param user: The user that is being notified or not
|
||||
:return: Vote state dict (PackageBase.ID: int -> bool)
|
||||
"""
|
||||
output = defaultdict(bool)
|
||||
query_set = {pkg.PackageBaseID for pkg in query}
|
||||
voted = (
|
||||
db.query(models.PackageVote)
|
||||
.join(models.PackageBase, models.PackageBase.ID.in_(query_set))
|
||||
.filter(models.PackageVote.UsersID == user.ID)
|
||||
)
|
||||
for vote in voted:
|
||||
output[vote.PackageBase.ID] = True
|
||||
return output
|
||||
|
||||
|
||||
def query_notified(query: list[models.Package], user: models.User) -> dict[int, bool]:
|
||||
"""Produce a dictionary of package base ID keys to boolean values,
|
||||
which indicate whether or not the package base has a notification
|
||||
record related to user.
|
||||
|
||||
:param query: A collection of Package models
|
||||
:param user: The user that is being notified or not
|
||||
:return: Notification state dict (PackageBase.ID: int -> bool)
|
||||
"""
|
||||
output = defaultdict(bool)
|
||||
query_set = {pkg.PackageBaseID for pkg in query}
|
||||
notified = (
|
||||
db.query(models.PackageNotification)
|
||||
.join(models.PackageBase, models.PackageBase.ID.in_(query_set))
|
||||
.filter(models.PackageNotification.UserID == user.ID)
|
||||
)
|
||||
for notif in notified:
|
||||
output[notif.PackageBase.ID] = True
|
||||
return output
|
||||
|
||||
|
||||
def pkg_required(pkgname: str, provides: list[str]) -> list[PackageDependency]:
|
||||
"""
|
||||
Get dependencies that match a string in `[pkgname] + provides`.
|
||||
|
||||
:param pkgname: Package.Name
|
||||
:param provides: List of PackageRelation.Name
|
||||
:param limit: Maximum number of dependencies to query
|
||||
:return: List of PackageDependency instances
|
||||
"""
|
||||
targets = set([pkgname] + provides)
|
||||
query = (
|
||||
db.query(PackageDependency)
|
||||
.join(Package)
|
||||
.options(orm.contains_eager(PackageDependency.Package))
|
||||
.filter(PackageDependency.DepName.in_(targets))
|
||||
.order_by(Package.Name.asc())
|
||||
)
|
||||
return query
|
||||
|
||||
|
||||
@register_filter("source_uri")
|
||||
def source_uri(pkgsrc: models.PackageSource) -> Tuple[str, str]:
|
||||
"""
|
||||
Produce a (text, uri) tuple out of `pkgsrc`.
|
||||
|
||||
In this filter, we cover various cases:
|
||||
1. If "::" is anywhere in the Source column, split the string,
|
||||
which should produce a (text, uri), where text is before "::"
|
||||
and uri is after "::".
|
||||
2. Otherwise, if "://" is anywhere in the Source column, it's just
|
||||
some sort of URI, which we'll return varbatim as both text and uri.
|
||||
3. Otherwise, we'll return a path to the source file in a uri produced
|
||||
out of options.source_file_uri formatted with the source file and
|
||||
the package base name.
|
||||
|
||||
:param pkgsrc: PackageSource instance
|
||||
:return text, uri)tuple
|
||||
"""
|
||||
if "::" in pkgsrc.Source:
|
||||
return pkgsrc.Source.split("::", 1)
|
||||
elif "://" in pkgsrc.Source:
|
||||
return pkgsrc.Source, pkgsrc.Source
|
||||
path = config.get("options", "source_file_uri")
|
||||
pkgbasename = quote_plus(pkgsrc.Package.PackageBase.Name)
|
||||
return pkgsrc.Source, path % (pkgsrc.Source, pkgbasename)
|
0
aurweb/pkgbase/__init__.py
Normal file
0
aurweb/pkgbase/__init__.py
Normal file
195
aurweb/pkgbase/actions.py
Normal file
195
aurweb/pkgbase/actions.py
Normal file
|
@ -0,0 +1,195 @@
|
|||
from fastapi import Request
|
||||
|
||||
from aurweb import aur_logging, db, util
|
||||
from aurweb.auth import creds
|
||||
from aurweb.models import PackageBase, User
|
||||
from aurweb.models.package_comaintainer import PackageComaintainer
|
||||
from aurweb.models.package_notification import PackageNotification
|
||||
from aurweb.models.request_type import DELETION_ID, MERGE_ID, ORPHAN_ID
|
||||
from aurweb.packages.requests import handle_request, update_closure_comment
|
||||
from aurweb.pkgbase import util as pkgbaseutil
|
||||
from aurweb.scripts import notify, popupdate
|
||||
|
||||
logger = aur_logging.get_logger(__name__)
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def _retry_notify(user: User, pkgbase: PackageBase) -> None:
|
||||
with db.begin():
|
||||
db.create(PackageNotification, PackageBase=pkgbase, User=user)
|
||||
|
||||
|
||||
def pkgbase_notify_instance(request: Request, pkgbase: PackageBase) -> None:
|
||||
notif = db.query(
|
||||
pkgbase.notifications.filter(
|
||||
PackageNotification.UserID == request.user.ID
|
||||
).exists()
|
||||
).scalar()
|
||||
has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY)
|
||||
if has_cred and not notif:
|
||||
_retry_notify(request.user, pkgbase)
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def _retry_unnotify(notif: PackageNotification, pkgbase: PackageBase) -> None:
|
||||
with db.begin():
|
||||
db.delete(notif)
|
||||
|
||||
|
||||
def pkgbase_unnotify_instance(request: Request, pkgbase: PackageBase) -> None:
|
||||
notif = pkgbase.notifications.filter(
|
||||
PackageNotification.UserID == request.user.ID
|
||||
).first()
|
||||
has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY)
|
||||
if has_cred and notif:
|
||||
_retry_unnotify(notif, pkgbase)
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def _retry_unflag(pkgbase: PackageBase) -> None:
|
||||
with db.begin():
|
||||
pkgbase.OutOfDateTS = None
|
||||
pkgbase.Flagger = None
|
||||
pkgbase.FlaggerComment = str()
|
||||
|
||||
|
||||
def pkgbase_unflag_instance(request: Request, pkgbase: PackageBase) -> None:
|
||||
has_cred = request.user.has_credential(
|
||||
creds.PKGBASE_UNFLAG,
|
||||
approved=[pkgbase.Flagger, pkgbase.Maintainer]
|
||||
+ [c.User for c in pkgbase.comaintainers],
|
||||
)
|
||||
if has_cred:
|
||||
_retry_unflag(pkgbase)
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def _retry_disown(request: Request, pkgbase: PackageBase):
|
||||
notifs: list[notify.Notification] = []
|
||||
|
||||
is_maint = request.user == pkgbase.Maintainer
|
||||
|
||||
comaint = pkgbase.comaintainers.filter(
|
||||
PackageComaintainer.User == request.user
|
||||
).one_or_none()
|
||||
is_comaint = comaint is not None
|
||||
|
||||
if is_maint:
|
||||
with db.begin():
|
||||
# Comaintainer with the lowest Priority value; next-in-line.
|
||||
prio_comaint = pkgbase.comaintainers.order_by(
|
||||
PackageComaintainer.Priority.asc()
|
||||
).first()
|
||||
if prio_comaint:
|
||||
# If there is such a comaintainer, promote them to maint.
|
||||
pkgbase.Maintainer = prio_comaint.User
|
||||
notifs.append(pkgbaseutil.remove_comaintainer(prio_comaint))
|
||||
else:
|
||||
# Otherwise, just orphan the package completely.
|
||||
pkgbase.Maintainer = None
|
||||
elif is_comaint:
|
||||
# This disown request is from a Comaintainer
|
||||
with db.begin():
|
||||
notif = pkgbaseutil.remove_comaintainer(comaint)
|
||||
notifs.append(notif)
|
||||
elif request.user.has_credential(creds.PKGBASE_DISOWN):
|
||||
# Otherwise, the request user performing this disownage is a
|
||||
# Package Maintainer and we treat it like a standard orphan request.
|
||||
notifs += handle_request(request, ORPHAN_ID, pkgbase)
|
||||
with db.begin():
|
||||
pkgbase.Maintainer = None
|
||||
db.delete_all(pkgbase.comaintainers)
|
||||
|
||||
return notifs
|
||||
|
||||
|
||||
def pkgbase_disown_instance(request: Request, pkgbase: PackageBase) -> None:
|
||||
disowner = request.user
|
||||
notifs = [notify.DisownNotification(disowner.ID, pkgbase.ID)]
|
||||
notifs += _retry_disown(request, pkgbase)
|
||||
util.apply_all(notifs, lambda n: n.send())
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def _retry_adopt(request: Request, pkgbase: PackageBase) -> None:
|
||||
with db.begin():
|
||||
pkgbase.Maintainer = request.user
|
||||
|
||||
|
||||
def pkgbase_adopt_instance(request: Request, pkgbase: PackageBase) -> None:
|
||||
_retry_adopt(request, pkgbase)
|
||||
notif = notify.AdoptNotification(request.user.ID, pkgbase.ID)
|
||||
notif.send()
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def _retry_delete(pkgbase: PackageBase, comments: str) -> None:
|
||||
with db.begin():
|
||||
update_closure_comment(pkgbase, DELETION_ID, comments)
|
||||
db.delete(pkgbase)
|
||||
|
||||
|
||||
def pkgbase_delete_instance(
|
||||
request: Request, pkgbase: PackageBase, comments: str = str()
|
||||
) -> list[notify.Notification]:
|
||||
notif = notify.DeleteNotification(request.user.ID, pkgbase.ID)
|
||||
notifs = handle_request(request, DELETION_ID, pkgbase, comments=comments) + [notif]
|
||||
|
||||
_retry_delete(pkgbase, comments)
|
||||
|
||||
return notifs
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def _retry_merge(pkgbase: PackageBase, target: PackageBase) -> None:
|
||||
# Target votes and notifications sets of user IDs that are
|
||||
# looking to be migrated.
|
||||
target_votes = set(v.UsersID for v in target.package_votes)
|
||||
target_notifs = set(n.UserID for n in target.notifications)
|
||||
|
||||
with db.begin():
|
||||
# Merge pkgbase's comments.
|
||||
for comment in pkgbase.comments:
|
||||
comment.PackageBase = target
|
||||
|
||||
# Merge notifications that don't yet exist in the target.
|
||||
for notif in pkgbase.notifications:
|
||||
if notif.UserID not in target_notifs:
|
||||
notif.PackageBase = target
|
||||
|
||||
# Merge votes that don't yet exist in the target.
|
||||
for vote in pkgbase.package_votes:
|
||||
if vote.UsersID not in target_votes:
|
||||
vote.PackageBase = target
|
||||
|
||||
# Run popupdate.
|
||||
popupdate.run_single(target)
|
||||
|
||||
with db.begin():
|
||||
# Delete pkgbase and its packages now that everything's merged.
|
||||
for pkg in pkgbase.packages:
|
||||
db.delete(pkg)
|
||||
db.delete(pkgbase)
|
||||
|
||||
|
||||
def pkgbase_merge_instance(
|
||||
request: Request,
|
||||
pkgbase: PackageBase,
|
||||
target: PackageBase,
|
||||
comments: str = str(),
|
||||
) -> None:
|
||||
pkgbasename = str(pkgbase.Name)
|
||||
|
||||
# Create notifications.
|
||||
notifs = handle_request(request, MERGE_ID, pkgbase, target, comments)
|
||||
|
||||
_retry_merge(pkgbase, target)
|
||||
|
||||
# Log this out for accountability purposes.
|
||||
logger.info(
|
||||
f"Package Maintainer '{request.user.Username}' merged "
|
||||
f"'{pkgbasename}' into '{target.Name}'."
|
||||
)
|
||||
|
||||
# Send notifications.
|
||||
util.apply_all(notifs, lambda n: n.send())
|
246
aurweb/pkgbase/util.py
Normal file
246
aurweb/pkgbase/util.py
Normal file
|
@ -0,0 +1,246 @@
|
|||
from typing import Any
|
||||
|
||||
from fastapi import Request
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from aurweb import config, db, defaults, l10n, time, util
|
||||
from aurweb.models import PackageBase, User
|
||||
from aurweb.models.package_base import popularity
|
||||
from aurweb.models.package_comaintainer import PackageComaintainer
|
||||
from aurweb.models.package_comment import PackageComment
|
||||
from aurweb.models.package_request import PENDING_ID, PackageRequest
|
||||
from aurweb.models.package_vote import PackageVote
|
||||
from aurweb.scripts import notify
|
||||
from aurweb.templates import make_context as _make_context
|
||||
|
||||
|
||||
def make_context(
|
||||
request: Request, pkgbase: PackageBase, context: dict[str, Any] = None
|
||||
) -> dict[str, Any]:
|
||||
"""Make a basic context for package or pkgbase.
|
||||
|
||||
:param request: FastAPI request
|
||||
:param pkgbase: PackageBase instance
|
||||
:return: A pkgbase context without specific differences
|
||||
"""
|
||||
if not context:
|
||||
context = _make_context(request, pkgbase.Name)
|
||||
|
||||
is_authenticated = request.user.is_authenticated()
|
||||
|
||||
# Per page and offset.
|
||||
offset, per_page = util.sanitize_params(
|
||||
request.query_params.get("O", defaults.O),
|
||||
request.query_params.get("PP", defaults.COMMENTS_PER_PAGE),
|
||||
)
|
||||
context["O"] = offset
|
||||
context["PP"] = per_page
|
||||
context["git_clone_uri_anon"] = config.get("options", "git_clone_uri_anon")
|
||||
context["git_clone_uri_priv"] = config.get("options", "git_clone_uri_priv")
|
||||
context["pkgbase"] = pkgbase
|
||||
context["comaintainers"] = [
|
||||
c.User
|
||||
for c in pkgbase.comaintainers.options(joinedload(PackageComaintainer.User))
|
||||
.order_by(PackageComaintainer.Priority.asc())
|
||||
.all()
|
||||
]
|
||||
if is_authenticated:
|
||||
context["unflaggers"] = context["comaintainers"].copy()
|
||||
context["unflaggers"].extend([pkgbase.Maintainer, pkgbase.Flagger])
|
||||
else:
|
||||
context["unflaggers"] = []
|
||||
|
||||
context["packages_count"] = pkgbase.packages.count()
|
||||
context["keywords"] = pkgbase.keywords
|
||||
context["comments_total"] = pkgbase.comments.order_by(
|
||||
PackageComment.CommentTS.desc()
|
||||
).count()
|
||||
context["comments"] = (
|
||||
pkgbase.comments.order_by(PackageComment.CommentTS.desc())
|
||||
.limit(per_page)
|
||||
.offset(offset)
|
||||
)
|
||||
context["pinned_comments"] = pkgbase.comments.filter(
|
||||
PackageComment.PinnedTS != 0
|
||||
).order_by(PackageComment.CommentTS.desc())
|
||||
|
||||
context["is_maintainer"] = bool(request.user == pkgbase.Maintainer)
|
||||
if is_authenticated:
|
||||
context["notified"] = request.user.notified(pkgbase)
|
||||
else:
|
||||
context["notified"] = False
|
||||
|
||||
context["out_of_date"] = bool(pkgbase.OutOfDateTS)
|
||||
|
||||
if is_authenticated:
|
||||
context["voted"] = db.query(
|
||||
request.user.package_votes.filter(
|
||||
PackageVote.PackageBaseID == pkgbase.ID
|
||||
).exists()
|
||||
).scalar()
|
||||
else:
|
||||
context["voted"] = False
|
||||
|
||||
if is_authenticated:
|
||||
context["requests"] = pkgbase.requests.filter(
|
||||
and_(PackageRequest.Status == PENDING_ID, PackageRequest.ClosedTS.is_(None))
|
||||
).count()
|
||||
else:
|
||||
context["requests"] = []
|
||||
|
||||
context["popularity"] = popularity(pkgbase, time.utcnow())
|
||||
|
||||
return context
|
||||
|
||||
|
||||
def remove_comaintainer(
|
||||
comaint: PackageComaintainer,
|
||||
) -> notify.ComaintainerRemoveNotification:
|
||||
"""
|
||||
Remove a PackageComaintainer.
|
||||
|
||||
This function does *not* begin any database transaction and
|
||||
must be used **within** a database transaction, e.g.:
|
||||
|
||||
with db.begin():
|
||||
remove_comaintainer(comaint)
|
||||
|
||||
:param comaint: Target PackageComaintainer to be deleted
|
||||
:return: ComaintainerRemoveNotification
|
||||
"""
|
||||
pkgbase = comaint.PackageBase
|
||||
notif = notify.ComaintainerRemoveNotification(comaint.User.ID, pkgbase.ID)
|
||||
db.delete(comaint)
|
||||
rotate_comaintainers(pkgbase)
|
||||
return notif
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def remove_comaintainers(pkgbase: PackageBase, usernames: list[str]) -> None:
|
||||
"""
|
||||
Remove comaintainers from `pkgbase`.
|
||||
|
||||
:param pkgbase: PackageBase instance
|
||||
:param usernames: Iterable of username strings
|
||||
"""
|
||||
notifications = []
|
||||
with db.begin():
|
||||
comaintainers = (
|
||||
pkgbase.comaintainers.join(User).filter(User.Username.in_(usernames)).all()
|
||||
)
|
||||
notifications = [
|
||||
notify.ComaintainerRemoveNotification(co.User.ID, pkgbase.ID)
|
||||
for co in comaintainers
|
||||
]
|
||||
db.delete_all(comaintainers)
|
||||
|
||||
# Rotate comaintainer priority values.
|
||||
with db.begin():
|
||||
rotate_comaintainers(pkgbase)
|
||||
|
||||
# Send out notifications.
|
||||
util.apply_all(notifications, lambda n: n.send())
|
||||
|
||||
|
||||
def latest_priority(pkgbase: PackageBase) -> int:
|
||||
"""
|
||||
Return the highest Priority column related to `pkgbase`.
|
||||
|
||||
:param pkgbase: PackageBase instance
|
||||
:return: Highest Priority found or 0 if no records exist
|
||||
"""
|
||||
|
||||
# Order comaintainers related to pkgbase by Priority DESC.
|
||||
record = pkgbase.comaintainers.order_by(PackageComaintainer.Priority.desc()).first()
|
||||
|
||||
# Use Priority column if record exists, otherwise 0.
|
||||
return record.Priority if record else 0
|
||||
|
||||
|
||||
class NoopComaintainerNotification:
|
||||
"""A noop notification stub used as an error-state return value."""
|
||||
|
||||
def send(self) -> None:
|
||||
"""noop"""
|
||||
return
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def add_comaintainer(
|
||||
pkgbase: PackageBase, comaintainer: User
|
||||
) -> notify.ComaintainerAddNotification:
|
||||
"""
|
||||
Add a new comaintainer to `pkgbase`.
|
||||
|
||||
:param pkgbase: PackageBase instance
|
||||
:param comaintainer: User instance used for new comaintainer record
|
||||
:return: ComaintainerAddNotification
|
||||
"""
|
||||
# Skip given `comaintainers` who are already maintainer.
|
||||
if pkgbase.Maintainer == comaintainer:
|
||||
return NoopComaintainerNotification()
|
||||
|
||||
# Priority for the new comaintainer is +1 more than the highest.
|
||||
new_prio = latest_priority(pkgbase) + 1
|
||||
|
||||
with db.begin():
|
||||
db.create(
|
||||
PackageComaintainer,
|
||||
PackageBase=pkgbase,
|
||||
User=comaintainer,
|
||||
Priority=new_prio,
|
||||
)
|
||||
|
||||
return notify.ComaintainerAddNotification(comaintainer.ID, pkgbase.ID)
|
||||
|
||||
|
||||
def add_comaintainers(
|
||||
request: Request, pkgbase: PackageBase, usernames: list[str]
|
||||
) -> None:
|
||||
"""
|
||||
Add comaintainers to `pkgbase`.
|
||||
|
||||
:param request: FastAPI request
|
||||
:param pkgbase: PackageBase instance
|
||||
:param usernames: Iterable of username strings
|
||||
:return: Error string on failure else None
|
||||
"""
|
||||
# For each username in usernames, perform validation of the username
|
||||
# and append the User record to `users` if no errors occur.
|
||||
users = []
|
||||
for username in usernames:
|
||||
user = db.query(User).filter(User.Username == username).first()
|
||||
if not user:
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
return _("Invalid user name: %s") % username
|
||||
users.append(user)
|
||||
|
||||
notifications = []
|
||||
|
||||
def add_comaint(user: User):
|
||||
nonlocal notifications
|
||||
# Populate `notifications` with add_comaintainer's return value,
|
||||
# which is a ComaintainerAddNotification.
|
||||
notifications.append(add_comaintainer(pkgbase, user))
|
||||
|
||||
# Move along: add all `users` as new `pkgbase` comaintainers.
|
||||
util.apply_all(users, add_comaint)
|
||||
|
||||
# Send out notifications.
|
||||
util.apply_all(notifications, lambda n: n.send())
|
||||
|
||||
|
||||
def rotate_comaintainers(pkgbase: PackageBase) -> None:
|
||||
"""
|
||||
Rotate `pkgbase` comaintainers.
|
||||
|
||||
This function resets the Priority column of all PackageComaintainer
|
||||
instances related to `pkgbase` to seqential 1 .. n values with
|
||||
persisted order.
|
||||
|
||||
:param pkgbase: PackageBase instance
|
||||
"""
|
||||
comaintainers = pkgbase.comaintainers.order_by(PackageComaintainer.Priority.asc())
|
||||
for i, comaint in enumerate(comaintainers):
|
||||
comaint.Priority = i + 1
|
55
aurweb/pkgbase/validate.py
Normal file
55
aurweb/pkgbase/validate.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
from http import HTTPStatus
|
||||
from typing import Any
|
||||
|
||||
from fastapi import HTTPException
|
||||
|
||||
from aurweb import config, db
|
||||
from aurweb.exceptions import ValidationError
|
||||
from aurweb.models import PackageBase
|
||||
|
||||
|
||||
def request(
|
||||
pkgbase: PackageBase,
|
||||
type: str,
|
||||
comments: str,
|
||||
merge_into: str,
|
||||
context: dict[str, Any],
|
||||
) -> None:
|
||||
# validate comment
|
||||
comment(comments)
|
||||
|
||||
if type == "merge":
|
||||
# Perform merge-related checks.
|
||||
if not merge_into:
|
||||
# TODO: This error needs to be translated.
|
||||
raise ValidationError(['The "Merge into" field must not be empty.'])
|
||||
|
||||
target = db.query(PackageBase).filter(PackageBase.Name == merge_into).first()
|
||||
if not target:
|
||||
# TODO: This error needs to be translated.
|
||||
raise ValidationError(
|
||||
["The package base you want to merge into does not exist."]
|
||||
)
|
||||
|
||||
db.refresh(target)
|
||||
if target.ID == pkgbase.ID:
|
||||
# TODO: This error needs to be translated.
|
||||
raise ValidationError(["You cannot merge a package base into itself."])
|
||||
|
||||
|
||||
def comment(comment: str):
|
||||
if not comment:
|
||||
raise ValidationError(["The comment field must not be empty."])
|
||||
|
||||
if len(comment) > config.getint("options", "max_chars_comment", 5000):
|
||||
raise ValidationError(["Maximum number of characters for comment exceeded."])
|
||||
|
||||
|
||||
def comment_raise_http_ex(comments: str):
|
||||
try:
|
||||
comment(comments)
|
||||
except ValidationError as err:
|
||||
raise HTTPException(
|
||||
status_code=HTTPStatus.BAD_REQUEST,
|
||||
detail=err.data[0],
|
||||
)
|
143
aurweb/prometheus.py
Normal file
143
aurweb/prometheus.py
Normal file
|
@ -0,0 +1,143 @@
|
|||
from typing import Any, Callable, Optional
|
||||
|
||||
from prometheus_client import Counter, Gauge
|
||||
from prometheus_fastapi_instrumentator import Instrumentator
|
||||
from prometheus_fastapi_instrumentator.metrics import Info
|
||||
from starlette.routing import Match, Route
|
||||
|
||||
from aurweb import aur_logging
|
||||
|
||||
logger = aur_logging.get_logger(__name__)
|
||||
_instrumentator = Instrumentator()
|
||||
|
||||
|
||||
# Custom metrics
|
||||
SEARCH_REQUESTS = Counter(
|
||||
"aur_search_requests", "Number of search requests by cache hit/miss", ["cache"]
|
||||
)
|
||||
USERS = Gauge(
|
||||
"aur_users", "Number of AUR users by type", ["type"], multiprocess_mode="livemax"
|
||||
)
|
||||
PACKAGES = Gauge(
|
||||
"aur_packages",
|
||||
"Number of AUR packages by state",
|
||||
["state"],
|
||||
multiprocess_mode="livemax",
|
||||
)
|
||||
REQUESTS = Gauge(
|
||||
"aur_requests",
|
||||
"Number of AUR requests by type and status",
|
||||
["type", "status"],
|
||||
multiprocess_mode="livemax",
|
||||
)
|
||||
|
||||
|
||||
def instrumentator():
|
||||
return _instrumentator
|
||||
|
||||
|
||||
# FastAPI metrics
|
||||
# Taken from https://github.com/stephenhillier/starlette_exporter
|
||||
# Their license is included in LICENSES/starlette_exporter.
|
||||
# The code has been modified to remove child route checks
|
||||
# (since we don't have any) and to stay within an 80-width limit.
|
||||
def get_matching_route_path(
|
||||
scope: dict[Any, Any], routes: list[Route], route_name: Optional[str] = None
|
||||
) -> str:
|
||||
"""
|
||||
Find a matching route and return its original path string
|
||||
|
||||
Will attempt to enter mounted routes and subrouters.
|
||||
|
||||
Credit to https://github.com/elastic/apm-agent-python
|
||||
|
||||
"""
|
||||
for route in routes:
|
||||
match, child_scope = route.matches(scope)
|
||||
if match == Match.FULL:
|
||||
route_name = route.path
|
||||
|
||||
"""
|
||||
# This path exists in the original function's code, but we
|
||||
# don't need it (currently), so it's been removed to avoid
|
||||
# useless test coverage.
|
||||
child_scope = {**scope, **child_scope}
|
||||
if isinstance(route, Mount) and route.routes:
|
||||
child_route_name = get_matching_route_path(child_scope,
|
||||
route.routes,
|
||||
route_name)
|
||||
if child_route_name is None:
|
||||
route_name = None
|
||||
else:
|
||||
route_name += child_route_name
|
||||
"""
|
||||
|
||||
return route_name
|
||||
elif match == Match.PARTIAL and route_name is None:
|
||||
route_name = route.path
|
||||
|
||||
|
||||
def http_requests_total() -> Callable[[Info], None]:
|
||||
metric = Counter(
|
||||
"http_requests_total",
|
||||
"Number of HTTP requests.",
|
||||
labelnames=("method", "path", "status"),
|
||||
)
|
||||
|
||||
def instrumentation(info: Info) -> None:
|
||||
if info.request.method.lower() in ("head", "options"): # pragma: no cover
|
||||
return
|
||||
|
||||
scope = info.request.scope
|
||||
|
||||
# Taken from https://github.com/stephenhillier/starlette_exporter
|
||||
# Their license is included at LICENSES/starlette_exporter.
|
||||
# The code has been slightly modified: we no longer catch
|
||||
# exceptions; we expect this collector to always succeed.
|
||||
# Failures in this collector shall cause test failures.
|
||||
if not (scope.get("endpoint", None) and scope.get("router", None)):
|
||||
return None
|
||||
|
||||
root_path = scope.get("root_path", str())
|
||||
app = scope.get("app", dict())
|
||||
|
||||
if hasattr(app, "root_path"):
|
||||
app_root_path = getattr(app, "root_path")
|
||||
if root_path.startswith(app_root_path):
|
||||
root_path = root_path[len(app_root_path) :]
|
||||
|
||||
base_scope = {
|
||||
"type": scope.get("type"),
|
||||
"path": root_path + scope.get("path"),
|
||||
"path_params": scope.get("path_params", {}),
|
||||
"method": scope.get("method"),
|
||||
}
|
||||
|
||||
method = scope.get("method")
|
||||
path = get_matching_route_path(base_scope, scope.get("router").routes)
|
||||
|
||||
if info.response:
|
||||
status = str(int(info.response.status_code))[:1] + "xx"
|
||||
metric.labels(method=method, path=path, status=status).inc()
|
||||
|
||||
return instrumentation
|
||||
|
||||
|
||||
def http_api_requests_total() -> Callable[[Info], None]:
|
||||
metric = Counter(
|
||||
"http_api_requests",
|
||||
"Number of times an RPC API type has been requested.",
|
||||
labelnames=("type", "status"),
|
||||
)
|
||||
|
||||
def instrumentation(info: Info) -> None:
|
||||
if info.request.method.lower() in ("head", "options"): # pragma: no cover
|
||||
return
|
||||
|
||||
if info.request.url.path.rstrip("/") == "/rpc":
|
||||
type = info.request.query_params.get("type", "None")
|
||||
if info.response:
|
||||
status = str(info.response.status_code)[:1] + "xx"
|
||||
metric.labels(type=type, status=status).inc()
|
||||
|
||||
return instrumentation
|
117
aurweb/ratelimit.py
Normal file
117
aurweb/ratelimit.py
Normal file
|
@ -0,0 +1,117 @@
|
|||
from fastapi import Request
|
||||
from redis.client import Pipeline
|
||||
|
||||
from aurweb import aur_logging, config, db, time
|
||||
from aurweb.aur_redis import redis_connection
|
||||
from aurweb.models import ApiRateLimit
|
||||
from aurweb.util import get_client_ip
|
||||
|
||||
logger = aur_logging.get_logger(__name__)
|
||||
|
||||
|
||||
def _update_ratelimit_redis(request: Request, pipeline: Pipeline):
|
||||
window_length = config.getint("ratelimit", "window_length")
|
||||
now = time.utcnow()
|
||||
time_to_delete = now - window_length
|
||||
|
||||
host = get_client_ip(request)
|
||||
window_key = f"ratelimit-ws:{host}"
|
||||
requests_key = f"ratelimit:{host}"
|
||||
|
||||
pipeline.get(window_key)
|
||||
window = pipeline.execute()[0]
|
||||
|
||||
if not window or int(window.decode()) < time_to_delete:
|
||||
pipeline.set(window_key, now)
|
||||
pipeline.expire(window_key, window_length)
|
||||
|
||||
pipeline.set(requests_key, 1)
|
||||
pipeline.expire(requests_key, window_length)
|
||||
|
||||
pipeline.execute()
|
||||
else:
|
||||
pipeline.incr(requests_key)
|
||||
pipeline.execute()
|
||||
|
||||
|
||||
def _update_ratelimit_db(request: Request):
|
||||
window_length = config.getint("ratelimit", "window_length")
|
||||
now = time.utcnow()
|
||||
time_to_delete = now - window_length
|
||||
|
||||
@db.retry_deadlock
|
||||
def retry_delete(records: list[ApiRateLimit]) -> None:
|
||||
with db.begin():
|
||||
db.delete_all(records)
|
||||
|
||||
records = db.query(ApiRateLimit).filter(ApiRateLimit.WindowStart < time_to_delete)
|
||||
retry_delete(records)
|
||||
|
||||
@db.retry_deadlock
|
||||
def retry_create(record: ApiRateLimit, now: int, host: str) -> ApiRateLimit:
|
||||
with db.begin():
|
||||
if not record:
|
||||
record = db.create(ApiRateLimit, WindowStart=now, IP=host, Requests=1)
|
||||
else:
|
||||
record.Requests += 1
|
||||
return record
|
||||
|
||||
host = get_client_ip(request)
|
||||
record = db.query(ApiRateLimit, ApiRateLimit.IP == host).first()
|
||||
record = retry_create(record, now, host)
|
||||
|
||||
logger.debug(record.Requests)
|
||||
return record
|
||||
|
||||
|
||||
def update_ratelimit(request: Request, pipeline: Pipeline):
|
||||
"""Update the ratelimit stored in Redis or the database depending
|
||||
on AUR_CONFIG's [options] cache setting.
|
||||
|
||||
This Redis-capable function is slightly different than most. If Redis
|
||||
is not configured to use a real server, this function instead uses
|
||||
the database to persist tracking of a particular host.
|
||||
|
||||
:param request: FastAPI request
|
||||
:param pipeline: redis.client.Pipeline
|
||||
:returns: ApiRateLimit record when Redis cache is not configured, else None
|
||||
"""
|
||||
if config.getboolean("ratelimit", "cache"):
|
||||
return _update_ratelimit_redis(request, pipeline)
|
||||
return _update_ratelimit_db(request)
|
||||
|
||||
|
||||
def check_ratelimit(request: Request):
|
||||
"""Increment and check to see if request has exceeded their rate limit.
|
||||
|
||||
:param request: FastAPI request
|
||||
:returns: True if the request host has exceeded the rate limit else False
|
||||
"""
|
||||
redis = redis_connection()
|
||||
pipeline = redis.pipeline()
|
||||
|
||||
record = update_ratelimit(request, pipeline)
|
||||
|
||||
# Get cache value, else None.
|
||||
host = get_client_ip(request)
|
||||
pipeline.get(f"ratelimit:{host}")
|
||||
requests = pipeline.execute()[0]
|
||||
|
||||
# Take into account the split paths. When Redis is used, a
|
||||
# valid cache value will be returned which must be converted
|
||||
# to an int. Otherwise, use the database record returned
|
||||
# by update_ratelimit.
|
||||
if not config.getboolean("ratelimit", "cache") or requests is None:
|
||||
# If we got nothing from pipeline.get, we did not use
|
||||
# the Redis path of logic: use the DB record's count.
|
||||
requests = record.Requests
|
||||
else:
|
||||
# Otherwise, just case Redis results over to an int.
|
||||
requests = int(requests.decode())
|
||||
|
||||
limit = config.getint("ratelimit", "request_limit")
|
||||
exceeded_ratelimit = requests > limit
|
||||
if exceeded_ratelimit:
|
||||
logger.debug(f"{host} has exceeded the ratelimit.")
|
||||
|
||||
return exceeded_ratelimit
|
0
aurweb/requests/__init__.py
Normal file
0
aurweb/requests/__init__.py
Normal file
13
aurweb/requests/util.py
Normal file
13
aurweb/requests/util.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
from http import HTTPStatus
|
||||
|
||||
from fastapi import HTTPException
|
||||
|
||||
from aurweb import db
|
||||
from aurweb.models import PackageRequest
|
||||
|
||||
|
||||
def get_pkgreq_by_id(id: int) -> PackageRequest:
|
||||
pkgreq = db.query(PackageRequest).filter(PackageRequest.ID == id).first()
|
||||
if not pkgreq:
|
||||
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
|
||||
return db.refresh(pkgreq)
|
36
aurweb/routers/__init__.py
Normal file
36
aurweb/routers/__init__.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
"""
|
||||
API routers for FastAPI.
|
||||
|
||||
See https://fastapi.tiangolo.com/tutorial/bigger-applications/
|
||||
"""
|
||||
|
||||
from . import (
|
||||
accounts,
|
||||
auth,
|
||||
html,
|
||||
package_maintainer,
|
||||
packages,
|
||||
pkgbase,
|
||||
requests,
|
||||
rpc,
|
||||
rss,
|
||||
sso,
|
||||
)
|
||||
|
||||
"""
|
||||
aurweb application routes. This constant can be any iterable
|
||||
and each element must have a .router attribute which points
|
||||
to a fastapi.APIRouter.
|
||||
"""
|
||||
APP_ROUTES = [
|
||||
accounts,
|
||||
auth,
|
||||
html,
|
||||
packages,
|
||||
pkgbase,
|
||||
requests,
|
||||
package_maintainer,
|
||||
rss,
|
||||
rpc,
|
||||
sso,
|
||||
]
|
776
aurweb/routers/accounts.py
Normal file
776
aurweb/routers/accounts.py
Normal file
|
@ -0,0 +1,776 @@
|
|||
import copy
|
||||
import typing
|
||||
from http import HTTPStatus
|
||||
from typing import Any
|
||||
|
||||
from fastapi import APIRouter, Form, HTTPException, Request
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||
from sqlalchemy import and_, or_
|
||||
|
||||
import aurweb.config
|
||||
from aurweb import aur_logging, db, l10n, models, util
|
||||
from aurweb.auth import account_type_required, creds, requires_auth, requires_guest
|
||||
from aurweb.captcha import get_captcha_salts
|
||||
from aurweb.exceptions import ValidationError, handle_form_exceptions
|
||||
from aurweb.l10n import get_translator_for_request
|
||||
from aurweb.models import account_type as at
|
||||
from aurweb.models.ssh_pub_key import get_fingerprint
|
||||
from aurweb.models.user import generate_resetkey
|
||||
from aurweb.scripts.notify import ResetKeyNotification, WelcomeNotification
|
||||
from aurweb.templates import make_context, make_variable_context, render_template
|
||||
from aurweb.users import update, validate
|
||||
from aurweb.users.util import get_user_by_name
|
||||
|
||||
router = APIRouter()
|
||||
logger = aur_logging.get_logger(__name__)
|
||||
|
||||
|
||||
@router.get("/passreset", response_class=HTMLResponse)
|
||||
@requires_guest
|
||||
async def passreset(request: Request):
|
||||
context = await make_variable_context(request, "Password Reset")
|
||||
return render_template(request, "passreset.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/passreset", response_class=HTMLResponse)
|
||||
@handle_form_exceptions
|
||||
@requires_guest
|
||||
async def passreset_post(
|
||||
request: Request,
|
||||
user: str = Form(...),
|
||||
resetkey: str = Form(default=None),
|
||||
password: str = Form(default=None),
|
||||
confirm: str = Form(default=None),
|
||||
):
|
||||
context = await make_variable_context(request, "Password Reset")
|
||||
|
||||
# The user parameter being required, we can match against
|
||||
criteria = or_(models.User.Username == user, models.User.Email == user)
|
||||
db_user = db.query(models.User, and_(criteria, models.User.Suspended == 0)).first()
|
||||
if db_user is None:
|
||||
context["errors"] = ["Invalid e-mail."]
|
||||
return render_template(
|
||||
request, "passreset.html", context, status_code=HTTPStatus.NOT_FOUND
|
||||
)
|
||||
|
||||
db.refresh(db_user)
|
||||
if resetkey:
|
||||
context["resetkey"] = resetkey
|
||||
|
||||
if not db_user.ResetKey or resetkey != db_user.ResetKey:
|
||||
context["errors"] = ["Invalid e-mail."]
|
||||
return render_template(
|
||||
request, "passreset.html", context, status_code=HTTPStatus.NOT_FOUND
|
||||
)
|
||||
|
||||
if not user or not password:
|
||||
context["errors"] = ["Missing a required field."]
|
||||
return render_template(
|
||||
request, "passreset.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
if password != confirm:
|
||||
# If the provided password does not match the provided confirm.
|
||||
context["errors"] = ["Password fields do not match."]
|
||||
return render_template(
|
||||
request, "passreset.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
if len(password) < models.User.minimum_passwd_length():
|
||||
# Translate the error here, which simplifies error output
|
||||
# in the jinja2 template.
|
||||
_ = get_translator_for_request(request)
|
||||
context["errors"] = [
|
||||
_("Your password must be at least %s characters.")
|
||||
% (str(models.User.minimum_passwd_length()))
|
||||
]
|
||||
return render_template(
|
||||
request, "passreset.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
# We got to this point; everything matched up. Update the password
|
||||
# and remove the ResetKey.
|
||||
with db.begin():
|
||||
db_user.ResetKey = str()
|
||||
if db_user.session:
|
||||
db.delete(db_user.session)
|
||||
db_user.update_password(password)
|
||||
|
||||
# Render ?step=complete.
|
||||
return RedirectResponse(
|
||||
url="/passreset?step=complete", status_code=HTTPStatus.SEE_OTHER
|
||||
)
|
||||
|
||||
# If we got here, we continue with issuing a resetkey for the user.
|
||||
resetkey = generate_resetkey()
|
||||
with db.begin():
|
||||
db_user.ResetKey = resetkey
|
||||
|
||||
ResetKeyNotification(db_user.ID).send()
|
||||
|
||||
# Render ?step=confirm.
|
||||
return RedirectResponse(
|
||||
url="/passreset?step=confirm", status_code=HTTPStatus.SEE_OTHER
|
||||
)
|
||||
|
||||
|
||||
def process_account_form(request: Request, user: models.User, args: dict[str, Any]):
|
||||
"""Process an account form. All fields are optional and only checks
|
||||
requirements in the case they are present.
|
||||
|
||||
```
|
||||
context = await make_variable_context(request, "Accounts")
|
||||
ok, errors = process_account_form(request, user, **kwargs)
|
||||
if not ok:
|
||||
context["errors"] = errors
|
||||
return render_template(request, "some_account_template.html", context)
|
||||
```
|
||||
|
||||
:param request: An incoming FastAPI request
|
||||
:param user: The user model of the account being processed
|
||||
:param args: A dictionary of arguments generated via request.form()
|
||||
:return: A (passed processing boolean, list of errors) tuple
|
||||
"""
|
||||
|
||||
# Get a local translator.
|
||||
_ = get_translator_for_request(request)
|
||||
|
||||
checks = [
|
||||
validate.is_banned,
|
||||
validate.invalid_user_password,
|
||||
validate.invalid_fields,
|
||||
validate.invalid_suspend_permission,
|
||||
validate.invalid_username,
|
||||
validate.invalid_password,
|
||||
validate.invalid_email,
|
||||
validate.invalid_backup_email,
|
||||
validate.invalid_homepage,
|
||||
validate.invalid_pgp_key,
|
||||
validate.invalid_ssh_pubkey,
|
||||
validate.invalid_language,
|
||||
validate.invalid_timezone,
|
||||
validate.username_in_use,
|
||||
validate.email_in_use,
|
||||
validate.invalid_account_type,
|
||||
validate.invalid_captcha,
|
||||
]
|
||||
|
||||
try:
|
||||
for check in checks:
|
||||
check(**args, request=request, user=user, _=_)
|
||||
except ValidationError as exc:
|
||||
return False, exc.data
|
||||
|
||||
return True, []
|
||||
|
||||
|
||||
def make_account_form_context(
|
||||
context: dict, request: Request, user: models.User, args: dict
|
||||
):
|
||||
"""Modify a FastAPI context and add attributes for the account form.
|
||||
|
||||
:param context: FastAPI context
|
||||
:param request: FastAPI request
|
||||
:param user: Target user
|
||||
:param args: Persistent arguments: request.form()
|
||||
:return: FastAPI context adjusted for account form
|
||||
"""
|
||||
# Do not modify the original context.
|
||||
context = copy.copy(context)
|
||||
|
||||
context["account_types"] = list(
|
||||
filter(
|
||||
lambda e: request.user.AccountTypeID >= e[0],
|
||||
[
|
||||
(at.USER_ID, f"Normal {at.USER}"),
|
||||
(at.PACKAGE_MAINTAINER_ID, at.PACKAGE_MAINTAINER),
|
||||
(at.DEVELOPER_ID, at.DEVELOPER),
|
||||
(at.PACKAGE_MAINTAINER_AND_DEV_ID, at.PACKAGE_MAINTAINER_AND_DEV),
|
||||
],
|
||||
)
|
||||
)
|
||||
|
||||
if request.user.is_authenticated():
|
||||
context["username"] = args.get("U", user.Username)
|
||||
context["account_type"] = args.get("T", user.AccountType.ID)
|
||||
context["suspended"] = args.get("S", user.Suspended)
|
||||
context["email"] = args.get("E", user.Email)
|
||||
context["hide_email"] = args.get("H", user.HideEmail)
|
||||
context["backup_email"] = args.get("BE", user.BackupEmail)
|
||||
context["realname"] = args.get("R", user.RealName)
|
||||
context["homepage"] = args.get("HP", user.Homepage or str())
|
||||
context["ircnick"] = args.get("I", user.IRCNick)
|
||||
context["pgp"] = args.get("K", user.PGPKey or str())
|
||||
context["lang"] = args.get("L", user.LangPreference)
|
||||
context["tz"] = args.get("TZ", user.Timezone)
|
||||
ssh_pks = [pk.PubKey for pk in user.ssh_pub_keys]
|
||||
context["ssh_pks"] = args.get("PK", ssh_pks)
|
||||
context["cn"] = args.get("CN", user.CommentNotify)
|
||||
context["un"] = args.get("UN", user.UpdateNotify)
|
||||
context["on"] = args.get("ON", user.OwnershipNotify)
|
||||
context["hdc"] = args.get("HDC", user.HideDeletedComments)
|
||||
context["inactive"] = args.get("J", user.InactivityTS != 0)
|
||||
else:
|
||||
context["username"] = args.get("U", str())
|
||||
context["account_type"] = args.get("T", at.USER_ID)
|
||||
context["suspended"] = args.get("S", False)
|
||||
context["email"] = args.get("E", str())
|
||||
context["hide_email"] = args.get("H", False)
|
||||
context["backup_email"] = args.get("BE", str())
|
||||
context["realname"] = args.get("R", str())
|
||||
context["homepage"] = args.get("HP", str())
|
||||
context["ircnick"] = args.get("I", str())
|
||||
context["pgp"] = args.get("K", str())
|
||||
context["lang"] = args.get("L", context.get("language"))
|
||||
context["tz"] = args.get("TZ", context.get("timezone"))
|
||||
context["ssh_pks"] = args.get("PK", str())
|
||||
context["cn"] = args.get("CN", True)
|
||||
context["un"] = args.get("UN", False)
|
||||
context["on"] = args.get("ON", True)
|
||||
context["hdc"] = args.get("HDC", False)
|
||||
context["inactive"] = args.get("J", False)
|
||||
|
||||
context["password"] = args.get("P", str())
|
||||
context["confirm"] = args.get("C", str())
|
||||
|
||||
return context
|
||||
|
||||
|
||||
@router.get("/register", response_class=HTMLResponse)
|
||||
@requires_guest
|
||||
async def account_register(
|
||||
request: Request,
|
||||
U: str = Form(default=str()), # Username
|
||||
E: str = Form(default=str()), # Email
|
||||
H: str = Form(default=False), # Hide Email
|
||||
BE: str = Form(default=None), # Backup Email
|
||||
R: str = Form(default=None), # Real Name
|
||||
HP: str = Form(default=None), # Homepage
|
||||
I: str = Form(default=None), # IRC Nick
|
||||
K: str = Form(default=None), # PGP Key FP
|
||||
L: str = Form(default=aurweb.config.get("options", "default_lang")),
|
||||
TZ: str = Form(default=aurweb.config.get("options", "default_timezone")),
|
||||
PK: str = Form(default=None),
|
||||
CN: bool = Form(default=False), # Comment Notify
|
||||
CU: bool = Form(default=False), # Update Notify
|
||||
CO: bool = Form(default=False), # Owner Notify
|
||||
HDC: bool = Form(default=False), # Hide Deleted Comments
|
||||
captcha: str = Form(default=str()),
|
||||
):
|
||||
context = await make_variable_context(request, "Register")
|
||||
context["captcha_salt"] = get_captcha_salts()[0]
|
||||
context = make_account_form_context(context, request, None, dict())
|
||||
return render_template(request, "register.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/register", response_class=HTMLResponse)
|
||||
@handle_form_exceptions
|
||||
@requires_guest
|
||||
async def account_register_post(
|
||||
request: Request,
|
||||
U: str = Form(default=str()), # Username
|
||||
E: str = Form(default=str()), # Email
|
||||
H: str = Form(default=False), # Hide Email
|
||||
BE: str = Form(default=None), # Backup Email
|
||||
R: str = Form(default=""), # Real Name
|
||||
HP: str = Form(default=None), # Homepage
|
||||
I: str = Form(default=None), # IRC Nick
|
||||
K: str = Form(default=None), # PGP Key
|
||||
L: str = Form(default=aurweb.config.get("options", "default_lang")),
|
||||
TZ: str = Form(default=aurweb.config.get("options", "default_timezone")),
|
||||
PK: str = Form(default=str()), # SSH PubKey
|
||||
CN: bool = Form(default=False),
|
||||
UN: bool = Form(default=False),
|
||||
ON: bool = Form(default=False),
|
||||
HDC: bool = Form(default=False),
|
||||
captcha: str = Form(default=None),
|
||||
captcha_salt: str = Form(...),
|
||||
):
|
||||
context = await make_variable_context(request, "Register")
|
||||
args = dict(await request.form())
|
||||
args["K"] = args.get("K", str()).replace(" ", "")
|
||||
K = args.get("K")
|
||||
|
||||
# Force "H" into a boolean.
|
||||
args["H"] = H = args.get("H", str()) == "on"
|
||||
|
||||
context = make_account_form_context(context, request, None, args)
|
||||
ok, errors = process_account_form(request, request.user, args)
|
||||
if not ok:
|
||||
# If the field values given do not meet the requirements,
|
||||
# return HTTP 400 with an error.
|
||||
context["errors"] = errors
|
||||
return render_template(
|
||||
request, "register.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
if not captcha:
|
||||
context["errors"] = ["The CAPTCHA is missing."]
|
||||
return render_template(
|
||||
request, "register.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
# Create a user with no password with a resetkey, then send
|
||||
# an email off about it.
|
||||
resetkey = generate_resetkey()
|
||||
|
||||
# By default, we grab the User account type to associate with.
|
||||
atype = db.query(
|
||||
models.AccountType, models.AccountType.AccountType == "User"
|
||||
).first()
|
||||
|
||||
# Create a user given all parameters available.
|
||||
with db.begin():
|
||||
user = db.create(
|
||||
models.User,
|
||||
Username=U,
|
||||
Email=E,
|
||||
HideEmail=H,
|
||||
BackupEmail=BE,
|
||||
RealName=R,
|
||||
Homepage=HP,
|
||||
IRCNick=I,
|
||||
PGPKey=K,
|
||||
LangPreference=L,
|
||||
Timezone=TZ,
|
||||
CommentNotify=CN,
|
||||
UpdateNotify=UN,
|
||||
OwnershipNotify=ON,
|
||||
HideDeletedComments=HDC,
|
||||
ResetKey=resetkey,
|
||||
AccountType=atype,
|
||||
)
|
||||
|
||||
# If a PK was given and either one does not exist or the given
|
||||
# PK mismatches the existing user's SSHPubKey.PubKey.
|
||||
if PK:
|
||||
# Get the second element in the PK, which is the actual key.
|
||||
keys = util.parse_ssh_keys(PK.strip())
|
||||
for k in keys:
|
||||
pk = " ".join(k)
|
||||
fprint = get_fingerprint(pk)
|
||||
db.create(models.SSHPubKey, User=user, PubKey=pk, Fingerprint=fprint)
|
||||
|
||||
# Send a reset key notification to the new user.
|
||||
WelcomeNotification(user.ID).send()
|
||||
|
||||
context["complete"] = True
|
||||
context["user"] = user
|
||||
return render_template(request, "register.html", context)
|
||||
|
||||
|
||||
def cannot_edit(
|
||||
request: Request, user: models.User
|
||||
) -> typing.Optional[RedirectResponse]:
|
||||
"""
|
||||
Decide if `request.user` cannot edit `user`.
|
||||
|
||||
If the request user can edit the target user, None is returned.
|
||||
Otherwise, a redirect is returned to /account/{user.Username}.
|
||||
|
||||
:param request: FastAPI request
|
||||
:param user: Target user to be edited
|
||||
:return: RedirectResponse if approval != granted else None
|
||||
"""
|
||||
# raise 404 if user does not exist
|
||||
if not user:
|
||||
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
|
||||
approved = request.user.can_edit_user(user)
|
||||
if not approved and (to := "/"):
|
||||
if user:
|
||||
to = f"/account/{user.Username}"
|
||||
return RedirectResponse(to, status_code=HTTPStatus.SEE_OTHER)
|
||||
return None
|
||||
|
||||
|
||||
@router.get("/account/{username}/edit", response_class=HTMLResponse)
|
||||
@requires_auth
|
||||
async def account_edit(request: Request, username: str):
|
||||
user = db.query(models.User, models.User.Username == username).first()
|
||||
|
||||
response = cannot_edit(request, user)
|
||||
if response:
|
||||
return response
|
||||
|
||||
context = await make_variable_context(request, "Accounts")
|
||||
context["user"] = db.refresh(user)
|
||||
|
||||
context = make_account_form_context(context, request, user, dict())
|
||||
return render_template(request, "account/edit.html", context)
|
||||
|
||||
|
||||
@router.post("/account/{username}/edit", response_class=HTMLResponse)
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def account_edit_post(
|
||||
request: Request,
|
||||
username: str,
|
||||
U: str = Form(default=str()), # Username
|
||||
J: bool = Form(default=False),
|
||||
E: str = Form(default=str()), # Email
|
||||
H: str = Form(default=False), # Hide Email
|
||||
BE: str = Form(default=None), # Backup Email
|
||||
R: str = Form(default=None), # Real Name
|
||||
HP: str = Form(default=None), # Homepage
|
||||
I: str = Form(default=None), # IRC Nick
|
||||
K: str = Form(default=None), # PGP Key
|
||||
L: str = Form(aurweb.config.get("options", "default_lang")),
|
||||
TZ: str = Form(aurweb.config.get("options", "default_timezone")),
|
||||
P: str = Form(default=str()), # New Password
|
||||
C: str = Form(default=None), # Password Confirm
|
||||
S: bool = Form(default=False), # Suspended
|
||||
PK: str = Form(default=None), # PubKey
|
||||
CN: bool = Form(default=False), # Comment Notify
|
||||
UN: bool = Form(default=False), # Update Notify
|
||||
ON: bool = Form(default=False), # Owner Notify
|
||||
HDC: bool = Form(default=False), # Hide Deleted Comments
|
||||
T: int = Form(default=None),
|
||||
passwd: str = Form(default=str()),
|
||||
):
|
||||
user = db.query(models.User).filter(models.User.Username == username).first()
|
||||
response = cannot_edit(request, user)
|
||||
if response:
|
||||
return response
|
||||
|
||||
context = await make_variable_context(request, "Accounts")
|
||||
context["user"] = db.refresh(user)
|
||||
|
||||
args = dict(await request.form())
|
||||
args["K"] = args.get("K", str()).replace(" ", "")
|
||||
|
||||
context = make_account_form_context(context, request, user, args)
|
||||
ok, errors = process_account_form(request, user, args)
|
||||
|
||||
if PK:
|
||||
context["ssh_pks"] = [PK]
|
||||
|
||||
if not passwd:
|
||||
context["errors"] = ["Invalid password."]
|
||||
return render_template(
|
||||
request, "account/edit.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
if not ok:
|
||||
context["errors"] = errors
|
||||
return render_template(
|
||||
request, "account/edit.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
updates = [
|
||||
update.simple,
|
||||
update.language,
|
||||
update.timezone,
|
||||
update.ssh_pubkey,
|
||||
update.account_type,
|
||||
update.password,
|
||||
update.suspend,
|
||||
]
|
||||
|
||||
# These update functions are all guarded by retry_deadlock;
|
||||
# there's no need to guard this route itself.
|
||||
for f in updates:
|
||||
f(**args, request=request, user=user, context=context)
|
||||
|
||||
if not errors:
|
||||
context["complete"] = True
|
||||
|
||||
return render_template(request, "account/edit.html", context)
|
||||
|
||||
|
||||
@router.get("/account/{username}")
|
||||
async def account(request: Request, username: str):
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
context = await make_variable_context(request, _("Account") + " " + username)
|
||||
if not request.user.is_authenticated():
|
||||
return render_template(
|
||||
request, "account/show.html", context, status_code=HTTPStatus.UNAUTHORIZED
|
||||
)
|
||||
|
||||
# Get related User record, if possible.
|
||||
user = get_user_by_name(username)
|
||||
context["user"] = user
|
||||
|
||||
# Format PGPKey for display with a space between each 4 characters.
|
||||
k = user.PGPKey or str()
|
||||
context["pgp_key"] = " ".join([k[i : i + 4] for i in range(0, len(k), 4)])
|
||||
|
||||
login_ts = None
|
||||
session = db.query(models.Session).filter(models.Session.UsersID == user.ID).first()
|
||||
if session:
|
||||
login_ts = user.session.LastUpdateTS
|
||||
context["login_ts"] = login_ts
|
||||
|
||||
# Render the template.
|
||||
return render_template(request, "account/show.html", context)
|
||||
|
||||
|
||||
@router.get("/account/{username}/comments")
|
||||
@requires_auth
|
||||
async def account_comments(request: Request, username: str):
|
||||
user = get_user_by_name(username)
|
||||
context = make_context(request, "Accounts")
|
||||
context["username"] = username
|
||||
context["comments"] = user.package_comments.order_by(
|
||||
models.PackageComment.CommentTS.desc()
|
||||
)
|
||||
return render_template(request, "account/comments.html", context)
|
||||
|
||||
|
||||
@router.get("/accounts")
|
||||
@requires_auth
|
||||
@account_type_required(
|
||||
{at.PACKAGE_MAINTAINER, at.DEVELOPER, at.PACKAGE_MAINTAINER_AND_DEV}
|
||||
)
|
||||
async def accounts(request: Request):
|
||||
context = make_context(request, "Accounts")
|
||||
return render_template(request, "account/search.html", context)
|
||||
|
||||
|
||||
@router.post("/accounts")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
@account_type_required(
|
||||
{at.PACKAGE_MAINTAINER, at.DEVELOPER, at.PACKAGE_MAINTAINER_AND_DEV}
|
||||
)
|
||||
async def accounts_post(
|
||||
request: Request,
|
||||
O: int = Form(default=0), # Offset
|
||||
SB: str = Form(default=str()), # Sort By
|
||||
U: str = Form(default=str()), # Username
|
||||
T: str = Form(default=str()), # Account Type
|
||||
S: bool = Form(default=False), # Suspended
|
||||
E: str = Form(default=str()), # Email
|
||||
R: str = Form(default=str()), # Real Name
|
||||
I: str = Form(default=str()), # IRC Nick
|
||||
K: str = Form(default=str()),
|
||||
): # PGP Key
|
||||
context = await make_variable_context(request, "Accounts")
|
||||
context["pp"] = pp = 50 # Hits per page.
|
||||
|
||||
offset = max(O, 0) # Minimize offset at 0.
|
||||
context["offset"] = offset # Offset.
|
||||
|
||||
context["params"] = dict(await request.form())
|
||||
if "O" in context["params"]:
|
||||
context["params"].pop("O")
|
||||
|
||||
# Setup order by criteria based on SB.
|
||||
order_by_columns = {
|
||||
"t": (models.AccountType.ID.asc(), models.User.Username.asc()),
|
||||
"r": (models.User.RealName.asc(), models.AccountType.ID.asc()),
|
||||
"i": (models.User.IRCNick.asc(), models.AccountType.ID.asc()),
|
||||
}
|
||||
default_order = (models.User.Username.asc(), models.AccountType.ID.asc())
|
||||
order_by = order_by_columns.get(SB, default_order)
|
||||
|
||||
# Convert parameter T to an AccountType ID.
|
||||
account_types = {
|
||||
"u": at.USER_ID,
|
||||
"t": at.PACKAGE_MAINTAINER_ID,
|
||||
"d": at.DEVELOPER_ID,
|
||||
"td": at.PACKAGE_MAINTAINER_AND_DEV_ID,
|
||||
}
|
||||
account_type_id = account_types.get(T, None)
|
||||
|
||||
# Get a query handle to users, populate the total user
|
||||
# count into a jinja2 context variable.
|
||||
query = db.query(models.User).join(models.AccountType)
|
||||
|
||||
# Populate this list with any additional statements to
|
||||
# be ANDed together.
|
||||
statements = [
|
||||
v
|
||||
for k, v in [
|
||||
(account_type_id is not None, models.AccountType.ID == account_type_id),
|
||||
(bool(U), models.User.Username.like(f"%{U}%")),
|
||||
(bool(S), models.User.Suspended == S),
|
||||
(bool(E), models.User.Email.like(f"%{E}%")),
|
||||
(bool(R), models.User.RealName.like(f"%{R}%")),
|
||||
(bool(I), models.User.IRCNick.like(f"%{I}%")),
|
||||
(bool(K), models.User.PGPKey.like(f"%{K}%")),
|
||||
]
|
||||
if k
|
||||
]
|
||||
|
||||
# Filter the query by coe-mbining all statements added above into
|
||||
# an AND statement, unless there's just one statement, which
|
||||
# we pass on to filter() as args.
|
||||
if statements:
|
||||
query = query.filter(and_(*statements))
|
||||
|
||||
context["total_users"] = query.count()
|
||||
|
||||
# Finally, order and truncate our users for the current page.
|
||||
users = query.order_by(*order_by).limit(pp).offset(offset).all()
|
||||
context["users"] = util.apply_all(users, db.refresh)
|
||||
|
||||
return render_template(request, "account/index.html", context)
|
||||
|
||||
|
||||
@router.get("/account/{name}/delete")
|
||||
@requires_auth
|
||||
async def account_delete(request: Request, name: str):
|
||||
user = db.query(models.User).filter(models.User.Username == name).first()
|
||||
if not user:
|
||||
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
|
||||
|
||||
has_cred = request.user.has_credential(creds.ACCOUNT_EDIT, approved=[user])
|
||||
if not has_cred:
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
raise HTTPException(
|
||||
detail=_("You do not have permission to edit this account."),
|
||||
status_code=HTTPStatus.UNAUTHORIZED,
|
||||
)
|
||||
|
||||
context = make_context(request, "Accounts")
|
||||
context["name"] = name
|
||||
return render_template(request, "account/delete.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/account/{name}/delete")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def account_delete_post(
|
||||
request: Request,
|
||||
name: str,
|
||||
passwd: str = Form(default=str()),
|
||||
confirm: bool = Form(default=False),
|
||||
):
|
||||
user = db.query(models.User).filter(models.User.Username == name).first()
|
||||
if not user:
|
||||
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
|
||||
|
||||
has_cred = request.user.has_credential(creds.ACCOUNT_EDIT, approved=[user])
|
||||
if not has_cred:
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
raise HTTPException(
|
||||
detail=_("You do not have permission to edit this account."),
|
||||
status_code=HTTPStatus.UNAUTHORIZED,
|
||||
)
|
||||
|
||||
context = make_context(request, "Accounts")
|
||||
context["name"] = name
|
||||
|
||||
confirm = util.strtobool(confirm)
|
||||
if not confirm:
|
||||
context["errors"] = [
|
||||
"The account has not been deleted, check the confirmation checkbox."
|
||||
]
|
||||
return render_template(
|
||||
request,
|
||||
"account/delete.html",
|
||||
context,
|
||||
status_code=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not request.user.valid_password(passwd):
|
||||
context["errors"] = ["Invalid password."]
|
||||
return render_template(
|
||||
request,
|
||||
"account/delete.html",
|
||||
context,
|
||||
status_code=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
|
||||
with db.begin():
|
||||
db.delete(user)
|
||||
|
||||
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
def render_terms_of_service(request: Request, context: dict, terms: typing.Iterable):
|
||||
if not terms:
|
||||
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
|
||||
context["unaccepted_terms"] = terms
|
||||
return render_template(request, "tos/index.html", context)
|
||||
|
||||
|
||||
@router.get("/tos")
|
||||
@requires_auth
|
||||
async def terms_of_service(request: Request):
|
||||
# Query the database for terms that were previously accepted,
|
||||
# but now have a bumped Revision that needs to be accepted.
|
||||
diffs = (
|
||||
db.query(models.Term)
|
||||
.join(models.AcceptedTerm)
|
||||
.filter(models.AcceptedTerm.Revision < models.Term.Revision)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Query the database for any terms that have not yet been accepted.
|
||||
unaccepted = (
|
||||
db.query(models.Term)
|
||||
.filter(~models.Term.ID.in_(db.query(models.AcceptedTerm.TermsID)))
|
||||
.all()
|
||||
)
|
||||
|
||||
for record in diffs + unaccepted:
|
||||
db.refresh(record)
|
||||
|
||||
# Translate the 'Terms of Service' part of our page title.
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
title = f"AUR {_('Terms of Service')}"
|
||||
context = await make_variable_context(request, title)
|
||||
|
||||
accept_needed = sorted(unaccepted + diffs)
|
||||
return render_terms_of_service(request, context, accept_needed)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/tos")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def terms_of_service_post(request: Request, accept: bool = Form(default=False)):
|
||||
# Query the database for terms that were previously accepted,
|
||||
# but now have a bumped Revision that needs to be accepted.
|
||||
diffs = (
|
||||
db.query(models.Term)
|
||||
.join(models.AcceptedTerm)
|
||||
.filter(models.AcceptedTerm.Revision < models.Term.Revision)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Query the database for any terms that have not yet been accepted.
|
||||
unaccepted = (
|
||||
db.query(models.Term)
|
||||
.filter(~models.Term.ID.in_(db.query(models.AcceptedTerm.TermsID)))
|
||||
.all()
|
||||
)
|
||||
|
||||
if not accept:
|
||||
# Translate the 'Terms of Service' part of our page title.
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
title = f"AUR {_('Terms of Service')}"
|
||||
context = await make_variable_context(request, title)
|
||||
|
||||
# We already did the database filters here, so let's just use
|
||||
# them instead of reiterating the process in terms_of_service.
|
||||
accept_needed = sorted(unaccepted + diffs)
|
||||
return render_terms_of_service(
|
||||
request, context, util.apply_all(accept_needed, db.refresh)
|
||||
)
|
||||
|
||||
with db.begin():
|
||||
# For each term we found, query for the matching accepted term
|
||||
# and update its Revision to the term's current Revision.
|
||||
for term in diffs:
|
||||
db.refresh(term)
|
||||
accepted_term = request.user.accepted_terms.filter(
|
||||
models.AcceptedTerm.TermsID == term.ID
|
||||
).first()
|
||||
accepted_term.Revision = term.Revision
|
||||
|
||||
# For each term that was never accepted, accept it!
|
||||
for term in unaccepted:
|
||||
db.refresh(term)
|
||||
db.create(
|
||||
models.AcceptedTerm,
|
||||
User=request.user,
|
||||
Term=term,
|
||||
Revision=term.Revision,
|
||||
)
|
||||
|
||||
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
|
122
aurweb/routers/auth.py
Normal file
122
aurweb/routers/auth.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
from http import HTTPStatus
|
||||
|
||||
from fastapi import APIRouter, Form, HTTPException, Request
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||
from sqlalchemy import or_
|
||||
|
||||
import aurweb.config
|
||||
from aurweb import cookies, db
|
||||
from aurweb.auth import requires_auth, requires_guest
|
||||
from aurweb.exceptions import handle_form_exceptions
|
||||
from aurweb.l10n import get_translator_for_request
|
||||
from aurweb.models import User
|
||||
from aurweb.templates import make_variable_context, render_template
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
async def login_template(request: Request, next: str, errors: list = None):
|
||||
"""Provide login-specific template context to render_template."""
|
||||
context = await make_variable_context(request, "Login", next)
|
||||
context["errors"] = errors
|
||||
context["url_base"] = f"{request.url.scheme}://{request.url.netloc}"
|
||||
return render_template(request, "login.html", context)
|
||||
|
||||
|
||||
@router.get("/login", response_class=HTMLResponse)
|
||||
async def login_get(request: Request, next: str = "/"):
|
||||
return await login_template(request, next)
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def _retry_login(request: Request, user: User, passwd: str) -> str:
|
||||
return user.login(request, passwd)
|
||||
|
||||
|
||||
@router.post("/login", response_class=HTMLResponse)
|
||||
@handle_form_exceptions
|
||||
@requires_guest
|
||||
async def login_post(
|
||||
request: Request,
|
||||
next: str = Form(...),
|
||||
user: str = Form(default=str()),
|
||||
passwd: str = Form(default=str()),
|
||||
remember_me: bool = Form(default=False),
|
||||
):
|
||||
# TODO: Once the Origin header gets broader adoption, this code can be
|
||||
# slightly simplified to use it.
|
||||
login_path = aurweb.config.get("options", "aur_location") + "/login"
|
||||
referer = request.headers.get("Referer")
|
||||
if not referer or not referer.startswith(login_path):
|
||||
_ = get_translator_for_request(request)
|
||||
raise HTTPException(
|
||||
status_code=HTTPStatus.BAD_REQUEST, detail=_("Bad Referer header.")
|
||||
)
|
||||
|
||||
user = (
|
||||
db.query(User)
|
||||
.filter(
|
||||
or_(
|
||||
User.Username == user,
|
||||
User.Email == user,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not user:
|
||||
return await login_template(request, next, errors=["Bad username or password."])
|
||||
|
||||
if user.Suspended:
|
||||
return await login_template(request, next, errors=["Account Suspended"])
|
||||
|
||||
# If "remember me" was not ticked, we set a session cookie for AURSID,
|
||||
# otherwise we make it a persistent cookie
|
||||
cookie_timeout = None
|
||||
if remember_me:
|
||||
cookie_timeout = aurweb.config.getint("options", "persistent_cookie_timeout")
|
||||
|
||||
perma_timeout = aurweb.config.getint("options", "permanent_cookie_timeout")
|
||||
sid = _retry_login(request, user, passwd)
|
||||
if not sid:
|
||||
return await login_template(request, next, errors=["Bad username or password."])
|
||||
|
||||
response = RedirectResponse(url=next, status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
secure = aurweb.config.getboolean("options", "disable_http_login")
|
||||
response.set_cookie(
|
||||
"AURSID",
|
||||
sid,
|
||||
max_age=cookie_timeout,
|
||||
secure=secure,
|
||||
httponly=secure,
|
||||
samesite=cookies.samesite(),
|
||||
)
|
||||
response.set_cookie(
|
||||
"AURREMEMBER",
|
||||
remember_me,
|
||||
max_age=perma_timeout,
|
||||
secure=secure,
|
||||
httponly=secure,
|
||||
samesite=cookies.samesite(),
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
@db.retry_deadlock
|
||||
def _retry_logout(request: Request) -> None:
|
||||
request.user.logout(request)
|
||||
|
||||
|
||||
@router.post("/logout")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def logout(request: Request, next: str = Form(default="/")):
|
||||
if request.user.is_authenticated():
|
||||
_retry_logout(request)
|
||||
|
||||
# Use 303 since we may be handling a post request, that'll get it
|
||||
# to redirect to a get request.
|
||||
response = RedirectResponse(url=next, status_code=HTTPStatus.SEE_OTHER)
|
||||
response.delete_cookie("AURSID")
|
||||
response.delete_cookie("AURREMEMBER")
|
||||
return response
|
227
aurweb/routers/html.py
Normal file
227
aurweb/routers/html.py
Normal file
|
@ -0,0 +1,227 @@
|
|||
""" AURWeb's primary routing module. Define all routes via @app.app.{get,post}
|
||||
decorators in some way; more complex routes should be defined in their
|
||||
own modules and imported here. """
|
||||
|
||||
import os
|
||||
from http import HTTPStatus
|
||||
|
||||
from fastapi import APIRouter, Form, HTTPException, Request, Response
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse
|
||||
from prometheus_client import (
|
||||
CONTENT_TYPE_LATEST,
|
||||
CollectorRegistry,
|
||||
generate_latest,
|
||||
multiprocess,
|
||||
)
|
||||
from sqlalchemy import case, or_
|
||||
|
||||
import aurweb.config
|
||||
import aurweb.models.package_request
|
||||
from aurweb import aur_logging, cookies, db, models, statistics, time, util
|
||||
from aurweb.exceptions import handle_form_exceptions
|
||||
from aurweb.models.package_request import PENDING_ID
|
||||
from aurweb.packages.util import query_notified, query_voted, updated_packages
|
||||
from aurweb.templates import make_context, render_template
|
||||
|
||||
logger = aur_logging.get_logger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/favicon.ico")
|
||||
async def favicon(request: Request):
|
||||
"""Some browsers attempt to find a website's favicon via root uri at
|
||||
/favicon.ico, so provide a redirection here to our static icon."""
|
||||
return RedirectResponse("/static/images/favicon.ico")
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/language", response_class=RedirectResponse)
|
||||
@handle_form_exceptions
|
||||
async def language(
|
||||
request: Request,
|
||||
set_lang: str = Form(...),
|
||||
next: str = Form(...),
|
||||
q: str = Form(default=None),
|
||||
):
|
||||
"""
|
||||
A POST route used to set a session's language.
|
||||
|
||||
Return a 303 See Other redirect to {next}?next={next}. If we are
|
||||
setting the language on any page, we want to preserve query
|
||||
parameters across the redirect.
|
||||
"""
|
||||
if next[0] != "/":
|
||||
return HTMLResponse(b"Invalid 'next' parameter.", status_code=400)
|
||||
|
||||
query_string = "?" + q if q else str()
|
||||
|
||||
response = RedirectResponse(
|
||||
url=f"{next}{query_string}", status_code=HTTPStatus.SEE_OTHER
|
||||
)
|
||||
|
||||
# If the user is authenticated, update the user's LangPreference.
|
||||
# Otherwise set an AURLANG cookie
|
||||
if request.user.is_authenticated():
|
||||
with db.begin():
|
||||
request.user.LangPreference = set_lang
|
||||
else:
|
||||
secure = aurweb.config.getboolean("options", "disable_http_login")
|
||||
perma_timeout = aurweb.config.getint("options", "permanent_cookie_timeout")
|
||||
|
||||
response.set_cookie(
|
||||
"AURLANG",
|
||||
set_lang,
|
||||
secure=secure,
|
||||
httponly=secure,
|
||||
max_age=perma_timeout,
|
||||
samesite=cookies.samesite(),
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@router.get("/", response_class=HTMLResponse)
|
||||
async def index(request: Request):
|
||||
"""Homepage route."""
|
||||
context = make_context(request, "Home")
|
||||
context["ssh_fingerprints"] = util.get_ssh_fingerprints()
|
||||
|
||||
cache_expire = aurweb.config.getint("cache", "expiry_time_statistics", 300)
|
||||
|
||||
# Package statistics.
|
||||
counts = statistics.get_homepage_counts()
|
||||
for k in counts:
|
||||
context[k] = counts[k]
|
||||
|
||||
# Get the 15 most recently updated packages.
|
||||
context["package_updates"] = updated_packages(15, cache_expire)
|
||||
|
||||
if request.user.is_authenticated():
|
||||
# Authenticated users get a few extra pieces of data for
|
||||
# the dashboard display.
|
||||
packages = db.query(models.Package).join(models.PackageBase)
|
||||
|
||||
maintained = (
|
||||
packages.join(
|
||||
models.PackageComaintainer,
|
||||
models.PackageComaintainer.PackageBaseID == models.PackageBase.ID,
|
||||
isouter=True,
|
||||
)
|
||||
.join(
|
||||
models.User,
|
||||
or_(
|
||||
models.PackageBase.MaintainerUID == models.User.ID,
|
||||
models.PackageComaintainer.UsersID == models.User.ID,
|
||||
),
|
||||
)
|
||||
.filter(models.User.ID == request.user.ID)
|
||||
)
|
||||
|
||||
# Packages maintained by the user that have been flagged.
|
||||
context["flagged_packages"] = (
|
||||
maintained.filter(models.PackageBase.OutOfDateTS.isnot(None))
|
||||
.order_by(models.PackageBase.ModifiedTS.desc(), models.Package.Name.asc())
|
||||
.limit(50)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Flagged packages that request.user has voted for.
|
||||
context["flagged_packages_voted"] = query_voted(
|
||||
context.get("flagged_packages"), request.user
|
||||
)
|
||||
|
||||
# Flagged packages that request.user is being notified about.
|
||||
context["flagged_packages_notified"] = query_notified(
|
||||
context.get("flagged_packages"), request.user
|
||||
)
|
||||
|
||||
archive_time = aurweb.config.getint("options", "request_archive_time")
|
||||
start = time.utcnow() - archive_time
|
||||
|
||||
# Package requests created by request.user.
|
||||
context["package_requests"] = (
|
||||
request.user.package_requests.filter(
|
||||
models.PackageRequest.RequestTS >= start
|
||||
)
|
||||
.order_by(
|
||||
# Order primarily by the Status column being PENDING_ID,
|
||||
# and secondarily by RequestTS; both in descending order.
|
||||
case([(models.PackageRequest.Status == PENDING_ID, 1)], else_=0).desc(),
|
||||
models.PackageRequest.RequestTS.desc(),
|
||||
)
|
||||
.limit(50)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Packages that the request user maintains or comaintains.
|
||||
context["packages"] = (
|
||||
maintained.filter(models.User.ID == models.PackageBase.MaintainerUID)
|
||||
.order_by(models.PackageBase.ModifiedTS.desc(), models.Package.Name.desc())
|
||||
.limit(50)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Packages that request.user has voted for.
|
||||
context["packages_voted"] = query_voted(context.get("packages"), request.user)
|
||||
|
||||
# Packages that request.user is being notified about.
|
||||
context["packages_notified"] = query_notified(
|
||||
context.get("packages"), request.user
|
||||
)
|
||||
|
||||
# Any packages that the request user comaintains.
|
||||
context["comaintained"] = (
|
||||
packages.join(models.PackageComaintainer)
|
||||
.filter(models.PackageComaintainer.UsersID == request.user.ID)
|
||||
.order_by(models.PackageBase.ModifiedTS.desc(), models.Package.Name.desc())
|
||||
.limit(50)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Comaintained packages that request.user has voted for.
|
||||
context["comaintained_voted"] = query_voted(
|
||||
context.get("comaintained"), request.user
|
||||
)
|
||||
|
||||
# Comaintained packages that request.user is being notified about.
|
||||
context["comaintained_notified"] = query_notified(
|
||||
context.get("comaintained"), request.user
|
||||
)
|
||||
|
||||
return render_template(request, "index.html", context)
|
||||
|
||||
|
||||
@router.get("/{archive}.sha256")
|
||||
async def archive_sha256(request: Request, archive: str):
|
||||
archivedir = aurweb.config.get("mkpkglists", "archivedir")
|
||||
hashfile = os.path.join(archivedir, f"{archive}.sha256")
|
||||
if not os.path.exists(hashfile):
|
||||
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
|
||||
|
||||
with open(hashfile) as f:
|
||||
hash_value = f.read()
|
||||
headers = {"Content-Type": "text/plain"}
|
||||
return Response(hash_value, headers=headers)
|
||||
|
||||
|
||||
@router.get("/metrics")
|
||||
async def metrics(request: Request):
|
||||
if not os.environ.get("PROMETHEUS_MULTIPROC_DIR", None):
|
||||
return Response(
|
||||
"Prometheus metrics are not enabled.",
|
||||
status_code=HTTPStatus.SERVICE_UNAVAILABLE,
|
||||
)
|
||||
|
||||
# update prometheus gauges for packages and users
|
||||
statistics.update_prometheus_metrics()
|
||||
|
||||
registry = CollectorRegistry()
|
||||
multiprocess.MultiProcessCollector(registry)
|
||||
data = generate_latest(registry)
|
||||
headers = {"Content-Type": CONTENT_TYPE_LATEST, "Content-Length": str(len(data))}
|
||||
return Response(data, headers=headers)
|
||||
|
||||
|
||||
@router.get("/raisefivethree", response_class=HTMLResponse)
|
||||
async def raise_service_unavailable(request: Request):
|
||||
raise HTTPException(status_code=HTTPStatus.SERVICE_UNAVAILABLE)
|
394
aurweb/routers/package_maintainer.py
Normal file
394
aurweb/routers/package_maintainer.py
Normal file
|
@ -0,0 +1,394 @@
|
|||
import html
|
||||
import typing
|
||||
from http import HTTPStatus
|
||||
from typing import Any
|
||||
|
||||
from fastapi import APIRouter, Form, HTTPException, Request
|
||||
from fastapi.responses import RedirectResponse, Response
|
||||
from sqlalchemy import and_, func, or_
|
||||
|
||||
from aurweb import aur_logging, db, l10n, models, time
|
||||
from aurweb.auth import creds, requires_auth
|
||||
from aurweb.exceptions import handle_form_exceptions
|
||||
from aurweb.models import User
|
||||
from aurweb.models.account_type import (
|
||||
PACKAGE_MAINTAINER_AND_DEV_ID,
|
||||
PACKAGE_MAINTAINER_ID,
|
||||
)
|
||||
from aurweb.templates import make_context, make_variable_context, render_template
|
||||
|
||||
router = APIRouter()
|
||||
logger = aur_logging.get_logger(__name__)
|
||||
|
||||
# Some PM route specific constants.
|
||||
ITEMS_PER_PAGE = 10 # Paged table size.
|
||||
MAX_AGENDA_LENGTH = 75 # Agenda table column length.
|
||||
|
||||
ADDVOTE_SPECIFICS = {
|
||||
# This dict stores a vote duration and quorum for a proposal.
|
||||
# When a proposal is added, duration is added to the current
|
||||
# timestamp.
|
||||
# "addvote_type": (duration, quorum)
|
||||
"add_pm": (7 * 24 * 60 * 60, 0.66),
|
||||
"remove_pm": (7 * 24 * 60 * 60, 0.75),
|
||||
"remove_inactive_pm": (5 * 24 * 60 * 60, 0.66),
|
||||
"bylaws": (7 * 24 * 60 * 60, 0.75),
|
||||
}
|
||||
|
||||
|
||||
def populate_package_maintainer_counts(context: dict[str, Any]) -> None:
|
||||
pm_query = db.query(User).filter(
|
||||
or_(
|
||||
User.AccountTypeID == PACKAGE_MAINTAINER_ID,
|
||||
User.AccountTypeID == PACKAGE_MAINTAINER_AND_DEV_ID,
|
||||
)
|
||||
)
|
||||
context["package_maintainer_count"] = pm_query.count()
|
||||
|
||||
# In case any records have a None InactivityTS.
|
||||
active_pm_query = pm_query.filter(
|
||||
or_(User.InactivityTS.is_(None), User.InactivityTS == 0)
|
||||
)
|
||||
context["active_package_maintainer_count"] = active_pm_query.count()
|
||||
|
||||
|
||||
@router.get("/package-maintainer")
|
||||
@requires_auth
|
||||
async def package_maintainer(
|
||||
request: Request,
|
||||
coff: int = 0, # current offset
|
||||
cby: str = "desc", # current by
|
||||
poff: int = 0, # past offset
|
||||
pby: str = "desc",
|
||||
): # past by
|
||||
"""Proposal listings."""
|
||||
|
||||
if not request.user.has_credential(creds.PM_LIST_VOTES):
|
||||
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = make_context(request, "Package Maintainer")
|
||||
|
||||
current_by, past_by = cby, pby
|
||||
current_off, past_off = coff, poff
|
||||
|
||||
context["pp"] = pp = ITEMS_PER_PAGE
|
||||
context["prev_len"] = MAX_AGENDA_LENGTH
|
||||
|
||||
ts = time.utcnow()
|
||||
|
||||
if current_by not in {"asc", "desc"}:
|
||||
# If a malicious by was given, default to desc.
|
||||
current_by = "desc"
|
||||
context["current_by"] = current_by
|
||||
|
||||
if past_by not in {"asc", "desc"}:
|
||||
# If a malicious by was given, default to desc.
|
||||
past_by = "desc"
|
||||
context["past_by"] = past_by
|
||||
|
||||
current_votes = (
|
||||
db.query(models.VoteInfo)
|
||||
.filter(models.VoteInfo.End > ts)
|
||||
.order_by(models.VoteInfo.Submitted.desc())
|
||||
)
|
||||
context["current_votes_count"] = current_votes.count()
|
||||
current_votes = current_votes.limit(pp).offset(current_off)
|
||||
context["current_votes"] = (
|
||||
reversed(current_votes.all()) if current_by == "asc" else current_votes.all()
|
||||
)
|
||||
context["current_off"] = current_off
|
||||
|
||||
past_votes = (
|
||||
db.query(models.VoteInfo)
|
||||
.filter(models.VoteInfo.End <= ts)
|
||||
.order_by(models.VoteInfo.Submitted.desc())
|
||||
)
|
||||
context["past_votes_count"] = past_votes.count()
|
||||
past_votes = past_votes.limit(pp).offset(past_off)
|
||||
context["past_votes"] = (
|
||||
reversed(past_votes.all()) if past_by == "asc" else past_votes.all()
|
||||
)
|
||||
context["past_off"] = past_off
|
||||
|
||||
last_vote = func.max(models.Vote.VoteID).label("LastVote")
|
||||
last_votes_by_pm = (
|
||||
db.query(models.Vote)
|
||||
.join(models.User)
|
||||
.join(models.VoteInfo, models.VoteInfo.ID == models.Vote.VoteID)
|
||||
.filter(
|
||||
and_(
|
||||
models.Vote.VoteID == models.VoteInfo.ID,
|
||||
models.User.ID == models.Vote.UserID,
|
||||
models.VoteInfo.End < ts,
|
||||
or_(models.User.AccountTypeID == 2, models.User.AccountTypeID == 4),
|
||||
)
|
||||
)
|
||||
.with_entities(models.Vote.UserID, last_vote, models.User.Username)
|
||||
.group_by(models.Vote.UserID)
|
||||
.order_by(last_vote.desc(), models.User.Username.asc())
|
||||
)
|
||||
context["last_votes_by_pm"] = last_votes_by_pm.all()
|
||||
|
||||
context["current_by_next"] = "asc" if current_by == "desc" else "desc"
|
||||
context["past_by_next"] = "asc" if past_by == "desc" else "desc"
|
||||
|
||||
populate_package_maintainer_counts(context)
|
||||
|
||||
context["q"] = {
|
||||
"coff": current_off,
|
||||
"cby": current_by,
|
||||
"poff": past_off,
|
||||
"pby": past_by,
|
||||
}
|
||||
|
||||
return render_template(request, "package-maintainer/index.html", context)
|
||||
|
||||
|
||||
def render_proposal(
|
||||
request: Request,
|
||||
context: dict,
|
||||
proposal: int,
|
||||
voteinfo: models.VoteInfo,
|
||||
voters: typing.Iterable[models.User],
|
||||
vote: models.Vote,
|
||||
status_code: HTTPStatus = HTTPStatus.OK,
|
||||
):
|
||||
"""Render a single PM proposal."""
|
||||
context["proposal"] = proposal
|
||||
context["voteinfo"] = voteinfo
|
||||
context["voters"] = voters.all()
|
||||
|
||||
total = voteinfo.total_votes()
|
||||
participation = (total / voteinfo.ActiveUsers) if voteinfo.ActiveUsers else 0
|
||||
context["participation"] = participation
|
||||
|
||||
accepted = (voteinfo.Yes > voteinfo.ActiveUsers / 2) or (
|
||||
participation > voteinfo.Quorum and voteinfo.Yes > voteinfo.No
|
||||
)
|
||||
context["accepted"] = accepted
|
||||
|
||||
can_vote = voters.filter(models.Vote.User == request.user).first() is None
|
||||
context["can_vote"] = can_vote
|
||||
|
||||
if not voteinfo.is_running():
|
||||
context["error"] = "Voting is closed for this proposal."
|
||||
|
||||
context["vote"] = vote
|
||||
context["has_voted"] = vote is not None
|
||||
|
||||
return render_template(
|
||||
request, "package-maintainer/show.html", context, status_code=status_code
|
||||
)
|
||||
|
||||
|
||||
@router.get("/package-maintainer/{proposal}")
|
||||
@requires_auth
|
||||
async def package_maintainer_proposal(request: Request, proposal: int):
|
||||
if not request.user.has_credential(creds.PM_LIST_VOTES):
|
||||
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = await make_variable_context(request, "Package Maintainer")
|
||||
proposal = int(proposal)
|
||||
|
||||
voteinfo = db.query(models.VoteInfo).filter(models.VoteInfo.ID == proposal).first()
|
||||
if not voteinfo:
|
||||
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
|
||||
|
||||
voters = (
|
||||
db.query(models.User)
|
||||
.join(models.Vote)
|
||||
.filter(models.Vote.VoteID == voteinfo.ID)
|
||||
)
|
||||
vote = (
|
||||
db.query(models.Vote)
|
||||
.filter(
|
||||
and_(
|
||||
models.Vote.UserID == request.user.ID,
|
||||
models.Vote.VoteID == voteinfo.ID,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not request.user.has_credential(creds.PM_VOTE):
|
||||
context["error"] = "Only Package Maintainers are allowed to vote."
|
||||
if voteinfo.User == request.user.Username:
|
||||
context["error"] = "You cannot vote in an proposal about you."
|
||||
elif vote is not None:
|
||||
context["error"] = "You've already voted for this proposal."
|
||||
|
||||
context["vote"] = vote
|
||||
return render_proposal(request, context, proposal, voteinfo, voters, vote)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/package-maintainer/{proposal}")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def package_maintainer_proposal_post(
|
||||
request: Request, proposal: int, decision: str = Form(...)
|
||||
):
|
||||
if not request.user.has_credential(creds.PM_LIST_VOTES):
|
||||
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = await make_variable_context(request, "Package Maintainer")
|
||||
proposal = int(proposal) # Make sure it's an int.
|
||||
|
||||
voteinfo = db.query(models.VoteInfo).filter(models.VoteInfo.ID == proposal).first()
|
||||
if not voteinfo:
|
||||
raise HTTPException(status_code=HTTPStatus.NOT_FOUND)
|
||||
|
||||
voters = (
|
||||
db.query(models.User)
|
||||
.join(models.Vote)
|
||||
.filter(models.Vote.VoteID == voteinfo.ID)
|
||||
)
|
||||
vote = (
|
||||
db.query(models.Vote)
|
||||
.filter(
|
||||
and_(
|
||||
models.Vote.UserID == request.user.ID,
|
||||
models.Vote.VoteID == voteinfo.ID,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
status_code = HTTPStatus.OK
|
||||
if not request.user.has_credential(creds.PM_VOTE):
|
||||
context["error"] = "Only Package Maintainers are allowed to vote."
|
||||
status_code = HTTPStatus.UNAUTHORIZED
|
||||
elif voteinfo.User == request.user.Username:
|
||||
context["error"] = "You cannot vote in an proposal about you."
|
||||
status_code = HTTPStatus.BAD_REQUEST
|
||||
elif vote is not None:
|
||||
context["error"] = "You've already voted for this proposal."
|
||||
status_code = HTTPStatus.BAD_REQUEST
|
||||
|
||||
if status_code != HTTPStatus.OK:
|
||||
return render_proposal(
|
||||
request, context, proposal, voteinfo, voters, vote, status_code=status_code
|
||||
)
|
||||
|
||||
with db.begin():
|
||||
if decision in {"Yes", "No", "Abstain"}:
|
||||
# Increment whichever decision was given to us.
|
||||
setattr(voteinfo, decision, getattr(voteinfo, decision) + 1)
|
||||
else:
|
||||
return Response(
|
||||
"Invalid 'decision' value.", status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
vote = db.create(models.Vote, User=request.user, VoteInfo=voteinfo)
|
||||
|
||||
context["error"] = "You've already voted for this proposal."
|
||||
return render_proposal(request, context, proposal, voteinfo, voters, vote)
|
||||
|
||||
|
||||
@router.get("/addvote")
|
||||
@requires_auth
|
||||
async def package_maintainer_addvote(
|
||||
request: Request, user: str = str(), type: str = "add_pm", agenda: str = str()
|
||||
):
|
||||
if not request.user.has_credential(creds.PM_ADD_VOTE):
|
||||
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = await make_variable_context(request, "Add Proposal")
|
||||
|
||||
if type not in ADDVOTE_SPECIFICS:
|
||||
context["error"] = "Invalid type."
|
||||
type = "add_pm" # Default it.
|
||||
|
||||
context["user"] = user
|
||||
context["type"] = type
|
||||
context["agenda"] = agenda
|
||||
|
||||
return render_template(request, "addvote.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/addvote")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def package_maintainer_addvote_post(
|
||||
request: Request,
|
||||
user: str = Form(default=str()),
|
||||
type: str = Form(default=str()),
|
||||
agenda: str = Form(default=str()),
|
||||
):
|
||||
if not request.user.has_credential(creds.PM_ADD_VOTE):
|
||||
return RedirectResponse("/package-maintainer", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
# Build a context.
|
||||
context = await make_variable_context(request, "Add Proposal")
|
||||
|
||||
context["type"] = type
|
||||
context["user"] = user
|
||||
context["agenda"] = agenda
|
||||
|
||||
def render_addvote(context, status_code):
|
||||
"""Simplify render_template a bit for this test."""
|
||||
return render_template(request, "addvote.html", context, status_code)
|
||||
|
||||
# Alright, get some database records, if we can.
|
||||
if type != "bylaws":
|
||||
user_record = db.query(models.User).filter(models.User.Username == user).first()
|
||||
if user_record is None:
|
||||
context["error"] = "Username does not exist."
|
||||
return render_addvote(context, HTTPStatus.NOT_FOUND)
|
||||
|
||||
utcnow = time.utcnow()
|
||||
voteinfo = (
|
||||
db.query(models.VoteInfo)
|
||||
.filter(and_(models.VoteInfo.User == user, models.VoteInfo.End > utcnow))
|
||||
.count()
|
||||
)
|
||||
if voteinfo:
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
context["error"] = _("%s already has proposal running for them.") % (
|
||||
html.escape(user),
|
||||
)
|
||||
return render_addvote(context, HTTPStatus.BAD_REQUEST)
|
||||
|
||||
if type not in ADDVOTE_SPECIFICS:
|
||||
context["error"] = "Invalid type."
|
||||
context["type"] = type = "add_pm" # Default for rendering.
|
||||
return render_addvote(context, HTTPStatus.BAD_REQUEST)
|
||||
|
||||
if not agenda:
|
||||
context["error"] = "Proposal cannot be empty."
|
||||
return render_addvote(context, HTTPStatus.BAD_REQUEST)
|
||||
|
||||
# Gather some mapped constants and the current timestamp.
|
||||
duration, quorum = ADDVOTE_SPECIFICS.get(type)
|
||||
timestamp = time.utcnow()
|
||||
|
||||
# Active PM types we filter for.
|
||||
types = {PACKAGE_MAINTAINER_ID, PACKAGE_MAINTAINER_AND_DEV_ID}
|
||||
|
||||
# Create a new VoteInfo (proposal)!
|
||||
with db.begin():
|
||||
active_pms = (
|
||||
db.query(User)
|
||||
.filter(
|
||||
and_(
|
||||
User.Suspended == 0,
|
||||
User.InactivityTS.isnot(None),
|
||||
User.AccountTypeID.in_(types),
|
||||
)
|
||||
)
|
||||
.count()
|
||||
)
|
||||
voteinfo = db.create(
|
||||
models.VoteInfo,
|
||||
User=user,
|
||||
Agenda=html.escape(agenda),
|
||||
Submitted=timestamp,
|
||||
End=(timestamp + duration),
|
||||
Quorum=quorum,
|
||||
ActiveUsers=active_pms,
|
||||
Submitter=request.user,
|
||||
)
|
||||
|
||||
# Redirect to the new proposal.
|
||||
endpoint = f"/package-maintainer/{voteinfo.ID}"
|
||||
return RedirectResponse(endpoint, status_code=HTTPStatus.SEE_OTHER)
|
518
aurweb/routers/packages.py
Normal file
518
aurweb/routers/packages.py
Normal file
|
@ -0,0 +1,518 @@
|
|||
from collections import defaultdict
|
||||
from http import HTTPStatus
|
||||
from typing import Any
|
||||
|
||||
from fastapi import APIRouter, Form, Query, Request, Response
|
||||
|
||||
import aurweb.filters # noqa: F401
|
||||
from aurweb import aur_logging, config, db, defaults, models, util
|
||||
from aurweb.auth import creds, requires_auth
|
||||
from aurweb.cache import db_count_cache, db_query_cache
|
||||
from aurweb.exceptions import InvariantError, handle_form_exceptions
|
||||
from aurweb.models.relation_type import CONFLICTS_ID, PROVIDES_ID, REPLACES_ID
|
||||
from aurweb.packages import util as pkgutil
|
||||
from aurweb.packages.search import PackageSearch
|
||||
from aurweb.packages.util import get_pkg_or_base
|
||||
from aurweb.pkgbase import actions as pkgbase_actions, util as pkgbaseutil
|
||||
from aurweb.templates import make_context, make_variable_context, render_template
|
||||
from aurweb.util import hash_query
|
||||
|
||||
logger = aur_logging.get_logger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
async def packages_get(
|
||||
request: Request, context: dict[str, Any], status_code: HTTPStatus = HTTPStatus.OK
|
||||
):
|
||||
# Query parameters used in this request.
|
||||
context["q"] = dict(request.query_params)
|
||||
|
||||
# Per page and offset.
|
||||
offset, per_page = util.sanitize_params(
|
||||
request.query_params.get("O", defaults.O),
|
||||
request.query_params.get("PP", defaults.PP),
|
||||
)
|
||||
context["O"] = offset
|
||||
|
||||
# Limit PP to options.max_search_results
|
||||
max_search_results = config.getint("options", "max_search_results")
|
||||
context["PP"] = per_page = min(per_page, max_search_results)
|
||||
|
||||
# Query search by.
|
||||
search_by = context["SeB"] = request.query_params.get("SeB", "nd")
|
||||
|
||||
# Query sort by.
|
||||
sort_by = request.query_params.get("SB", None)
|
||||
|
||||
# Query sort order.
|
||||
sort_order = request.query_params.get("SO", None)
|
||||
|
||||
# Apply ordering, limit and offset.
|
||||
search = PackageSearch(request.user)
|
||||
|
||||
# For each keyword found in K, apply a search_by filter.
|
||||
# This means that for any sentences separated by spaces,
|
||||
# they are used as if they were ANDed.
|
||||
keywords = context["K"] = request.query_params.get("K", str())
|
||||
|
||||
keywords = keywords.split(" ")
|
||||
if search_by == "k":
|
||||
# If we're searchin by keywords, supply a set of keywords.
|
||||
search.search_by(search_by, set(keywords))
|
||||
else:
|
||||
for keyword in keywords:
|
||||
search.search_by(search_by, keyword)
|
||||
|
||||
flagged = request.query_params.get("outdated", None)
|
||||
if flagged:
|
||||
# If outdated was given, set it up in the context.
|
||||
context["outdated"] = flagged
|
||||
|
||||
# When outdated is set to "on," we filter records which do have
|
||||
# an OutOfDateTS. When it's set to "off," we filter out any which
|
||||
# do **not** have OutOfDateTS.
|
||||
criteria = None
|
||||
if flagged == "on":
|
||||
criteria = models.PackageBase.OutOfDateTS.isnot
|
||||
else:
|
||||
criteria = models.PackageBase.OutOfDateTS.is_
|
||||
|
||||
# Apply the flag criteria to our PackageSearch.query.
|
||||
search.query = search.query.filter(criteria(None))
|
||||
|
||||
submit = request.query_params.get("submit", "Go")
|
||||
if submit == "Orphans":
|
||||
# If the user clicked the "Orphans" button, we only want
|
||||
# orphaned packages.
|
||||
search.query = search.query.filter(models.PackageBase.MaintainerUID.is_(None))
|
||||
|
||||
# Collect search result count here; we've applied our keywords.
|
||||
# Including more query operations below, like ordering, will
|
||||
# increase the amount of time required to collect a count.
|
||||
# we use redis for caching the results of the query
|
||||
cache_expire = config.getint("cache", "expiry_time_search", 600)
|
||||
num_packages = db_count_cache(hash_query(search.query), search.query, cache_expire)
|
||||
|
||||
# Apply user-specified sort column and ordering.
|
||||
search.sort_by(sort_by, sort_order)
|
||||
|
||||
# Insert search results into the context.
|
||||
results = search.results().with_entities(
|
||||
models.Package.ID,
|
||||
models.Package.Name,
|
||||
models.Package.PackageBaseID,
|
||||
models.Package.Version,
|
||||
models.Package.Description,
|
||||
models.PackageBase.Popularity,
|
||||
models.PackageBase.NumVotes,
|
||||
models.PackageBase.OutOfDateTS,
|
||||
models.PackageBase.ModifiedTS,
|
||||
models.User.Username.label("Maintainer"),
|
||||
models.PackageVote.PackageBaseID.label("Voted"),
|
||||
models.PackageNotification.PackageBaseID.label("Notify"),
|
||||
)
|
||||
|
||||
# paging
|
||||
results = results.limit(per_page).offset(offset)
|
||||
|
||||
# we use redis for caching the results of the query
|
||||
packages = db_query_cache(hash_query(results), results, cache_expire)
|
||||
|
||||
context["packages"] = packages
|
||||
context["packages_count"] = num_packages
|
||||
|
||||
return render_template(
|
||||
request, "packages/index.html", context, status_code=status_code
|
||||
)
|
||||
|
||||
|
||||
@router.get("/packages")
|
||||
async def packages(request: Request) -> Response:
|
||||
context = await make_variable_context(request, "Packages")
|
||||
return await packages_get(request, context)
|
||||
|
||||
|
||||
@router.get("/packages/{name}")
|
||||
async def package(
|
||||
request: Request,
|
||||
name: str,
|
||||
all_deps: bool = Query(default=False),
|
||||
all_reqs: bool = Query(default=False),
|
||||
) -> Response:
|
||||
"""
|
||||
Get a package by name.
|
||||
|
||||
By default, we limit the number of depends and requires results
|
||||
to 20. To bypass this and load all of them, which should be triggered
|
||||
via a "Show more" link near the limited listing.
|
||||
|
||||
:param name: Package.Name
|
||||
:param all_deps: Boolean indicating whether we should load all depends
|
||||
:param all_reqs: Boolean indicating whether we should load all requires
|
||||
:return: FastAPI Response
|
||||
"""
|
||||
|
||||
# Get the Package.
|
||||
pkg = get_pkg_or_base(name, models.Package)
|
||||
pkgbase = pkg.PackageBase
|
||||
|
||||
rels = pkg.package_relations.order_by(models.PackageRelation.RelName.asc())
|
||||
rels_data = defaultdict(list)
|
||||
for rel in rels:
|
||||
if rel.RelTypeID == CONFLICTS_ID:
|
||||
rels_data["c"].append(rel)
|
||||
elif rel.RelTypeID == PROVIDES_ID:
|
||||
rels_data["p"].append(rel)
|
||||
elif rel.RelTypeID == REPLACES_ID:
|
||||
rels_data["r"].append(rel)
|
||||
|
||||
# Add our base information.
|
||||
context = pkgbaseutil.make_context(request, pkgbase)
|
||||
context["q"] = dict(request.query_params)
|
||||
|
||||
context.update({"all_deps": all_deps, "all_reqs": all_reqs})
|
||||
|
||||
context["package"] = pkg
|
||||
|
||||
# Package sources.
|
||||
context["sources"] = pkg.package_sources.order_by(
|
||||
models.PackageSource.Source.asc()
|
||||
).all()
|
||||
|
||||
# Listing metadata.
|
||||
context["max_listing"] = max_listing = 20
|
||||
|
||||
# Package dependencies.
|
||||
deps = pkg.package_dependencies.order_by(
|
||||
models.PackageDependency.DepTypeID.asc(), models.PackageDependency.DepName.asc()
|
||||
)
|
||||
context["depends_count"] = deps.count()
|
||||
if not all_deps:
|
||||
deps = deps.limit(max_listing)
|
||||
context["dependencies"] = deps.all()
|
||||
# Existing dependencies to avoid multiple lookups
|
||||
context["dependencies_names_from_aur"] = [
|
||||
item.Name
|
||||
for item in db.query(models.Package)
|
||||
.filter(
|
||||
models.Package.Name.in_(
|
||||
pkg.package_dependencies.with_entities(models.PackageDependency.DepName)
|
||||
)
|
||||
)
|
||||
.all()
|
||||
]
|
||||
|
||||
# Package requirements (other packages depend on this one).
|
||||
reqs = pkgutil.pkg_required(pkg.Name, [p.RelName for p in rels_data.get("p", [])])
|
||||
context["reqs_count"] = reqs.count()
|
||||
if not all_reqs:
|
||||
reqs = reqs.limit(max_listing)
|
||||
context["required_by"] = reqs.all()
|
||||
|
||||
context["licenses"] = pkg.package_licenses
|
||||
|
||||
context["groups"] = pkg.package_groups
|
||||
|
||||
conflicts = pkg.package_relations.filter(
|
||||
models.PackageRelation.RelTypeID == CONFLICTS_ID
|
||||
).order_by(models.PackageRelation.RelName.asc())
|
||||
context["conflicts"] = conflicts
|
||||
|
||||
provides = pkg.package_relations.filter(
|
||||
models.PackageRelation.RelTypeID == PROVIDES_ID
|
||||
).order_by(models.PackageRelation.RelName.asc())
|
||||
context["provides"] = provides
|
||||
|
||||
replaces = pkg.package_relations.filter(
|
||||
models.PackageRelation.RelTypeID == REPLACES_ID
|
||||
).order_by(models.PackageRelation.RelName.asc())
|
||||
context["replaces"] = replaces
|
||||
|
||||
return render_template(request, "packages/show.html", context)
|
||||
|
||||
|
||||
async def packages_unflag(request: Request, package_ids: list[int] = [], **kwargs):
|
||||
if not package_ids:
|
||||
return False, ["You did not select any packages to unflag."]
|
||||
|
||||
# Holds the set of package bases we're looking to unflag.
|
||||
# Constructed below via looping through the packages query.
|
||||
bases = set()
|
||||
|
||||
package_ids = set(package_ids) # Convert this to a set for O(1).
|
||||
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
|
||||
for pkg in packages:
|
||||
has_cred = request.user.has_credential(
|
||||
creds.PKGBASE_UNFLAG, approved=[pkg.PackageBase.Flagger]
|
||||
)
|
||||
if not has_cred:
|
||||
return False, ["You did not select any packages to unflag."]
|
||||
|
||||
if pkg.PackageBase not in bases:
|
||||
bases.update({pkg.PackageBase})
|
||||
|
||||
for pkgbase in bases:
|
||||
pkgbase_actions.pkgbase_unflag_instance(request, pkgbase)
|
||||
return True, ["The selected packages have been unflagged."]
|
||||
|
||||
|
||||
async def packages_notify(request: Request, package_ids: list[int] = [], **kwargs):
|
||||
# In cases where we encounter errors with the request, we'll
|
||||
# use this error tuple as a return value.
|
||||
# TODO: This error does not yet have a translation.
|
||||
error_tuple = (False, ["You did not select any packages to be notified about."])
|
||||
if not package_ids:
|
||||
return error_tuple
|
||||
|
||||
bases = set()
|
||||
package_ids = set(package_ids)
|
||||
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
|
||||
|
||||
for pkg in packages:
|
||||
if pkg.PackageBase not in bases:
|
||||
bases.update({pkg.PackageBase})
|
||||
|
||||
# Perform some checks on what the user selected for notify.
|
||||
for pkgbase in bases:
|
||||
notif = db.query(
|
||||
pkgbase.notifications.filter(
|
||||
models.PackageNotification.UserID == request.user.ID
|
||||
).exists()
|
||||
).scalar()
|
||||
has_cred = request.user.has_credential(creds.PKGBASE_NOTIFY)
|
||||
|
||||
# If the request user either does not have credentials
|
||||
# or the notification already exists:
|
||||
if not (has_cred and not notif):
|
||||
return error_tuple
|
||||
|
||||
# If we get here, user input is good.
|
||||
for pkgbase in bases:
|
||||
pkgbase_actions.pkgbase_notify_instance(request, pkgbase)
|
||||
|
||||
# TODO: This message does not yet have a translation.
|
||||
return True, ["The selected packages' notifications have been enabled."]
|
||||
|
||||
|
||||
async def packages_unnotify(request: Request, package_ids: list[int] = [], **kwargs):
|
||||
if not package_ids:
|
||||
# TODO: This error does not yet have a translation.
|
||||
return False, ["You did not select any packages for notification removal."]
|
||||
|
||||
# TODO: This error does not yet have a translation.
|
||||
error_tuple = (
|
||||
False,
|
||||
["A package you selected does not have notifications enabled."],
|
||||
)
|
||||
|
||||
bases = set()
|
||||
package_ids = set(package_ids)
|
||||
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
|
||||
|
||||
for pkg in packages:
|
||||
if pkg.PackageBase not in bases:
|
||||
bases.update({pkg.PackageBase})
|
||||
|
||||
# Perform some checks on what the user selected for notify.
|
||||
for pkgbase in bases:
|
||||
notif = db.query(
|
||||
pkgbase.notifications.filter(
|
||||
models.PackageNotification.UserID == request.user.ID
|
||||
).exists()
|
||||
).scalar()
|
||||
if not notif:
|
||||
return error_tuple
|
||||
|
||||
for pkgbase in bases:
|
||||
pkgbase_actions.pkgbase_unnotify_instance(request, pkgbase)
|
||||
|
||||
# TODO: This message does not yet have a translation.
|
||||
return True, ["The selected packages' notifications have been removed."]
|
||||
|
||||
|
||||
async def packages_adopt(
|
||||
request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs
|
||||
):
|
||||
if not package_ids:
|
||||
return False, ["You did not select any packages to adopt."]
|
||||
|
||||
if not confirm:
|
||||
return (
|
||||
False,
|
||||
[
|
||||
"The selected packages have not been adopted, "
|
||||
"check the confirmation checkbox."
|
||||
],
|
||||
)
|
||||
|
||||
bases = set()
|
||||
package_ids = set(package_ids)
|
||||
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
|
||||
|
||||
for pkg in packages:
|
||||
if pkg.PackageBase not in bases:
|
||||
bases.update({pkg.PackageBase})
|
||||
|
||||
# Check that the user has credentials for every package they selected.
|
||||
for pkgbase in bases:
|
||||
has_cred = request.user.has_credential(creds.PKGBASE_ADOPT)
|
||||
if not (has_cred or not pkgbase.Maintainer):
|
||||
# TODO: This error needs to be translated.
|
||||
return (
|
||||
False,
|
||||
["You are not allowed to adopt one of the " "packages you selected."],
|
||||
)
|
||||
|
||||
# Now, really adopt the bases.
|
||||
for pkgbase in bases:
|
||||
pkgbase_actions.pkgbase_adopt_instance(request, pkgbase)
|
||||
|
||||
return True, ["The selected packages have been adopted."]
|
||||
|
||||
|
||||
def disown_all(request: Request, pkgbases: list[models.PackageBase]) -> list[str]:
|
||||
errors = []
|
||||
for pkgbase in pkgbases:
|
||||
try:
|
||||
pkgbase_actions.pkgbase_disown_instance(request, pkgbase)
|
||||
except InvariantError as exc:
|
||||
errors.append(str(exc))
|
||||
return errors
|
||||
|
||||
|
||||
async def packages_disown(
|
||||
request: Request, package_ids: list[int] = [], confirm: bool = False, **kwargs
|
||||
):
|
||||
if not package_ids:
|
||||
return False, ["You did not select any packages to disown."]
|
||||
|
||||
if not confirm:
|
||||
return (
|
||||
False,
|
||||
[
|
||||
"The selected packages have not been disowned, "
|
||||
"check the confirmation checkbox."
|
||||
],
|
||||
)
|
||||
|
||||
bases = set()
|
||||
package_ids = set(package_ids)
|
||||
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
|
||||
|
||||
for pkg in packages:
|
||||
if pkg.PackageBase not in bases:
|
||||
bases.update({pkg.PackageBase})
|
||||
|
||||
# Check that the user has credentials for every package they selected.
|
||||
for pkgbase in bases:
|
||||
has_cred = request.user.has_credential(
|
||||
creds.PKGBASE_DISOWN, approved=[pkgbase.Maintainer]
|
||||
)
|
||||
if not has_cred:
|
||||
# TODO: This error needs to be translated.
|
||||
return (
|
||||
False,
|
||||
["You are not allowed to disown one " "of the packages you selected."],
|
||||
)
|
||||
|
||||
# Now, disown all the bases if we can.
|
||||
if errors := disown_all(request, bases):
|
||||
return False, errors
|
||||
|
||||
return True, ["The selected packages have been disowned."]
|
||||
|
||||
|
||||
async def packages_delete(
|
||||
request: Request,
|
||||
package_ids: list[int] = [],
|
||||
confirm: bool = False,
|
||||
merge_into: str = str(),
|
||||
**kwargs,
|
||||
):
|
||||
if not package_ids:
|
||||
return False, ["You did not select any packages to delete."]
|
||||
|
||||
if not confirm:
|
||||
return (
|
||||
False,
|
||||
[
|
||||
"The selected packages have not been deleted, "
|
||||
"check the confirmation checkbox."
|
||||
],
|
||||
)
|
||||
|
||||
if not request.user.has_credential(creds.PKGBASE_DELETE):
|
||||
return False, ["You do not have permission to delete packages."]
|
||||
|
||||
# set-ify package_ids and query the database for related records.
|
||||
package_ids = set(package_ids)
|
||||
packages = db.query(models.Package).filter(models.Package.ID.in_(package_ids)).all()
|
||||
|
||||
if len(packages) != len(package_ids):
|
||||
# Let the user know there was an issue with their input: they have
|
||||
# provided at least one package_id which does not exist in the DB.
|
||||
# TODO: This error has not yet been translated.
|
||||
return False, ["One of the packages you selected does not exist."]
|
||||
|
||||
# Make a set out of all package bases related to `packages`.
|
||||
bases = {pkg.PackageBase for pkg in packages}
|
||||
deleted_bases, notifs = [], []
|
||||
for pkgbase in bases:
|
||||
deleted_bases.append(pkgbase.Name)
|
||||
notifs += pkgbase_actions.pkgbase_delete_instance(request, pkgbase)
|
||||
|
||||
# Log out the fact that this happened for accountability.
|
||||
logger.info(
|
||||
f"Privileged user '{request.user.Username}' deleted the "
|
||||
f"following package bases: {str(deleted_bases)}."
|
||||
)
|
||||
|
||||
util.apply_all(notifs, lambda n: n.send())
|
||||
return True, ["The selected packages have been deleted."]
|
||||
|
||||
|
||||
# A mapping of action string -> callback functions used within the
|
||||
# `packages_post` route below. We expect any action callback to
|
||||
# return a tuple in the format: (succeeded: bool, message: list[str]).
|
||||
PACKAGE_ACTIONS = {
|
||||
"unflag": packages_unflag,
|
||||
"notify": packages_notify,
|
||||
"unnotify": packages_unnotify,
|
||||
"adopt": packages_adopt,
|
||||
"disown": packages_disown,
|
||||
"delete": packages_delete,
|
||||
}
|
||||
|
||||
|
||||
@router.post("/packages")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def packages_post(
|
||||
request: Request,
|
||||
IDs: list[int] = Form(default=[]),
|
||||
action: str = Form(default=str()),
|
||||
confirm: bool = Form(default=False),
|
||||
):
|
||||
# If an invalid action is specified, just render GET /packages
|
||||
# with an BAD_REQUEST status_code.
|
||||
if action not in PACKAGE_ACTIONS:
|
||||
context = make_context(request, "Packages")
|
||||
return await packages_get(request, context, HTTPStatus.BAD_REQUEST)
|
||||
|
||||
context = make_context(request, "Packages")
|
||||
|
||||
# We deal with `IDs`, `merge_into` and `confirm` arguments
|
||||
# within action callbacks.
|
||||
callback = PACKAGE_ACTIONS.get(action)
|
||||
retval = await callback(request, package_ids=IDs, confirm=confirm)
|
||||
if retval: # If *anything* was returned:
|
||||
success, messages = retval
|
||||
if not success:
|
||||
# If the first element was False:
|
||||
context["errors"] = messages
|
||||
return await packages_get(request, context, HTTPStatus.BAD_REQUEST)
|
||||
else:
|
||||
# Otherwise:
|
||||
context["success"] = messages
|
||||
|
||||
return await packages_get(request, context)
|
987
aurweb/routers/pkgbase.py
Normal file
987
aurweb/routers/pkgbase.py
Normal file
|
@ -0,0 +1,987 @@
|
|||
from http import HTTPStatus
|
||||
|
||||
from fastapi import APIRouter, Form, HTTPException, Query, Request, Response
|
||||
from fastapi.responses import JSONResponse, RedirectResponse
|
||||
from sqlalchemy import and_
|
||||
|
||||
from aurweb import aur_logging, config, db, l10n, templates, time, util
|
||||
from aurweb.auth import creds, requires_auth
|
||||
from aurweb.exceptions import InvariantError, ValidationError, handle_form_exceptions
|
||||
from aurweb.models import PackageBase
|
||||
from aurweb.models.package_comment import PackageComment
|
||||
from aurweb.models.package_keyword import PackageKeyword
|
||||
from aurweb.models.package_notification import PackageNotification
|
||||
from aurweb.models.package_request import ACCEPTED_ID, PENDING_ID, PackageRequest
|
||||
from aurweb.models.package_vote import PackageVote
|
||||
from aurweb.models.request_type import DELETION_ID, MERGE_ID, ORPHAN_ID
|
||||
from aurweb.packages.requests import update_closure_comment
|
||||
from aurweb.packages.util import get_pkg_or_base, get_pkgbase_comment
|
||||
from aurweb.pkgbase import actions, util as pkgbaseutil, validate
|
||||
from aurweb.scripts import notify, popupdate
|
||||
from aurweb.scripts.rendercomment import update_comment_render_fastapi
|
||||
from aurweb.templates import make_variable_context, render_template
|
||||
|
||||
logger = aur_logging.get_logger(__name__)
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/pkgbase/{name}")
|
||||
async def pkgbase(request: Request, name: str) -> Response:
|
||||
"""
|
||||
Single package base view.
|
||||
|
||||
:param request: FastAPI Request
|
||||
:param name: PackageBase.Name
|
||||
:return: HTMLResponse
|
||||
"""
|
||||
# Get the PackageBase.
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
# Redirect to /packages if there's only one related Package
|
||||
# and its name matches its PackageBase.
|
||||
packages = pkgbase.packages.all()
|
||||
pkg = packages[0]
|
||||
if len(packages) == 1 and pkg.Name == pkgbase.Name:
|
||||
return RedirectResponse(
|
||||
f"/packages/{pkg.Name}", status_code=int(HTTPStatus.SEE_OTHER)
|
||||
)
|
||||
|
||||
# Add our base information.
|
||||
context = pkgbaseutil.make_context(request, pkgbase)
|
||||
context["packages"] = packages
|
||||
|
||||
return render_template(request, "pkgbase/index.html", context)
|
||||
|
||||
|
||||
@router.get("/pkgbase/{name}/voters")
|
||||
async def pkgbase_voters(request: Request, name: str) -> Response:
|
||||
"""
|
||||
View of package base voters.
|
||||
|
||||
Requires `request.user` has creds.PKGBASE_LIST_VOTERS credential.
|
||||
|
||||
:param request: FastAPI Request
|
||||
:param name: PackageBase.Name
|
||||
:return: HTMLResponse
|
||||
"""
|
||||
# Get the PackageBase.
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
if not request.user.has_credential(creds.PKGBASE_LIST_VOTERS):
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = templates.make_context(request, "Voters")
|
||||
context["pkgbase"] = pkgbase
|
||||
return render_template(request, "pkgbase/voters.html", context)
|
||||
|
||||
|
||||
@router.get("/pkgbase/{name}/flag-comment")
|
||||
async def pkgbase_flag_comment(request: Request, name: str):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
if pkgbase.OutOfDateTS is None:
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = templates.make_context(request, "Flag Comment")
|
||||
context["pkgbase"] = pkgbase
|
||||
return render_template(request, "pkgbase/flag-comment.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/keywords")
|
||||
@handle_form_exceptions
|
||||
async def pkgbase_keywords(
|
||||
request: Request, name: str, keywords: str = Form(default=str())
|
||||
):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
approved = [pkgbase.Maintainer] + [c.User for c in pkgbase.comaintainers]
|
||||
has_cred = creds.has_credential(
|
||||
request.user, creds.PKGBASE_SET_KEYWORDS, approved=approved
|
||||
)
|
||||
if not has_cred:
|
||||
return Response(status_code=HTTPStatus.UNAUTHORIZED)
|
||||
|
||||
# Lowercase all keywords. Our database table is case insensitive,
|
||||
# and providing CI duplicates of keywords is erroneous.
|
||||
keywords = set(k.lower() for k in keywords.split())
|
||||
|
||||
# Delete all keywords which are not supplied by the user.
|
||||
with db.begin():
|
||||
other_keywords = pkgbase.keywords.filter(~PackageKeyword.Keyword.in_(keywords))
|
||||
other_keyword_strings = set(kwd.Keyword.lower() for kwd in other_keywords)
|
||||
|
||||
existing_keywords = set(
|
||||
kwd.Keyword.lower()
|
||||
for kwd in pkgbase.keywords.filter(
|
||||
~PackageKeyword.Keyword.in_(other_keyword_strings)
|
||||
)
|
||||
)
|
||||
|
||||
db.delete_all(other_keywords)
|
||||
new_keywords = keywords.difference(existing_keywords)
|
||||
for keyword in new_keywords:
|
||||
db.create(PackageKeyword, PackageBase=pkgbase, Keyword=keyword)
|
||||
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@router.get("/pkgbase/{name}/flag")
|
||||
@requires_auth
|
||||
async def pkgbase_flag_get(request: Request, name: str):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
has_cred = request.user.has_credential(creds.PKGBASE_FLAG)
|
||||
if not has_cred or pkgbase.OutOfDateTS is not None:
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = templates.make_context(request, "Flag Package Out-Of-Date")
|
||||
context["pkgbase"] = pkgbase
|
||||
return render_template(request, "pkgbase/flag.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/flag")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_flag_post(
|
||||
request: Request, name: str, comments: str = Form(default=str())
|
||||
):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
if not comments:
|
||||
context = templates.make_context(request, "Flag Package Out-Of-Date")
|
||||
context["pkgbase"] = pkgbase
|
||||
context["errors"] = [
|
||||
"The selected packages have not been flagged, " "please enter a comment."
|
||||
]
|
||||
return render_template(
|
||||
request, "pkgbase/flag.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
validate.comment_raise_http_ex(comments)
|
||||
|
||||
has_cred = request.user.has_credential(creds.PKGBASE_FLAG)
|
||||
if has_cred and not pkgbase.OutOfDateTS:
|
||||
now = time.utcnow()
|
||||
with db.begin():
|
||||
pkgbase.OutOfDateTS = now
|
||||
pkgbase.Flagger = request.user
|
||||
pkgbase.FlaggerComment = comments
|
||||
|
||||
notify.FlagNotification(request.user.ID, pkgbase.ID).send()
|
||||
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/comments")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_comments_post(
|
||||
request: Request,
|
||||
name: str,
|
||||
comment: str = Form(default=str()),
|
||||
enable_notifications: bool = Form(default=False),
|
||||
):
|
||||
"""Add a new comment via POST request."""
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
validate.comment_raise_http_ex(comment)
|
||||
|
||||
# If the provided comment is different than the record's version,
|
||||
# update the db record.
|
||||
now = time.utcnow()
|
||||
with db.begin():
|
||||
comment = db.create(
|
||||
PackageComment,
|
||||
User=request.user,
|
||||
PackageBase=pkgbase,
|
||||
Comments=comment,
|
||||
RenderedComment=str(),
|
||||
CommentTS=now,
|
||||
)
|
||||
|
||||
if enable_notifications and not request.user.notified(pkgbase):
|
||||
db.create(PackageNotification, User=request.user, PackageBase=pkgbase)
|
||||
update_comment_render_fastapi(comment)
|
||||
|
||||
notif = notify.CommentNotification(request.user.ID, pkgbase.ID, comment.ID)
|
||||
notif.send()
|
||||
|
||||
# Redirect to the pkgbase page.
|
||||
return RedirectResponse(
|
||||
f"/pkgbase/{pkgbase.Name}#comment-{comment.ID}",
|
||||
status_code=HTTPStatus.SEE_OTHER,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/pkgbase/{name}/comments/{id}/form")
|
||||
@requires_auth
|
||||
async def pkgbase_comment_form(
|
||||
request: Request, name: str, id: int, next: str = Query(default=None)
|
||||
):
|
||||
"""
|
||||
Produce a comment form for comment {id}.
|
||||
|
||||
This route is used as a partial HTML endpoint when editing
|
||||
package comments via Javascript. This endpoint used to be
|
||||
part of the RPC as type=get-comment-form and has been
|
||||
relocated here because the form returned cannot be used
|
||||
externally and requires a POST request by the user.
|
||||
|
||||
:param request: FastAPI Request
|
||||
:param name: PackageBase.Name
|
||||
:param id: PackageComment.ID
|
||||
:param next: Optional `next` value used for the comment form
|
||||
:return: JSONResponse
|
||||
"""
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
comment = pkgbase.comments.filter(PackageComment.ID == id).first()
|
||||
if not comment:
|
||||
return JSONResponse({}, status_code=HTTPStatus.NOT_FOUND)
|
||||
|
||||
if not request.user.is_elevated() and request.user != comment.User:
|
||||
return JSONResponse({}, status_code=HTTPStatus.UNAUTHORIZED)
|
||||
|
||||
context = pkgbaseutil.make_context(request, pkgbase)
|
||||
context["comment"] = comment
|
||||
|
||||
if not next:
|
||||
next = f"/pkgbase/{name}"
|
||||
|
||||
context["next"] = next
|
||||
|
||||
form = templates.render_raw_template(
|
||||
request, "partials/packages/comment_form.html", context
|
||||
)
|
||||
return JSONResponse({"form": form})
|
||||
|
||||
|
||||
@router.get("/pkgbase/{name}/comments/{id}/edit")
|
||||
@requires_auth
|
||||
async def pkgbase_comment_edit(
|
||||
request: Request, name: str, id: int, next: str = Form(default=None)
|
||||
):
|
||||
"""
|
||||
Render the non-javascript edit form.
|
||||
|
||||
:param request: FastAPI Request
|
||||
:param name: PackageBase.Name
|
||||
:param id: PackageComment.ID
|
||||
:param next: Optional `next` parameter used in the POST request
|
||||
:return: HTMLResponse
|
||||
"""
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
comment = get_pkgbase_comment(pkgbase, id)
|
||||
|
||||
if not next:
|
||||
next = f"/pkgbase/{name}"
|
||||
|
||||
context = await make_variable_context(request, "Edit comment", next=next)
|
||||
context["comment"] = comment
|
||||
return render_template(request, "pkgbase/comments/edit.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/comments/{id}")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_comment_post(
|
||||
request: Request,
|
||||
name: str,
|
||||
id: int,
|
||||
comment: str = Form(default=str()),
|
||||
enable_notifications: bool = Form(default=False),
|
||||
next: str = Form(default=None),
|
||||
cancel: bool = Form(default=False),
|
||||
):
|
||||
"""Edit an existing comment."""
|
||||
if cancel:
|
||||
return RedirectResponse(
|
||||
f"/pkgbase/{name}#comment-{id}", status_code=HTTPStatus.SEE_OTHER
|
||||
)
|
||||
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
db_comment = get_pkgbase_comment(pkgbase, id)
|
||||
|
||||
validate.comment_raise_http_ex(comment)
|
||||
|
||||
if request.user.ID != db_comment.UsersID:
|
||||
raise HTTPException(status_code=HTTPStatus.UNAUTHORIZED)
|
||||
|
||||
# If the provided comment is different than the record's version,
|
||||
# update the db record.
|
||||
now = time.utcnow()
|
||||
if db_comment.Comments != comment:
|
||||
with db.begin():
|
||||
db_comment.Comments = comment
|
||||
db_comment.Editor = request.user
|
||||
db_comment.EditedTS = now
|
||||
|
||||
if enable_notifications:
|
||||
with db.begin():
|
||||
db_notif = request.user.notifications.filter(
|
||||
PackageNotification.PackageBaseID == pkgbase.ID
|
||||
).first()
|
||||
if not db_notif:
|
||||
db.create(PackageNotification, User=request.user, PackageBase=pkgbase)
|
||||
|
||||
update_comment_render_fastapi(db_comment)
|
||||
|
||||
if not next:
|
||||
next = f"/pkgbase/{pkgbase.Name}"
|
||||
|
||||
# Redirect to the pkgbase page anchored to the updated comment.
|
||||
return RedirectResponse(
|
||||
f"{next}#comment-{db_comment.ID}", status_code=HTTPStatus.SEE_OTHER
|
||||
)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/comments/{id}/pin")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_comment_pin(
|
||||
request: Request, name: str, id: int, next: str = Form(default=None)
|
||||
):
|
||||
"""
|
||||
Pin a comment.
|
||||
|
||||
:param request: FastAPI Request
|
||||
:param name: PackageBase.Name
|
||||
:param id: PackageComment.ID
|
||||
:param next: Optional `next` parameter used in the POST request
|
||||
:return: RedirectResponse to `next`
|
||||
"""
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
comment = get_pkgbase_comment(pkgbase, id)
|
||||
|
||||
has_cred = request.user.has_credential(
|
||||
creds.COMMENT_PIN, approved=comment.maintainers()
|
||||
)
|
||||
if not has_cred:
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
raise HTTPException(
|
||||
status_code=HTTPStatus.UNAUTHORIZED,
|
||||
detail=_("You are not allowed to pin this comment."),
|
||||
)
|
||||
|
||||
now = time.utcnow()
|
||||
with db.begin():
|
||||
comment.PinnedTS = now
|
||||
|
||||
if not next:
|
||||
next = f"/pkgbase/{name}"
|
||||
|
||||
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/comments/{id}/unpin")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_comment_unpin(
|
||||
request: Request, name: str, id: int, next: str = Form(default=None)
|
||||
):
|
||||
"""
|
||||
Unpin a comment.
|
||||
|
||||
:param request: FastAPI Request
|
||||
:param name: PackageBase.Name
|
||||
:param id: PackageComment.ID
|
||||
:param next: Optional `next` parameter used in the POST request
|
||||
:return: RedirectResponse to `next`
|
||||
"""
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
comment = get_pkgbase_comment(pkgbase, id)
|
||||
|
||||
has_cred = request.user.has_credential(
|
||||
creds.COMMENT_PIN, approved=comment.maintainers()
|
||||
)
|
||||
if not has_cred:
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
raise HTTPException(
|
||||
status_code=HTTPStatus.UNAUTHORIZED,
|
||||
detail=_("You are not allowed to unpin this comment."),
|
||||
)
|
||||
|
||||
with db.begin():
|
||||
comment.PinnedTS = 0
|
||||
|
||||
if not next:
|
||||
next = f"/pkgbase/{name}"
|
||||
|
||||
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/comments/{id}/delete")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_comment_delete(
|
||||
request: Request, name: str, id: int, next: str = Form(default=None)
|
||||
):
|
||||
"""
|
||||
Delete a comment.
|
||||
|
||||
This action does **not** delete the comment from the database, but
|
||||
sets PackageBase.DelTS and PackageBase.DeleterUID, which is used to
|
||||
decide who gets to view the comment and what utilities it gets.
|
||||
|
||||
:param request: FastAPI Request
|
||||
:param name: PackageBase.Name
|
||||
:param id: PackageComment.ID
|
||||
:param next: Optional `next` parameter used in the POST request
|
||||
:return: RedirectResposne to `next`
|
||||
"""
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
comment = get_pkgbase_comment(pkgbase, id)
|
||||
|
||||
authorized = request.user.has_credential(creds.COMMENT_DELETE, [comment.User])
|
||||
if not authorized:
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
raise HTTPException(
|
||||
status_code=HTTPStatus.UNAUTHORIZED,
|
||||
detail=_("You are not allowed to delete this comment."),
|
||||
)
|
||||
|
||||
now = time.utcnow()
|
||||
with db.begin():
|
||||
comment.Deleter = request.user
|
||||
comment.DelTS = now
|
||||
|
||||
if not next:
|
||||
next = f"/pkgbase/{name}"
|
||||
|
||||
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/comments/{id}/undelete")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_comment_undelete(
|
||||
request: Request, name: str, id: int, next: str = Form(default=None)
|
||||
):
|
||||
"""
|
||||
Undelete a comment.
|
||||
|
||||
This action does **not** undelete any comment from the database, but
|
||||
unsets PackageBase.DelTS and PackageBase.DeleterUID which restores
|
||||
the comment to a standard state.
|
||||
|
||||
:param request: FastAPI Request
|
||||
:param name: PackageBase.Name
|
||||
:param id: PackageComment.ID
|
||||
:param next: Optional `next` parameter used in the POST request
|
||||
:return: RedirectResponse to `next`
|
||||
"""
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
comment = get_pkgbase_comment(pkgbase, id)
|
||||
|
||||
has_cred = request.user.has_credential(
|
||||
creds.COMMENT_UNDELETE, approved=[comment.User]
|
||||
)
|
||||
if not has_cred:
|
||||
_ = l10n.get_translator_for_request(request)
|
||||
raise HTTPException(
|
||||
status_code=HTTPStatus.UNAUTHORIZED,
|
||||
detail=_("You are not allowed to undelete this comment."),
|
||||
)
|
||||
|
||||
with db.begin():
|
||||
comment.Deleter = None
|
||||
comment.DelTS = None
|
||||
|
||||
if not next:
|
||||
next = f"/pkgbase/{name}"
|
||||
|
||||
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/vote")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_vote(request: Request, name: str):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
vote = pkgbase.package_votes.filter(PackageVote.UsersID == request.user.ID).first()
|
||||
has_cred = request.user.has_credential(creds.PKGBASE_VOTE)
|
||||
if has_cred and not vote:
|
||||
now = time.utcnow()
|
||||
with db.begin():
|
||||
db.create(PackageVote, User=request.user, PackageBase=pkgbase, VoteTS=now)
|
||||
|
||||
# Update NumVotes/Popularity.
|
||||
popupdate.run_single(pkgbase)
|
||||
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/unvote")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_unvote(request: Request, name: str):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
vote = pkgbase.package_votes.filter(PackageVote.UsersID == request.user.ID).first()
|
||||
has_cred = request.user.has_credential(creds.PKGBASE_VOTE)
|
||||
if has_cred and vote:
|
||||
with db.begin():
|
||||
db.delete(vote)
|
||||
|
||||
# Update NumVotes/Popularity.
|
||||
popupdate.run_single(pkgbase)
|
||||
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/notify")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_notify(request: Request, name: str):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
actions.pkgbase_notify_instance(request, pkgbase)
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/unnotify")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_unnotify(request: Request, name: str):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
actions.pkgbase_unnotify_instance(request, pkgbase)
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/unflag")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_unflag(request: Request, name: str):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
actions.pkgbase_unflag_instance(request, pkgbase)
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@router.get("/pkgbase/{name}/disown")
|
||||
@requires_auth
|
||||
async def pkgbase_disown_get(
|
||||
request: Request, name: str, next: str = Query(default=str())
|
||||
):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
comaints = {c.User for c in pkgbase.comaintainers}
|
||||
approved = [pkgbase.Maintainer] + list(comaints)
|
||||
has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, approved=approved)
|
||||
if not has_cred:
|
||||
return RedirectResponse(f"/pkgbase/{name}", HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = templates.make_context(request, "Disown Package")
|
||||
context["pkgbase"] = pkgbase
|
||||
context["next"] = next or "/pkgbase/{name}"
|
||||
context["is_maint"] = request.user == pkgbase.Maintainer
|
||||
context["is_comaint"] = request.user in comaints
|
||||
return render_template(request, "pkgbase/disown.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/disown")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_disown_post(
|
||||
request: Request,
|
||||
name: str,
|
||||
comments: str = Form(default=str()),
|
||||
confirm: bool = Form(default=False),
|
||||
next: str = Form(default=str()),
|
||||
):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
if comments:
|
||||
validate.comment_raise_http_ex(comments)
|
||||
|
||||
comaints = {c.User for c in pkgbase.comaintainers}
|
||||
approved = [pkgbase.Maintainer] + list(comaints)
|
||||
has_cred = request.user.has_credential(creds.PKGBASE_DISOWN, approved=approved)
|
||||
if not has_cred:
|
||||
return RedirectResponse(f"/pkgbase/{name}", HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = templates.make_context(request, "Disown Package")
|
||||
context["pkgbase"] = pkgbase
|
||||
context["is_maint"] = request.user == pkgbase.Maintainer
|
||||
context["is_comaint"] = request.user in comaints
|
||||
|
||||
if not confirm:
|
||||
context["errors"] = [
|
||||
(
|
||||
"The selected packages have not been disowned, "
|
||||
"check the confirmation checkbox."
|
||||
)
|
||||
]
|
||||
return render_template(
|
||||
request, "pkgbase/disown.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
if request.user != pkgbase.Maintainer and request.user not in comaints:
|
||||
with db.begin():
|
||||
update_closure_comment(pkgbase, ORPHAN_ID, comments)
|
||||
|
||||
try:
|
||||
actions.pkgbase_disown_instance(request, pkgbase)
|
||||
except InvariantError as exc:
|
||||
context["errors"] = [str(exc)]
|
||||
return render_template(
|
||||
request, "pkgbase/disown.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
next = next or f"/pkgbase/{name}"
|
||||
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/adopt")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_adopt_post(request: Request, name: str):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
has_cred = request.user.has_credential(creds.PKGBASE_ADOPT)
|
||||
if has_cred or not pkgbase.Maintainer:
|
||||
# If the user has credentials, they'll adopt the package regardless
|
||||
# of maintainership. Otherwise, we'll promote the user to maintainer
|
||||
# if no maintainer currently exists.
|
||||
actions.pkgbase_adopt_instance(request, pkgbase)
|
||||
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@router.get("/pkgbase/{name}/comaintainers")
|
||||
@requires_auth
|
||||
async def pkgbase_comaintainers(request: Request, name: str) -> Response:
|
||||
# Get the PackageBase.
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
# Unauthorized users (Non-TU/Dev and not the pkgbase maintainer)
|
||||
# get redirected to the package base's page.
|
||||
has_creds = request.user.has_credential(
|
||||
creds.PKGBASE_EDIT_COMAINTAINERS, approved=[pkgbase.Maintainer]
|
||||
)
|
||||
if not has_creds:
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
# Add our base information.
|
||||
context = templates.make_context(request, "Manage Co-maintainers")
|
||||
context.update(
|
||||
{
|
||||
"pkgbase": pkgbase,
|
||||
"comaintainers": [c.User.Username for c in pkgbase.comaintainers],
|
||||
}
|
||||
)
|
||||
|
||||
return render_template(request, "pkgbase/comaintainers.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/comaintainers")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_comaintainers_post(
|
||||
request: Request, name: str, users: str = Form(default=str())
|
||||
) -> Response:
|
||||
# Get the PackageBase.
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
# Unauthorized users (Non-TU/Dev and not the pkgbase maintainer)
|
||||
# get redirected to the package base's page.
|
||||
has_creds = request.user.has_credential(
|
||||
creds.PKGBASE_EDIT_COMAINTAINERS, approved=[pkgbase.Maintainer]
|
||||
)
|
||||
if not has_creds:
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
users = {e.strip() for e in users.split("\n") if bool(e.strip())}
|
||||
records = {c.User.Username for c in pkgbase.comaintainers}
|
||||
|
||||
users_to_rm = records.difference(users)
|
||||
pkgbaseutil.remove_comaintainers(pkgbase, users_to_rm)
|
||||
logger.debug(
|
||||
f"{request.user} removed comaintainers from " f"{pkgbase.Name}: {users_to_rm}"
|
||||
)
|
||||
|
||||
users_to_add = users.difference(records)
|
||||
error = pkgbaseutil.add_comaintainers(request, pkgbase, users_to_add)
|
||||
if error:
|
||||
context = templates.make_context(request, "Manage Co-maintainers")
|
||||
context["pkgbase"] = pkgbase
|
||||
context["comaintainers"] = [c.User.Username for c in pkgbase.comaintainers]
|
||||
context["errors"] = [error]
|
||||
return render_template(request, "pkgbase/comaintainers.html", context)
|
||||
|
||||
logger.debug(
|
||||
f"{request.user} added comaintainers to " f"{pkgbase.Name}: {users_to_add}"
|
||||
)
|
||||
|
||||
return RedirectResponse(
|
||||
f"/pkgbase/{pkgbase.Name}", status_code=HTTPStatus.SEE_OTHER
|
||||
)
|
||||
|
||||
|
||||
@router.get("/pkgbase/{name}/request")
|
||||
@requires_auth
|
||||
async def pkgbase_request(
|
||||
request: Request, name: str, next: str = Query(default=str())
|
||||
):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
context = await make_variable_context(request, "Submit Request")
|
||||
context["pkgbase"] = pkgbase
|
||||
context["next"] = next or f"/pkgbase/{name}"
|
||||
return render_template(request, "pkgbase/request.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/request")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_request_post(
|
||||
request: Request,
|
||||
name: str,
|
||||
type: str = Form(...),
|
||||
merge_into: str = Form(default=None),
|
||||
comments: str = Form(default=str()),
|
||||
next: str = Form(default=str()),
|
||||
):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
# Create our render context.
|
||||
context = await make_variable_context(request, "Submit Request")
|
||||
context["pkgbase"] = pkgbase
|
||||
|
||||
types = {"deletion": DELETION_ID, "merge": MERGE_ID, "orphan": ORPHAN_ID}
|
||||
|
||||
if type not in types:
|
||||
# In the case that someone crafted a POST request with an invalid
|
||||
# type, just return them to the request form with BAD_REQUEST status.
|
||||
return render_template(
|
||||
request, "pkgbase/request.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
try:
|
||||
validate.request(pkgbase, type, comments, merge_into, context)
|
||||
except ValidationError as exc:
|
||||
logger.error(f"Request Validation Error: {str(exc.data)}")
|
||||
context["errors"] = exc.data
|
||||
return render_template(request, "pkgbase/request.html", context)
|
||||
|
||||
# All good. Create a new PackageRequest based on the given type.
|
||||
now = time.utcnow()
|
||||
with db.begin():
|
||||
pkgreq = db.create(
|
||||
PackageRequest,
|
||||
ReqTypeID=types.get(type),
|
||||
User=request.user,
|
||||
RequestTS=now,
|
||||
PackageBase=pkgbase,
|
||||
PackageBaseName=pkgbase.Name,
|
||||
MergeBaseName=merge_into,
|
||||
Comments=comments,
|
||||
ClosureComment=str(),
|
||||
)
|
||||
|
||||
# Prepare notification object.
|
||||
notif = notify.RequestOpenNotification(
|
||||
request.user.ID,
|
||||
pkgreq.ID,
|
||||
type,
|
||||
pkgreq.PackageBase.ID,
|
||||
merge_into=merge_into or None,
|
||||
)
|
||||
|
||||
# Send the notification now that we're out of the DB scope.
|
||||
notif.send()
|
||||
|
||||
auto_orphan_age = config.getint("options", "auto_orphan_age")
|
||||
auto_delete_age = config.getint("options", "auto_delete_age")
|
||||
|
||||
ood_ts = pkgbase.OutOfDateTS or 0
|
||||
flagged = ood_ts and (now - ood_ts) >= auto_orphan_age
|
||||
is_maintainer = pkgbase.Maintainer == request.user
|
||||
outdated = (now - pkgbase.SubmittedTS) <= auto_delete_age
|
||||
|
||||
if type == "orphan" and flagged:
|
||||
# This request should be auto-accepted.
|
||||
with db.begin():
|
||||
pkgbase.Maintainer = None
|
||||
pkgreq.Status = ACCEPTED_ID
|
||||
notif = notify.RequestCloseNotification(
|
||||
request.user.ID, pkgreq.ID, pkgreq.status_display()
|
||||
)
|
||||
notif.send()
|
||||
logger.debug(f"New request #{pkgreq.ID} is marked for auto-orphan.")
|
||||
elif type == "deletion" and is_maintainer and outdated:
|
||||
# This request should be auto-accepted.
|
||||
notifs = actions.pkgbase_delete_instance(request, pkgbase, comments=comments)
|
||||
util.apply_all(notifs, lambda n: n.send())
|
||||
logger.debug(f"New request #{pkgreq.ID} is marked for auto-deletion.")
|
||||
|
||||
# Redirect the submitting user to /packages.
|
||||
return RedirectResponse("/packages", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@router.get("/pkgbase/{name}/delete")
|
||||
@requires_auth
|
||||
async def pkgbase_delete_get(
|
||||
request: Request, name: str, next: str = Query(default=str())
|
||||
):
|
||||
if not request.user.has_credential(creds.PKGBASE_DELETE):
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = templates.make_context(request, "Package Deletion")
|
||||
context["pkgbase"] = get_pkg_or_base(name, PackageBase)
|
||||
context["next"] = next or "/packages"
|
||||
return render_template(request, "pkgbase/delete.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/delete")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_delete_post(
|
||||
request: Request,
|
||||
name: str,
|
||||
confirm: bool = Form(default=False),
|
||||
comments: str = Form(default=str()),
|
||||
next: str = Form(default="/packages"),
|
||||
):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
if not request.user.has_credential(creds.PKGBASE_DELETE):
|
||||
return RedirectResponse(f"/pkgbase/{name}", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
if not confirm:
|
||||
context = templates.make_context(request, "Package Deletion")
|
||||
context["pkgbase"] = pkgbase
|
||||
context["errors"] = [
|
||||
(
|
||||
"The selected packages have not been deleted, "
|
||||
"check the confirmation checkbox."
|
||||
)
|
||||
]
|
||||
return render_template(
|
||||
request, "pkgbase/delete.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
if comments:
|
||||
validate.comment_raise_http_ex(comments)
|
||||
# Update any existing deletion requests' ClosureComment.
|
||||
with db.begin():
|
||||
requests = pkgbase.requests.filter(
|
||||
and_(
|
||||
PackageRequest.Status == PENDING_ID,
|
||||
PackageRequest.ReqTypeID == DELETION_ID,
|
||||
)
|
||||
)
|
||||
for pkgreq in requests:
|
||||
pkgreq.ClosureComment = comments
|
||||
|
||||
notifs = actions.pkgbase_delete_instance(request, pkgbase, comments=comments)
|
||||
util.apply_all(notifs, lambda n: n.send())
|
||||
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
|
||||
@router.get("/pkgbase/{name}/merge")
|
||||
@requires_auth
|
||||
async def pkgbase_merge_get(
|
||||
request: Request,
|
||||
name: str,
|
||||
into: str = Query(default=str()),
|
||||
next: str = Query(default=str()),
|
||||
):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
|
||||
context = templates.make_context(request, "Package Merging")
|
||||
context.update({"pkgbase": pkgbase, "into": into, "next": next})
|
||||
|
||||
status_code = HTTPStatus.OK
|
||||
# TODO: Lookup errors from credential instead of hardcoding them.
|
||||
# Idea: Something like credential_errors(creds.PKGBASE_MERGE).
|
||||
# Perhaps additionally: bad_credential_status_code(creds.PKGBASE_MERGE).
|
||||
# Don't take these examples verbatim. We should find good naming.
|
||||
if not request.user.has_credential(creds.PKGBASE_MERGE):
|
||||
context["errors"] = [
|
||||
"Only Package Maintainers and Developers can merge packages."
|
||||
]
|
||||
status_code = HTTPStatus.UNAUTHORIZED
|
||||
|
||||
return render_template(
|
||||
request, "pkgbase/merge.html", context, status_code=status_code
|
||||
)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/pkgbase/{name}/merge")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def pkgbase_merge_post(
|
||||
request: Request,
|
||||
name: str,
|
||||
into: str = Form(default=str()),
|
||||
comments: str = Form(default=str()),
|
||||
confirm: bool = Form(default=False),
|
||||
next: str = Form(default=str()),
|
||||
):
|
||||
pkgbase = get_pkg_or_base(name, PackageBase)
|
||||
context = await make_variable_context(request, "Package Merging")
|
||||
context["pkgbase"] = pkgbase
|
||||
|
||||
# TODO: Lookup errors from credential instead of hardcoding them.
|
||||
if not request.user.has_credential(creds.PKGBASE_MERGE):
|
||||
context["errors"] = [
|
||||
"Only Package Maintainers and Developers can merge packages."
|
||||
]
|
||||
return render_template(
|
||||
request, "pkgbase/merge.html", context, status_code=HTTPStatus.UNAUTHORIZED
|
||||
)
|
||||
|
||||
if not confirm:
|
||||
context["errors"] = [
|
||||
"The selected packages have not been deleted, "
|
||||
"check the confirmation checkbox."
|
||||
]
|
||||
return render_template(
|
||||
request, "pkgbase/merge.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
try:
|
||||
target = get_pkg_or_base(into, PackageBase)
|
||||
except HTTPException:
|
||||
context["errors"] = ["Cannot find package to merge votes and comments into."]
|
||||
return render_template(
|
||||
request, "pkgbase/merge.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
if pkgbase == target:
|
||||
context["errors"] = ["Cannot merge a package base with itself."]
|
||||
return render_template(
|
||||
request, "pkgbase/merge.html", context, status_code=HTTPStatus.BAD_REQUEST
|
||||
)
|
||||
|
||||
if comments:
|
||||
validate.comment_raise_http_ex(comments)
|
||||
|
||||
with db.begin():
|
||||
update_closure_comment(pkgbase, MERGE_ID, comments, target=target)
|
||||
|
||||
# Merge pkgbase into target.
|
||||
actions.pkgbase_merge_instance(request, pkgbase, target, comments=comments)
|
||||
|
||||
if not next:
|
||||
next = f"/pkgbase/{target.Name}"
|
||||
|
||||
# Redirect to the newly merged into package.
|
||||
return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
|
166
aurweb/routers/requests.py
Normal file
166
aurweb/routers/requests.py
Normal file
|
@ -0,0 +1,166 @@
|
|||
from http import HTTPStatus
|
||||
|
||||
from fastapi import APIRouter, Form, Query, Request
|
||||
from fastapi.responses import RedirectResponse
|
||||
from sqlalchemy import case, orm
|
||||
|
||||
from aurweb import db, defaults, time, util
|
||||
from aurweb.auth import creds, requires_auth
|
||||
from aurweb.exceptions import handle_form_exceptions
|
||||
from aurweb.models import PackageBase, PackageRequest, User
|
||||
from aurweb.models.package_request import (
|
||||
ACCEPTED_ID,
|
||||
CLOSED_ID,
|
||||
PENDING_ID,
|
||||
REJECTED_ID,
|
||||
)
|
||||
from aurweb.requests.util import get_pkgreq_by_id
|
||||
from aurweb.scripts import notify
|
||||
from aurweb.statistics import get_request_counts
|
||||
from aurweb.templates import make_context, render_template
|
||||
|
||||
FILTER_PARAMS = {
|
||||
"filter_pending",
|
||||
"filter_closed",
|
||||
"filter_accepted",
|
||||
"filter_rejected",
|
||||
"filter_maintainers_requests",
|
||||
}
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/requests")
|
||||
@requires_auth
|
||||
async def requests( # noqa: C901
|
||||
request: Request,
|
||||
O: int = Query(default=defaults.O),
|
||||
PP: int = Query(default=defaults.PP),
|
||||
filter_pending: bool = False,
|
||||
filter_closed: bool = False,
|
||||
filter_accepted: bool = False,
|
||||
filter_rejected: bool = False,
|
||||
filter_maintainer_requests: bool = False,
|
||||
filter_pkg_name: str = None,
|
||||
):
|
||||
context = make_context(request, "Requests")
|
||||
|
||||
context["q"] = dict(request.query_params)
|
||||
|
||||
# Set pending filter by default if no status filter was provided.
|
||||
# In case we got a package name filter, but no status filter,
|
||||
# we enable the other ones too.
|
||||
if not dict(request.query_params).keys() & FILTER_PARAMS:
|
||||
filter_pending = True
|
||||
if filter_pkg_name:
|
||||
filter_closed = True
|
||||
filter_accepted = True
|
||||
filter_rejected = True
|
||||
|
||||
O, PP = util.sanitize_params(str(O), str(PP))
|
||||
context["O"] = O
|
||||
context["PP"] = PP
|
||||
context["filter_pending"] = filter_pending
|
||||
context["filter_closed"] = filter_closed
|
||||
context["filter_accepted"] = filter_accepted
|
||||
context["filter_rejected"] = filter_rejected
|
||||
context["filter_maintainer_requests"] = filter_maintainer_requests
|
||||
context["filter_pkg_name"] = filter_pkg_name
|
||||
|
||||
Maintainer = orm.aliased(User)
|
||||
# A PackageRequest query
|
||||
query = (
|
||||
db.query(PackageRequest)
|
||||
.join(PackageBase)
|
||||
.join(User, PackageRequest.UsersID == User.ID, isouter=True)
|
||||
.join(Maintainer, PackageBase.MaintainerUID == Maintainer.ID, isouter=True)
|
||||
)
|
||||
|
||||
# Requests statistics
|
||||
counts = get_request_counts()
|
||||
for k in counts:
|
||||
context[k] = counts[k]
|
||||
|
||||
# Apply status filters
|
||||
in_filters = []
|
||||
if filter_pending:
|
||||
in_filters.append(PENDING_ID)
|
||||
if filter_closed:
|
||||
in_filters.append(CLOSED_ID)
|
||||
if filter_accepted:
|
||||
in_filters.append(ACCEPTED_ID)
|
||||
if filter_rejected:
|
||||
in_filters.append(REJECTED_ID)
|
||||
filtered = query.filter(PackageRequest.Status.in_(in_filters))
|
||||
|
||||
# Name filter (contains)
|
||||
if filter_pkg_name:
|
||||
filtered = filtered.filter(PackageBase.Name.like(f"%{filter_pkg_name}%"))
|
||||
|
||||
# Additionally filter for requests made from package maintainer
|
||||
if filter_maintainer_requests:
|
||||
filtered = filtered.filter(PackageRequest.UsersID == PackageBase.MaintainerUID)
|
||||
# If the request user is not elevated (TU or Dev), then
|
||||
# filter PackageRequests which are owned by the request user.
|
||||
if not request.user.is_elevated():
|
||||
filtered = filtered.filter(PackageRequest.UsersID == request.user.ID)
|
||||
|
||||
context["total"] = filtered.count()
|
||||
context["results"] = (
|
||||
filtered.order_by(
|
||||
# Order primarily by the Status column being PENDING_ID,
|
||||
# and secondarily by RequestTS; both in descending order.
|
||||
case([(PackageRequest.Status == PENDING_ID, 1)], else_=0).desc(),
|
||||
PackageRequest.RequestTS.desc(),
|
||||
)
|
||||
.limit(PP)
|
||||
.offset(O)
|
||||
.all()
|
||||
)
|
||||
return render_template(request, "requests.html", context)
|
||||
|
||||
|
||||
@router.get("/requests/{id}/close")
|
||||
@requires_auth
|
||||
async def request_close(request: Request, id: int):
|
||||
pkgreq = get_pkgreq_by_id(id)
|
||||
if not request.user.is_elevated() and request.user != pkgreq.User:
|
||||
# Request user doesn't have permission here: redirect to '/'.
|
||||
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = make_context(request, "Close Request")
|
||||
context["pkgreq"] = pkgreq
|
||||
return render_template(request, "requests/close.html", context)
|
||||
|
||||
|
||||
@db.async_retry_deadlock
|
||||
@router.post("/requests/{id}/close")
|
||||
@handle_form_exceptions
|
||||
@requires_auth
|
||||
async def request_close_post(
|
||||
request: Request, id: int, comments: str = Form(default=str())
|
||||
):
|
||||
pkgreq = get_pkgreq_by_id(id)
|
||||
|
||||
# `pkgreq`.User can close their own request.
|
||||
approved = [pkgreq.User]
|
||||
if not request.user.has_credential(creds.PKGREQ_CLOSE, approved=approved):
|
||||
# Request user doesn't have permission here: redirect to '/'.
|
||||
return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
|
||||
|
||||
context = make_context(request, "Close Request")
|
||||
context["pkgreq"] = pkgreq
|
||||
|
||||
now = time.utcnow()
|
||||
with db.begin():
|
||||
pkgreq.Closer = request.user
|
||||
pkgreq.ClosureComment = comments
|
||||
pkgreq.ClosedTS = now
|
||||
pkgreq.Status = REJECTED_ID
|
||||
|
||||
notify_ = notify.RequestCloseNotification(
|
||||
request.user.ID, pkgreq.ID, pkgreq.status_display()
|
||||
)
|
||||
notify_.send()
|
||||
|
||||
return RedirectResponse("/requests", status_code=HTTPStatus.SEE_OTHER)
|
320
aurweb/routers/rpc.py
Normal file
320
aurweb/routers/rpc.py
Normal file
|
@ -0,0 +1,320 @@
|
|||
"""
|
||||
RPC API routing module
|
||||
|
||||
For legacy route documentation, see https://aur.archlinux.org/rpc
|
||||
|
||||
Legacy Routes:
|
||||
- GET /rpc
|
||||
- POST /rpc
|
||||
|
||||
Legacy example (version 5): /rpc?v=5&type=info&arg=my-package
|
||||
|
||||
For OpenAPI route documentation, see https://aur.archlinux.org/docs
|
||||
|
||||
OpenAPI Routes:
|
||||
- GET /rpc/v{version}/info/{arg}
|
||||
- GET /rpc/v{version}/info
|
||||
- POST /rpc/v{version}/info
|
||||
- GET /rpc/v{version}/search/{arg}
|
||||
- GET /rpc/v{version}/search
|
||||
- POST /rpc/v{version}/search
|
||||
- GET /rpc/v{version}/suggest/{arg}
|
||||
|
||||
OpenAPI example (version 5): /rpc/v5/info/my-package
|
||||
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import re
|
||||
from http import HTTPStatus
|
||||
from typing import Optional
|
||||
from urllib.parse import unquote
|
||||
|
||||
import orjson
|
||||
from fastapi import APIRouter, Form, Query, Request, Response
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from aurweb import defaults
|
||||
from aurweb.exceptions import handle_form_exceptions
|
||||
from aurweb.ratelimit import check_ratelimit
|
||||
from aurweb.rpc import RPC, documentation
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def parse_args(request: Request):
|
||||
"""Handle legacy logic of 'arg' and 'arg[]' query parameter handling.
|
||||
|
||||
When 'arg' appears as the last argument given to the query string,
|
||||
that argument is used by itself as one single argument, regardless
|
||||
of any more 'arg' or 'arg[]' parameters supplied before it.
|
||||
|
||||
When 'arg[]' appears as the last argument given to the query string,
|
||||
we iterate from last to first and build a list of arguments until
|
||||
we hit an 'arg'.
|
||||
|
||||
TODO: This handling should be addressed in v6 of the RPC API. This
|
||||
was most likely a bi-product of legacy handling of versions 1-4
|
||||
which we no longer support.
|
||||
|
||||
:param request: FastAPI request
|
||||
:returns: List of deduced arguments
|
||||
"""
|
||||
# Create a list of (key, value) pairs of the given 'arg' and 'arg[]'
|
||||
# query parameters from last to first.
|
||||
query = list(reversed(unquote(request.url.query).split("&")))
|
||||
parts = [e.split("=", 1) for e in query if e.startswith(("arg=", "arg[]="))]
|
||||
|
||||
args = []
|
||||
if parts:
|
||||
# If we found 'arg' and/or 'arg[]' arguments, we begin processing
|
||||
# the set of arguments depending on the last key found.
|
||||
last = parts[0][0]
|
||||
|
||||
if last == "arg":
|
||||
# If the last key was 'arg', then it is our sole argument.
|
||||
args.append(parts[0][1])
|
||||
else:
|
||||
# Otherwise, it must be 'arg[]', so traverse backward
|
||||
# until we reach a non-'arg[]' key.
|
||||
for key, value in parts:
|
||||
if key != last:
|
||||
break
|
||||
args.append(value)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
JSONP_EXPR = re.compile(r"^[a-zA-Z0-9()_.]{1,128}$")
|
||||
|
||||
|
||||
async def rpc_request(
|
||||
request: Request,
|
||||
v: Optional[int] = None,
|
||||
type: Optional[str] = None,
|
||||
by: Optional[str] = defaults.RPC_SEARCH_BY,
|
||||
arg: Optional[str] = None,
|
||||
args: Optional[list[str]] = [],
|
||||
callback: Optional[str] = None,
|
||||
):
|
||||
# Create a handle to our RPC class.
|
||||
rpc = RPC(version=v, type=type)
|
||||
|
||||
# If ratelimit was exceeded, return a 429 Too Many Requests.
|
||||
if check_ratelimit(request):
|
||||
return JSONResponse(
|
||||
rpc.error("Rate limit reached"),
|
||||
status_code=int(HTTPStatus.TOO_MANY_REQUESTS),
|
||||
)
|
||||
|
||||
# If `callback` was provided, produce a text/javascript response
|
||||
# valid for the jsonp callback. Otherwise, by default, return
|
||||
# application/json containing `output`.
|
||||
content_type = "application/json"
|
||||
if callback:
|
||||
if not re.match(JSONP_EXPR, callback):
|
||||
return rpc.error("Invalid callback name.")
|
||||
|
||||
content_type = "text/javascript"
|
||||
|
||||
# Prepare list of arguments for input. If 'arg' was given, it'll
|
||||
# be a list with one element.
|
||||
arguments = []
|
||||
if request.url.query:
|
||||
arguments = parse_args(request)
|
||||
else:
|
||||
if arg:
|
||||
arguments.append(arg)
|
||||
arguments += args
|
||||
|
||||
data = rpc.handle(by=by, args=arguments)
|
||||
|
||||
# Serialize `data` into JSON in a sorted fashion. This way, our
|
||||
# ETag header produced below will never end up changed.
|
||||
content = orjson.dumps(data, option=orjson.OPT_SORT_KEYS)
|
||||
|
||||
# Produce an md5 hash based on `output`.
|
||||
md5 = hashlib.md5()
|
||||
md5.update(content)
|
||||
etag = md5.hexdigest()
|
||||
|
||||
# The ETag header expects quotes to surround any identifier.
|
||||
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag
|
||||
headers = {"Content-Type": content_type, "ETag": f'"{etag}"'}
|
||||
|
||||
if_none_match = request.headers.get("If-None-Match", str())
|
||||
if if_none_match and if_none_match.strip('\t\n\r" ') == etag:
|
||||
return Response(headers=headers, status_code=int(HTTPStatus.NOT_MODIFIED))
|
||||
|
||||
if callback:
|
||||
content = f"/**/{callback}({content.decode()})"
|
||||
|
||||
return Response(content, headers=headers)
|
||||
|
||||
|
||||
@router.get("/rpc.php/") # Temporary! Remove on 03/04
|
||||
@router.get("/rpc.php") # Temporary! Remove on 03/04
|
||||
@router.get("/rpc/")
|
||||
@router.get("/rpc")
|
||||
async def rpc(
|
||||
request: Request,
|
||||
v: Optional[int] = Query(default=None),
|
||||
type: Optional[str] = Query(default=None),
|
||||
by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY),
|
||||
arg: Optional[str] = Query(default=None),
|
||||
args: Optional[list[str]] = Query(default=[], alias="arg[]"),
|
||||
callback: Optional[str] = Query(default=None),
|
||||
):
|
||||
if not request.url.query:
|
||||
return documentation()
|
||||
return await rpc_request(request, v, type, by, arg, args, callback)
|
||||
|
||||
|
||||
@router.get("/rpc.php/") # Temporary! Remove on 03/04
|
||||
@router.get("/rpc.php") # Temporary! Remove on 03/04
|
||||
@router.post("/rpc/")
|
||||
@router.post("/rpc")
|
||||
@handle_form_exceptions
|
||||
async def rpc_post(
|
||||
request: Request,
|
||||
v: Optional[int] = Form(default=None),
|
||||
type: Optional[str] = Form(default=None),
|
||||
by: Optional[str] = Form(default=defaults.RPC_SEARCH_BY),
|
||||
arg: Optional[str] = Form(default=None),
|
||||
args: list[str] = Form(default=[], alias="arg[]"),
|
||||
callback: Optional[str] = Form(default=None),
|
||||
):
|
||||
return await rpc_request(request, v, type, by, arg, args, callback)
|
||||
|
||||
|
||||
@router.get("/rpc/v{version}/info/{name}")
|
||||
async def rpc_openapi_info(request: Request, version: int, name: str):
|
||||
return await rpc_request(
|
||||
request,
|
||||
version,
|
||||
"info",
|
||||
defaults.RPC_SEARCH_BY,
|
||||
name,
|
||||
[],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/rpc/v{version}/info")
|
||||
async def rpc_openapi_multiinfo(
|
||||
request: Request,
|
||||
version: int,
|
||||
args: Optional[list[str]] = Query(default=[], alias="arg[]"),
|
||||
):
|
||||
arg = args.pop(0) if args else None
|
||||
return await rpc_request(
|
||||
request,
|
||||
version,
|
||||
"info",
|
||||
defaults.RPC_SEARCH_BY,
|
||||
arg,
|
||||
args,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/rpc/v{version}/info")
|
||||
async def rpc_openapi_multiinfo_post(
|
||||
request: Request,
|
||||
version: int,
|
||||
):
|
||||
data = await request.json()
|
||||
|
||||
args = data.get("arg", [])
|
||||
if not isinstance(args, list):
|
||||
rpc = RPC(version, "info")
|
||||
return JSONResponse(
|
||||
rpc.error("the 'arg' parameter must be of array type"),
|
||||
status_code=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
|
||||
arg = args.pop(0) if args else None
|
||||
return await rpc_request(
|
||||
request,
|
||||
version,
|
||||
"info",
|
||||
defaults.RPC_SEARCH_BY,
|
||||
arg,
|
||||
args,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/rpc/v{version}/search/{arg}")
|
||||
async def rpc_openapi_search_arg(
|
||||
request: Request,
|
||||
version: int,
|
||||
arg: str,
|
||||
by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY),
|
||||
):
|
||||
return await rpc_request(
|
||||
request,
|
||||
version,
|
||||
"search",
|
||||
by,
|
||||
arg,
|
||||
[],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/rpc/v{version}/search")
|
||||
async def rpc_openapi_search(
|
||||
request: Request,
|
||||
version: int,
|
||||
arg: Optional[str] = Query(default=str()),
|
||||
by: Optional[str] = Query(default=defaults.RPC_SEARCH_BY),
|
||||
):
|
||||
return await rpc_request(
|
||||
request,
|
||||
version,
|
||||
"search",
|
||||
by,
|
||||
arg,
|
||||
[],
|
||||
)
|
||||
|
||||
|
||||
@router.post("/rpc/v{version}/search")
|
||||
async def rpc_openapi_search_post(
|
||||
request: Request,
|
||||
version: int,
|
||||
):
|
||||
data = await request.json()
|
||||
by = data.get("by", defaults.RPC_SEARCH_BY)
|
||||
if not isinstance(by, str):
|
||||
rpc = RPC(version, "search")
|
||||
return JSONResponse(
|
||||
rpc.error("the 'by' parameter must be of string type"),
|
||||
status_code=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
|
||||
arg = data.get("arg", str())
|
||||
if not isinstance(arg, str):
|
||||
rpc = RPC(version, "search")
|
||||
return JSONResponse(
|
||||
rpc.error("the 'arg' parameter must be of string type"),
|
||||
status_code=HTTPStatus.BAD_REQUEST,
|
||||
)
|
||||
|
||||
return await rpc_request(
|
||||
request,
|
||||
version,
|
||||
"search",
|
||||
by,
|
||||
arg,
|
||||
[],
|
||||
)
|
||||
|
||||
|
||||
@router.get("/rpc/v{version}/suggest/{arg}")
|
||||
async def rpc_openapi_suggest(request: Request, version: int, arg: str):
|
||||
return await rpc_request(
|
||||
request,
|
||||
version,
|
||||
"suggest",
|
||||
defaults.RPC_SEARCH_BY,
|
||||
arg,
|
||||
[],
|
||||
)
|
89
aurweb/routers/rss.py
Normal file
89
aurweb/routers/rss.py
Normal file
|
@ -0,0 +1,89 @@
|
|||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import Response
|
||||
from feedgen.feed import FeedGenerator
|
||||
|
||||
from aurweb import config, db, filters
|
||||
from aurweb.cache import lambda_cache
|
||||
from aurweb.models import Package, PackageBase
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def make_rss_feed(request: Request, packages: list):
|
||||
"""Create an RSS Feed string for some packages.
|
||||
|
||||
:param request: A FastAPI request
|
||||
:param packages: A list of packages to add to the RSS feed
|
||||
:return: RSS Feed string
|
||||
"""
|
||||
|
||||
feed = FeedGenerator()
|
||||
feed.title("AUR Newest Packages")
|
||||
feed.description("The latest and greatest packages in the AUR")
|
||||
base = f"{request.url.scheme}://{request.url.netloc}"
|
||||
feed.link(href=base, rel="alternate")
|
||||
feed.link(href=f"{base}/rss", rel="self")
|
||||
feed.image(
|
||||
title="AUR Newest Packages",
|
||||
url=f"{base}/static/css/archnavbar/aurlogo.png",
|
||||
link=base,
|
||||
description="AUR Newest Packages Feed",
|
||||
)
|
||||
|
||||
for pkg in packages:
|
||||
entry = feed.add_entry(order="append")
|
||||
entry.title(pkg.Name)
|
||||
entry.link(href=f"{base}/packages/{pkg.Name}", rel="alternate")
|
||||
entry.description(pkg.Description or str())
|
||||
dt = filters.timestamp_to_datetime(pkg.Timestamp)
|
||||
dt = filters.as_timezone(dt, request.user.Timezone)
|
||||
entry.pubDate(dt.strftime("%Y-%m-%d %H:%M:%S%z"))
|
||||
entry.guid(f"{pkg.Name}-{pkg.Timestamp}")
|
||||
|
||||
return feed.rss_str()
|
||||
|
||||
|
||||
@router.get("/rss/")
|
||||
async def rss(request: Request):
|
||||
packages = (
|
||||
db.query(Package)
|
||||
.join(PackageBase)
|
||||
.order_by(PackageBase.SubmittedTS.desc())
|
||||
.limit(100)
|
||||
.with_entities(
|
||||
Package.Name,
|
||||
Package.Description,
|
||||
PackageBase.SubmittedTS.label("Timestamp"),
|
||||
)
|
||||
)
|
||||
|
||||
# we use redis for caching the results of the feedgen
|
||||
cache_expire = config.getint("cache", "expiry_time_rss", 300)
|
||||
feed = lambda_cache("rss", lambda: make_rss_feed(request, packages), cache_expire)
|
||||
|
||||
response = Response(feed, media_type="application/rss+xml")
|
||||
return response
|
||||
|
||||
|
||||
@router.get("/rss/modified")
|
||||
async def rss_modified(request: Request):
|
||||
packages = (
|
||||
db.query(Package)
|
||||
.join(PackageBase)
|
||||
.order_by(PackageBase.ModifiedTS.desc())
|
||||
.limit(100)
|
||||
.with_entities(
|
||||
Package.Name,
|
||||
Package.Description,
|
||||
PackageBase.ModifiedTS.label("Timestamp"),
|
||||
)
|
||||
)
|
||||
|
||||
# we use redis for caching the results of the feedgen
|
||||
cache_expire = config.getint("cache", "expiry_time_rss", 300)
|
||||
feed = lambda_cache(
|
||||
"rss_modified", lambda: make_rss_feed(request, packages), cache_expire
|
||||
)
|
||||
|
||||
response = Response(feed, media_type="application/rss+xml")
|
||||
return response
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue