mirror of
https://github.com/Jguer/yay.git
synced 2025-06-25 00:03:32 -04:00
Compare commits
1685 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
6f3bc5cc4c | ||
|
c2caf9c7d7 | ||
|
8615239329 | ||
|
c9a8507654 | ||
|
75e90c3a6d | ||
|
8ab3652846 | ||
|
98be3fec97 | ||
|
c6a2226ce1 | ||
|
3e82496057 | ||
|
a26ac1ba95 | ||
|
b745f87210 | ||
|
b4a41700ee | ||
|
0aa80e521e | ||
|
bce9f2fc72 | ||
|
5a71db2526 | ||
|
535370bca0 | ||
|
9a3be07887 | ||
|
2f205ee96c | ||
|
1982ce0366 | ||
|
2dcf94544c | ||
|
95fc0938fd | ||
|
ff176c0dd2 | ||
|
bf315041b1 | ||
|
559bc06b31 | ||
|
35019f95b6 | ||
|
50cbf70bf4 | ||
|
0b5f5f0ccd | ||
|
a300330b94 | ||
|
670598912e | ||
|
257b230e39 | ||
|
d2c67ae0a4 | ||
|
4432c60246 | ||
|
33ba07fe0d | ||
|
d37e365ac3 | ||
|
6807ecc081 | ||
|
b07d8c1447 | ||
|
3da808847f | ||
|
590a3d3a8c | ||
|
69685d0fb5 | ||
|
5572d1817e | ||
|
2f403a4f28 | ||
|
9322197d0c | ||
|
46f3842e6f | ||
|
76000ae987 | ||
|
e27979d21d | ||
|
c0baacd633 | ||
|
43567b5d85 | ||
|
e18cc87307 | ||
|
81a2a19101 | ||
|
669d7af6d1 | ||
|
65ce4b9f6f | ||
|
5f2b94ce7c | ||
|
3f2f6eae31 | ||
|
da53d3855f | ||
|
f23fe98a66 | ||
|
ec837c831d | ||
|
f100c1d54b | ||
|
3f5d26c4f9 | ||
|
3003f1667c | ||
|
fb168fb176 | ||
|
842067256b | ||
|
a6a6dc0acb | ||
|
2e06552211 | ||
|
138c2dd6cd | ||
|
4872b8b829 | ||
|
1b6ad7b305 | ||
|
d6e961af70 | ||
|
89b32ee9ce | ||
|
f68a57129f | ||
|
bea53a4a09 | ||
|
13df7e79eb | ||
|
0f496c9af9 | ||
|
84d8f1b7b3 | ||
|
3a118b7690 | ||
|
a32f5e7e2c | ||
|
3c881d577f | ||
|
86f5c08ec4 | ||
|
675f0ba3f3 | ||
|
d7d67e3fd3 | ||
|
c28be1d8b0 | ||
|
1b8f823f7c | ||
|
836fc5922a | ||
|
0165486bf4 | ||
|
5149e3714d | ||
|
9ed9b0b4e1 | ||
|
e19700234f | ||
|
965f8956e9 | ||
|
53c9d0ef34 | ||
|
9b41f136d6 | ||
|
d956dd7888 | ||
|
61dd708a4a | ||
|
ff3ad18fa8 | ||
|
803f708106 | ||
|
9b6d40d7f9 | ||
|
779a9f16bd | ||
|
02d3e2e1c0 | ||
|
8de397ed11 | ||
|
de7ad4070f | ||
|
3b18e2197c | ||
|
127b3a5b1a | ||
|
2ff794da32 | ||
|
6c2330528f | ||
|
a1d530cbf4 | ||
|
5d887cbd41 | ||
|
05b76852bd | ||
|
9524cbbaed | ||
|
48d1d3d2d5 | ||
|
741d83c1f0 | ||
|
9c02af429a | ||
|
03e89d660f | ||
|
f7f2169992 | ||
|
e8080f87c2 | ||
|
26aa171b2b | ||
|
92d7cb0faa | ||
|
f3a4fc8987 | ||
|
aa6cad75a3 | ||
|
2078bc936f | ||
|
d778be4f9f | ||
|
aeafe23027 | ||
|
6c31477ccd | ||
|
0a930c9ffc | ||
|
d411524481 | ||
|
15ef062bb5 | ||
|
c2d7d99e43 | ||
|
86207fce64 | ||
|
dc68b1a8fa | ||
|
d02c45e5b6 | ||
|
8d773aa6a3 | ||
|
b81d34d5cc | ||
|
d77dd77141 | ||
|
e34bce003d | ||
|
d23e8925fa | ||
|
643830fccd | ||
|
cb4cd7b451 | ||
|
350ff1c70a | ||
|
26dc74ed67 | ||
|
a00ff6b3cc | ||
|
9e665a98b9 | ||
|
4169f0ee42 | ||
|
5a6b18fe23 | ||
|
c8577bb613 | ||
|
965f41b938 | ||
|
0771ded99b | ||
|
8f98ab3d4b | ||
|
d368f99be0 | ||
|
a1121556be | ||
|
299aa1e123 | ||
|
04c76a404e | ||
|
e60ccdf8b7 | ||
|
b6c72ce7a2 | ||
|
87d1fd1c06 | ||
|
92d50910de | ||
|
a0e6838a5f | ||
|
8916cd174b | ||
|
7483393377 | ||
|
9aefb8440e | ||
|
6c1998f6eb | ||
|
688434b242 | ||
|
5995e55ddb | ||
|
04c82b8112 | ||
|
abd398a787 | ||
|
23b053bccf | ||
|
dadc8c0d98 | ||
|
6dd7933fbe | ||
|
d9029face3 | ||
|
64f5c2b0a9 | ||
|
93afb03738 | ||
|
0dcf911e99 | ||
|
2be57cb312 | ||
|
f070cff9f9 | ||
|
c46f5d31cc | ||
|
74c1cdb254 | ||
|
79b03fdac1 | ||
|
5a3c3ae4d0 | ||
|
710ff0097a | ||
|
ddeaf47a53 | ||
|
4f7b3edefe | ||
|
5b8cc98afa | ||
|
e25d00015a | ||
|
c5a18e5000 | ||
|
adde043514 | ||
|
7dc4fae155 | ||
|
599a5a9073 | ||
|
12282fb28a | ||
|
0607090719 | ||
|
1568e64d55 | ||
|
d08f217b3a | ||
|
29f47a4413 | ||
|
35ee42d343 | ||
|
1335e9b4e0 | ||
|
e28319fece | ||
|
c1aa71bee1 | ||
|
56d1b7ed1c | ||
|
036a53882d | ||
|
330b9ab920 | ||
|
98d5352b78 | ||
|
27f336e68c | ||
|
23937356eb | ||
|
9641d2a608 | ||
|
df3dbfa125 | ||
|
fdcf6ef664 | ||
|
39f0d4e9a1 | ||
|
d33bf8841d | ||
|
bd79057fd9 | ||
|
a0a5e45fe7 | ||
|
ec15a5b363 | ||
|
d568a73ab8 | ||
|
490ebe4f7f | ||
|
4dfee1f82f | ||
|
5cf215a00a | ||
|
f7731d7cf9 | ||
|
966bfb74ee | ||
|
c721fe7f3b | ||
|
b51c10ca3e | ||
|
4832ec59db | ||
|
49267b9cd9 | ||
|
e6344100e6 | ||
|
15400c5fc5 | ||
|
822b11b4d6 | ||
|
71432a447e | ||
|
957292a911 | ||
|
8c69356bd4 | ||
|
64bb346fbe | ||
|
797b7485ab | ||
|
57a3a090f1 | ||
|
f0bfe63ced | ||
|
83214fbc1c | ||
|
161fede450 | ||
|
26c9ab5a87 | ||
|
4a9c736e2a | ||
|
76e5ee1fa6 | ||
|
88008e4eb3 | ||
|
c7a51a1614 | ||
|
6a971df635 | ||
|
511b95769e | ||
|
4e9a865388 | ||
|
9270f00c7e | ||
|
527c3a5058 | ||
|
1ee94f28d3 | ||
|
a64180464b | ||
|
a31ca0d7dc | ||
|
1bc3171abd | ||
|
2d7297ae6d | ||
|
ce0cb35510 | ||
|
d9b57790fa | ||
|
5a3f277574 | ||
|
d1c012085c | ||
|
89f47f8ebe | ||
|
5b5617c7e7 | ||
|
b80ef15add | ||
|
e0fbb4495a | ||
|
5c7f9ba159 | ||
|
0892fc7cdd | ||
|
c78d031b32 | ||
|
6983a9ee7e | ||
|
ada8261bca | ||
|
3f09397816 | ||
|
9532e7b7da | ||
|
956c4cb100 | ||
|
16cce4384b | ||
|
c3888d9881 | ||
|
c63576c36d | ||
|
dd42593ba1 | ||
|
d13bdb0ce1 | ||
|
6390d1c2b0 | ||
|
9028f5d8be | ||
|
5d1c54413c | ||
|
e615f8e07e | ||
|
d75e0a001d | ||
|
2bdbc3e06b | ||
|
01666aef37 | ||
|
68337a58c1 | ||
|
ca12cd7156 | ||
|
bebe80bb84 | ||
|
9137c1e95f | ||
|
bdd888c59d | ||
|
16b9516f96 | ||
|
dfa7ed51c1 | ||
|
7bc4a666e6 | ||
|
210512a5d6 | ||
|
57250fec4b | ||
|
e56b9cd72b | ||
|
794a38fa28 | ||
|
f11f9058c2 | ||
|
7e7764a797 | ||
|
c744058b20 | ||
|
7073939cdc | ||
|
3d5a43c294 | ||
|
46bf36a160 | ||
|
463e60e045 | ||
|
e6ed869df1 | ||
|
8b8d6001a4 | ||
|
0387dfdb59 | ||
|
fa2e726ca6 | ||
|
841395c318 | ||
|
4e0a5c8520 | ||
|
61f1bdf291 | ||
|
3ef4664d99 | ||
|
4780a974d9 | ||
|
7c8f273cdf | ||
|
a3d51a42da | ||
|
096ff7a544 | ||
|
6c870db1f1 | ||
|
f0433cc339 | ||
|
fad26c078d | ||
|
7490836991 | ||
|
8d18f1be18 | ||
|
f1d086df1d | ||
|
2f5fd5cb1c | ||
|
0a8bc1fe2e | ||
|
0bf4c2e502 | ||
|
4f50b799ef | ||
|
4626a0409c | ||
|
1bfbd01f94 | ||
|
04c4b0aa59 | ||
|
9356481d1c | ||
|
2f1ebb9fde | ||
|
5626ed3ff4 | ||
|
e09209bb19 | ||
|
c40e949752 | ||
|
b8debd1ae7 | ||
|
8948278568 | ||
|
86bba8a289 | ||
|
47b1428a25 | ||
|
b3c334e014 | ||
|
b41e67f31e | ||
|
13109992c5 | ||
|
ed95688c1b | ||
|
b8b085599a | ||
|
7a2db4f448 | ||
|
40f058fd19 | ||
|
f771424336 | ||
|
36282a8192 | ||
|
f372494d74 | ||
|
9be51052f7 | ||
|
28d90c981e | ||
|
d3fbfa26ca | ||
|
8c61bc9b45 | ||
|
2bda76e431 | ||
|
9a23b792c4 | ||
|
4e3c664ab3 | ||
|
3eb9eb0d3d | ||
|
0b1ae938a3 | ||
|
f8e7891b0b | ||
|
4a3e365fc5 | ||
|
27cbbf4cb9 | ||
|
7da9f4869d | ||
|
c826456d4d | ||
|
9df087bd3d | ||
|
1b5f0d66fe | ||
|
4f1f539217 | ||
|
7612bb5da5 | ||
|
9f67d10d5c | ||
|
6ad63cae10 | ||
|
63f20599cd | ||
|
0f45a99efa | ||
|
f52db80a9d | ||
|
7f151cd603 | ||
|
2805252365 | ||
|
481c63db91 | ||
|
5e8d9ac846 | ||
|
085e2c8aea | ||
|
1153ee6dbb | ||
|
3f5eb36fe2 | ||
|
2358a7f66e | ||
|
2560cadcc0 | ||
|
c00cd8d88e | ||
|
f042713aaa | ||
|
ae918986f4 | ||
|
3f5dffc188 | ||
|
3f7f55f260 | ||
|
fd46fa0f33 | ||
|
d7851223c6 | ||
|
5aeb0d696c | ||
|
56a46644cc | ||
|
01721c816c | ||
|
cc8c0a2366 | ||
|
6cbf00c5a7 | ||
|
742b6ad79c | ||
|
9b576fbab7 | ||
|
bfb32ea63b | ||
|
a724d1554f | ||
|
3fc5d93243 | ||
|
0b3ca79788 | ||
|
b5bdcfbd1a | ||
|
776fc9686a | ||
|
d3efb59da3 | ||
|
a88fad68d4 | ||
|
849e8f7b60 | ||
|
ba935ccf95 | ||
|
1f49b7e11c | ||
|
f496dbac8b | ||
|
bc4732e9e1 | ||
|
e4fdc9a4d4 | ||
|
647d160182 | ||
|
c86c460816 | ||
|
d646cd6c87 | ||
|
7f9ac3435f | ||
|
ed94152cfe | ||
|
915799755b | ||
|
351e352f64 | ||
|
5bb46ac1de | ||
|
4a73dfb0ca | ||
|
be036c39ff | ||
|
cd4462761a | ||
|
d53505be37 | ||
|
0aa18d61d5 | ||
|
95e7542ade | ||
|
cadeecc4df | ||
|
f7286b25ae | ||
|
650809eba1 | ||
|
0d1e339fda | ||
|
7ebe64871d | ||
|
14cc8d4ef0 | ||
|
1d2d19c323 | ||
|
70695d7de7 | ||
|
a4565b367b | ||
|
cce21ce0b6 | ||
|
714fee0b18 | ||
|
b054828aa8 | ||
|
859b7c703f | ||
|
446dc86d1e | ||
|
b6b1888e89 | ||
|
b882faf48a | ||
|
2a2040c570 | ||
|
458c69e179 | ||
|
6ffa86908a | ||
|
221b86bea4 | ||
|
ae6c2deda0 | ||
|
64a26ca2bc | ||
|
888fb4b1d0 | ||
|
85934bddea | ||
|
c339a605de | ||
|
0b6446f927 | ||
|
e2f61255cb | ||
|
35c91bea50 | ||
|
c0a9b6af4f | ||
|
1a52da5891 | ||
|
4d5131d6c7 | ||
|
2214d99f8e | ||
|
6f60892356 | ||
|
216acead86 | ||
|
bda2165205 | ||
|
e0006ec272 | ||
|
18fbc24e68 | ||
|
0fdfe79943 | ||
|
44a0a243ed | ||
|
70a9765854 | ||
|
5439229c7d | ||
|
edef790532 | ||
|
4ca50e55f0 | ||
|
c83b48d474 | ||
|
ed2f533534 | ||
|
74cf1f7542 | ||
|
bd3e6c5236 | ||
|
94cff87927 | ||
|
85023f7197 | ||
|
0f62b567ef | ||
|
281ec1c926 | ||
|
ed62496025 | ||
|
e773c7662c | ||
|
b8f054f988 | ||
|
e4a1f018ea | ||
|
ae01f8e4a0 | ||
|
dc9bef0115 | ||
|
b52e3cf0d2 | ||
|
83a257b16b | ||
|
f719a6ffca | ||
|
7f5a060324 | ||
|
7f7b69447d | ||
|
ba222c87f6 | ||
|
ddd8e0e1f2 | ||
|
9c2d4ec0d1 | ||
|
2a3ddafce5 | ||
|
0e3affd876 | ||
|
c5bea07f64 | ||
|
712665e531 | ||
|
87415a8d20 | ||
|
58ac44c748 | ||
|
527534af91 | ||
|
8ac1da8115 | ||
|
2de6d35709 | ||
|
165433d1e3 | ||
|
7feac514f7 | ||
|
4df76fd9fe | ||
|
d83c5ca5a1 | ||
|
e9d8894a37 | ||
|
19d42ae3cd | ||
|
09695c694f | ||
|
65ecee9cb6 | ||
|
1c22cc491c | ||
|
d02321bdc9 | ||
|
ac13060bfd | ||
|
0f5df3cbad | ||
|
2552cf1e22 | ||
|
f7dbebcc2c | ||
|
a9702da300 | ||
|
81eeb8c495 | ||
|
bf288822b9 | ||
|
6c09d9bf33 | ||
|
c0507e7f8e | ||
|
7e8d61c017 | ||
|
9ebc98e80f | ||
|
17c20f5cd9 | ||
|
0b2bddf9d0 | ||
|
df6af2e1e4 | ||
|
40561093f1 | ||
|
58bbcb3716 | ||
|
e38d0138a6 | ||
|
53e8ec70f8 | ||
|
22c165a11f | ||
|
1c238dd724 | ||
|
673f077e7f | ||
|
f88bf5c212 | ||
|
02d33f63c0 | ||
|
38b66fee63 | ||
|
15d91e4661 | ||
|
57a9630054 | ||
|
9eb73899fe | ||
|
44cf54795d | ||
|
f925b719e8 | ||
|
005c5c24b4 | ||
|
e4b1cb6e7d | ||
|
49577191c6 | ||
|
d096d15973 | ||
|
6364b4efe8 | ||
|
f3c3e2e4d4 | ||
|
a43fbacc96 | ||
|
1fb9f410a6 | ||
|
34e81d5d7d | ||
|
3fef4ae1e3 | ||
|
12a6d4f5c1 | ||
|
52efaeba52 | ||
|
2218e05981 | ||
|
3a6654b0c5 | ||
|
8238bc9f88 | ||
|
1a0cfd85f2 | ||
|
b2a728151e | ||
|
4122716ea1 | ||
|
84e872ebe3 | ||
|
555d1ead95 | ||
|
ba1e06d367 | ||
|
adb74b9252 | ||
|
ffb17fd2e3 | ||
|
bc1d900fa9 | ||
|
251c456d70 | ||
|
2eafbcc03e | ||
|
c8fcdeae5b | ||
|
e43c712c84 | ||
|
1869bbe291 | ||
|
b7f9a5e677 | ||
|
cedbcfbcda | ||
|
a1941b24bf | ||
|
074798aca5 | ||
|
e14fe638c2 | ||
|
5be99d9bf0 | ||
|
1d903b6c7e | ||
|
aedbcffc80 | ||
|
5d9e424942 | ||
|
401922a6ed | ||
|
c8c3f812f6 | ||
|
0fccb9baaf | ||
|
70e390f939 | ||
|
a05ba32acf | ||
|
d007a54e3a | ||
|
0db92c990f | ||
|
87dc57323b | ||
|
e231b2b02d | ||
|
3de02dbd73 | ||
|
49ef780602 | ||
|
95bc333a92 | ||
|
cb4b57f6d8 | ||
|
08d1305ec5 | ||
|
f42cbde6db | ||
|
5c11c01d85 | ||
|
444151de0d | ||
|
fc20bde10d | ||
|
172ee1f4e8 | ||
|
1a2e5b9529 | ||
|
456d83ef51 | ||
|
186c7a54fe | ||
|
7dda0fa2f8 | ||
|
1c96fd2d9d | ||
|
522247dca2 | ||
|
4a515c4462 | ||
|
ab902e9cc6 | ||
|
b8641256ab | ||
|
65b1c4be69 | ||
|
2ed099a263 | ||
|
98378642bc | ||
|
413ad23abc | ||
|
79b44fd544 | ||
|
50c0ece9a7 | ||
|
827adab4b8 | ||
|
99c3c98264 | ||
|
20d5fd406f | ||
|
d460e7ae5b | ||
|
e4a933da52 | ||
|
4a9319920b | ||
|
61e38ad2fc | ||
|
c9e63fff0d | ||
|
169d5f1f62 | ||
|
d6cb4c50e8 | ||
|
2ac2374b47 | ||
|
040c8fe380 | ||
|
8fd4f78400 | ||
|
93bb2867be | ||
|
a37f4efd73 | ||
|
c7cf7baa48 | ||
|
e20ab8b6a2 | ||
|
545ea772eb | ||
|
6e70e2a59d | ||
|
c922cb3509 | ||
|
059ca9d9b8 | ||
|
418b578e9e | ||
|
a24fb50e6d | ||
|
25efafe25e | ||
|
ba3c66949a | ||
|
4993e4dfbd | ||
|
c153631064 | ||
|
b0d87b7a09 | ||
|
ab0ce0aeb9 | ||
|
8e6d098ff8 | ||
|
662c630a08 | ||
|
faf03436a7 | ||
|
7ade2afd78 | ||
|
767c7975f3 | ||
|
de113b87f3 | ||
|
29f4c43227 | ||
|
f04a469324 | ||
|
64e5b9e93d | ||
|
3efb43af95 | ||
|
5184ef6a04 | ||
|
f3dd3467ec | ||
|
93d3c87624 | ||
|
85fe90c95f | ||
|
d3bc19a003 | ||
|
ca7165ecb6 | ||
|
2da1e154c2 | ||
|
6d4c2472d8 | ||
|
829f944659 | ||
|
d1a6ead81c | ||
|
a33f43f502 | ||
|
d7424b1b28 | ||
|
5da8161328 | ||
|
7a280e0199 | ||
|
ece81fa078 | ||
|
5f2a14d027 | ||
|
3d585877ba | ||
|
bfd5ca6024 | ||
|
4b476a0e78 | ||
|
b7f730a59a | ||
|
348f2de85c | ||
|
c8a74cb4a4 | ||
|
260594f42a | ||
|
eb9ff86aa9 | ||
|
1a8ee2012d | ||
|
df1361aa05 | ||
|
5d2c763fdb | ||
|
34f7dbc7fc | ||
|
5993613c53 | ||
|
b6feb7c308 | ||
|
ca60c15911 | ||
|
05c2fa6087 | ||
|
9fc6d9c128 | ||
|
fd9e2638f9 | ||
|
afc72daba7 | ||
|
ee3c51f6f2 | ||
|
28c985b378 | ||
|
a288dd754f | ||
|
d9b440c929 | ||
|
5bef39b728 | ||
|
b9d3992f13 | ||
|
c221d5ebbd | ||
|
f00630fd0c | ||
|
24edabe5df | ||
|
607d28778e | ||
|
5f9ad882f6 | ||
|
7d849a8dec | ||
|
f7d6ad59cd | ||
|
b864d433df | ||
|
bb50b4079f | ||
|
182f4c7f6c | ||
|
c85bbf54da | ||
|
999392640e | ||
|
35e64deca4 | ||
|
952e801ed1 | ||
|
f24e21105e | ||
|
8d9fed2ad0 | ||
|
8c4fe837d3 | ||
|
13caa7fb3d | ||
|
8393ee4acc | ||
|
de7373dd00 | ||
|
981c336541 | ||
|
6beefb1400 | ||
|
497f94ca9f | ||
|
55a6767b44 | ||
|
81b5feb68a | ||
|
7d080df6ac | ||
|
c4dc9bec6e | ||
|
01e6d80525 | ||
|
1c779da3ed | ||
|
39717f3974 | ||
|
f6cb0bc460 | ||
|
7979279c3f | ||
|
678d10e04e | ||
|
e1b632cf3d | ||
|
3e698f313a | ||
|
a80771c37e | ||
|
1f3020e7d6 | ||
|
d6aa288ab8 | ||
|
8a0a0cf2bc | ||
|
5ae510ad6f | ||
|
5c05811ae5 | ||
|
3b2bb28e81 | ||
|
1db50882e9 | ||
|
842dfb28a4 | ||
|
72f120b0f3 | ||
|
c5af6f8189 | ||
|
c1171d4146 | ||
|
ab3c66ab98 | ||
|
ce79d53eb2 | ||
|
7b70462a7c | ||
|
9ad2862b7d | ||
|
4a761c287b | ||
|
c305e1c196 | ||
|
1ccfd3f0b7 | ||
|
7a3794ae75 | ||
|
388aaf6d7f | ||
|
f3925e2481 | ||
|
6b973c6bc5 | ||
|
0e2a02b512 | ||
|
7bcf2ecb4c | ||
|
94f650f4d3 | ||
|
005b29373d | ||
|
9fd6917bc0 | ||
|
8165174462 | ||
|
ad9bc9ef8f | ||
|
7f4c277ce7 | ||
|
5647aab025 | ||
|
0adda5da4f | ||
|
8074179112 | ||
|
520dd58999 | ||
|
37372c883f | ||
|
2cc72685ad | ||
|
5b7cee1422 | ||
|
991c5b4146 | ||
|
de92f24ce5 | ||
|
c159c06222 | ||
|
7dd5d7e13e | ||
|
cd8fb7143c | ||
|
fa2af37eab | ||
|
45d7da4df6 | ||
|
1177493895 | ||
|
b37a1f7ab4 | ||
|
4ebbe723bd | ||
|
2aee260a9b | ||
|
22acf2ad8f | ||
|
17eb6b6d10 | ||
|
a8ba2a808b | ||
|
f91489a2ee | ||
|
fd7934d9ee | ||
|
42f337f2a5 | ||
|
7ed2cf7b49 | ||
|
054a9ac613 | ||
|
e47c9584c1 | ||
|
a0cff2b622 | ||
|
7a32ffdd37 | ||
|
93719d488c | ||
|
71929cb225 | ||
|
0c8ce4f033 | ||
|
0eb8c25c97 | ||
|
7b3de13e65 | ||
|
2b759fcecd | ||
|
1cbef97ae4 | ||
|
b1a6d154c3 | ||
|
2f88858ce6 | ||
|
d507e024bd | ||
|
56760e1b7c | ||
|
5cc47503be | ||
|
b5e70ae84b | ||
|
36730a41e3 | ||
|
968b61ac57 | ||
|
488d9537db | ||
|
9caecd9aee | ||
|
2dd9395190 | ||
|
5a77dd86e1 | ||
|
0bae6e2f7d | ||
|
9b49f76bbd | ||
|
2dc01d8a3e | ||
|
cd02fa459c | ||
|
fb5b6b74fe | ||
|
cb8a988701 | ||
|
fff9d74764 | ||
|
7efeaaff81 | ||
|
feb2cea9e0 | ||
|
6bfcfc168f | ||
|
b5b4484c5c | ||
|
3549aeca26 | ||
|
acf95aed5b | ||
|
d0450179f9 | ||
|
9d93a1f1bd | ||
|
78d336c031 | ||
|
bfbdd79424 | ||
|
0548d86dea | ||
|
9c3aeef75b | ||
|
67a07aa5db | ||
|
e83a22c0ee | ||
|
fc4b2e57fe | ||
|
72dfc6065e | ||
|
4621e2b57a | ||
|
321834de4c | ||
|
f9fb40a27a | ||
|
c780652d6a | ||
|
744434423f | ||
|
8e10fb7a1e | ||
|
07e15020e1 | ||
|
bd630a63c0 | ||
|
b3fc00cf96 | ||
|
5d4a20da6d | ||
|
9260ea351f | ||
|
79362b749a | ||
|
2f98d689c2 | ||
|
f0aa4e0b5a | ||
|
d4c327c153 | ||
|
1970dfa71b | ||
|
35a7d504fa | ||
|
1b8c9cea49 | ||
|
889a394176 | ||
|
2fa7b6ba3d | ||
|
80216bdc69 | ||
|
1769ba978a | ||
|
d3f9335d2d | ||
|
d003e96e07 | ||
|
5734faf33b | ||
|
1fbf6953a1 | ||
|
4af542e233 | ||
|
0554e46ec4 | ||
|
11db63f1e2 | ||
|
cecba84982 | ||
|
63ccabd33c | ||
|
0c45a390a2 | ||
|
87d0dd64c3 | ||
|
732f1a5412 | ||
|
db3993b83b | ||
|
9fccdcb30f | ||
|
e7b1fe4d53 | ||
|
023c742490 | ||
|
4f35c2c4fe | ||
|
873b2db591 | ||
|
328c561230 | ||
|
ddd1cc35d2 | ||
|
6bd920265c | ||
|
0e20e3cba0 | ||
|
942947fac4 | ||
|
fe091f7578 | ||
|
352667f550 | ||
|
35475c7f4c | ||
|
9795a36692 | ||
|
f4503ade39 | ||
|
2ffde239c0 | ||
|
fd7945b42b | ||
|
d5718b7406 | ||
|
558c17f67c | ||
|
c6a17d02f4 | ||
|
23706a3a71 | ||
|
3a5a6a77b3 | ||
|
1f007f39ab | ||
|
87fcfbf9f7 | ||
|
d63c4b0b2e | ||
|
b85428770e | ||
|
9458e02711 | ||
|
e9930b0c42 | ||
|
4da27c37c8 | ||
|
a34e080bf6 | ||
|
66a09aba8b | ||
|
a0d831180d | ||
|
b242d0ac4b | ||
|
a9b74a0e1e | ||
|
fa3191e6f5 | ||
|
13e3271dd6 | ||
|
bc2d1bf143 | ||
|
03c5e3fb94 | ||
|
70aa7b4689 | ||
|
f9ffa72b03 | ||
|
a689ec887f | ||
|
9e2713e881 | ||
|
990be97d44 | ||
|
6cb5587351 | ||
|
b08e505b34 | ||
|
45da573821 | ||
|
ab956ea3d2 | ||
|
0bd21ea29f | ||
|
f2959922a7 | ||
|
410c7e758f | ||
|
bbc7cbba73 | ||
|
3c996e5235 | ||
|
3fef09e650 | ||
|
12312c8aa0 | ||
|
662ce9994e | ||
|
9420a9724f | ||
|
6dd65a2df2 | ||
|
e36fda0f5a | ||
|
f8e496457c | ||
|
8d37981218 | ||
|
58a9c78033 | ||
|
290d3d2c43 | ||
|
203b49321e | ||
|
82549d0d95 | ||
|
446db893f0 | ||
|
6136e15717 | ||
|
1257a6df08 | ||
|
85fc7c59c7 | ||
|
c0e78c84f2 | ||
|
d50ca0430b | ||
|
e1403861c0 | ||
|
3416f6b266 | ||
|
211b96867c | ||
|
a3849deb4e | ||
|
3baf2e1ff1 | ||
|
1d2855ab76 | ||
|
169d0a07fc | ||
|
155c363001 | ||
|
568dc4a15e | ||
|
6bca3246c8 | ||
|
bbeef9d237 | ||
|
35b2297f1e | ||
|
a3f30beea4 | ||
|
9e773149e9 | ||
|
471b07152e | ||
|
b60a888981 | ||
|
dacd77012c | ||
|
cff358d5d6 | ||
|
f2579f26a3 | ||
|
0856edcf04 | ||
|
543d3afaa7 | ||
|
4d91acc749 | ||
|
8151e47211 | ||
|
91232f2b74 | ||
|
d647d29435 | ||
|
077b1092fd | ||
|
73d87b01b0 | ||
|
21d0f33dae | ||
|
de48604633 | ||
|
7b710b796b | ||
|
96dd1d3ce2 | ||
|
c180fa8d19 | ||
|
29b051134a | ||
|
c44f738ba8 | ||
|
a591b33262 | ||
|
422a5931af | ||
|
f1a6c4afdc | ||
|
923ec9aafd | ||
|
ab2101ab49 | ||
|
b773ec2aa4 | ||
|
3579d3521a | ||
|
5d7ea17b15 | ||
|
81dcc339e0 | ||
|
7ee206f36e | ||
|
6e2a4def99 | ||
|
a7619cce2b | ||
|
ed274ec665 | ||
|
bf89b3383c | ||
|
b01790f752 | ||
|
3d31b52799 | ||
|
9e205eef4b | ||
|
fdcfeba902 | ||
|
769aee728a | ||
|
5fa3557817 | ||
|
c5c2e906d3 | ||
|
5084391e79 | ||
|
6589631c6a | ||
|
569d314cea | ||
|
a04ece5feb | ||
|
963af374b5 | ||
|
e698336873 | ||
|
2214bc0cb2 | ||
|
99d1df6f1d | ||
|
f8339fc568 | ||
|
dfef396d3c | ||
|
5b3ae061cf | ||
|
d0705a6d6b | ||
|
de95a0e253 | ||
|
fd9a62ddb8 | ||
|
d34a84204a | ||
|
1901138e4c | ||
|
0ff6ced19f | ||
|
392865f7a0 | ||
|
c9f7105481 | ||
|
995d788e7c | ||
|
64eadcdaa2 | ||
|
3700c1daa6 | ||
|
f406866a45 | ||
|
d25f2939eb | ||
|
591f38488a | ||
|
125e716547 | ||
|
6cd47dd83c | ||
|
80e66ce479 | ||
|
16fddae8b6 | ||
|
e01af5dfd6 | ||
|
c717b61e69 | ||
|
5ac36ad97e | ||
|
e0a5e24a12 | ||
|
e43e68f6bf | ||
|
13d8c82d42 | ||
|
bcaf8a0eb7 | ||
|
37f3309917 | ||
|
86b73689a3 | ||
|
06da88778e | ||
|
b32a8ad582 | ||
|
48e1d65539 | ||
|
d406966174 | ||
|
6415c23be2 | ||
|
112046853b | ||
|
e1f3437c64 | ||
|
4e34e3a5fe | ||
|
eb468fc878 | ||
|
308cf5c3de | ||
|
af9f9751ee | ||
|
5932b6987d | ||
|
cf12fc6ff2 | ||
|
56da655195 | ||
|
f21a5a7f1a | ||
|
5db33b6bcb | ||
|
dc2186c5f3 | ||
|
9d708d5186 | ||
|
a75506cc34 | ||
|
711e12d7bb | ||
|
01e9cd2d47 | ||
|
6809d7c6d2 | ||
|
8880213fb0 | ||
|
888a289a1b | ||
|
aa82518720 | ||
|
062c6ea6d8 | ||
|
e1bc834f5b | ||
|
877c794bc2 | ||
|
ec5746f09f | ||
|
1447fb9ff0 | ||
|
14ac756298 | ||
|
1771c8f0b3 | ||
|
417cc653ab | ||
|
e18cd9b1e8 | ||
|
0878b485ae | ||
|
c3caa2da94 | ||
|
a33801615c | ||
|
50a99c1e54 | ||
|
808f6332c2 | ||
|
7b8e45d4e2 | ||
|
5e04e693c4 | ||
|
b2ab67c410 | ||
|
504e4e21af | ||
|
aa1e88459f | ||
|
737ed78eb1 | ||
|
8ebc41d4f6 | ||
|
895788e01b | ||
|
a645353c17 | ||
|
4b2279edbc | ||
|
229f2c606b | ||
|
5ce4a49687 | ||
|
dcc323cc06 | ||
|
eab94628ec | ||
|
18e5b47e4a | ||
|
4f01418769 | ||
|
0f57ed0ba8 | ||
|
50a52dba94 | ||
|
6354a4424e | ||
|
4ab7814af1 | ||
|
478e1ee869 | ||
|
b715c11306 | ||
|
ebf48ffa1a | ||
|
5acee2706f | ||
|
3e1f29277b | ||
|
6db8864572 | ||
|
d3a9082f2a | ||
|
14cb1c587f | ||
|
555b166fd3 | ||
|
13e3acaedf | ||
|
cc0036c43b | ||
|
1bb73667c9 | ||
|
fa8d4b0184 | ||
|
cc98fa0670 | ||
|
47373c5631 | ||
|
a05886d9eb | ||
|
6b31a1a626 | ||
|
84022c9425 | ||
|
7644a92c1c | ||
|
258f90edec | ||
|
5bcadc58ac | ||
|
6a95783790 | ||
|
91cf1e27a1 | ||
|
556a76c38d | ||
|
e18c236937 | ||
|
52bf777e67 | ||
|
96a4d9637c | ||
|
57ba60f283 | ||
|
431118b6ac | ||
|
2ed7df4f5a | ||
|
b40b1ef910 | ||
|
c7123e03df | ||
|
e71fb8617a | ||
|
c2aeb4bc4e | ||
|
1f1cee7023 | ||
|
79f7322722 | ||
|
d42f737849 | ||
|
a565544712 | ||
|
145a1393b5 | ||
|
7702a88ebb | ||
|
0bfba1f24b | ||
|
f648697994 | ||
|
5080e400e1 | ||
|
ff4f076d57 | ||
|
a83f8fb9e8 | ||
|
4ff36fbd4b | ||
|
704e8406d1 | ||
|
827dbd21cb | ||
|
8c44da374c | ||
|
0efc31a881 | ||
|
75d01f32b4 | ||
|
5109fc6250 | ||
|
2034575a76 | ||
|
3861aef502 | ||
|
69fd7a9b9e | ||
|
0f4768b4fe | ||
|
d6d76ee430 | ||
|
73deae94cf | ||
|
5fe9160b1b | ||
|
f26f36d459 | ||
|
cbc2a87c73 | ||
|
621b8aac43 | ||
|
e316c979cc | ||
|
f74a4f33b5 | ||
|
a19151504d | ||
|
9ad7c34949 | ||
|
6ee14d44b8 | ||
|
c574cdcf98 | ||
|
ae7400890d | ||
|
7ee2e5d1eb | ||
|
e78070ebbe | ||
|
4cc57dd970 | ||
|
42cba53165 | ||
|
98e6caefef | ||
|
bf0ab3216a | ||
|
ade9eb83eb | ||
|
2b8fa74c22 | ||
|
4fc6a1a711 | ||
|
9dfc445421 | ||
|
9ac4ab6c25 | ||
|
dededfde6a | ||
|
293339263c | ||
|
2826e7d79e | ||
|
13f81c7624 | ||
|
f79b3b90a4 | ||
|
60cf21f445 | ||
|
1f5ff02398 | ||
|
8b325ce704 | ||
|
f495b1a33e | ||
|
b0aaba7848 | ||
|
fa1c76c4e9 | ||
|
e6238d32fa | ||
|
380cc142dc | ||
|
23d45b1e77 | ||
|
4131262f36 | ||
|
586dfc8ce7 | ||
|
07b7be3788 | ||
|
f556389fa8 | ||
|
7f064db676 | ||
|
90d8b15c59 | ||
|
b10b88faf6 | ||
|
47702f4cec | ||
|
7251ffac8a | ||
|
b2f636d93b | ||
|
83d3411549 | ||
|
8248c803a0 | ||
|
81bccfd34e | ||
|
6e0797a8c7 | ||
|
7133a1252e | ||
|
c004364864 | ||
|
d3a2a060b2 | ||
|
0e695468b8 | ||
|
a109ea480c | ||
|
98d27360f7 | ||
|
5c7d6fa3be | ||
|
1beeaaf299 | ||
|
2b6a73041f | ||
|
bbeaf5bc6e | ||
|
00b880baf9 | ||
|
e945f1200a | ||
|
8c1658df0b | ||
|
b757d0abd3 | ||
|
ead1ad5797 | ||
|
103edc2931 | ||
|
0657f7e23f | ||
|
29642d181f | ||
|
ae06ca2380 | ||
|
004595a396 | ||
|
070f103fb9 | ||
|
5996e28e32 | ||
|
127c5e7cb9 | ||
|
0a120132ec | ||
|
5e23a2fc6b | ||
|
92ff640a65 | ||
|
c93b3fca9e | ||
|
73a171f48c | ||
|
d0cb79de6b | ||
|
acced8c440 | ||
|
4c351bdad8 | ||
|
6894ddfd5c | ||
|
d34ce70455 | ||
|
31aca68980 | ||
|
0a136a3932 | ||
|
b39f7494df | ||
|
261070bf9d | ||
|
2f545c7fdc | ||
|
3ab8a92421 | ||
|
0454e8918b | ||
|
23ff1e79df | ||
|
024fefe702 | ||
|
c0f73c4f31 | ||
|
ebc4ef1c33 | ||
|
0f324b37a6 | ||
|
aca65e743a | ||
|
907bf3a30e | ||
|
19bf8e773c | ||
|
03fee7b7d5 | ||
|
e547173d42 | ||
|
8711562923 | ||
|
a8e4f18e6b | ||
|
a1edd09a52 | ||
|
cba56c3f9e | ||
|
a37bbd851e | ||
|
b1ee03a7d8 | ||
|
40776fa184 | ||
|
afee400662 | ||
|
cd21298355 | ||
|
29fdf23f50 | ||
|
43feb12c85 | ||
|
3dc350d3ac | ||
|
0e40d9a3f6 | ||
|
339a014614 | ||
|
febccaef3a | ||
|
2fac9c036f | ||
|
1739a88be2 | ||
|
3f49316211 | ||
|
00f2cffc45 | ||
|
25242398ac | ||
|
8cb800c6d1 | ||
|
86ef1ec077 | ||
|
602f386b03 | ||
|
2de67d1b24 | ||
|
284c87afb6 | ||
|
297bf31cdf | ||
|
161bc1a17a | ||
|
b3e647aee4 | ||
|
ff5ed12181 | ||
|
8c2adaddb3 | ||
|
58240bdc3d | ||
|
6b25e60f8b | ||
|
16d4a2ca88 | ||
|
6b35f9ee2f | ||
|
1aff6cada0 | ||
|
746a541f7b | ||
|
a5ae757803 | ||
|
e4df6a7dac | ||
|
8f3f4b7b98 | ||
|
5ef38c12bd | ||
|
2d6fe95903 | ||
|
f62bd9245b | ||
|
8d7ff22fed | ||
|
6515a9bf04 | ||
|
de59b3911d | ||
|
55ca78e74d | ||
|
08ac9036ad | ||
|
6c5c6ab8f6 | ||
|
db9ff186d6 | ||
|
a658851bbb | ||
|
4a254303c5 | ||
|
1839b473c2 | ||
|
97006ade19 | ||
|
4e67c1e886 | ||
|
d9823e4230 | ||
|
0746d2828b | ||
|
b848514b4b | ||
|
06a45bad59 | ||
|
dfe7738d6e | ||
|
42a74c41c5 | ||
|
714556fd4a | ||
|
95bcea8f40 | ||
|
e9d0b8bee6 | ||
|
345fd552f0 | ||
|
29f3e011ec | ||
|
892d9cc752 | ||
|
5f107ad7ad | ||
|
05587c3a1c | ||
|
f9d8d61524 | ||
|
2a60dd7052 | ||
|
158e44cb1e | ||
|
2f755abfed | ||
|
0503753185 | ||
|
6fe17ad0c1 | ||
|
4841642c21 | ||
|
8e1e935d7e | ||
|
b4c102a17a | ||
|
bd3e1cd3a9 | ||
|
5ce740068e | ||
|
6892ea153f | ||
|
a61eb0d568 | ||
|
d9aa50b1f5 | ||
|
a54b949670 | ||
|
ccbb5e5923 | ||
|
a592d1aa87 | ||
|
f4aa7f7933 | ||
|
3180c66f39 | ||
|
2f975c7157 | ||
|
74f4a44da6 | ||
|
bc6028348b | ||
|
9c882614a3 | ||
|
0b9e10e697 | ||
|
23417df29f | ||
|
e6c2f6f3e3 | ||
|
9670b46d7d | ||
|
c4fec3dad0 | ||
|
96532c0b27 | ||
|
4af671afac | ||
|
f567abe386 | ||
|
7c74dc5dec | ||
|
523c471a45 | ||
|
b3a53d613d | ||
|
c8379db0aa | ||
|
562a21fb4d | ||
|
4063cf9282 | ||
|
0ebaa8a750 | ||
|
c0c018f005 | ||
|
e28f4f3431 | ||
|
a3564ec3a7 | ||
|
a5a9afd681 | ||
|
17c310156c | ||
|
253d162b5b | ||
|
f9972da763 | ||
|
1d463d1e3f | ||
|
ec48698f35 | ||
|
0e5cd51910 | ||
|
1d17940ce9 | ||
|
1bc8d5ef34 | ||
|
b5b6928803 | ||
|
23e0064382 | ||
|
900dfd1851 | ||
|
dd891aba8c | ||
|
d1146de6d5 | ||
|
298afac0e0 | ||
|
1d443c0d40 | ||
|
0196a47ad2 | ||
|
257653047b | ||
|
474cc56f8d | ||
|
8427dc3804 | ||
|
473a2de225 | ||
|
6e41ef882b | ||
|
526cec70ed | ||
|
6313c14382 | ||
|
a576dbc1fd | ||
|
c10716ab1a | ||
|
d4d36397fe | ||
|
ac19616bc3 | ||
|
9602d4bc79 | ||
|
a1b5684a2f | ||
|
46d09f8ab7 | ||
|
686e1baaf4 | ||
|
6b30e17b67 | ||
|
fdcc233cbe | ||
|
f3635cd14d | ||
|
d6b862357d | ||
|
b1fb1b9656 | ||
|
f8e84658c0 | ||
|
fafaa26d7b | ||
|
2437937372 | ||
|
595d362ee6 | ||
|
dcaf1e5595 | ||
|
a06c21d875 | ||
|
8dd0d97ae0 | ||
|
18bbc983c9 | ||
|
d627df7288 | ||
|
1e0ad92dd1 | ||
|
b46b111c07 | ||
|
b46b005ed7 | ||
|
43c52c69d2 | ||
|
a97034fc8b | ||
|
6eded7c4a3 | ||
|
61c7e19a0c | ||
|
fef91ab371 | ||
|
3406575f9d | ||
|
4f1a46eed8 | ||
|
9f9a10e8f4 | ||
|
89b5a1b798 | ||
|
664e9c7808 | ||
|
38f9df54fd | ||
|
a91dc9e557 | ||
|
52f5c8878f | ||
|
38b4e2bec7 | ||
|
3bdb534321 | ||
|
77a5cf5c69 | ||
|
ae24228aed | ||
|
67817e2a19 | ||
|
ea5a94e0f8 | ||
|
da466ba8bf | ||
|
f5a35d4d1d | ||
|
2e8d2b4d2c | ||
|
da0d83ff10 | ||
|
70fd80c1f1 | ||
|
0c46a22125 | ||
|
e5bb23d9c6 | ||
|
6d04c2ebce | ||
|
fbe04c8658 | ||
|
7e74d93866 | ||
|
21a8569fb0 | ||
|
a8d9166113 | ||
|
87daef421e | ||
|
1002a4f5e5 | ||
|
07b3d76a66 | ||
|
1952f88c34 | ||
|
8edaee6136 | ||
|
9ecc604558 | ||
|
2718cd3d23 | ||
|
b76fbc8694 | ||
|
14eaf467a1 | ||
|
4bfa5f6128 | ||
|
42f0508625 | ||
|
d0bc1d70d9 | ||
|
01fa34093f | ||
|
0960f9a135 | ||
|
065efdbcc0 | ||
|
0f89e09b6d | ||
|
ab38507bea | ||
|
09ad692864 | ||
|
fcb8c35046 | ||
|
08af51892b | ||
|
8430c41be9 | ||
|
91d3adc8e1 | ||
|
4d6c49b64a | ||
|
219e50e668 | ||
|
00da26a3ce | ||
|
2b925aefa3 | ||
|
4f8b43cd60 | ||
|
43d2a6601a | ||
|
73bd713217 | ||
|
1b8d9317e4 | ||
|
5775e3c0b7 | ||
|
2c6cff36da | ||
|
c3a94edd20 | ||
|
218a5e6d6e | ||
|
64ab60f4b4 | ||
|
cf6068dbd3 | ||
|
7fdb5c2007 | ||
|
e5d7cce49c | ||
|
0f6effbdbb | ||
|
3aea877ab9 | ||
|
b56afaaee3 | ||
|
ced35af515 | ||
|
ab317e52fe | ||
|
bad0af1a8a | ||
|
6c22d5987c | ||
|
63509365c1 | ||
|
a71784684c | ||
|
dda7a0ca08 | ||
|
68ca822fc7 | ||
|
bf562ee9a7 | ||
|
5f2933271a | ||
|
bd162cc317 | ||
|
942e389d85 | ||
|
3eb89aac7f | ||
|
c4d319268c | ||
|
5b6c4101f8 | ||
|
f20fbd25c1 | ||
|
33f067fe9e | ||
|
14b66043a2 | ||
|
444ccce925 | ||
|
477d3b937e | ||
|
af91c2f1b8 | ||
|
682040319e | ||
|
796a84bd1a | ||
|
0ae8fc2a06 | ||
|
b5715de4fd | ||
|
f186bd1de3 | ||
|
9afd671905 | ||
|
c36bfc1237 | ||
|
3381a2119e | ||
|
7ba552e91f | ||
|
13754722fd | ||
|
943b2f931a | ||
|
53227082a5 | ||
|
7a1e2de6cd | ||
|
e76f978d63 | ||
|
b140e66f6a | ||
|
c464af6b9b | ||
|
33d056e9ac | ||
|
6df8c58e00 | ||
|
6e990e4dc5 | ||
|
d51205194e | ||
|
b76d085418 | ||
|
6423fb55b5 | ||
|
7b558ba47c | ||
|
8c77ad061e | ||
|
671f836ec5 | ||
|
c019a2cc02 | ||
|
2e7a022b7c | ||
|
b585cec123 | ||
|
b2d3eb5c49 | ||
|
d442af9dcc | ||
|
cc2322f492 | ||
|
030f847243 | ||
|
cd4f74a04d | ||
|
e5b9f0aff4 | ||
|
2b5c725a0c | ||
|
6d70716ba2 | ||
|
70e5f43171 | ||
|
3f15788c6a | ||
|
6b7ee97479 | ||
|
20191f5bc4 | ||
|
84810a61af | ||
|
d6ab6ed9ac | ||
|
6be6ffc7d2 | ||
|
9f071a8e9b | ||
|
0c49f0f7cb | ||
|
a2072aa446 | ||
|
7c3ea5b2b3 | ||
|
3dc5238bd2 | ||
|
d15f899809 | ||
|
250d6c61f7 | ||
|
d1602083f2 | ||
|
e7357f1360 | ||
|
4b73ba37f3 | ||
|
9437bf1576 | ||
|
c399996cab | ||
|
d871e2c8de | ||
|
6c49436942 | ||
|
4143aaf6fd | ||
|
1b6d80f122 | ||
|
58d53b1f8b | ||
|
2bf310d37c | ||
|
686d6a3188 | ||
|
4c2986a036 | ||
|
a634c2d167 | ||
|
5e712b221e | ||
|
665d49f8e0 | ||
|
38818757c8 | ||
|
aaf89c0c1c | ||
|
1fdf62f7d0 | ||
|
dc0765918e | ||
|
4eca2be3c1 | ||
|
ef7e8acb13 | ||
|
e6b557f054 | ||
|
c29a80d7f7 | ||
|
58f23bfe9a | ||
|
58283a9799 | ||
|
21b7899083 | ||
|
a10241f7ab | ||
|
6af2b5c6e1 | ||
|
feb3b8b6c7 | ||
|
188df1ac93 | ||
|
2161dace3f | ||
|
976876961b | ||
|
8dd08327b1 | ||
|
f9723f67b7 | ||
|
e6dae03329 | ||
|
5307e97d66 | ||
|
ee75de0135 | ||
|
dc10c4b6dd | ||
|
79bfa9fbc8 | ||
|
9b351b1779 | ||
|
ba4ef4f51c | ||
|
408f3e138f | ||
|
58772c871b | ||
|
37b08eabf7 | ||
|
debe19278a | ||
|
8342ca724f | ||
|
4d83ffb915 | ||
|
d319576303 | ||
|
58745060cf | ||
|
66362aa981 | ||
|
a5fa47ffac | ||
|
39cf61f2fd | ||
|
579cb28361 | ||
|
1c9715dddf | ||
|
c54057931d | ||
|
99c7b5bb44 | ||
|
84569ef6d9 | ||
|
780f656531 | ||
|
2d559b874a | ||
|
55e7ed8ef7 | ||
|
8ccb5de6ac | ||
|
5fb61731a3 | ||
|
440004a80c | ||
|
63471b9ede | ||
|
86bfacd253 | ||
|
72eaf6f582 | ||
|
6118a9f8b6 | ||
|
8305631b1d | ||
|
44ce849cb0 | ||
|
14454875c5 | ||
|
6ab876258b | ||
|
683890c1b8 | ||
|
ef5fda0264 | ||
|
dccfcb6c69 | ||
|
6689198623 | ||
|
a79562677d | ||
|
110a905a0c | ||
|
bab050a354 | ||
|
cfd244f377 | ||
|
0216b116f4 | ||
|
cf47746d20 | ||
|
6d876a738c | ||
|
55fe22c430 | ||
|
694c74f5b4 | ||
|
24f596959d | ||
|
aa649d9b41 | ||
|
0229ec0861 | ||
|
ac571d314a | ||
|
c75bfe4947 | ||
|
06406d7cee | ||
|
191b5ebe47 | ||
|
0aae012eb5 | ||
|
53a31c2437 | ||
|
906748ebde | ||
|
960109d513 | ||
|
60b111545a | ||
|
f1e98e45a6 | ||
|
13ef6f66ab | ||
|
2aaf5f8694 | ||
|
0026701888 | ||
|
bc9439b26e | ||
|
0507bebee9 | ||
|
0c7ae56301 | ||
|
adbc47a4f9 | ||
|
6ad06696d9 | ||
|
a057050e33 | ||
|
6f223786ef | ||
|
f7a636573b | ||
|
38b5680619 | ||
|
1e4c029a2e | ||
|
e05d86b31b | ||
|
46cffa6ba6 | ||
|
9ba9b03396 | ||
|
992c27e82e | ||
|
ec71a5bb87 | ||
|
855c6c841c | ||
|
6eb3dccf06 | ||
|
072e8e6189 | ||
|
922a591b64 | ||
|
a33414b723 | ||
|
b0445028e2 | ||
|
00a1baf509 | ||
|
8556acdd5f | ||
|
794e8dd3c7 | ||
|
faf607c624 | ||
|
b8ef531b76 | ||
|
0ccfd1a19f | ||
|
5bcb51bf00 | ||
|
c8f6145a18 | ||
|
87943743ef | ||
|
6125dd979e | ||
|
4400ebc31e | ||
|
f986895aa2 | ||
|
e5c99778df | ||
|
13ba261977 | ||
|
fe3743c5ba | ||
|
bf73d29ade | ||
|
ed964bf04d | ||
|
5b7daa129a | ||
|
307daf32e7 | ||
|
f979d010ca | ||
|
b268b0a9ce | ||
|
b14073a00f | ||
|
fcf0ee34d1 | ||
|
42b7df44e0 | ||
|
81381cb943 | ||
|
5bf291b3a3 | ||
|
a04dd94282 | ||
|
2e7bb5e7a3 | ||
|
bb9f7a6205 | ||
|
31fecc3ec2 | ||
|
ec066fdfc7 | ||
|
e807cd637f | ||
|
b103a34f3b | ||
|
51f7b14777 | ||
|
21df6b1d57 | ||
|
1b704a869d | ||
|
8a0bade6cd |
26
.devcontainer/Dockerfile
Normal file
26
.devcontainer/Dockerfile
Normal file
@ -0,0 +1,26 @@
|
||||
# Use the jguer/yay-builder image as a parent image with archlinux
|
||||
FROM docker.io/jguer/yay-builder
|
||||
|
||||
# Install extra packages (pacman-contrib and fish)
|
||||
RUN sudo pacman -Syu --noconfirm pacman-contrib fish git-delta openssh bat go
|
||||
|
||||
# Set passwordless sudo for the docker user
|
||||
RUN echo "docker ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/docker
|
||||
|
||||
# Create a non-root user and switch to it
|
||||
USER docker
|
||||
|
||||
# Install xgotext
|
||||
RUN go install github.com/leonelquinteros/gotext/cli/xgotext@latest
|
||||
|
||||
# Add /app/bin to the PATH
|
||||
ENV PATH="/app/bin:$PATH"
|
||||
|
||||
# add /home/docker/go/bin to the PATH
|
||||
ENV PATH="/home/docker/go/bin:$PATH"
|
||||
|
||||
# Set the working directory
|
||||
WORKDIR /workspace
|
||||
|
||||
# Command to run when starting the container
|
||||
CMD ["bash"]
|
14
.devcontainer/devcontainer.json
Normal file
14
.devcontainer/devcontainer.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "Existing Dockerfile",
|
||||
"build": {
|
||||
"context": "..",
|
||||
"dockerfile": "../.devcontainer/Dockerfile"
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"golang.go"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
9
.dockerignore
Normal file
9
.dockerignore
Normal file
@ -0,0 +1,9 @@
|
||||
*
|
||||
!*.go
|
||||
!pkg
|
||||
!go.mod
|
||||
!go.sum
|
||||
!Makefile
|
||||
!po
|
||||
!doc
|
||||
!completions
|
1
.github/CODEOWNERS
vendored
Normal file
1
.github/CODEOWNERS
vendored
Normal file
@ -0,0 +1 @@
|
||||
* @Jguer
|
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1 @@
|
||||
github: [Jguer]
|
44
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
44
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Report a malfunction to help us improve
|
||||
title: ""
|
||||
labels: "Status: Triage, Type: Bug"
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
### Affected Version
|
||||
|
||||
<!-- Please ensure you are using the latest yay-git package
|
||||
Use `yay -V` to get installed version
|
||||
Example: `yay v8.1139.r0.g9ac4ab6 - libalpm v11.0.1` -->
|
||||
|
||||
### Describe the bug
|
||||
|
||||
<!-- A clear and concise description of the bug. -->
|
||||
|
||||
### Reproduction Steps
|
||||
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
### Expected behavior
|
||||
|
||||
<!-- A clear and concise description of what you expected to happen. -->
|
||||
|
||||
### Output
|
||||
|
||||
<!--
|
||||
Include the FULL output of any relevant commands/configs
|
||||
The current yay config can be printed with `yay -Pg`
|
||||
Paste services are only needed for excessive output (>500 lines)
|
||||
Use --debug to add pacman and yay debug logs
|
||||
or add the following key to your ~/.config/yay/config.json to only get yay debug logs
|
||||
{
|
||||
"debug": true
|
||||
}
|
||||
-->
|
||||
|
||||
```sh
|
||||
|
||||
```
|
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
23
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@ -0,0 +1,23 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ""
|
||||
labels: "Type: Feature Request, Status: Discussion Open"
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
### Is your feature request related to a problem? Please describe.
|
||||
|
||||
<!-- A clear and concise description of the problem, e.g. I'm always frustrated when ... -->
|
||||
|
||||
### Describe the solution you'd like
|
||||
|
||||
<!-- A clear and concise description of what you want to happen. -->
|
||||
|
||||
### Describe alternatives you've considered
|
||||
|
||||
<!-- A clear and concise description of any alternative solutions or features you've considered. -->
|
||||
|
||||
### Additional context
|
||||
|
||||
<!-- Add any other context or screenshots about the feature request here. -->
|
15
.github/dependabot.yml
vendored
Normal file
15
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gomod" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
groups:
|
||||
go-all:
|
||||
patterns:
|
||||
- '*'
|
20
.github/stale.yml
vendored
Normal file
20
.github/stale.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 120
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 40
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- "Status: In Progress"
|
||||
- "Status: Confirmed"
|
||||
- "Status: Approved"
|
||||
- "Status: Triage"
|
||||
- "Type: Bug"
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
143
.github/workflows/builder-image.yml
vendored
Normal file
143
.github/workflows/builder-image.yml
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
name: Builder Image
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 3 * * 1" # Every Monday at 3 AM
|
||||
push:
|
||||
paths:
|
||||
- "ci.Dockerfile"
|
||||
- ".github/workflows/builder-image.yml"
|
||||
|
||||
env:
|
||||
REGISTRY_IMAGE: jguer/yay-builder
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm/v7
|
||||
- linux/arm64
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY_IMAGE }}
|
||||
ghcr.io/${{ env.REGISTRY_IMAGE }}
|
||||
tags: |
|
||||
type=raw,value=latest
|
||||
type=sha,format=long
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ci.Dockerfile
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
echo -n "$digest" > "/tmp/digests/$(echo "${{ matrix.platform }}" | tr '/' '_')"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digest-${{ matrix.platform == 'linux/amd64' && 'amd64' || matrix.platform == 'linux/arm/v7' && 'armv7' || 'arm64' }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge:
|
||||
needs: [build]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: digest-*
|
||||
merge-multiple: true
|
||||
path: /tmp/digests
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY_IMAGE }}
|
||||
ghcr.io/${{ env.REGISTRY_IMAGE }}
|
||||
tags: |
|
||||
type=raw,value=latest
|
||||
type=sha,format=short
|
||||
|
||||
- name: Create and push manifest list
|
||||
env:
|
||||
DOCKER_CLI_EXPERIMENTAL: enabled
|
||||
run: |
|
||||
# Extract Docker Hub tags
|
||||
DH_TAGS=$(echo '${{ steps.meta.outputs.tags }}' | grep -v "^ghcr.io" | xargs -I {} echo "-t {}")
|
||||
|
||||
# Extract GitHub Container Registry tags
|
||||
GHCR_TAGS=$(echo '${{ steps.meta.outputs.tags }}' | grep "^ghcr.io" | xargs -I {} echo "-t {}")
|
||||
|
||||
# Create a manifest list using the image digests from /tmp/digests/*
|
||||
DIGESTS=$(for file in /tmp/digests/*; do
|
||||
echo -n "${{ env.REGISTRY_IMAGE }}@$(cat $file) "
|
||||
done)
|
||||
|
||||
# Create the manifest list for Docker Hub
|
||||
docker buildx imagetools create $DH_TAGS $DIGESTS
|
||||
|
||||
# Create the manifest list for GitHub Container Registry
|
||||
docker buildx imagetools create $GHCR_TAGS $DIGESTS
|
||||
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:latest
|
93
.github/workflows/multiarch-build.yml
vendored
Normal file
93
.github/workflows/multiarch-build.yml
vendored
Normal file
@ -0,0 +1,93 @@
|
||||
name: Build Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
build-releases:
|
||||
strategy:
|
||||
matrix:
|
||||
arch: ["linux/amd64 x86_64", "linux/arm/v7 armv7h", "linux/arm64 aarch64"]
|
||||
name: Build ${{ matrix.arch }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Read info
|
||||
id: tags
|
||||
run: |
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
|
||||
echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
arch="${{ matrix.arch }}"
|
||||
echo "PLATFORM=${arch%% *}" >> $GITHUB_OUTPUT
|
||||
echo "ARCH=${arch##* }" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build ${{ matrix.arch }} release
|
||||
run: |
|
||||
mkdir artifacts
|
||||
docker buildx build --platform ${{ steps.tags.outputs.platform }} \
|
||||
--build-arg VERSION=${{ steps.tags.outputs.version }} \
|
||||
--build-arg ARCH=${{ steps.tags.outputs.arch }} \
|
||||
--build-arg PREFIX="/usr" \
|
||||
-t yay:${{ steps.tags.outputs.arch }} . --load
|
||||
make docker-release ARCH=${{ steps.tags.outputs.arch }} VERSION=${{ steps.tags.outputs.version }} PREFIX="/usr"
|
||||
mv *.tar.gz artifacts
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: yay_${{ steps.tags.outputs.arch }}
|
||||
path: artifacts
|
||||
|
||||
create_release:
|
||||
name: Create release from this build
|
||||
needs: [build-releases]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Read info
|
||||
id: tags
|
||||
run: |
|
||||
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT
|
||||
echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: yay_*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Create Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh release create ${{ steps.tags.outputs.tag }} \
|
||||
--title "${{ steps.tags.outputs.tag }}" \
|
||||
--generate-notes \
|
||||
./yay_${{ steps.tags.outputs.version }}_*.tar.gz
|
||||
|
||||
- name: Release Notary Action
|
||||
uses: docker://aevea/release-notary:latest
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
39
.github/workflows/testing-git.yml
vendored
Normal file
39
.github/workflows/testing-git.yml
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
name: Test against pacman-git
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- "doc/**"
|
||||
- "**/*.po"
|
||||
- "README.md"
|
||||
- ".gitignore"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Lint and test yay (-git)
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ghcr.io/jguer/yay-builder:latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-go-
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: /home/runner/work/yay/yay/pacman-git
|
||||
key: ${{ runner.os }}-pacman-${{ hashFiles('/home/runner/work/yay/yay/pacman-git/PKGBUILD') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pacman-
|
||||
- name: checkout pacman-git
|
||||
run: |
|
||||
git -C ./pacman-git pull || git clone https://aur.archlinux.org/pacman-git
|
||||
useradd github
|
||||
echo 'github ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers
|
||||
chmod -R 777 pacman-git
|
||||
su github -c 'cd pacman-git; yes | makepkg -i --nocheck'
|
||||
- name: Run Build and Tests with pacman-git
|
||||
run: |
|
||||
make test
|
44
.github/workflows/testing.yml
vendored
Normal file
44
.github/workflows/testing.yml
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
name: Test against pacman
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Lint and test yay
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ghcr.io/jguer/yay-builder:latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-go-
|
||||
- name: Lint
|
||||
env:
|
||||
GOFLAGS: -buildvcs=false -tags=next
|
||||
run: /app/bin/golangci-lint run -v ./...
|
||||
- name: Run Build and Tests
|
||||
run: make test
|
||||
|
||||
- name: Run Integration Tests
|
||||
continue-on-error: true
|
||||
run: |
|
||||
useradd -m yay &&
|
||||
chown -R yay:yay . &&
|
||||
cp -r ~/go/ /home/yay/go/ &&
|
||||
chown -R yay:yay /home/yay/go/ &&
|
||||
su yay -c "make test-integration"
|
||||
- name: Build yay Artifact
|
||||
env:
|
||||
GOFLAGS: -buildvcs=false -tags=next
|
||||
run: make
|
||||
- name: Upload yay Artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: yay
|
||||
path: ./yay
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
17
.gitignore
vendored
17
.gitignore
vendored
@ -6,10 +6,7 @@
|
||||
# Folders
|
||||
_obj
|
||||
_test
|
||||
|
||||
# Architecture specific extensions/prefixes
|
||||
*.[568vq]
|
||||
[568vq].out
|
||||
.vscode
|
||||
|
||||
*.cgo1.go
|
||||
*.cgo2.c
|
||||
@ -23,3 +20,15 @@ _cgo_export.*
|
||||
yay
|
||||
yay_*/
|
||||
*.tar.gz
|
||||
qemu-*
|
||||
.go
|
||||
|
||||
# Locale
|
||||
*.mo
|
||||
*.pot
|
||||
*.po~
|
||||
*.pprof
|
||||
|
||||
node_modules/
|
||||
xgotext
|
||||
.devcontainer/
|
94
.golangci.yml
Normal file
94
.golangci.yml
Normal file
@ -0,0 +1,94 @@
|
||||
version: "2"
|
||||
run:
|
||||
go: "1.20"
|
||||
linters:
|
||||
default: none
|
||||
enable:
|
||||
- bodyclose
|
||||
- dogsled
|
||||
- dupl
|
||||
- errcheck
|
||||
- errorlint
|
||||
- gochecknoinits
|
||||
- gocritic
|
||||
- goprintffuncname
|
||||
- gosec
|
||||
- govet
|
||||
- ineffassign
|
||||
- lll
|
||||
- misspell
|
||||
- nakedret
|
||||
- noctx
|
||||
- nolintlint
|
||||
- staticcheck
|
||||
- unconvert
|
||||
- unparam
|
||||
- unused
|
||||
- whitespace
|
||||
settings:
|
||||
dupl:
|
||||
threshold: 100
|
||||
funlen:
|
||||
lines: 100
|
||||
statements: 50
|
||||
goconst:
|
||||
min-len: 3
|
||||
min-occurrences: 4
|
||||
gocritic:
|
||||
enabled-tags:
|
||||
- diagnostic
|
||||
- experimental
|
||||
- opinionated
|
||||
- performance
|
||||
- style
|
||||
gocyclo:
|
||||
min-complexity: 15
|
||||
lll:
|
||||
line-length: 140
|
||||
misspell:
|
||||
locale: US
|
||||
nolintlint:
|
||||
require-explanation: false
|
||||
require-specific: false
|
||||
allow-unused: false
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
rules:
|
||||
- linters:
|
||||
- dupl
|
||||
- errcheck
|
||||
- errorlint
|
||||
- gochecknoinits
|
||||
- gocritic
|
||||
- godot
|
||||
- govet
|
||||
- lll
|
||||
- revive
|
||||
- staticcheck
|
||||
- wsl
|
||||
path: (.+)_test.go
|
||||
- path: (.+)\.go$
|
||||
text: G204
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
formatters:
|
||||
enable:
|
||||
- gofmt
|
||||
- goimports
|
||||
settings:
|
||||
goimports:
|
||||
local-prefixes:
|
||||
- github.com/Jguer/yay/v12
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
29
.pre-commit-config.yaml
Normal file
29
.pre-commit-config.yaml
Normal file
@ -0,0 +1,29 @@
|
||||
default_stages: [commit]
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/dnephin/pre-commit-golang
|
||||
rev: v0.5.1
|
||||
hooks:
|
||||
- id: go-fmt
|
||||
- id: golangci-lint
|
||||
- id: go-unit-tests
|
||||
- id: go-build
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v4.0.0-alpha.8 # Use the sha or tag you want to point at
|
||||
hooks:
|
||||
- id: prettier
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0 # Use the ref you want to point at
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: check-json
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
|
||||
- repo: https://github.com/commitizen-tools/commitizen
|
||||
rev: v3.15.0
|
||||
hooks:
|
||||
- id: commitizen
|
||||
stages: [commit-msg]
|
7
.vscode/settings.json
vendored
Normal file
7
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"go.lintTool": "golangci-lint",
|
||||
"gopls": {
|
||||
"formatting.gofumpt": true,
|
||||
"formatting.local": "github.com/Jguer/yay/v12"
|
||||
}
|
||||
}
|
79
CONTRIBUTING.md
Normal file
79
CONTRIBUTING.md
Normal file
@ -0,0 +1,79 @@
|
||||
# Contributing to yay
|
||||
|
||||
## Translation
|
||||
|
||||
[Transifex](https://www.transifex.com/yay-1/yay/)
|
||||
|
||||
## Quality Assurance
|
||||
|
||||
```sh
|
||||
pacman -S --needed git base-devel
|
||||
git clone https://aur.archlinux.org/yay-git.git
|
||||
cd yay-git
|
||||
makepkg -si
|
||||
```
|
||||
|
||||
Installing `yay-git` and using issues to help determine what's broken is already
|
||||
a very big help.
|
||||
|
||||
## Development
|
||||
|
||||
Contributors are always welcome!
|
||||
|
||||
If you plan to make any large changes or changes that may not be 100% agreed
|
||||
on, we suggest opening an issue detailing your ideas first.
|
||||
|
||||
Otherwise send us a pull request and we will be happy to review it.
|
||||
|
||||
### Vision
|
||||
|
||||
Yay is based on the design of [yaourt](https://github.com/archlinuxfr/yaourt), [apacman](https://github.com/oshazard/apacman) and [pacaur](https://github.com/rmarquis/pacaur). It is developed with these objectives in mind:
|
||||
|
||||
- Provide an interface for pacman
|
||||
- Yaourt-style interactive search/install
|
||||
- Minimal dependencies
|
||||
- Minimize user input
|
||||
|
||||
### Dependencies
|
||||
|
||||
Yay depends on:
|
||||
|
||||
- go (make only)
|
||||
- git
|
||||
- base-devel
|
||||
- pacman
|
||||
|
||||
Note: Yay also depends on a few other projects, these are pulled as go modules.
|
||||
|
||||
### Building
|
||||
|
||||
Run `make` to build Yay. This command will generate a binary called `yay` in
|
||||
the same directory as the Makefile.
|
||||
|
||||
#### Docker Release
|
||||
|
||||
`make docker-release` will build the release packages for `aarch64` and for `x86_64`.
|
||||
|
||||
For `aarch64` to run on a `x86_64` platform `qemu-user-static(-bin)` must be
|
||||
installed.
|
||||
|
||||
```
|
||||
docker run --rm --privileged multiarch/qemu-user-static:register --reset
|
||||
```
|
||||
|
||||
will register QEMU in the build agent. ARM builds tend to crash sometimes but
|
||||
repeated runs tend to succeed.
|
||||
|
||||
### Code Style
|
||||
|
||||
All code should be formatted through `go fmt`. This tool will automatically
|
||||
format code for you. We recommend, however, that you write code in the proper
|
||||
style and use `go fmt` only to catch mistakes.
|
||||
|
||||
Use [pre-commit](https://pre-commit.com/) to validate your commits against the various
|
||||
linters configured for this repository.
|
||||
|
||||
### Testing
|
||||
|
||||
Run `make test` to test Yay. This command will verify that the code is
|
||||
formatted correctly, run the code through `go vet`, and run unit tests.
|
12
Dockerfile
Normal file
12
Dockerfile
Normal file
@ -0,0 +1,12 @@
|
||||
FROM ghcr.io/jguer/yay-builder:latest
|
||||
LABEL maintainer="Jguer,docker@jguer.space"
|
||||
|
||||
ARG VERSION
|
||||
ARG PREFIX
|
||||
ARG ARCH
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN make release VERSION=${VERSION} PREFIX=${PREFIX} ARCH=${ARCH}
|
27
Gopkg.lock
generated
27
Gopkg.lock
generated
@ -1,27 +0,0 @@
|
||||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/jguer/go-alpm"
|
||||
packages = ["."]
|
||||
revision = "ec031c9cd5f6050edc3c2f23df2bff3bbb9511cc"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/mikkeloscar/aur"
|
||||
packages = ["."]
|
||||
revision = "9050804dc7d471393053322aaaa40428fbd32de3"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/mikkeloscar/gopkgbuild"
|
||||
packages = ["."]
|
||||
revision = "32274fc52aa8f5eb28711da734179e9aea27b31f"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "456465ee334310996a51a2282bf4cfe9f6269db508479c962474d61a4ce0a08c"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
16
Gopkg.toml
16
Gopkg.toml
@ -1,16 +0,0 @@
|
||||
[prune]
|
||||
non-go = true
|
||||
unused-packages = true
|
||||
go-tests = true
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/jguer/go-alpm"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/mikkeloscar/aur"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/mikkeloscar/gopkgbuild"
|
@ -1,12 +0,0 @@
|
||||
#### Affected Version
|
||||
<!-- Please ensure you are using the latest yay-git package -->
|
||||
<!-- Use `yay -V` to get installed version -->
|
||||
<!-- Example: `yay v3.373` -->
|
||||
|
||||
#### Issue
|
||||
|
||||
#### Steps to reproduce
|
||||
<!-- Exact commands are useful -->
|
||||
1.
|
||||
2.
|
||||
3.
|
150
Makefile
150
Makefile
@ -1,62 +1,134 @@
|
||||
.PHONY: all default install uninstall test build release clean
|
||||
export GO111MODULE=on
|
||||
GOPROXY ?= https://proxy.golang.org,direct
|
||||
export GOPROXY
|
||||
|
||||
PREFIX := /usr
|
||||
BUILD_TAG = devel
|
||||
ARCH ?= $(shell uname -m)
|
||||
BIN := yay
|
||||
DESTDIR :=
|
||||
|
||||
ifndef VERSION
|
||||
MAJORVERSION := 5
|
||||
MINORVERSION ?= $(shell git rev-list --count master)
|
||||
endif
|
||||
VERSION := ${MAJORVERSION}.${MINORVERSION}
|
||||
|
||||
LDFLAGS := -ldflags '-s -w -X main.version=${VERSION}'
|
||||
GOFILES := $(shell ls *.go | grep -v /vendor/)
|
||||
ARCH := $(shell uname -m)
|
||||
GO ?= go
|
||||
PKGNAME := yay
|
||||
BINNAME := yay
|
||||
PACKAGE := ${PKGNAME}_${VERSION}_${ARCH}
|
||||
PREFIX := /usr/local
|
||||
|
||||
export GOPATH=$(shell pwd)/.go
|
||||
export GOROOT=/usr/lib/go
|
||||
MAJORVERSION := 12
|
||||
MINORVERSION := 0
|
||||
PATCHVERSION := 0
|
||||
VERSION ?= ${MAJORVERSION}.${MINORVERSION}.${PATCHVERSION}
|
||||
|
||||
LOCALEDIR := po
|
||||
SYSTEMLOCALEPATH := $(PREFIX)/share/locale/
|
||||
|
||||
# ls -1 po | sed -e 's/\.po$//' | paste -sd " "
|
||||
LANGS := ca cs de en es eu fr_FR he id it_IT ja ko pl_PL pt_BR pt ru_RU ru sv tr uk zh_CN zh_TW
|
||||
POTFILE := default.pot
|
||||
POFILES := $(addprefix $(LOCALEDIR)/,$(addsuffix .po,$(LANGS)))
|
||||
MOFILES := $(POFILES:.po=.mo)
|
||||
|
||||
FLAGS ?= -trimpath -mod=readonly -modcacherw
|
||||
EXTRA_FLAGS ?= -buildmode=pie
|
||||
LDFLAGS := -X "main.yayVersion=${VERSION}" -X "main.localePath=${SYSTEMLOCALEPATH}" -linkmode=external -compressdwarf=false
|
||||
|
||||
RELEASE_DIR := ${PKGNAME}_${VERSION}_${ARCH}
|
||||
PACKAGE := $(RELEASE_DIR).tar.gz
|
||||
SOURCES ?= $(shell find . -name "*.go" -type f)
|
||||
|
||||
.PRECIOUS: ${LOCALEDIR}/%.po
|
||||
|
||||
.PHONY: default
|
||||
default: build
|
||||
|
||||
all: | clean package
|
||||
.PHONY: all
|
||||
all: | clean release
|
||||
|
||||
install:
|
||||
install -Dm755 ${BINNAME} $(DESTDIR)$(PREFIX)/bin/${BINNAME}
|
||||
.PHONY: clean
|
||||
clean:
|
||||
$(GO) clean $(FLAGS) -i ./...
|
||||
rm -rf $(BIN) $(PKGNAME)_*
|
||||
|
||||
.PHONY: test_lint
|
||||
test_lint: test lint
|
||||
|
||||
.PHONY: test
|
||||
test:
|
||||
$(GO) test -race -covermode=atomic $(FLAGS) ./...
|
||||
|
||||
.PHONY: test-integration
|
||||
test-integration:
|
||||
$(GO) test -tags=integration $(FLAGS) ./...
|
||||
|
||||
.PHONY: build
|
||||
build: $(BIN)
|
||||
|
||||
.PHONY: release
|
||||
release: $(PACKAGE)
|
||||
|
||||
.PHONY: docker-release-all
|
||||
docker-release-all:
|
||||
make docker-release-armv7h ARCH=armv7h
|
||||
make docker-release-x86_64 ARCH=x86_64
|
||||
make docker-release-aarch64 ARCH=aarch64
|
||||
|
||||
docker-release:
|
||||
docker create --name yay-$(ARCH) yay:${ARCH} /bin/sh
|
||||
docker cp yay-$(ARCH):/app/${PACKAGE} $(PACKAGE)
|
||||
docker container rm yay-$(ARCH)
|
||||
|
||||
.PHONY: docker-build
|
||||
docker-build:
|
||||
docker build -t yay-$(ARCH):${VERSION} .
|
||||
docker run -e="ARCH=$(ARCH)" --name yay-$(ARCH) yay-$(ARCH):${VERSION} make build VERSION=${VERSION} PREFIX=${PREFIX}
|
||||
docker cp yay-$(ARCH):/app/${BIN} $(BIN)
|
||||
docker container rm yay-$(ARCH)
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
GOFLAGS="$(FLAGS)" golangci-lint run ./...
|
||||
|
||||
.PHONY: fmt
|
||||
fmt:
|
||||
go fmt ./...
|
||||
|
||||
.PHONY: install
|
||||
install: build ${MOFILES}
|
||||
install -Dm755 ${BIN} $(DESTDIR)$(PREFIX)/bin/${BIN}
|
||||
install -Dm644 doc/${PKGNAME}.8 $(DESTDIR)$(PREFIX)/share/man/man8/${PKGNAME}.8
|
||||
install -Dm644 completions/bash $(DESTDIR)$(PREFIX)/share/bash-completion/completions/${PKGNAME}
|
||||
install -Dm644 completions/zsh $(DESTDIR)$(PREFIX)/share/zsh/site-functions/_${PKGNAME}
|
||||
install -Dm644 completions/fish $(DESTDIR)$(PREFIX)/share/fish/vendor_completions.d/${PKGNAME}.fish
|
||||
for lang in ${LANGS}; do \
|
||||
install -Dm644 ${LOCALEDIR}/$${lang}.mo $(DESTDIR)$(PREFIX)/share/locale/$$lang/LC_MESSAGES/${PKGNAME}.mo; \
|
||||
done
|
||||
|
||||
.PHONY: uninstall
|
||||
uninstall:
|
||||
rm -f $(DESTDIR)$(PREFIX)/bin/${BINNAME}
|
||||
rm -f $(DESTDIR)$(PREFIX)/bin/${BIN}
|
||||
rm -f $(DESTDIR)$(PREFIX)/share/man/man8/${PKGNAME}.8
|
||||
rm -f $(DESTDIR)$(PREFIX)/share/bash-completion/completions/${PKGNAME}
|
||||
rm -f $(DESTDIR)$(PREFIX)/share/zsh/site-functions/_${PKGNAME}
|
||||
rm -f $(DESTDIR)$(PREFIX)/share/fish/vendor_completions.d/${PKGNAME}.fish
|
||||
for lang in ${LANGS}; do \
|
||||
rm -f $(DESTDIR)$(PREFIX)/share/locale/$$lang/LC_MESSAGES/${PKGNAME}.mo; \
|
||||
done
|
||||
|
||||
test:
|
||||
gofmt -l *.go
|
||||
@test -z "$$(gofmt -l *.go)" || (echo "Files need to be linted" && false)
|
||||
go vet
|
||||
go test -v
|
||||
$(BIN): $(SOURCES)
|
||||
$(GO) build $(FLAGS) -ldflags '$(LDFLAGS)' $(EXTRA_FLAGS) -o $@
|
||||
|
||||
build:
|
||||
go build -v ${LDFLAGS} -o ${BINNAME}
|
||||
$(RELEASE_DIR):
|
||||
mkdir $(RELEASE_DIR)
|
||||
|
||||
release: | test build
|
||||
mkdir ${PACKAGE}
|
||||
cp ./${BINNAME} ${PACKAGE}/
|
||||
cp ./doc/${PKGNAME}.8 ${PACKAGE}/
|
||||
cp ./completions/zsh ${PACKAGE}/
|
||||
cp ./completions/fish ${PACKAGE}/
|
||||
cp ./completions/bash ${PACKAGE}/
|
||||
$(PACKAGE): $(BIN) $(RELEASE_DIR) ${MOFILES}
|
||||
strip ${BIN}
|
||||
cp -t $(RELEASE_DIR) ${BIN} doc/${PKGNAME}.8 completions/* ${MOFILES}
|
||||
tar -czvf $(PACKAGE) $(RELEASE_DIR)
|
||||
|
||||
package: release
|
||||
tar -czvf ${PACKAGE}.tar.gz ${PACKAGE}
|
||||
clean:
|
||||
rm -rf ${PKGNAME}_*
|
||||
rm -f ${BINNAME}
|
||||
locale:
|
||||
xgotext -in . -out po
|
||||
mv po/default.pot po/en.po
|
||||
for lang in ${LANGS}; do \
|
||||
test -f po/$$lang.po || msginit --no-translator -l po/$$lang.po -i po/${POTFILE} -o po/$$lang.po; \
|
||||
msgmerge -U po/$$lang.po po/${POTFILE}; \
|
||||
touch po/$$lang.po; \
|
||||
done
|
||||
|
||||
${LOCALEDIR}/%.mo: ${LOCALEDIR}/%.po
|
||||
msgfmt $< -o $@
|
||||
|
290
README.md
290
README.md
@ -1,217 +1,189 @@
|
||||
# yay
|
||||
[](https://aur.archlinux.org/packages/yay/)
|
||||
[](https://aur.archlinux.org/packages/yay-bin/)
|
||||
[](https://aur.archlinux.org/packages/yay-git/)
|
||||

|
||||
[](https://github.com/Jguer/yay/blob/master/LICENSE)
|
||||
|
||||
Yet another Yogurt - An AUR Helper written in Go
|
||||
# Yay
|
||||
|
||||
#### Packages
|
||||
Yet Another Yogurt - An AUR Helper Written in Go
|
||||
|
||||
[](https://aur.archlinux.org/packages/yay/) [](https://aur.archlinux.org/packages/yay-bin/) [](https://aur.archlinux.org/packages/yay-git/) [](https://raw.githubusercontent.com/Jguer/yay/master/LICENSE)
|
||||
|
||||
There's a point in everyone's life when you feel the need to write an AUR helper because there are only about 20 of them.
|
||||
So say hi to 20+1.
|
||||
|
||||
Yay was created with a few objectives in mind and based on the design of [yaourt](https://github.com/archlinuxfr/yaourt) and [apacman](https://github.com/oshazard/apacman):
|
||||
|
||||
* Have almost no dependencies.
|
||||
* Provide an interface for pacman.
|
||||
* Have yaourt like search.
|
||||
* Minimize user input
|
||||
* Know when git packages are due for an upgrade.
|
||||
### Help translate yay: [Transifex](https://www.transifex.com/yay-1/yay/)
|
||||
|
||||
## Features
|
||||
|
||||
* AUR Tab completion
|
||||
* Download PKGBUILD from ABS or AUR
|
||||
* Ask all questions first and then start building
|
||||
* Search narrowing (`yay linux header` will first search linux and then narrow on header)
|
||||
* No sourcing of PKGBUILD is done
|
||||
* The binary has no dependencies that pacman doesn't already have.
|
||||
* Sources build dependencies
|
||||
* Removes make dependencies at the end of build process
|
||||
- Advanced dependency solving
|
||||
- PKGBUILD downloading from ABS or AUR
|
||||
- Completions for AUR packages
|
||||
- Query user up-front for all input (prior to starting builds)
|
||||
- Narrow search (`yay linux header` will first search `linux` and then narrow on `header`)
|
||||
- Find matching package providers during search and allow selection
|
||||
- Remove make dependencies at the end of the build process
|
||||
- Build local PKGBUILDs with AUR dependencies
|
||||
- Un/Vote for packages
|
||||
|
||||
#### Frequently Asked Questions
|
||||
[](https://asciinema.org/a/399431)
|
||||
|
||||
* Yay does not display colored output. How do I fix it?
|
||||
Make sure you have the `Color` option in your `/etc/pacman.conf` [#123](https://github.com/Jguer/yay/issues/123)
|
||||
[](https://asciinema.org/a/399433)
|
||||
|
||||
#### Example of Custom Operations
|
||||
## Installation
|
||||
|
||||
* `yay <Search Term>` presents package selection menu
|
||||
* `yay -Ps` prints system statistics
|
||||
* `yay -Pu` prints update list
|
||||
* `yay -Yc` cleans unneeded dependencies
|
||||
* `yay -G` downloads PKGBUILD from ABS or AUR
|
||||
* `yay -Y --gendb` generates development package DB used for devel updates.
|
||||
If you are migrating from another AUR helper, you can simply install Yay with that helper.
|
||||
|
||||
<img src="http://jguer.github.io/yay/yayupgrade.png" width="450">
|
||||
<img src="http://jguer.github.io/yay/yay2.png" width="450">
|
||||
<img src="http://jguer.github.io/yay/yay4.png" width="450">
|
||||
> [!WARNING]
|
||||
> We are using `sudo` in these examples, you can switch that out for a different privilege escalation tool.
|
||||
|
||||
### Changelog
|
||||
### Source
|
||||
|
||||
#### v5.608
|
||||
The initial installation of Yay can be done by cloning the PKGBUILD and
|
||||
building with makepkg:
|
||||
|
||||
* Updated Shell completions
|
||||
* Added `-Qu` to extended pacman options
|
||||
* Provides now supported in `-Si`
|
||||
* Improved build method
|
||||
* Improved conflict checking
|
||||
* PKGBUILDs with unsupported arch can force build now
|
||||
* PGP Key automatic importing
|
||||
* GPG option passing
|
||||
* `db/name` support readded
|
||||
We make sure we have the `base-devel` package group installed.
|
||||
|
||||
#### 4.505
|
||||
```sh
|
||||
sudo pacman -S --needed git base-devel
|
||||
git clone https://aur.archlinux.org/yay.git
|
||||
cd yay
|
||||
makepkg -si
|
||||
```
|
||||
|
||||
* `yay` used to auto save permanent configuration options, now `--save` must be passed to save permanent configuration options
|
||||
* Competions updated
|
||||
* Number menu is now used to edit PKGBuilds and Clean Builds
|
||||
* Devel updates of `-git` packages now uses `git ls-remote` which makes it compatible with other platforms besides github.
|
||||
* Devel update checking is faster as well
|
||||
* Updated man page
|
||||
If you want to do all of this at once, we can chain the commands like so:
|
||||
|
||||
#### 3.440
|
||||
```sh
|
||||
sudo pacman -S --needed git base-devel && git clone https://aur.archlinux.org/yay.git && cd yay && makepkg -si
|
||||
```
|
||||
|
||||
* Closed a lot of issues
|
||||
* Updated bash and zsh completions
|
||||
* New colour scheme
|
||||
* Small parsing fixes
|
||||
* Automatically delete package from transaction if $EDITOR exits with non-zero #140
|
||||
* Added check depends support
|
||||
### Binary
|
||||
|
||||
#### 3.373
|
||||
If you do not want to compile yay yourself you can use the builds generated by
|
||||
GitHub Actions.
|
||||
|
||||
* Version bump to V3 to reflect all of the changes to syntax
|
||||
* `yay -Pd` prints default config
|
||||
* `yay -Pg` prints current config
|
||||
* Fixes #174
|
||||
* Fixes #176
|
||||
* Fixes -G being unable to download split packages
|
||||
* Fixes #171
|
||||
* Fixes -Si failing when given a non existing package on https://github.com/Jguer/yay/pull/155
|
||||
* Fixes other small bugs on 2.350 without adding new features
|
||||
```sh
|
||||
sudo pacman -S --needed git base-devel
|
||||
git clone https://aur.archlinux.org/yay-bin.git
|
||||
cd yay-bin
|
||||
makepkg -si
|
||||
```
|
||||
|
||||
#### 2.350
|
||||
If you want to do all of this at once, we can chain the commands like so:
|
||||
|
||||
* Adds sudo loop (off by default, enable only by editing config file) #147
|
||||
* Adds replace package support #154 #134
|
||||
* Minor display improvements #150 for example
|
||||
* Fixes GenDB
|
||||
* Fixes Double options passing to pacman
|
||||
* Noconfirm works more as expected
|
||||
* Minor fixes and refactoring
|
||||
* Yay filters out the repository name if it's included.
|
||||
* Fixes #122
|
||||
```sh
|
||||
sudo pacman -S --needed git base-devel && git clone https://aur.archlinux.org/yay-bin.git && cd yay-bin && makepkg -si
|
||||
```
|
||||
|
||||
#### 2.298
|
||||
### Other distributions
|
||||
|
||||
* Adds #115
|
||||
If you're using Manjaro or [another distribution that packages `yay`](https://repology.org/project/yay/versions)
|
||||
you can simply install yay using pacman (as root):
|
||||
|
||||
#### 2.296
|
||||
```sh
|
||||
pacman -S --needed git base-devel yay
|
||||
```
|
||||
> [!WARNING]
|
||||
> distributions sometimes lag updating yay on their repositories.
|
||||
|
||||
* New argument parsing @Morganamilo (check manpage or --help for new
|
||||
information)
|
||||
* yay -Qstats changed to yay -Ps or yay -P --stats
|
||||
* yay -Cd changed to yay -Yc or yay -Y --clean
|
||||
* yay -Pu (--upgrades) prints update list
|
||||
* yay -Pn (--numberupgrades) prints number of updates
|
||||
* yay -G also possible through -Yg or -Y --getpkgbuild (yay -G will be
|
||||
discontinued once it's possible to add options to the getpkgbuild operation)
|
||||
* yay now counts from 1 instead of 0 @Morganamilo
|
||||
* Support for ranges when selecting packages @samosaara
|
||||
* Pacaur style ask all questions first and download first @Morganamilo
|
||||
* Updated vendor dependencies (Fixes pacman.conf parsing errors and PKGBUILD
|
||||
parsing errors)
|
||||
* Updated completions
|
||||
## First Use
|
||||
|
||||
#### 2.219
|
||||
#### Development packages upgrade
|
||||
|
||||
* Updated manpage
|
||||
* Updated --help
|
||||
* Fixed AUR update fails with large number of packages #59
|
||||
* Check if package is already in upgrade list and skip it. #60
|
||||
* Add -V and -h for flag parsing @AnthonyLam
|
||||
* Prevent file corruption by truncating the files @maximbaz
|
||||
* Print VCS error details @maximbaz
|
||||
* Using '-' doesn't raise an error @PietroCarrara
|
||||
* use Command.Dir in aur.PkgInstall; Fixes #32 #47 @afg984
|
||||
* Suffix YayConf.BuildDir with uid to avoid permission issues @afg984 (Not included in last changelog)
|
||||
- Use `yay -Y --gendb` to generate a development package database for `*-git`
|
||||
packages that were installed without yay.
|
||||
This command should only be run once.
|
||||
|
||||
#### 2.200
|
||||
- `yay -Syu --devel` will then check for development package updates
|
||||
|
||||
* Development github package support readded
|
||||
- Use `yay -Y --devel --save` to make development package updates permanently
|
||||
enabled (`yay` and `yay -Syu` will then always check dev packages)
|
||||
|
||||
#### 2.196
|
||||
## Examples of Custom Operations
|
||||
|
||||
* XDG_CONFIG_HOME support
|
||||
* XDG_CACHE_HOME support
|
||||
| Command | Description |
|
||||
| --------------------------------- | ---------------------------------------------------------------------------------------------------------- |
|
||||
| `yay` | Alias to `yay -Syu`. |
|
||||
| `yay <Search Term>` | Present package-installation selection menu. |
|
||||
| `yay -Bi <dir>` | Install dependencies and build a local PKGBUILD. |
|
||||
| `yay -G <AUR Package>` | Download PKGBUILD from ABS or AUR. (yay v12.0+) |
|
||||
| `yay -Gp <AUR Package>` | Print to stdout PKGBUILD from ABS or AUR. |
|
||||
| `yay -Ps` | Print system statistics. |
|
||||
| `yay -Syu --devel` | Perform system upgrade, but also check for development package updates. |
|
||||
| `yay -Syu --timeupdate` | Perform system upgrade and use PKGBUILD modification time (not version number) to determine update. |
|
||||
| `yay -Wu <AUR Package>` | Unvote for package (Requires setting `AUR_USERNAME` and `AUR_PASSWORD` environment variables) (yay v11.3+) |
|
||||
| `yay -Wv <AUR Package>` | Vote for package (Requires setting `AUR_USERNAME` and `AUR_PASSWORD` environment variables). (yay v11.3+) |
|
||||
| `yay -Y --combinedupgrade --save` | Make combined upgrade the default mode. |
|
||||
| `yay -Y --gendb` | Generate development package database used for devel update. |
|
||||
| `yay -Yc` | Clean unneeded dependencies. |
|
||||
|
||||
#### 2.165
|
||||
## Frequently Asked Questions
|
||||
|
||||
* Upgrade list now allows skipping upgrade install
|
||||
- **Yay does not display colored output. How do I fix it?**
|
||||
|
||||
#### 2.159
|
||||
Make sure you have the `Color` option in your `/etc/pacman.conf`
|
||||
(see issue [#123](https://github.com/Jguer/yay/issues/123)).
|
||||
|
||||
* Qstats now warns about packages not available in AUR
|
||||
- **Sometimes diffs are printed to the terminal, and other times they are paged via less. How do I fix this?**
|
||||
|
||||
#### 2.152
|
||||
Yay uses `git diff` to display diffs, which by default tells less not to
|
||||
page if the output can fit into one terminal length. This behavior can be
|
||||
overridden by exporting your own flags (`export LESS=SRX`).
|
||||
|
||||
* Fetching backend changed to Mikkel Oscar's [Aur](https://github.com/mikkeloscar/aur)
|
||||
* Added support for development packages from github.
|
||||
* Pacman backend rewritten and simplified
|
||||
* Added config framework.
|
||||
- **Yay is not asking me to edit PKGBUILDS, and I don't like the diff menu! What can I do?**
|
||||
|
||||
#### 1.115
|
||||
`yay --editmenu --diffmenu=false --save`
|
||||
|
||||
* Added AUR completions (updates on first completion every 48h)
|
||||
- **How can I tell Yay to act only on AUR packages, or only on repo packages?**
|
||||
|
||||
#### 1.101
|
||||
`yay -{OPERATION} --aur`
|
||||
`yay -{OPERATION} --repo`
|
||||
|
||||
* Search speed and quality improved [#3](https://github.com/Jguer/yay/issues/3)
|
||||
- **A `Flagged Out Of Date AUR Packages` message is displayed. Why doesn't Yay update them?**
|
||||
|
||||
#### 1.100
|
||||
This message does not mean that updated AUR packages are available. It means
|
||||
the packages have been flagged out of date on the AUR, but
|
||||
their maintainers have not yet updated the `PKGBUILD`s
|
||||
(see [outdated AUR packages](https://wiki.archlinux.org/index.php/Arch_User_Repository#Foo_in_the_AUR_is_outdated.3B_what_should_I_do.3F)).
|
||||
|
||||
* Added manpage
|
||||
* Improved search [#3](https://github.com/Jguer/yay/issues/3)
|
||||
* Added -G to get pkgbuild from the AUR or ABS. [#6](https://github.com/Jguer/yay/issues/6)
|
||||
* Fixed [#8](https://github.com/Jguer/yay/issues/8)
|
||||
* Completed and decluttered zsh completions
|
||||
* If `$EDITOR` or `$VISUAL` is not set yay will prompt you for an editor [#7](https://github.com/Jguer/yay/issues/7)
|
||||
- **Yay doesn't install dependencies added to a PKGBUILD during installation.**
|
||||
|
||||
#### 1.91
|
||||
Yay resolves all dependencies ahead of time. You are free to edit the
|
||||
PKGBUILD in any way, but any problems you cause are your own and should not be
|
||||
reported unless they can be reproduced with the original PKGBUILD.
|
||||
|
||||
* `--downtop` has been replaced with `--bottomup` (as is logical)
|
||||
* `yay -Ssq` and `yay -Sqs` now displays AUR packages with less information
|
||||
* Repository search now uses the same criteria as pacman
|
||||
- **I know my `-git` package has updates but yay doesn't offer to update it**
|
||||
|
||||
#### 1.85
|
||||
Yay uses a hash cache for development packages. Normally it is updated at the end of the package install with the message `Found git repo`.
|
||||
If you transition between aur helpers and did not install the devel package using yay at some point, it is possible it never got added to the cache. `yay -Y --gendb` will fix the current version of every devel package and start checking from there.
|
||||
|
||||
* yay now does -Si for AUR packages
|
||||
* Fixed package install bugs
|
||||
- **I want to help out!**
|
||||
|
||||
#### 1.83
|
||||
Check [CONTRIBUTING.md](./CONTRIBUTING.md) for more information.
|
||||
|
||||
* Added new dependency resolver for future features
|
||||
* Sort package statistics
|
||||
## Support
|
||||
|
||||
#### 1.80
|
||||
All support related to Yay should be requested via GitHub issues. Since Yay is not
|
||||
officially supported by Arch Linux, support should not be sought out on the
|
||||
forums, AUR comments or other official channels.
|
||||
|
||||
* yay now warns when installing orphan packages
|
||||
* Added orphan status to number menu
|
||||
* Qstats now checks if system has orphan packages installed
|
||||
A broken AUR package should be reported as a comment on the package's AUR page.
|
||||
A package may only be considered broken if it fails to build with makepkg.
|
||||
|
||||
#### 1.78
|
||||
Reports should be made using makepkg and include the full output as well as any
|
||||
other relevant information. Never make reports using Yay or any other external
|
||||
tools.
|
||||
|
||||
* Added foreign package statistics to Qstats
|
||||
* Group installing is now possible
|
||||
* Better handling of package dependency installing
|
||||
## Images
|
||||
|
||||
#### 1.76
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/Jguer/jguer.github.io/refs/heads/master/yay/yay.png" width="42%">
|
||||
<img src="https://raw.githubusercontent.com/Jguer/jguer.github.io/refs/heads/master/yay/yay-s.png" width="42%">
|
||||
</p>
|
||||
|
||||
* Fixed critical bug that prevented AUR dependencies from being installed.
|
||||
<p align="center">
|
||||
<img src="https://raw.githubusercontent.com/Jguer/jguer.github.io/refs/heads/master/yay/yay-y.png" width="42%">
|
||||
<img src="https://raw.githubusercontent.com/Jguer/jguer.github.io/refs/heads/master/yay/yay-ps.png" width="42%">
|
||||
</p>
|
||||
|
||||
#### 1.70
|
||||
### Other AUR helpers/tools
|
||||
|
||||
* Stable for everyday use
|
||||
* Bottom up package display
|
||||
* Number menu like yaourt/apacman
|
||||
* System package statistics
|
||||
- [paru](https://github.com/morganamilo/paru)
|
||||
- [aurutils](https://github.com/AladW/aurutils)
|
||||
- [pikaur](https://github.com/actionless/pikaur)
|
||||
|
13
SECURITY.md
Normal file
13
SECURITY.md
Normal file
@ -0,0 +1,13 @@
|
||||
# Security Policy
|
||||
|
||||
Thank you for helping keep yay secure!
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We only provide security updates and support for the latest released version of yay. Please ensure you are using the most up-to-date version before reporting vulnerabilities.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you discover a security vulnerability, please email us at [security@jguer.space](mailto:security@jguer.space). We will respond as quickly as possible and coordinate a fix.
|
||||
|
||||
We appreciate responsible disclosure and your help in making this project safe for everyone.
|
86
callbacks.go
86
callbacks.go
@ -1,86 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
alpm "github.com/jguer/go-alpm"
|
||||
"os"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
func questionCallback(question alpm.QuestionAny) {
|
||||
qi, err := question.QuestionInstallIgnorepkg()
|
||||
if err == nil {
|
||||
qi.SetInstall(true)
|
||||
}
|
||||
|
||||
qp, err := question.QuestionSelectProvider()
|
||||
if err == nil {
|
||||
size := 0
|
||||
|
||||
qp.Providers(alpmHandle).ForEach(func(pkg alpm.Package) error {
|
||||
size++
|
||||
return nil
|
||||
})
|
||||
|
||||
fmt.Print(bold(cyan(":: ")))
|
||||
str := bold(fmt.Sprintf(bold("There are %d providers avalable for %s:"), size, qp.Dep()))
|
||||
|
||||
size = 1
|
||||
var db string
|
||||
|
||||
qp.Providers(alpmHandle).ForEach(func(pkg alpm.Package) error {
|
||||
thisDb := pkg.DB().Name()
|
||||
|
||||
if db != thisDb {
|
||||
db = thisDb
|
||||
str += bold(cyan("\n:: ")) + bold("Repository "+db+"\n\t")
|
||||
}
|
||||
str += fmt.Sprintf("%d) %s ", size, pkg.Name())
|
||||
size++
|
||||
return nil
|
||||
})
|
||||
|
||||
fmt.Println(str)
|
||||
|
||||
for {
|
||||
fmt.Print("\nEnter a number (default=1): ")
|
||||
|
||||
if config.NoConfirm {
|
||||
fmt.Println()
|
||||
break
|
||||
}
|
||||
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
numberBuf, overflow, err := reader.ReadLine()
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
break
|
||||
}
|
||||
|
||||
if overflow {
|
||||
fmt.Println("Input too long")
|
||||
continue
|
||||
}
|
||||
|
||||
if string(numberBuf) == "" {
|
||||
break
|
||||
}
|
||||
|
||||
num, err := strconv.Atoi(string(numberBuf))
|
||||
if err != nil {
|
||||
fmt.Printf("%s invalid number: %s\n", red("error:"), string(numberBuf))
|
||||
continue
|
||||
}
|
||||
|
||||
if num < 1 || num > size {
|
||||
fmt.Printf("%s invalid value: %d is not between %d and %d\n", red("error:"), num, 1, size)
|
||||
continue
|
||||
}
|
||||
|
||||
qp.SetUseIndex(num - 1)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
15
ci.Dockerfile
Normal file
15
ci.Dockerfile
Normal file
@ -0,0 +1,15 @@
|
||||
FROM docker.io/ljmf00/archlinux:devel
|
||||
LABEL maintainer="Jguer,docker@jguer.space"
|
||||
|
||||
ENV GO111MODULE=on
|
||||
WORKDIR /app
|
||||
|
||||
RUN sed -i '/^\[community\]/,/^\[/ s/^/#/' /etc/pacman.conf
|
||||
|
||||
COPY go.mod .
|
||||
|
||||
RUN pacman-key --init && pacman -Sy && pacman -S --overwrite=* --noconfirm archlinux-keyring && \
|
||||
pacman -Su --overwrite=* --needed --noconfirm pacman doxygen meson asciidoc go git gcc make sudo base-devel && \
|
||||
rm -rfv /var/cache/pacman/* /var/lib/pacman/sync/* && \
|
||||
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s v2.1.5 && \
|
||||
go mod download
|
230
clean.go
230
clean.go
@ -1,53 +1,207 @@
|
||||
package main
|
||||
|
||||
// GetPkgbuild gets the pkgbuild of the package 'pkg' trying the ABS first and then the AUR trying the ABS first and then the AUR.
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
// RemovePackage removes package from VCS information
|
||||
func removeVCSPackage(pkgs []string) {
|
||||
updated := false
|
||||
"github.com/Jguer/aur"
|
||||
mapset "github.com/deckarep/golang-set/v2"
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
for _, pkgName := range pkgs {
|
||||
_, ok := savedInfo[pkgName]
|
||||
if ok {
|
||||
delete(savedInfo, pkgName)
|
||||
updated = true
|
||||
}
|
||||
}
|
||||
|
||||
if updated {
|
||||
saveVCSInfo()
|
||||
}
|
||||
}
|
||||
|
||||
// CleanDependencies removes all dangling dependencies in system
|
||||
func cleanDependencies() error {
|
||||
hanging, err := hangingPackages()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
"github.com/Jguer/yay/v12/pkg/runtime"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
)
|
||||
|
||||
// CleanDependencies removes all dangling dependencies in system.
|
||||
func cleanDependencies(ctx context.Context, cfg *settings.Configuration,
|
||||
cmdBuilder exe.ICmdBuilder, cmdArgs *parser.Arguments, dbExecutor db.Executor,
|
||||
removeOptional bool,
|
||||
) error {
|
||||
hanging := hangingPackages(removeOptional, dbExecutor)
|
||||
if len(hanging) != 0 {
|
||||
if !continueTask("Confirm Removal?", "nN") {
|
||||
return nil
|
||||
}
|
||||
err = cleanRemove(hanging)
|
||||
return cleanRemove(ctx, cfg, cmdBuilder, cmdArgs, hanging)
|
||||
}
|
||||
|
||||
return err
|
||||
return nil
|
||||
}
|
||||
|
||||
// CleanRemove sends a full removal command to pacman with the pkgName slice
|
||||
func cleanRemove(pkgNames []string) (err error) {
|
||||
// CleanRemove sends a full removal command to pacman with the pkgName slice.
|
||||
func cleanRemove(ctx context.Context, cfg *settings.Configuration,
|
||||
cmdBuilder exe.ICmdBuilder, cmdArgs *parser.Arguments, pkgNames []string,
|
||||
) error {
|
||||
if len(pkgNames) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
oldvalue := config.NoConfirm
|
||||
config.NoConfirm = true
|
||||
arguments := makeArguments()
|
||||
arguments.addArg("R")
|
||||
arguments.addTarget(pkgNames...)
|
||||
err = passToPacman(arguments)
|
||||
config.NoConfirm = oldvalue
|
||||
return err
|
||||
arguments := cmdArgs.CopyGlobal()
|
||||
if err := arguments.AddArg("R", "s", "u"); err != nil {
|
||||
return err
|
||||
}
|
||||
arguments.AddTarget(pkgNames...)
|
||||
|
||||
return cmdBuilder.Show(
|
||||
cmdBuilder.BuildPacmanCmd(ctx,
|
||||
arguments, cfg.Mode, settings.NoConfirm))
|
||||
}
|
||||
|
||||
func syncClean(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
|
||||
keepInstalled := false
|
||||
keepCurrent := false
|
||||
|
||||
_, removeAll, _ := cmdArgs.GetArg("c", "clean")
|
||||
|
||||
for _, v := range run.PacmanConf.CleanMethod {
|
||||
switch v {
|
||||
case "KeepInstalled":
|
||||
keepInstalled = true
|
||||
case "KeepCurrent":
|
||||
keepCurrent = true
|
||||
}
|
||||
}
|
||||
|
||||
if run.Cfg.Mode.AtLeastRepo() {
|
||||
if err := run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if !run.Cfg.Mode.AtLeastAUR() {
|
||||
return nil
|
||||
}
|
||||
|
||||
var question string
|
||||
if removeAll {
|
||||
question = gotext.Get("Do you want to remove ALL AUR packages from cache?")
|
||||
} else {
|
||||
question = gotext.Get("Do you want to remove all other AUR packages from cache?")
|
||||
}
|
||||
|
||||
run.Logger.Println(gotext.Get("\nBuild directory:"), run.Cfg.BuildDir)
|
||||
|
||||
if run.Logger.ContinueTask(question, true, settings.NoConfirm) {
|
||||
if err := cleanAUR(ctx, run, keepInstalled, keepCurrent, removeAll, dbExecutor); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if removeAll {
|
||||
return nil
|
||||
}
|
||||
|
||||
if run.Logger.ContinueTask(gotext.Get("Do you want to remove ALL untracked AUR files?"), true, settings.NoConfirm) {
|
||||
return cleanUntracked(ctx, run)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func cleanAUR(ctx context.Context, run *runtime.Runtime,
|
||||
keepInstalled, keepCurrent, removeAll bool, dbExecutor db.Executor,
|
||||
) error {
|
||||
run.Logger.Println(gotext.Get("removing AUR packages from cache..."))
|
||||
|
||||
installedBases := mapset.NewThreadUnsafeSet[string]()
|
||||
inAURBases := mapset.NewThreadUnsafeSet[string]()
|
||||
|
||||
remotePackages := dbExecutor.InstalledRemotePackages()
|
||||
|
||||
files, err := os.ReadDir(run.Cfg.BuildDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cachedPackages := make([]string, 0, len(files))
|
||||
|
||||
for _, file := range files {
|
||||
if !file.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
cachedPackages = append(cachedPackages, file.Name())
|
||||
}
|
||||
|
||||
// Most people probably don't use keep current and that is the only
|
||||
// case where this is needed.
|
||||
// Querying the AUR is slow and needs internet so don't do it if we
|
||||
// don't need to.
|
||||
if keepCurrent {
|
||||
info, errInfo := run.AURClient.Get(ctx, &aur.Query{
|
||||
Needles: cachedPackages,
|
||||
})
|
||||
if errInfo != nil {
|
||||
return errInfo
|
||||
}
|
||||
|
||||
for i := range info {
|
||||
inAURBases.Add(info[i].PackageBase)
|
||||
}
|
||||
}
|
||||
|
||||
for _, pkg := range remotePackages {
|
||||
if pkg.Base() != "" {
|
||||
installedBases.Add(pkg.Base())
|
||||
} else {
|
||||
installedBases.Add(pkg.Name())
|
||||
}
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
if !file.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
if !removeAll {
|
||||
if keepInstalled && installedBases.Contains(file.Name()) {
|
||||
continue
|
||||
}
|
||||
|
||||
if keepCurrent && inAURBases.Contains(file.Name()) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
dir := filepath.Join(run.Cfg.BuildDir, file.Name())
|
||||
run.Logger.Debugln("removing", dir)
|
||||
if err = os.RemoveAll(dir); err != nil {
|
||||
run.Logger.Warnln(gotext.Get("Unable to remove %s: %s", dir, err))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func cleanUntracked(ctx context.Context, run *runtime.Runtime) error {
|
||||
run.Logger.Println(gotext.Get("removing untracked AUR files from cache..."))
|
||||
|
||||
files, err := os.ReadDir(run.Cfg.BuildDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
if !file.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
dir := filepath.Join(run.Cfg.BuildDir, file.Name())
|
||||
run.Logger.Debugln("cleaning", dir)
|
||||
if isGitRepository(dir) {
|
||||
if err := run.CmdBuilder.Show(run.CmdBuilder.BuildGitCmd(ctx, dir, "clean", "-fx")); err != nil {
|
||||
run.Logger.Warnln(gotext.Get("Unable to clean:"), dir)
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func isGitRepository(dir string) bool {
|
||||
_, err := os.Stat(filepath.Join(dir, ".git"))
|
||||
return !os.IsNotExist(err)
|
||||
}
|
||||
|
116
clean_test.go
Normal file
116
clean_test.go
Normal file
@ -0,0 +1,116 @@
|
||||
//go:build !integration
|
||||
// +build !integration
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/Jguer/go-alpm/v2"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db/mock"
|
||||
"github.com/Jguer/yay/v12/pkg/runtime"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
)
|
||||
|
||||
func TestCleanHanging(t *testing.T) {
|
||||
pacmanBin := t.TempDir() + "/pacman"
|
||||
|
||||
t.Parallel()
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
args []string
|
||||
wantShow []string
|
||||
}{
|
||||
{
|
||||
name: "clean",
|
||||
args: []string{"Y", "c"},
|
||||
wantShow: []string{"pacman", "-R", "-s", "-u", "--config", "/etc/pacman.conf", "--", "lsp-plugins"},
|
||||
},
|
||||
{
|
||||
name: "clean double",
|
||||
args: []string{"Y", "c", "c"},
|
||||
wantShow: []string{"pacman", "-R", "-s", "-u", "--config", "/etc/pacman.conf", "--", "lsp-plugins", "linux-headers"},
|
||||
},
|
||||
}
|
||||
|
||||
dbExc := &mock.DBExecutor{
|
||||
PackageOptionalDependsFn: func(i alpm.IPackage) []alpm.Depend {
|
||||
if i.Name() == "linux" {
|
||||
return []alpm.Depend{
|
||||
{
|
||||
Name: "linux-headers",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return []alpm.Depend{}
|
||||
},
|
||||
PackageProvidesFn: func(p alpm.IPackage) []alpm.Depend { return []alpm.Depend{} },
|
||||
PackageDependsFn: func(p alpm.IPackage) []alpm.Depend { return []alpm.Depend{} },
|
||||
LocalPackagesFn: func() []mock.IPackage {
|
||||
return []mock.IPackage{
|
||||
&mock.Package{
|
||||
PReason: alpm.PkgReasonExplicit,
|
||||
PName: "linux",
|
||||
},
|
||||
&mock.Package{
|
||||
PReason: alpm.PkgReasonDepend,
|
||||
PName: "lsp-plugins",
|
||||
},
|
||||
&mock.Package{
|
||||
PReason: alpm.PkgReasonDepend,
|
||||
PName: "linux-headers",
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
mockRunner := &exe.MockRunner{
|
||||
CaptureFn: func(cmd *exec.Cmd) (stdout string, stderr string, err error) {
|
||||
return "", "", nil
|
||||
},
|
||||
ShowFn: func(cmd *exec.Cmd) error { return nil },
|
||||
}
|
||||
cmdBuilder := &exe.CmdBuilder{
|
||||
SudoBin: "su",
|
||||
PacmanBin: pacmanBin,
|
||||
PacmanConfigPath: "/etc/pacman.conf",
|
||||
GitBin: "git",
|
||||
Runner: mockRunner,
|
||||
SudoLoopEnabled: false,
|
||||
}
|
||||
|
||||
run := &runtime.Runtime{CmdBuilder: cmdBuilder, Cfg: &settings.Configuration{}}
|
||||
cmdArgs := parser.MakeArguments()
|
||||
cmdArgs.AddArg(tc.args...)
|
||||
|
||||
err := handleCmd(context.Background(),
|
||||
run, cmdArgs, dbExc,
|
||||
)
|
||||
|
||||
require.NoError(t, err)
|
||||
|
||||
for i, call := range mockRunner.ShowCalls {
|
||||
show := call.Args[0].(*exec.Cmd).String()
|
||||
show = strings.ReplaceAll(show, pacmanBin, "pacman")
|
||||
|
||||
// options are in a different order on different systems and on CI root user is used
|
||||
assert.Subset(t, strings.Split(show, " "),
|
||||
strings.Split(tc.wantShow[i], " "),
|
||||
fmt.Sprintf("%d - %s", i, show))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
821
cmd.go
821
cmd.go
@ -2,19 +2,33 @@ package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
alpm "github.com/Jguer/go-alpm/v2"
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/completion"
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
"github.com/Jguer/yay/v12/pkg/download"
|
||||
"github.com/Jguer/yay/v12/pkg/intrange"
|
||||
"github.com/Jguer/yay/v12/pkg/news"
|
||||
"github.com/Jguer/yay/v12/pkg/query"
|
||||
"github.com/Jguer/yay/v12/pkg/runtime"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
"github.com/Jguer/yay/v12/pkg/upgrade"
|
||||
"github.com/Jguer/yay/v12/pkg/vcs"
|
||||
)
|
||||
|
||||
var cmdArgs = makeArguments()
|
||||
|
||||
func usage() {
|
||||
fmt.Println(`Usage:
|
||||
func usage(logger *text.Logger) {
|
||||
logger.Println(`Usage:
|
||||
yay
|
||||
yay <operation> [...]
|
||||
yay <package(s)>
|
||||
|
||||
@ -30,493 +44,424 @@ operations:
|
||||
yay {-U --upgrade} [options] <file(s)>
|
||||
|
||||
New operations:
|
||||
yay {-B --build} [options] [dir]
|
||||
yay {-G --getpkgbuild} [options] [package(s)]
|
||||
yay {-P --show} [options]
|
||||
yay {-W --web} [options] [package(s)]
|
||||
yay {-Y --yay} [options] [package(s)]
|
||||
yay {-P --print} [options]
|
||||
yay {-G --getpkgbuild} [package(s)]
|
||||
|
||||
If no operation is specified 'yay -Syu' will be performed
|
||||
If no operation is specified and targets are provided -Y will be assumed
|
||||
|
||||
New options:
|
||||
-N --repo Assume targets are from the repositories
|
||||
-a --aur Assume targets are from the AUR
|
||||
|
||||
Permanent configuration options:
|
||||
--save Causes the following options to be saved back to the
|
||||
config file when used
|
||||
--save Causes the following options to be saved back to the
|
||||
config file when used
|
||||
|
||||
--builddir <dir> Directory to use for building AUR Packages
|
||||
--editor <file> Editor to use when editing PKGBUILDs
|
||||
--makepkg <file> makepkg command to use
|
||||
--pacman <file> pacman command to use
|
||||
--tar <file> bsdtar command to use
|
||||
--git <file> git command to use
|
||||
--gpg <file> gpg command to use
|
||||
--config <file> pacman.conf file to use
|
||||
--aururl <url> Set an alternative AUR URL
|
||||
--aurrpcurl <url> Set an alternative URL for the AUR /rpc endpoint
|
||||
--builddir <dir> Directory used to download and run PKGBUILDS
|
||||
--editor <file> Editor to use when editing PKGBUILDs
|
||||
--editorflags <flags> Pass arguments to editor
|
||||
--makepkg <file> makepkg command to use
|
||||
--mflags <flags> Pass arguments to makepkg
|
||||
--pacman <file> pacman command to use
|
||||
--git <file> git command to use
|
||||
--gitflags <flags> Pass arguments to git
|
||||
--gpg <file> gpg command to use
|
||||
--gpgflags <flags> Pass arguments to gpg
|
||||
--config <file> pacman.conf file to use
|
||||
--makepkgconf <file> makepkg.conf file to use
|
||||
--nomakepkgconf Use the default makepkg.conf
|
||||
|
||||
--requestsplitn <n> Max amount of packages to query per AUR request
|
||||
--requestsplitn <n> Max amount of packages to query per AUR request
|
||||
--completioninterval <n> Time in days to refresh completion cache
|
||||
--sortby <field> Sort AUR results by a specific field during search
|
||||
--searchby <field> Search for packages using a specified field
|
||||
--answerclean <a> Set a predetermined answer for the clean build menu
|
||||
--answerdiff <a> Set a predetermined answer for the diff menu
|
||||
--answeredit <a> Set a predetermined answer for the edit pkgbuild menu
|
||||
--answerupgrade <a> Set a predetermined answer for the upgrade menu
|
||||
--noanswerclean Unset the answer for the clean build menu
|
||||
--noanswerdiff Unset the answer for the edit diff menu
|
||||
--noansweredit Unset the answer for the edit pkgbuild menu
|
||||
--noanswerupgrade Unset the answer for the upgrade menu
|
||||
--cleanmenu Give the option to clean build PKGBUILDS
|
||||
--diffmenu Give the option to show diffs for build files
|
||||
--editmenu Give the option to edit/view PKGBUILDS
|
||||
--askremovemake Ask to remove makedepends after install
|
||||
--askyesremovemake Ask to remove makedepends after install("Y" as default)
|
||||
--removemake Remove makedepends after install
|
||||
--noremovemake Don't remove makedepends after install
|
||||
|
||||
--topdown Shows repository's packages first and then AUR's
|
||||
--bottomup Shows AUR's packages first and then repository's
|
||||
--devel Check development packages during sysupgrade
|
||||
--nodevel Do not check development packages
|
||||
--afterclean Remove package sources after successful install
|
||||
--noafterclean Do not remove package sources after successful build
|
||||
--timeupdate Check package's AUR page for changes during sysupgrade
|
||||
--notimeupdate Do not checking of AUR page changes
|
||||
--redownload Always download pkgbuilds of targets
|
||||
--redownloadall Always download pkgbuilds of all AUR packages
|
||||
--noredownload Skip pkgbuild download if in cache and up to date
|
||||
--rebuild Always build target packages
|
||||
--rebuildall Always build all AUR packages
|
||||
--rebuildtree Always build all AUR packages even if installed
|
||||
--norebuild Skip package build if in cache and up to date
|
||||
--mflags <flags> Pass arguments to makepkg
|
||||
--gpgflags <flags> Pass arguments to gpg
|
||||
--sudoloop Loop sudo calls in the background to avoid timeout
|
||||
--nosudoloop Do not loop sudo calls in the background
|
||||
--cleanafter Remove package sources after successful install
|
||||
--keepsrc Keep pkg/ and src/ after building packages
|
||||
--bottomup Shows AUR's packages first and then repository's
|
||||
--topdown Shows repository's packages first and then AUR's
|
||||
--singlelineresults List each search result on its own line
|
||||
--doublelineresults List each search result on two lines, like pacman
|
||||
|
||||
Print specific options:
|
||||
-c --complete Used for completions
|
||||
-d --defaultconfig Print default yay configuration
|
||||
-g --config Print current yay configuration
|
||||
-n --numberupgrades Print number of updates
|
||||
-s --stats Display system package statistics
|
||||
-u --upgrades Print update list
|
||||
--devel Check development packages during sysupgrade
|
||||
--rebuild Always build target packages
|
||||
--rebuildall Always build all AUR packages
|
||||
--norebuild Skip package build if in cache and up to date
|
||||
--rebuildtree Always build all AUR packages even if installed
|
||||
--redownload Always download pkgbuilds of targets
|
||||
--noredownload Skip pkgbuild download if in cache and up to date
|
||||
--redownloadall Always download pkgbuilds of all AUR packages
|
||||
--provides Look for matching providers when searching for packages
|
||||
--pgpfetch Prompt to import PGP keys from PKGBUILDs
|
||||
--useask Automatically resolve conflicts using pacman's ask flag
|
||||
|
||||
Yay specific options:
|
||||
-c --clean Remove unneeded dependencies
|
||||
--gendb Generates development package DB used for updating
|
||||
--sudo <file> sudo command to use
|
||||
--sudoflags <flags> Pass arguments to sudo
|
||||
--sudoloop Loop sudo calls in the background to avoid timeout
|
||||
|
||||
If no operation is provided -Y will be assumed`)
|
||||
--timeupdate Check packages' AUR page for changes during sysupgrade
|
||||
|
||||
show specific options:
|
||||
-c --complete Used for completions
|
||||
-d --defaultconfig Print default yay configuration
|
||||
-g --currentconfig Print current yay configuration
|
||||
-s --stats Display system package statistics
|
||||
-w --news Print arch news
|
||||
|
||||
yay specific options:
|
||||
-c --clean Remove unneeded dependencies (-cc to ignore optdepends)
|
||||
--gendb Generates development package DB used for updating
|
||||
|
||||
getpkgbuild specific options:
|
||||
-f --force Force download for existing ABS packages
|
||||
-p --print Print pkgbuild of packages`)
|
||||
}
|
||||
|
||||
func sudoLoopBackground() {
|
||||
updateSudo()
|
||||
go sudoLoop()
|
||||
}
|
||||
|
||||
func sudoLoop() {
|
||||
for {
|
||||
updateSudo()
|
||||
time.Sleep(298 * time.Second)
|
||||
}
|
||||
}
|
||||
|
||||
func updateSudo() {
|
||||
for {
|
||||
cmd := exec.Command("sudo", "-v")
|
||||
cmd.Stdin, cmd.Stdout, cmd.Stderr = os.Stdin, os.Stdout, os.Stderr
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func handleCmd() (err error) {
|
||||
for option, value := range cmdArgs.options {
|
||||
if handleConfig(option, value) {
|
||||
cmdArgs.delArg(option)
|
||||
}
|
||||
func handleCmd(ctx context.Context, run *runtime.Runtime,
|
||||
cmdArgs *parser.Arguments, dbExecutor db.Executor,
|
||||
) error {
|
||||
if cmdArgs.ExistsArg("h", "help") {
|
||||
return handleHelp(ctx, run, cmdArgs)
|
||||
}
|
||||
|
||||
for option, value := range cmdArgs.globals {
|
||||
if handleConfig(option, value) {
|
||||
cmdArgs.delArg(option)
|
||||
}
|
||||
if run.Cfg.SudoLoop && cmdArgs.NeedRoot(run.Cfg.Mode) {
|
||||
run.CmdBuilder.SudoLoop()
|
||||
}
|
||||
|
||||
if shouldSaveConfig {
|
||||
config.saveConfig()
|
||||
}
|
||||
|
||||
if config.SudoLoop && cmdArgs.needRoot() {
|
||||
sudoLoopBackground()
|
||||
}
|
||||
|
||||
switch cmdArgs.op {
|
||||
switch cmdArgs.Op {
|
||||
case "V", "version":
|
||||
handleVersion()
|
||||
handleVersion(run.Logger)
|
||||
return nil
|
||||
case "D", "database":
|
||||
err = passToPacman(cmdArgs)
|
||||
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
|
||||
case "F", "files":
|
||||
err = passToPacman(cmdArgs)
|
||||
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
|
||||
case "Q", "query":
|
||||
err = handleQuery()
|
||||
return handleQuery(ctx, run, cmdArgs, dbExecutor)
|
||||
case "R", "remove":
|
||||
err = handleRemove()
|
||||
return handleRemove(ctx, run, cmdArgs, run.VCSStore)
|
||||
case "S", "sync":
|
||||
err = handleSync()
|
||||
return handleSync(ctx, run, cmdArgs, dbExecutor)
|
||||
case "T", "deptest":
|
||||
err = passToPacman(cmdArgs)
|
||||
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
|
||||
case "U", "upgrade":
|
||||
err = passToPacman(cmdArgs)
|
||||
return handleUpgrade(ctx, run, cmdArgs)
|
||||
case "B", "build":
|
||||
return handleBuild(ctx, run, dbExecutor, cmdArgs)
|
||||
case "G", "getpkgbuild":
|
||||
err = handleGetpkgbuild()
|
||||
case "P", "print":
|
||||
err = handlePrint()
|
||||
case "Y", "--yay":
|
||||
err = handleYay()
|
||||
default:
|
||||
//this means we allowed an op but not implement it
|
||||
//if this happens it an error in the code and not the usage
|
||||
err = fmt.Errorf("unhandled operation")
|
||||
return handleGetpkgbuild(ctx, run, cmdArgs, dbExecutor)
|
||||
case "P", "show":
|
||||
return handlePrint(ctx, run, cmdArgs, dbExecutor)
|
||||
case "Y", "yay":
|
||||
return handleYay(ctx, run, cmdArgs, run.CmdBuilder,
|
||||
dbExecutor, run.QueryBuilder)
|
||||
case "W", "web":
|
||||
return handleWeb(ctx, run, cmdArgs)
|
||||
}
|
||||
|
||||
return
|
||||
return errors.New(gotext.Get("unhandled operation"))
|
||||
}
|
||||
|
||||
func handleQuery() error {
|
||||
var err error
|
||||
// getFilter returns filter function which can keep packages which were only
|
||||
// explicitly installed or ones installed as dependencies for showing available
|
||||
// updates or their count.
|
||||
func getFilter(cmdArgs *parser.Arguments) (upgrade.Filter, error) {
|
||||
deps, explicit := cmdArgs.ExistsArg("d", "deps"), cmdArgs.ExistsArg("e", "explicit")
|
||||
|
||||
if cmdArgs.existsArg("u", "upgrades") {
|
||||
err = printUpdateList(cmdArgs)
|
||||
} else {
|
||||
err = passToPacman(cmdArgs)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
//this function should only set config options
|
||||
//but currently still uses the switch left over from old code
|
||||
//eventually this should be refactored out futher
|
||||
//my current plan is to have yay specific operations in its own operator
|
||||
//e.g. yay -Y --gendb
|
||||
//e.g yay -Yg
|
||||
func handleConfig(option, value string) bool {
|
||||
switch option {
|
||||
case "save":
|
||||
shouldSaveConfig = true
|
||||
case "afterclean":
|
||||
config.CleanAfter = true
|
||||
case "noafterclean":
|
||||
config.CleanAfter = false
|
||||
case "devel":
|
||||
config.Devel = true
|
||||
case "nodevel":
|
||||
config.Devel = false
|
||||
case "timeupdate":
|
||||
config.TimeUpdate = true
|
||||
case "notimeupdate":
|
||||
config.TimeUpdate = false
|
||||
case "topdown":
|
||||
config.SortMode = TopDown
|
||||
case "bottomup":
|
||||
config.SortMode = BottomUp
|
||||
case "noconfirm":
|
||||
config.NoConfirm = true
|
||||
case "redownload":
|
||||
config.ReDownload = "yes"
|
||||
case "redownloadall":
|
||||
config.ReDownload = "all"
|
||||
case "noredownload":
|
||||
config.ReDownload = "no"
|
||||
case "rebuild":
|
||||
config.ReBuild = "yes"
|
||||
case "rebuildall":
|
||||
config.ReBuild = "all"
|
||||
case "rebuildtree":
|
||||
config.ReBuild = "tree"
|
||||
case "norebuild":
|
||||
config.ReBuild = "no"
|
||||
case "gpgflags":
|
||||
config.GpgFlags = value
|
||||
case "mflags":
|
||||
config.MFlags = value
|
||||
case "builddir":
|
||||
config.BuildDir = value
|
||||
case "editor":
|
||||
config.Editor = value
|
||||
case "makepkg":
|
||||
config.MakepkgBin = value
|
||||
case "pacman":
|
||||
config.PacmanBin = value
|
||||
case "tar":
|
||||
config.TarBin = value
|
||||
case "git":
|
||||
config.GitBin = value
|
||||
case "gpg":
|
||||
config.GpgBin = value
|
||||
case "requestsplitn":
|
||||
n, err := strconv.Atoi(value)
|
||||
if err == nil && n > 0 {
|
||||
config.RequestSplitN = n
|
||||
}
|
||||
case "sudoloop":
|
||||
config.SudoLoop = true
|
||||
case "nosudoloop":
|
||||
config.SudoLoop = false
|
||||
default:
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func handleVersion() {
|
||||
fmt.Printf("yay v%s\n", version)
|
||||
}
|
||||
|
||||
func handlePrint() (err error) {
|
||||
switch {
|
||||
case cmdArgs.existsArg("d", "defaultconfig"):
|
||||
var tmpConfig Configuration
|
||||
defaultSettings(&tmpConfig)
|
||||
fmt.Printf("%v", tmpConfig)
|
||||
case cmdArgs.existsArg("g", "config"):
|
||||
fmt.Printf("%v", config)
|
||||
case cmdArgs.existsArg("n", "numberupgrades"):
|
||||
err = printNumberOfUpdates()
|
||||
case cmdArgs.existsArg("u", "upgrades"):
|
||||
err = printUpdateList(cmdArgs)
|
||||
case cmdArgs.existsArg("c", "complete"):
|
||||
switch {
|
||||
case cmdArgs.existsArg("f", "fish"):
|
||||
complete("fish")
|
||||
default:
|
||||
complete("sh")
|
||||
case deps && explicit:
|
||||
return nil, errors.New(gotext.Get("invalid option: '--deps' and '--explicit' may not be used together"))
|
||||
case deps:
|
||||
return func(pkg *upgrade.Upgrade) bool {
|
||||
return pkg.Reason == alpm.PkgReasonDepend
|
||||
}, nil
|
||||
case explicit:
|
||||
return func(pkg *upgrade.Upgrade) bool {
|
||||
return pkg.Reason == alpm.PkgReasonExplicit
|
||||
}, nil
|
||||
}
|
||||
|
||||
return func(pkg *upgrade.Upgrade) bool {
|
||||
return true
|
||||
}, nil
|
||||
}
|
||||
|
||||
func handleQuery(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
|
||||
if cmdArgs.ExistsArg("u", "upgrades") {
|
||||
filter, err := getFilter(cmdArgs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case cmdArgs.existsArg("s", "stats"):
|
||||
err = localStatistics()
|
||||
default:
|
||||
err = nil
|
||||
|
||||
return printUpdateList(ctx, run, cmdArgs, dbExecutor,
|
||||
cmdArgs.ExistsDouble("u", "sysupgrade"), filter)
|
||||
}
|
||||
|
||||
if err := run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm)); err != nil {
|
||||
if str := err.Error(); strings.Contains(str, "exit status") {
|
||||
// yay -Qdt should not output anything in case of error
|
||||
return fmt.Errorf("")
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleHelp(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments) error {
|
||||
usage(run.Logger)
|
||||
switch cmdArgs.Op {
|
||||
case "Y", "yay", "G", "getpkgbuild", "P", "show", "W", "web", "B", "build":
|
||||
return nil
|
||||
}
|
||||
|
||||
run.Logger.Println("\npacman operation specific options:")
|
||||
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
|
||||
}
|
||||
|
||||
func handleVersion(logger *text.Logger) {
|
||||
logger.Printf("yay v%s - libalpm v%s\n", yayVersion, alpm.Version())
|
||||
}
|
||||
|
||||
func handlePrint(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
|
||||
switch {
|
||||
case cmdArgs.ExistsArg("d", "defaultconfig"):
|
||||
tmpConfig := settings.DefaultConfig(yayVersion)
|
||||
run.Logger.Printf("%v", tmpConfig)
|
||||
|
||||
return nil
|
||||
case cmdArgs.ExistsArg("g", "currentconfig"):
|
||||
run.Logger.Printf("%v", run.Cfg)
|
||||
|
||||
return nil
|
||||
case cmdArgs.ExistsArg("w", "news"):
|
||||
double := cmdArgs.ExistsDouble("w", "news")
|
||||
quiet := cmdArgs.ExistsArg("q", "quiet")
|
||||
|
||||
return news.PrintNewsFeed(ctx, run.HTTPClient, run.Logger,
|
||||
dbExecutor.LastBuildTime(), run.Cfg.BottomUp, double, quiet)
|
||||
case cmdArgs.ExistsArg("c", "complete"):
|
||||
return completion.Show(ctx, run.HTTPClient, dbExecutor,
|
||||
run.Cfg.AURURL, run.Cfg.CompletionPath, run.Cfg.CompletionInterval, cmdArgs.ExistsDouble("c", "complete"))
|
||||
case cmdArgs.ExistsArg("s", "stats"):
|
||||
return localStatistics(ctx, run, dbExecutor)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleYay(ctx context.Context, run *runtime.Runtime,
|
||||
cmdArgs *parser.Arguments, cmdBuilder exe.ICmdBuilder,
|
||||
dbExecutor db.Executor, queryBuilder query.Builder,
|
||||
) error {
|
||||
switch {
|
||||
case cmdArgs.ExistsArg("gendb"):
|
||||
return createDevelDB(ctx, run, dbExecutor)
|
||||
case cmdArgs.ExistsDouble("c"):
|
||||
return cleanDependencies(ctx, run.Cfg, cmdBuilder, cmdArgs, dbExecutor, true)
|
||||
case cmdArgs.ExistsArg("c", "clean"):
|
||||
return cleanDependencies(ctx, run.Cfg, cmdBuilder, cmdArgs, dbExecutor, false)
|
||||
case len(cmdArgs.Targets) > 0:
|
||||
return displayNumberMenu(ctx, run, cmdArgs.Targets, dbExecutor, queryBuilder, cmdArgs)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleWeb(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments) error {
|
||||
switch {
|
||||
case cmdArgs.ExistsArg("v", "vote"):
|
||||
return handlePackageVote(ctx, cmdArgs.Targets, run.AURClient, run.Logger,
|
||||
run.VoteClient, true)
|
||||
case cmdArgs.ExistsArg("u", "unvote"):
|
||||
return handlePackageVote(ctx, cmdArgs.Targets, run.AURClient, run.Logger,
|
||||
run.VoteClient, false)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleGetpkgbuild(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, dbExecutor download.DBSearcher) error {
|
||||
if cmdArgs.ExistsArg("p", "print") {
|
||||
return printPkgbuilds(dbExecutor, run.AURClient,
|
||||
run.HTTPClient, run.Logger, cmdArgs.Targets, run.Cfg.Mode, run.Cfg.AURURL)
|
||||
}
|
||||
|
||||
return getPkgbuilds(ctx, dbExecutor, run.AURClient, run,
|
||||
cmdArgs.Targets, cmdArgs.ExistsArg("f", "force"))
|
||||
}
|
||||
|
||||
func handleUpgrade(ctx context.Context,
|
||||
run *runtime.Runtime, cmdArgs *parser.Arguments,
|
||||
) error {
|
||||
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
|
||||
}
|
||||
|
||||
// -B* options
|
||||
func handleBuild(ctx context.Context,
|
||||
run *runtime.Runtime, dbExecutor db.Executor, cmdArgs *parser.Arguments,
|
||||
) error {
|
||||
if cmdArgs.ExistsArg("i", "install") {
|
||||
return installLocalPKGBUILD(ctx, run, cmdArgs, dbExecutor)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleSync(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
|
||||
targets := cmdArgs.Targets
|
||||
|
||||
switch {
|
||||
case cmdArgs.ExistsArg("s", "search"):
|
||||
return syncSearch(ctx, targets, dbExecutor, run.QueryBuilder, !cmdArgs.ExistsArg("q", "quiet"))
|
||||
case cmdArgs.ExistsArg("p", "print", "print-format"):
|
||||
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
|
||||
case cmdArgs.ExistsArg("c", "clean"):
|
||||
return syncClean(ctx, run, cmdArgs, dbExecutor)
|
||||
case cmdArgs.ExistsArg("l", "list"):
|
||||
return syncList(ctx, run, run.HTTPClient, cmdArgs, dbExecutor)
|
||||
case cmdArgs.ExistsArg("g", "groups"):
|
||||
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
|
||||
case cmdArgs.ExistsArg("i", "info"):
|
||||
return syncInfo(ctx, run, cmdArgs, targets, dbExecutor)
|
||||
case cmdArgs.ExistsArg("u", "sysupgrade") || len(cmdArgs.Targets) > 0:
|
||||
return syncInstall(ctx, run, cmdArgs, dbExecutor)
|
||||
case cmdArgs.ExistsArg("y", "refresh"):
|
||||
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleRemove(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, localCache vcs.Store) error {
|
||||
err := run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
|
||||
if err == nil {
|
||||
localCache.RemovePackages(cmdArgs.Targets)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func handleYay() (err error) {
|
||||
//_, options, targets := cmdArgs.formatArgs()
|
||||
if cmdArgs.existsArg("h", "help") {
|
||||
usage()
|
||||
} else if cmdArgs.existsArg("gendb") {
|
||||
err = createDevelDB()
|
||||
} else if cmdArgs.existsArg("c", "clean") {
|
||||
err = cleanDependencies()
|
||||
} else if len(cmdArgs.targets) > 0 {
|
||||
err = handleYogurt()
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func handleGetpkgbuild() (err error) {
|
||||
err = getPkgbuilds(cmdArgs.formatTargets())
|
||||
return
|
||||
}
|
||||
|
||||
func handleYogurt() (err error) {
|
||||
options := cmdArgs.formatArgs()
|
||||
targets := cmdArgs.formatTargets()
|
||||
|
||||
config.SearchMode = NumberMenu
|
||||
err = numberMenu(targets, options)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func handleSync() (err error) {
|
||||
targets := cmdArgs.formatTargets()
|
||||
|
||||
if cmdArgs.existsArg("y", "refresh") {
|
||||
arguments := cmdArgs.copy()
|
||||
cmdArgs.delArg("y", "refresh")
|
||||
arguments.delArg("u", "sysupgrade")
|
||||
arguments.delArg("s", "search")
|
||||
arguments.delArg("i", "info")
|
||||
arguments.delArg("l", "list")
|
||||
arguments.targets = make(stringSet)
|
||||
err = passToPacman(arguments)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if cmdArgs.existsArg("s", "search") {
|
||||
if cmdArgs.existsArg("q", "quiet") {
|
||||
config.SearchMode = Minimal
|
||||
} else {
|
||||
config.SearchMode = Detailed
|
||||
}
|
||||
|
||||
err = syncSearch(targets)
|
||||
} else if cmdArgs.existsArg("l", "list") {
|
||||
err = passToPacman(cmdArgs)
|
||||
} else if cmdArgs.existsArg("c", "clean") {
|
||||
err = passToPacman(cmdArgs)
|
||||
} else if cmdArgs.existsArg("i", "info") {
|
||||
err = syncInfo(targets)
|
||||
} else if cmdArgs.existsArg("u", "sysupgrade") {
|
||||
err = install(cmdArgs)
|
||||
} else if len(cmdArgs.targets) > 0 {
|
||||
err = install(cmdArgs)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func handleRemove() (err error) {
|
||||
removeVCSPackage(cmdArgs.formatTargets())
|
||||
err = passToPacman(cmdArgs)
|
||||
return
|
||||
}
|
||||
|
||||
// NumberMenu presents a CLI for selecting packages to install.
|
||||
func numberMenu(pkgS []string, flags []string) (err error) {
|
||||
aurQ, err := narrowSearch(pkgS, true)
|
||||
if err != nil {
|
||||
fmt.Println("Error during AUR search:", err)
|
||||
}
|
||||
numaq := len(aurQ)
|
||||
repoQ, numpq, err := queryRepo(pkgS)
|
||||
if err != nil {
|
||||
return
|
||||
func displayNumberMenu(ctx context.Context, run *runtime.Runtime, pkgS []string, dbExecutor db.Executor,
|
||||
queryBuilder query.Builder, cmdArgs *parser.Arguments,
|
||||
) error {
|
||||
queryBuilder.Execute(ctx, dbExecutor, pkgS)
|
||||
|
||||
if err := queryBuilder.Results(dbExecutor, query.NumberMenu); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if numpq == 0 && numaq == 0 {
|
||||
return fmt.Errorf("no packages match search")
|
||||
if queryBuilder.Len() == 0 {
|
||||
// no results were found
|
||||
return nil
|
||||
}
|
||||
|
||||
if config.SortMode == BottomUp {
|
||||
aurQ.printSearch(numpq + 1)
|
||||
repoQ.printSearch()
|
||||
} else {
|
||||
repoQ.printSearch()
|
||||
aurQ.printSearch(numpq + 1)
|
||||
}
|
||||
|
||||
fmt.Println(bold(green(arrow + " Packages to install (eg: 1 2 3, 1-3 or ^4)")))
|
||||
fmt.Print(bold(green(arrow + " ")))
|
||||
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
numberBuf, overflow, err := reader.ReadLine()
|
||||
run.Logger.Infoln(gotext.Get("Packages to install (eg: 1 2 3, 1-3 or ^4)"))
|
||||
|
||||
numberBuf, err := run.Logger.GetInput("", false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if overflow {
|
||||
return fmt.Errorf("Input too long")
|
||||
include, exclude, _, otherExclude := intrange.ParseNumberMenu(numberBuf)
|
||||
|
||||
targets, err := queryBuilder.GetTargets(include, exclude, otherExclude)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
include, exclude, _, otherExclude := parseNumberMenu(string(numberBuf))
|
||||
arguments := makeArguments()
|
||||
// modify the arguments to pass for the install
|
||||
cmdArgs.Targets = targets
|
||||
|
||||
isInclude := len(exclude) == 0 && len(otherExclude) == 0
|
||||
|
||||
for i, pkg := range repoQ {
|
||||
target := len(repoQ) - i
|
||||
if config.SortMode == TopDown {
|
||||
target = i + 1
|
||||
}
|
||||
|
||||
if isInclude && include.get(target) {
|
||||
arguments.addTarget(pkg.DB().Name() + "/" + pkg.Name())
|
||||
}
|
||||
if !isInclude && !exclude.get(target) {
|
||||
arguments.addTarget(pkg.DB().Name() + "/" + pkg.Name())
|
||||
}
|
||||
if len(cmdArgs.Targets) == 0 {
|
||||
run.Logger.Println(gotext.Get(" there is nothing to do"))
|
||||
return nil
|
||||
}
|
||||
|
||||
for i, pkg := range aurQ {
|
||||
target := len(aurQ) - i + len(repoQ)
|
||||
if config.SortMode == TopDown {
|
||||
target = i + 1 + len(repoQ)
|
||||
}
|
||||
|
||||
if isInclude && include.get(target) {
|
||||
arguments.addTarget("aur/" + pkg.Name)
|
||||
}
|
||||
if !isInclude && !exclude.get(target) {
|
||||
arguments.addTarget("aur/" + pkg.Name)
|
||||
}
|
||||
}
|
||||
|
||||
if config.SudoLoop {
|
||||
sudoLoopBackground()
|
||||
}
|
||||
|
||||
err = install(arguments)
|
||||
|
||||
return err
|
||||
return syncInstall(ctx, run, cmdArgs, dbExecutor)
|
||||
}
|
||||
|
||||
// passToPacman outsources execution to pacman binary without modifications.
|
||||
func passToPacman(args *arguments) error {
|
||||
var cmd *exec.Cmd
|
||||
argArr := make([]string, 0)
|
||||
func syncList(ctx context.Context, run *runtime.Runtime,
|
||||
httpClient *http.Client, cmdArgs *parser.Arguments, dbExecutor db.Executor,
|
||||
) error {
|
||||
aur := false
|
||||
|
||||
if args.needRoot() {
|
||||
argArr = append(argArr, "sudo")
|
||||
for i := len(cmdArgs.Targets) - 1; i >= 0; i-- {
|
||||
if cmdArgs.Targets[i] == "aur" && run.Cfg.Mode.AtLeastAUR() {
|
||||
cmdArgs.Targets = append(cmdArgs.Targets[:i], cmdArgs.Targets[i+1:]...)
|
||||
aur = true
|
||||
}
|
||||
}
|
||||
|
||||
argArr = append(argArr, config.PacmanBin)
|
||||
argArr = append(argArr, cmdArgs.formatGlobals()...)
|
||||
argArr = append(argArr, args.formatArgs()...)
|
||||
if config.NoConfirm {
|
||||
argArr = append(argArr, "--noconfirm")
|
||||
if run.Cfg.Mode.AtLeastAUR() && (len(cmdArgs.Targets) == 0 || aur) {
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, run.Cfg.AURURL+"/packages.gz", http.NoBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resp, err := httpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
|
||||
scanner.Scan()
|
||||
|
||||
for scanner.Scan() {
|
||||
name := scanner.Text()
|
||||
if cmdArgs.ExistsArg("q", "quiet") {
|
||||
run.Logger.Println(name)
|
||||
} else {
|
||||
run.Logger.Printf("%s %s %s", text.Magenta("aur"), text.Bold(name), text.Bold(text.Green(gotext.Get("unknown-version"))))
|
||||
|
||||
if dbExecutor.LocalPackage(name) != nil {
|
||||
run.Logger.Print(text.Bold(text.Blue(gotext.Get(" [Installed]"))))
|
||||
}
|
||||
|
||||
run.Logger.Println()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
argArr = append(argArr, "--")
|
||||
|
||||
argArr = append(argArr, args.formatTargets()...)
|
||||
|
||||
cmd = exec.Command(argArr[0], argArr[1:]...)
|
||||
|
||||
cmd.Stdin, cmd.Stdout, cmd.Stderr = os.Stdin, os.Stdout, os.Stderr
|
||||
err := cmd.Run()
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("")
|
||||
if run.Cfg.Mode.AtLeastRepo() && (len(cmdArgs.Targets) != 0 || !aur) {
|
||||
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
|
||||
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
//passToPacman but return the output instead of showing the user
|
||||
func passToPacmanCapture(args *arguments) (string, string, error) {
|
||||
var outbuf, errbuf bytes.Buffer
|
||||
var cmd *exec.Cmd
|
||||
argArr := make([]string, 0)
|
||||
|
||||
if args.needRoot() {
|
||||
argArr = append(argArr, "sudo")
|
||||
}
|
||||
|
||||
argArr = append(argArr, config.PacmanBin)
|
||||
argArr = append(argArr, cmdArgs.formatGlobals()...)
|
||||
argArr = append(argArr, args.formatArgs()...)
|
||||
if config.NoConfirm {
|
||||
argArr = append(argArr, "--noconfirm")
|
||||
}
|
||||
|
||||
argArr = append(argArr, "--")
|
||||
|
||||
argArr = append(argArr, args.formatTargets()...)
|
||||
|
||||
cmd = exec.Command(argArr[0], argArr[1:]...)
|
||||
cmd.Stdout = &outbuf
|
||||
cmd.Stderr = &errbuf
|
||||
|
||||
err := cmd.Run()
|
||||
stdout := outbuf.String()
|
||||
stderr := errbuf.String()
|
||||
|
||||
return stdout, stderr, err
|
||||
}
|
||||
|
||||
// passToMakepkg outsources execution to makepkg binary without modifications.
|
||||
func passToMakepkg(dir string, args ...string) (err error) {
|
||||
|
||||
if config.NoConfirm {
|
||||
args = append(args)
|
||||
}
|
||||
|
||||
mflags := strings.Fields(config.MFlags)
|
||||
args = append(args, mflags...)
|
||||
|
||||
cmd := exec.Command(config.MakepkgBin, args...)
|
||||
cmd.Stdin, cmd.Stdout, cmd.Stderr = os.Stdin, os.Stdout, os.Stderr
|
||||
cmd.Dir = dir
|
||||
err = cmd.Run()
|
||||
if err == nil {
|
||||
_ = saveVCSInfo()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
140
cmd_test.go
Normal file
140
cmd_test.go
Normal file
@ -0,0 +1,140 @@
|
||||
//go:build !integration
|
||||
// +build !integration
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/Jguer/aur"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db/mock"
|
||||
mockaur "github.com/Jguer/yay/v12/pkg/dep/mock"
|
||||
"github.com/Jguer/yay/v12/pkg/query"
|
||||
"github.com/Jguer/yay/v12/pkg/runtime"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
"github.com/Jguer/yay/v12/pkg/vcs"
|
||||
)
|
||||
|
||||
func TestYogurtMenuAURDB(t *testing.T) {
|
||||
t.Skip("skip until Operation service is an interface")
|
||||
t.Parallel()
|
||||
makepkgBin := t.TempDir() + "/makepkg"
|
||||
pacmanBin := t.TempDir() + "/pacman"
|
||||
gitBin := t.TempDir() + "/git"
|
||||
f, err := os.OpenFile(makepkgBin, os.O_RDONLY|os.O_CREATE, 0o755)
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, f.Close())
|
||||
|
||||
f, err = os.OpenFile(pacmanBin, os.O_RDONLY|os.O_CREATE, 0o755)
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, f.Close())
|
||||
|
||||
f, err = os.OpenFile(gitBin, os.O_RDONLY|os.O_CREATE, 0o755)
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, f.Close())
|
||||
|
||||
captureOverride := func(cmd *exec.Cmd) (stdout string, stderr string, err error) {
|
||||
return "", "", nil
|
||||
}
|
||||
|
||||
showOverride := func(cmd *exec.Cmd) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
mockRunner := &exe.MockRunner{CaptureFn: captureOverride, ShowFn: showOverride}
|
||||
cmdBuilder := &exe.CmdBuilder{
|
||||
MakepkgBin: makepkgBin,
|
||||
SudoBin: "su",
|
||||
PacmanBin: pacmanBin,
|
||||
PacmanConfigPath: "/etc/pacman.conf",
|
||||
GitBin: "git",
|
||||
Runner: mockRunner,
|
||||
SudoLoopEnabled: false,
|
||||
}
|
||||
|
||||
cmdArgs := parser.MakeArguments()
|
||||
cmdArgs.AddArg("Y")
|
||||
cmdArgs.AddTarget("yay")
|
||||
|
||||
db := &mock.DBExecutor{
|
||||
AlpmArchitecturesFn: func() ([]string, error) {
|
||||
return []string{"x86_64"}, nil
|
||||
},
|
||||
RefreshHandleFn: func() error {
|
||||
return nil
|
||||
},
|
||||
ReposFn: func() []string {
|
||||
return []string{"aur"}
|
||||
},
|
||||
SyncPackagesFn: func(s ...string) []mock.IPackage {
|
||||
return []mock.IPackage{
|
||||
&mock.Package{
|
||||
PName: "yay",
|
||||
PBase: "yay",
|
||||
PVersion: "10.0.0",
|
||||
PDB: mock.NewDB("aur"),
|
||||
},
|
||||
}
|
||||
},
|
||||
LocalPackageFn: func(s string) mock.IPackage {
|
||||
return nil
|
||||
},
|
||||
}
|
||||
aurCache := &mockaur.MockAUR{
|
||||
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
return []aur.Pkg{
|
||||
{
|
||||
Name: "yay",
|
||||
PackageBase: "yay",
|
||||
Version: "10.0.0",
|
||||
},
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
logger := text.NewLogger(io.Discard, os.Stderr, strings.NewReader("1\n"), true, "test")
|
||||
|
||||
run := &runtime.Runtime{
|
||||
Cfg: &settings.Configuration{
|
||||
RemoveMake: "no",
|
||||
},
|
||||
Logger: logger,
|
||||
CmdBuilder: cmdBuilder,
|
||||
VCSStore: &vcs.Mock{},
|
||||
QueryBuilder: query.NewSourceQueryBuilder(aurCache, logger, "votes", parser.ModeAny, "name",
|
||||
true, false, true),
|
||||
AURClient: aurCache,
|
||||
}
|
||||
err = handleCmd(context.Background(), run, cmdArgs, db)
|
||||
require.NoError(t, err)
|
||||
|
||||
wantCapture := []string{}
|
||||
wantShow := []string{
|
||||
"pacman -S -y --config /etc/pacman.conf --",
|
||||
"pacman -S -y -u --config /etc/pacman.conf --",
|
||||
}
|
||||
|
||||
require.Len(t, mockRunner.ShowCalls, len(wantShow))
|
||||
require.Len(t, mockRunner.CaptureCalls, len(wantCapture))
|
||||
|
||||
for i, call := range mockRunner.ShowCalls {
|
||||
show := call.Args[0].(*exec.Cmd).String()
|
||||
show = strings.ReplaceAll(show, makepkgBin, "makepkg")
|
||||
show = strings.ReplaceAll(show, pacmanBin, "pacman")
|
||||
show = strings.ReplaceAll(show, gitBin, "pacman")
|
||||
|
||||
// options are in a different order on different systems and on CI root user is used
|
||||
assert.Subset(t, strings.Split(show, " "), strings.Split(wantShow[i], " "), fmt.Sprintf("%d - %s", i, show))
|
||||
}
|
||||
}
|
101
completions.go
101
completions.go
@ -1,101 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
alpm "github.com/jguer/go-alpm"
|
||||
)
|
||||
|
||||
//CreateAURList creates a new completion file
|
||||
func createAURList(out *os.File, shell string) (err error) {
|
||||
resp, err := http.Get("https://aur.archlinux.org/packages.gz")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
|
||||
scanner.Scan()
|
||||
for scanner.Scan() {
|
||||
fmt.Print(scanner.Text())
|
||||
out.WriteString(scanner.Text())
|
||||
if shell == "fish" {
|
||||
fmt.Print("\tAUR\n")
|
||||
out.WriteString("\tAUR\n")
|
||||
} else {
|
||||
fmt.Print("\n")
|
||||
out.WriteString("\n")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
//CreatePackageList appends Repo packages to completion cache
|
||||
func createRepoList(out *os.File, shell string) (err error) {
|
||||
dbList, err := alpmHandle.SyncDbs()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
_ = dbList.ForEach(func(db alpm.Db) error {
|
||||
_ = db.PkgCache().ForEach(func(pkg alpm.Package) error {
|
||||
fmt.Print(pkg.Name())
|
||||
out.WriteString(pkg.Name())
|
||||
if shell == "fish" {
|
||||
fmt.Print("\t" + pkg.DB().Name() + "\n")
|
||||
out.WriteString("\t" + pkg.DB().Name() + "\n")
|
||||
} else {
|
||||
fmt.Print("\n")
|
||||
out.WriteString("\n")
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return nil
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Complete provides completion info for shells
|
||||
func complete(shell string) error {
|
||||
var path string
|
||||
|
||||
if shell == "fish" {
|
||||
path = completionFile + "fish" + ".cache"
|
||||
} else {
|
||||
path = completionFile + "sh" + ".cache"
|
||||
}
|
||||
info, err := os.Stat(path)
|
||||
|
||||
if os.IsNotExist(err) || time.Since(info.ModTime()).Hours() > 48 {
|
||||
os.MkdirAll(filepath.Dir(completionFile), 0755)
|
||||
out, errf := os.Create(path)
|
||||
if errf != nil {
|
||||
return errf
|
||||
}
|
||||
|
||||
if createAURList(out, shell) != nil {
|
||||
defer os.Remove(path)
|
||||
}
|
||||
erra := createRepoList(out, shell)
|
||||
|
||||
out.Close()
|
||||
return erra
|
||||
}
|
||||
|
||||
in, err := os.OpenFile(path, os.O_RDWR|os.O_CREATE, 0644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer in.Close()
|
||||
|
||||
_, err = io.Copy(os.Stdout, in)
|
||||
return err
|
||||
}
|
132
completions/bash
132
completions/bash
@ -1,13 +1,13 @@
|
||||
# vim:fdm=marker foldlevel=0 tabstop=2 shiftwidth=2 filetype=bash
|
||||
# This file is in the public domain.
|
||||
|
||||
_arch_compgen() {
|
||||
local i r
|
||||
COMPREPLY=($(compgen -W '$*' -- "$cur"))
|
||||
for ((i=1; i < ${#COMP_WORDS[@]}-1; i++)); do
|
||||
for ((i = 1; i < ${#COMP_WORDS[@]} - 1; i++)); do
|
||||
for r in ${!COMPREPLY[@]}; do
|
||||
if [[ ${COMP_WORDS[i]} = ${COMPREPLY[r]} ]]; then
|
||||
unset 'COMPREPLY[r]'; break
|
||||
if [[ ${COMP_WORDS[i]} == ${COMPREPLY[r]} ]]; then
|
||||
unset 'COMPREPLY[r]'
|
||||
break
|
||||
fi
|
||||
done
|
||||
done
|
||||
@ -25,20 +25,8 @@ _arch_ptr2comp() {
|
||||
}
|
||||
|
||||
_arch_incomp() {
|
||||
local r="\s-(-${1#* }\s|\w*${1% *})"; [[ $COMP_LINE =~ $r ]]
|
||||
}
|
||||
|
||||
_pacman_keyids() {
|
||||
\pacman-key --list-keys 2>/dev/null | awk '
|
||||
$1 == "pub" {
|
||||
# key id
|
||||
split($2, a, "/"); print a[2]
|
||||
}
|
||||
$1 == "uid" {
|
||||
# email
|
||||
if (match($NF, /<[^>]+>/))
|
||||
print substr($NF, RSTART + 1, RLENGTH - 2)
|
||||
}'
|
||||
local r="[[:space:]]-(-${1#* }[[:space:]]|[[:alnum:]_]*${1% *})"
|
||||
[[ $COMP_LINE =~ $r ]]
|
||||
}
|
||||
|
||||
_pacman_pkg() {
|
||||
@ -51,64 +39,100 @@ _pacman_pkg() {
|
||||
)"
|
||||
}
|
||||
|
||||
_yay_pkg() {
|
||||
[ -z "$cur" ] && _pacman_pkg Slq && return
|
||||
_arch_compgen "$(yay -Pc)"
|
||||
}
|
||||
|
||||
_pacman_repo_list() {
|
||||
_arch_compgen "$(pacman-conf --repo-list)"
|
||||
}
|
||||
|
||||
_yay() {
|
||||
local common core cur database prev query remove sync upgrade yays print o
|
||||
COMPREPLY=()
|
||||
_get_comp_words_by_ref cur prev
|
||||
compopt -o default
|
||||
local common core cur database files prev query remove sync upgrade o
|
||||
local yays show getpkgbuild web
|
||||
local cur prev words cword
|
||||
|
||||
_init_completion || return
|
||||
database=('asdeps asexplicit')
|
||||
files=('list machinereadable owns search refresh regex' 'l o s x y')
|
||||
query=('changelog check deps explicit file foreign groups info list owns
|
||||
search unrequired upgrades' 'c e g i k l m o p s t u')
|
||||
files=('list machinereadable refresh regex' 'l x y')
|
||||
query=('changelog check deps explicit file foreign groups info list native owns
|
||||
search unrequired upgrades' 'c e g i k l m n o p s t u')
|
||||
remove=('cascade dbonly nodeps assume-installed nosave print recursive unneeded' 'c n p s u')
|
||||
sync=('asdeps asexplicit clean dbonly downloadonly force groups ignore ignoregroup
|
||||
info list needed nodeps assume-installed print refresh recursive search sysupgrade'
|
||||
'c g i l p s u w y')
|
||||
upgrade=('asdeps asexplicit force needed nodeps assume-installed print recursive' 'p')
|
||||
yays=('clean gendb' 'c')
|
||||
print=('complete defaultconfig config numberupgrades stats upgrades' 'c d g n
|
||||
s u')
|
||||
common=('arch cachedir color config confirm dbpath debug gpgdir help hookdir logfile
|
||||
noconfirm noprogressbar noscriptlet quiet save mflags buildir editor
|
||||
makepkg pacman tar git gpg gpgflags config requestsplitn sudoloop nosudoloop
|
||||
redownload noredownload redownloadall rebuild rebuildall rebuildtree norebuild root verbose' 'b d h q r v')
|
||||
sync=('asdeps asexplicit clean dbonly downloadonly overwrite groups ignore ignoregroup
|
||||
info list needed nodeps assume-installed print refresh recursive search sysupgrade aur repo'
|
||||
'c g i l p s u w y a N')
|
||||
upgrade=('asdeps asexplicit overwrite needed nodeps assume-installed print recursive' 'p')
|
||||
core=('database files help query remove sync upgrade version' 'D F Q R S U V h')
|
||||
|
||||
for o in 'D database' 'F files' 'Q query' 'R remove' 'S sync' 'U upgrade' 'Y yays' 'P print'; do
|
||||
##yay stuff
|
||||
common=('arch cachedir color config confirm dbpath debug gpgdir help hookdir logfile
|
||||
noconfirm noprogressbar noscriptlet quiet root verbose
|
||||
makepkg pacman git gpg gpgflags config requestsplitn sudoloop
|
||||
redownload noredownload redownloadall rebuild rebuildall rebuildtree norebuild sortby
|
||||
singlelineresults doublelineresults answerclean answerdiff answeredit answerupgrade noanswerclean noanswerdiff
|
||||
noansweredit noanswerupgrade cleanmenu diffmenu editmenu cleanafter keepsrc
|
||||
provides pgpfetch
|
||||
useask combinedupgrade aur repo makepkgconf
|
||||
nomakepkgconf askremovemake askyesremovemake removemake noremovemake completioninterval aururl aurrpcurl
|
||||
searchby batchinstall'
|
||||
'b d h q r v')
|
||||
yays=('clean gendb' 'c')
|
||||
show=('complete defaultconfig currentconfig stats news' 'c d g s w')
|
||||
getpkgbuild=('force print' 'f p')
|
||||
web=('vote unvote' 'v u')
|
||||
|
||||
for o in 'D database' 'F files' 'Q query' 'R remove' 'S sync' 'U upgrade' 'Y yays' 'P show' 'G getpkgbuild' 'W web'; do
|
||||
_arch_incomp "$o" && break
|
||||
done
|
||||
|
||||
if [[ $? != 0 ]]; then
|
||||
_arch_ptr2comp core
|
||||
elif [[ ! $prev =~ ^-\w*[Vbhr] &&
|
||||
! $prev = --@(cachedir|color|config|dbpath|help|hookdir|gpgdir|logfile|root|version) ]]
|
||||
then
|
||||
[[ $cur = -* ]] && _arch_ptr2comp ${o#* } common ||
|
||||
elif [[ ! $prev =~ ^-[[:alnum:]_]*[Vbhr] && ! $prev == --@(cachedir|color|config|dbpath|help|hookdir|gpgdir|logfile|root|version) ]]; then
|
||||
[[ $cur == -* ]] && _arch_ptr2comp ${o#* } common ||
|
||||
case ${o% *} in
|
||||
D|R)
|
||||
_pacman_pkg Qq;;
|
||||
D | R)
|
||||
_pacman_pkg Qq
|
||||
;;
|
||||
F)
|
||||
_arch_incomp 'l list' && _pacman_pkg Slq;
|
||||
;;
|
||||
{ _arch_incomp 'l list' && _pacman_pkg Slq; } ||
|
||||
_arch_incomp 'o owns' ||
|
||||
compopt +o default
|
||||
;;
|
||||
Q)
|
||||
{ _arch_incomp 'g groups' && _pacman_pkg Qg sort; } ||
|
||||
{ _arch_incomp 'p file' && _pacman_file; } ||
|
||||
_arch_incomp 'o owns' || _arch_incomp 'u upgrades' ||
|
||||
_pacman_pkg Qq;;
|
||||
{ _arch_incomp 'g groups' && _pacman_pkg Qg sort; } ||
|
||||
{ _arch_incomp 'p file' && _pacman_file; } ||
|
||||
{ _arch_incomp 's search' && compopt +o default; } ||
|
||||
{ _arch_incomp 'u upgrades' && compopt +o default; } ||
|
||||
_arch_incomp 'o owns' ||
|
||||
_pacman_pkg Qq
|
||||
;;
|
||||
S)
|
||||
{ _arch_incomp 'g groups' && _pacman_pkg Sg; } ||
|
||||
{ _arch_incomp 'l list' && _arch_compgen "$(yay -Pc | \sort -u)"; } ||
|
||||
_arch_compgen "$(yay -Pc )";;
|
||||
{ _arch_incomp 'g groups' && _pacman_pkg Sg; } ||
|
||||
{ _arch_incomp 'l list' && _pacman_repo_list; } ||
|
||||
{ _arch_incomp 's search' && compopt +o default; } ||
|
||||
_yay_pkg
|
||||
;;
|
||||
U)
|
||||
_pacman_file;;
|
||||
_pacman_file
|
||||
;;
|
||||
G)
|
||||
_yay_pkg
|
||||
;;
|
||||
W)
|
||||
_yay_pkg
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
true
|
||||
}
|
||||
|
||||
_pacman_file() {
|
||||
compopt -o filenames; _filedir 'pkg.tar*'
|
||||
compopt -o filenames
|
||||
_filedir 'pkg.*'
|
||||
}
|
||||
|
||||
complete -F _yay -o default yay
|
||||
complete -F _yay yay
|
||||
|
||||
# ex:et ts=2 sw=2 ft=sh
|
||||
|
354
completions/fish
354
completions/fish
@ -4,190 +4,242 @@
|
||||
|
||||
set -l progname yay
|
||||
|
||||
# Yay constants
|
||||
set -l listall "(yay -Pc)"
|
||||
set -l listpacman "(__fish_print_packages)"
|
||||
set -l yayspecific '__fish_contains_opt -s Y yay'
|
||||
set -l webspecific '__fish_contains_opt -s W web'
|
||||
set -l show '__fish_contains_opt -s P show'
|
||||
set -l getpkgbuild '__fish_contains_opt -s G getpkgbuild'
|
||||
|
||||
# Pacman constants
|
||||
set -l listinstalled "(pacman -Q | string replace ' ' \t)"
|
||||
# This might be an issue if another package manager is also installed (e.g. for containers)
|
||||
set -l listall "(yay -Pcf)"
|
||||
set -l listrepos "(__fish_print_pacman_repos)"
|
||||
set -l listgroups "(pacman -Sg)\t'Package Group'"
|
||||
set -l listpacman "(__fish_print_packages)"
|
||||
set -l noopt 'not __fish_contains_opt -s S -s D -s Q -s R -s U -s T -s F database query sync remove upgrade deptest files'
|
||||
|
||||
set -l noopt 'not __fish_contains_opt -s S -s D -s Q -s R -s U -s T -s F -s Y -s W -s P -s G database query sync remove upgrade deptest files show getpkgbuild web yay'
|
||||
set -l database '__fish_contains_opt -s D database'
|
||||
set -l getpkgbuild '__fish_contains_opt -s G getpkgbuild'
|
||||
set -l print '__fish_contains_opt -s P print'
|
||||
set -l query '__fish_contains_opt -s Q query'
|
||||
set -l remove '__fish_contains_opt -s R remove'
|
||||
set -l sync '__fish_contains_opt -s S sync'
|
||||
set -l upgrade '__fish_contains_opt -s U upgrade'
|
||||
set -l files '__fish_contains_opt -s F files'
|
||||
set -l yayspecific '__fish_contains_opt -s Y yay'
|
||||
|
||||
complete -c pacman -e
|
||||
complete -c pacman -f
|
||||
|
||||
|
||||
complete -c $progname -e
|
||||
complete -c $progname -f
|
||||
# HACK: We only need these two to coerce fish to stop file completion and complete options
|
||||
complete -c $progname -n $noopt -a "-D" -d "Modify the package database"
|
||||
complete -c $progname -n $noopt -a "-Q" -d "Query the package database"
|
||||
# Primary operations
|
||||
complete -c $progname -s D -f -l database -n $noopt -d 'Modify the package database'
|
||||
complete -c $progname -s Q -f -l query -n $noopt -d 'Query the package database'
|
||||
complete -c $progname -s G -f -l getpkgbuild -n $noopt -d 'Get PKGBUILD from ABS or AUR'
|
||||
complete -c $progname -s R -f -l remove -n $noopt -d 'Remove packages from the system'
|
||||
complete -c $progname -s S -f -l sync -n $noopt -d 'Synchronize packages'
|
||||
complete -c $progname -s T -f -l deptest -n $noopt -d 'Check if dependencies are installed'
|
||||
complete -c $progname -s U -f -l upgrade -n $noopt -d 'Upgrade or add a local package'
|
||||
complete -c $progname -s F -f -l files -n $noopt -d 'Query the files database'
|
||||
complete -c $progname -s G -f -l getpkgbuild -n $noopt -d 'Get PKGBUILD from ABS or AUR'
|
||||
complete -c $progname -s P -f -l print -n $noopt -d 'Print information'
|
||||
complete -c $progname -s Y -f -l yay -n $noopt -d 'Yay specific operations'
|
||||
complete -c $progname -n "$noopt" -a "-D" -d "Modify the package database"
|
||||
complete -c $progname -n "$noopt" -a "-Q" -d "Query the package database"
|
||||
|
||||
# Primary operations
|
||||
complete -c $progname -s D -f -l database -n "$noopt" -d 'Modify the package database'
|
||||
complete -c $progname -s Q -f -l query -n "$noopt" -d 'Query the package database'
|
||||
complete -c $progname -s R -f -l remove -n "$noopt" -d 'Remove packages from the system'
|
||||
complete -c $progname -s S -f -l sync -n "$noopt" -d 'Synchronize packages'
|
||||
complete -c $progname -s T -f -l deptest -n "$noopt" -d 'Check dependencies'
|
||||
complete -c $progname -s U -l upgrade -n "$noopt" -d 'Upgrade or add a local package'
|
||||
complete -c $progname -s F -f -l files -n "$noopt" -d 'Query the files database'
|
||||
complete -c $progname -s V -f -l version -d 'Display version and exit'
|
||||
complete -c $progname -s h -f -l help -d 'Display help'
|
||||
|
||||
# General options
|
||||
# Only offer these once a command has been given so they get prominent display
|
||||
complete -c $progname -n "not $noopt" -s b -l dbpath -d 'Alternative database location' -xa '(__fish_complete_directories)'
|
||||
complete -c $progname -n "not $noopt" -s r -l root -d 'Alternative installation root'
|
||||
complete -c $progname -n "not $noopt" -s v -l verbose -d 'Output more status messages'
|
||||
complete -c $progname -n "not $noopt" -s h -l help -d 'Display syntax for the given operation'
|
||||
complete -c $progname -n "not $noopt" -s b -l dbpath -d 'Alternate database location' -xa "(__fish_complete_directories)"
|
||||
complete -c $progname -n "not $noopt" -s r -l root -d 'Alternate installation root' -xa "(__fish_complete_directories)"
|
||||
complete -c $progname -n "not $noopt" -s v -l verbose -d 'Output more status messages' -f
|
||||
complete -c $progname -n "not $noopt" -l arch -d 'Alternate architecture' -f
|
||||
complete -c $progname -n "not $noopt" -l cachedir -d 'Alternative package cache location'
|
||||
complete -c $progname -n "not $noopt" -l color -d 'Colorize the output'
|
||||
complete -c $progname -n "not $noopt" -l config -d 'Alternate config file'
|
||||
complete -c $progname -n "not $noopt" -l cachedir -d 'Alternate package cache location' -xa "(__fish_complete_directories)"
|
||||
complete -c $progname -n "not $noopt" -l color -d 'Colorize the output' -fa '{auto,always,never}'
|
||||
complete -c $progname -n "not $noopt" -l config -d 'Alternate config file' -rF
|
||||
complete -c $progname -n "not $noopt" -l confirm -d 'Always ask for confirmation' -f
|
||||
complete -c $progname -n "not $noopt" -l debug -d 'Display debug messages' -f
|
||||
complete -c $progname -n "not $noopt" -l gpgdir -d 'GPG directory to verify signatures'
|
||||
complete -c $progname -n "not $noopt" -l hookdir -d 'Hook file directory'
|
||||
complete -c $progname -n "not $noopt" -l logfile -d 'Specify alternative log file'
|
||||
complete -c $progname -n "not $noopt" -l disable-download-timeout -d 'Use relaxed timeouts for download' -f
|
||||
complete -c $progname -n "not $noopt" -l gpgdir -d 'Alternate home directory for GnuPG' -xa "(__fish_complete_directories)"
|
||||
complete -c $progname -n "not $noopt" -l hookdir -d 'Alternate hook location' -xa "(__fish_complete_directories)"
|
||||
complete -c $progname -n "not $noopt" -l logfile -d 'Alternate log file'
|
||||
complete -c $progname -n "not $noopt" -l noconfirm -d 'Bypass any confirmation' -f
|
||||
complete -c $progname -n "not $noopt" -l sysroot -d 'Operate on a mounted guest system (root-only)' -xa "(__fish_complete_directories)"
|
||||
|
||||
complete -c $progname -n "not $noopt" -l noconfirm -d 'Bypass any question' -f
|
||||
complete -c $progname -n "not $noopt" -l topdown -d 'Shows repository packages first and then aur' -f
|
||||
complete -c $progname -n "not $noopt" -l bottomup -d 'Shows aur packages first and then repository' -f
|
||||
complete -c $progname -n "not $noopt" -l devel -d 'Check -git/-svn/-hg development version' -f
|
||||
complete -c $progname -n "not $noopt" -l nodevel -d 'Disable development version checking' -f
|
||||
complete -c $progname -n "not $noopt" -l afterclean -d 'Clean package sources after successful build' -f
|
||||
complete -c $progname -n "not $noopt" -l noafterclean -d 'Disable package sources cleaning' -f
|
||||
complete -c $progname -n "not $noopt" -l timeupdate -d 'Check package modification date and version' -f
|
||||
complete -c $progname -n "not $noopt" -l notimeupdate -d 'Check only package version change' -f
|
||||
# File, query, sync options (files, query, sync)
|
||||
for condition in files query sync
|
||||
complete -c $progname -n "$$condition" -s q -l quiet -d 'Show less information' -f
|
||||
end
|
||||
|
||||
# Transaction options (sync, remove, upgrade)
|
||||
for condition in sync remove upgrade
|
||||
complete -c $progname -n "$$condition" -s d -l nodeps -d 'Skip [all] dependency checks' -f
|
||||
complete -c $progname -n "$$condition" -s p -l print -d 'Dry run, only print targets' -f
|
||||
complete -c $progname -n "$$condition" -l assume-installed -d 'Add a virtual package to satisfy dependencies' -f
|
||||
complete -c $progname -n "$$condition" -l dbonly -d 'Modify database entry only' -f
|
||||
complete -c $progname -n "$$condition" -l noprogressbar -d 'Do not display progress bar' -f
|
||||
complete -c $progname -n "$$condition" -l noscriptlet -d 'Do not execute install script' -f
|
||||
complete -c $progname -n "$$condition" -l print-format -d 'Specify printf-like format' -x
|
||||
end
|
||||
|
||||
# File and query options (files, query)
|
||||
for condition in files query
|
||||
complete -c $progname -n "$$condition" -s l -l list -d 'List the files owned by PACKAGE' -f
|
||||
end
|
||||
|
||||
# File and sync options (files, sync)
|
||||
for condition in files sync
|
||||
complete -c $progname -n "$$condition" -s y -l refresh -d 'Download fresh package databases [force]' -f
|
||||
end
|
||||
|
||||
# Query and sync options (query, sync)
|
||||
for condition in query sync
|
||||
complete -c $progname -n "$$condition" -s g -l groups -d 'Display members of [all] package GROUP' -xa "$listgroups"
|
||||
end
|
||||
|
||||
# Sync and upgrade options (sync, upgrade)
|
||||
for condition in sync upgrade
|
||||
complete -c $progname -n "$$condition" -l asdeps -d 'Install packages as non-explicitly installed' -f
|
||||
complete -c $progname -n "$$condition" -l asexplicit -d 'Install packages as explicitly installed' -f
|
||||
complete -c $progname -n "$$condition" -l ignore -d 'Ignore a package upgrade (can be used more than once)' -xa "$listall"
|
||||
complete -c $progname -n "$$condition" -l ignoregroup -d 'Ignore a group upgrade (can be used more than once)' -xa "$listgroups"
|
||||
complete -c $progname -n "$$condition" -l needed -d 'Do not reinstall up to date packages' -f
|
||||
complete -c $progname -n "$$condition" -l overwrite -d 'Overwrite conflicting files (can be used more than once)' -rF
|
||||
end
|
||||
|
||||
# Database options
|
||||
set -l has_db_opt '__fish_contains_opt asdeps asexplicit check -s k'
|
||||
complete -c $progname -n "$database; and not $has_db_opt" -s k -l check -d 'Check database validity'
|
||||
complete -c $progname -n "$database" -s q -l quite -d 'Suppress output of success messages' -f
|
||||
complete -c $progname -n "$database; and not $has_db_opt" -l asdeps -d 'Mark PACKAGE as dependency' -x
|
||||
complete -c $progname -n "$database; and not $has_db_opt" -l asexplicit -d 'Mark PACKAGE as explicitly installed' -x
|
||||
complete -c $progname -n "$has_db_opt; and $database" -xa "$listinstalled"
|
||||
|
||||
# File options - since pacman 5
|
||||
complete -c $progname -n "$files" -s x -l regex -d 'Interpret each query as a regular expression' -f
|
||||
complete -c $progname -n "$files" -l machinereadable -d 'Print each match in a machine readable output format' -f
|
||||
complete -c $progname -n "$files" -d Package -xa "$listpacman"
|
||||
|
||||
# Query options
|
||||
complete -c $progname -n "$query" -s c -l changelog -d 'View the change log of PACKAGE' -f
|
||||
complete -c $progname -n "$query" -s d -l deps -d 'List only non-explicit packages (dependencies)' -f
|
||||
complete -c $progname -n "$query" -s e -l explicit -d 'List only explicitly installed packages' -f
|
||||
complete -c $progname -n "$query" -s i -l info -d 'View PACKAGE [backup files] information' -f
|
||||
complete -c $progname -n "$query" -s k -l check -d 'Check that PACKAGE files exist' -f
|
||||
complete -c $progname -n "$query" -s m -l foreign -d 'List installed packages not found in sync database' -f
|
||||
complete -c $progname -n "$query" -s n -l native -d 'list installed packages only found in sync database' -f
|
||||
complete -c $progname -n "$query" -s o -l owns -d 'Query the package that owns FILE' -rF
|
||||
complete -c $progname -n "$query" -s p -l file -d 'Query a package file instead of the database' -rF
|
||||
complete -c $progname -n "$query" -s s -l search -d 'Search locally-installed packages for regexp' -f
|
||||
complete -c $progname -n "$query" -s t -l unrequired -d 'List only unrequired packages [and optdepends]' -f
|
||||
complete -c $progname -n "$query" -s u -l upgrades -d 'List only out-of-date packages' -f
|
||||
complete -c $progname -n "$query" -d 'Installed package' -xa "$listinstalled"
|
||||
|
||||
# Remove options
|
||||
complete -c $progname -n "$remove" -s c -l cascade -d 'Also remove packages depending on PACKAGE' -f
|
||||
complete -c $progname -n "$remove" -s n -l nosave -d 'Ignore file backup designations' -f
|
||||
complete -c $progname -n "$remove" -s s -l recursive -d 'Also remove dependencies of PACKAGE' -f
|
||||
complete -c $progname -n "$remove" -s u -l unneeded -d 'Only remove targets not required by PACKAGE' -f
|
||||
complete -c $progname -n "$remove" -d 'Installed package' -xa "$listinstalled"
|
||||
|
||||
# Sync options
|
||||
complete -c $progname -n "$sync" -s c -l clean -d 'Remove [all] packages from cache' -f
|
||||
complete -c $progname -n "$sync" -s i -l info -d 'View PACKAGE [extended] information' -f
|
||||
complete -c $progname -n "$sync" -s l -l list -d 'List all packages in REPOSITORY' -xa "$listrepos"
|
||||
complete -c $progname -n "$sync" -s s -l search -d 'Search remote repositories for regexp' -f
|
||||
complete -c $progname -n "$sync" -s u -l sysupgrade -d 'Upgrade all packages that are out of date'
|
||||
complete -c $progname -n "$sync" -s w -l downloadonly -d 'Only download the target packages'
|
||||
complete -c $progname -n "$sync" -xa "$listall $listgroups"
|
||||
|
||||
# Upgrade options
|
||||
# Theoretically, pacman reads packages in all formats that libarchive supports
|
||||
# In practice, it's going to be tar.xz, tar.gz, tar.zst, or just pkg.tar (uncompressed pkg)
|
||||
complete -c $progname -n "$upgrade" -xa '(__fish_complete_suffix pkg.tar.zst; __fish_complete_suffix pkg.tar.xz; __fish_complete_suffix pkg.tar.gz; __fish_complete_suffix pkg.tar;)' -d 'Package file'
|
||||
|
||||
|
||||
# Yay operations
|
||||
complete -c $progname -s Y -f -l yay -n "$noopt" -d 'Yay specific operations'
|
||||
complete -c $progname -s P -f -l show -n "$noopt" -d 'Print information'
|
||||
complete -c $progname -s G -f -l getpkgbuild -n "$noopt" -d 'Get PKGBUILD from ABS or AUR'
|
||||
complete -c $progname -s W -f -l web -n "$noopt" -d 'Web operations'
|
||||
|
||||
# Web options
|
||||
complete -c $progname -n "$webspecific" -s v -l vote -d 'Vote for AUR packages' -f
|
||||
complete -c $progname -n "$webspecific" -s u -l unvote -d 'Unvote for AUR packages' -f
|
||||
complete -c $progname -n "$webspecific" -xa "$listall"
|
||||
|
||||
# New options
|
||||
complete -c $progname -n "not $noopt" -s a -l aur -d 'Assume targets are from the AUR' -f
|
||||
complete -c $progname -n "not $noopt" -s N -l repo -d 'Assume targets are from the repositories' -f
|
||||
|
||||
# Yay options
|
||||
complete -c $progname -n "$yayspecific" -s c -l clean -d 'Remove unneeded dependencies' -f
|
||||
complete -c $progname -n "$yayspecific" -l gendb -d 'Generate development package DB' -f
|
||||
|
||||
# Show options
|
||||
complete -c $progname -n "$show" -s c -l complete -d 'Print a list of all AUR and repo packages' -f
|
||||
#complete -c $progname -n "$show" -s f -l fish -d 'During complete adjust the output for the fish shell' -f
|
||||
complete -c $progname -n "$show" -s d -l defaultconfig -d 'Print default yay configuration' -f
|
||||
complete -c $progname -n "$show" -s g -l currentconfig -d 'Print current yay configuration' -f
|
||||
complete -c $progname -n "$show" -s s -l stats -d 'Display system package statistics' -f
|
||||
complete -c $progname -n "$show" -s w -l news -d 'Print arch news' -f
|
||||
complete -c $progname -n "$show" -s q -l quiet -d 'Do not print news description' -f
|
||||
|
||||
# Getpkgbuild options
|
||||
complete -c $progname -n "$getpkgbuild" -s f -l force -d 'Force download for existing ABS packages' -f
|
||||
complete -c $progname -n "$getpkgbuild" -xa "$listall"
|
||||
complete -c $progname -n "$getpkgbuild" -s p -l print -d 'Print pkgbuild of packages' -f
|
||||
|
||||
# Permanent configuration settings
|
||||
complete -c $progname -n "not $noopt" -l save -d 'Save current arguments to yay permanent configuration' -f
|
||||
complete -c $progname -n "not $noopt" -l mflags -d 'Pass the following options to makepkg' -f
|
||||
complete -c $progname -n "not $noopt" -l gpgflags -d 'Pass the following options to gpg' -f
|
||||
complete -c $progname -n "not $noopt" -l buildir -d 'Specify the build directory' -f
|
||||
complete -c $progname -n "not $noopt" -l aururl -d 'Set an alternative AUR URL' -f
|
||||
complete -c $progname -n "not $noopt" -l aurrpcurl -d 'Set an alternative URL for the AUR /rpc endpoint' -f
|
||||
complete -c $progname -n "not $noopt" -l builddir -d 'Directory to use for Building AUR Packages' -r
|
||||
complete -c $progname -n "not $noopt" -l editor -d 'Editor to use' -f
|
||||
complete -c $progname -n "not $noopt" -l editorflags -d 'Editor flags to use' -f
|
||||
complete -c $progname -n "not $noopt" -l makepkg -d 'Makepkg command to use' -f
|
||||
complete -c $progname -n "not $noopt" -l pacman -d 'Pacman command to use' -f
|
||||
complete -c $progname -n "not $noopt" -l tar -d 'Tar command to use' -f
|
||||
complete -c $progname -n "not $noopt" -l git -d 'Git command to use' -f
|
||||
complete -c $progname -n "not $noopt" -l gpg -d 'Gpg command to use' -f
|
||||
complete -c $progname -n "not $noopt" -l config -d 'The pacman config file to use' -r
|
||||
complete -c $progname -n "not $noopt" -l makepkgconf -d 'Use custom makepkg.conf location' -r
|
||||
complete -c $progname -n "not $noopt" -l nomakepkgconf -d 'Use default makepkg.conf' -f
|
||||
complete -c $progname -n "not $noopt" -l requestsplitn -d 'Max amount of packages to query per AUR request' -f
|
||||
complete -c $progname -n "not $noopt" -l sudoloop -d 'Loop sudo calls in the backgroud to avoid timeout' -f
|
||||
complete -c $progname -n "not $noopt" -l nosudoloop -d 'Do not loop sudo calls in the background' -f
|
||||
complete -c $progname -n "not $noopt" -l completioninterval -d 'Refresh interval for completion cache' -f
|
||||
complete -c $progname -n "not $noopt" -l sortby -d 'Sort AUR results by a specific field during search' -xa "{votes,popularity,id,baseid,name,base,submitted,modified}"
|
||||
complete -c $progname -n "not $noopt" -l searchby -d 'Search for AUR packages by querying the specified field' -xa "{name,name-desc,maintainer,depends,checkdepends,makedepends,optdepends}"
|
||||
complete -c $progname -n "not $noopt" -l answerclean -d 'Set a predetermined answer for the clean build menu' -xa "{All,None,Installed,NotInstalled}"
|
||||
complete -c $progname -n "not $noopt" -l answerdiff -d 'Set a predetermined answer for the edit diff menu' -xa "{All,None,Installed,NotInstalled}"
|
||||
complete -c $progname -n "not $noopt" -l answeredit -d 'Set a predetermined answer for the edit pkgbuild menu' -xa "{All,None,Installed,NotInstalled}"
|
||||
complete -c $progname -n "not $noopt" -l answerupgrade -d 'Set a predetermined answer for the upgrade menu' -f
|
||||
complete -c $progname -n "not $noopt" -l noanswerclean -d 'Unset the answer for the clean build menu' -f
|
||||
complete -c $progname -n "not $noopt" -l noanswerdiff -d 'Unset the answer for the diff menu' -f
|
||||
complete -c $progname -n "not $noopt" -l noansweredit -d 'Unset the answer for the edit pkgbuild menu' -f
|
||||
complete -c $progname -n "not $noopt" -l noanswerupgrade -d 'Unset the answer for the upgrade menu' -f
|
||||
complete -c $progname -n "not $noopt" -l cleanmenu -d 'Give the option to clean build PKGBUILDS' -f
|
||||
complete -c $progname -n "not $noopt" -l diffmenu -d 'Give the option to show diffs for build files' -f
|
||||
complete -c $progname -n "not $noopt" -l editmenu -d 'Give the option to edit/view PKGBUILDS' -f
|
||||
complete -c $progname -n "not $noopt" -l askremovemake -d 'Ask to remove make deps after install' -f
|
||||
complete -c $progname -n "not $noopt" -l askyesremovemake -d 'Ask to remove make deps after install(with "Y" as default)' -f
|
||||
complete -c $progname -n "not $noopt" -l removemake -d 'Remove make deps after install' -f
|
||||
complete -c $progname -n "not $noopt" -l noremovemake -d 'Do not remove make deps after install' -f
|
||||
complete -c $progname -n "not $noopt" -l topdown -d 'Shows repository packages first and then aur' -f
|
||||
complete -c $progname -n "not $noopt" -l bottomup -d 'Shows aur packages first and then repository' -f
|
||||
complete -c $progname -n "not $noopt" -l singlelineresults -d 'List each search result on its own line' -f
|
||||
complete -c $progname -n "not $noopt" -l doublelineresults -d 'List each search result on two lines, like pacman' -f
|
||||
complete -c $progname -n "not $noopt" -l devel -d 'Check -git/-svn/-hg development version' -f
|
||||
complete -c $progname -n "not $noopt" -l cleanafter -d 'Clean package sources after successful build' -f
|
||||
complete -c $progname -n "not $noopt" -l keepsrc -d 'Keep pkg/ and src/ after building packages' -f
|
||||
complete -c $progname -n "not $noopt" -l timeupdate -d 'Check package modification date and version' -f
|
||||
complete -c $progname -n "not $noopt" -l redownload -d 'Redownload PKGBUILD of package even if up-to-date' -f
|
||||
complete -c $progname -n "not $noopt" -l noredownload -d 'Do not redownload up-to-date PKGBUILDs' -f
|
||||
complete -c $progname -n "not $noopt" -l redownloadall -d 'Redownload PKGBUILD of package and deps even if up-to-date' -f
|
||||
complete -c $progname -n "not $noopt" -l noredownload -d 'Do not redownload up-to-date PKGBUILDs' -f
|
||||
complete -c $progname -n "not $noopt" -l provides -d 'Look for matching providers when searching for packages' -f
|
||||
complete -c $progname -n "not $noopt" -l pgpfetch -d 'Prompt to import PGP keys from PKGBUILDs' -f
|
||||
complete -c $progname -n "not $noopt" -l useask -d 'Automatically resolve conflicts using pacmans ask flag' -f
|
||||
complete -c $progname -n "not $noopt" -l combinedupgrade -d 'Refresh then perform the repo and AUR upgrade together' -f
|
||||
complete -c $progname -n "not $noopt" -l batchinstall -d 'Build multiple AUR packages then install them together' -f
|
||||
complete -c $progname -n "not $noopt" -l rebuild -d 'Always build target packages' -f
|
||||
complete -c $progname -n "not $noopt" -l rebuildall -d 'Always build all AUR packages' -f
|
||||
complete -c $progname -n "not $noopt" -l rebuildtree -d 'Always build all AUR packages even if installed' -f
|
||||
complete -c $progname -n "not $noopt" -l norebuild -d 'Skip package build if in cache and up to date' -f
|
||||
|
||||
# Yay options
|
||||
complete -c $progname -n $yayspecific -s c -l clean -d 'Remove unneeded dependencies' -f
|
||||
complete -c $progname -n $yayspecific -s g -l getpkgbuild -d 'Download PKGBUILD from ABS or AUR' -xa "$listall" -f
|
||||
complete -c $progname -n $yayspecific -l gendb -d 'Display system package statistics' -f
|
||||
|
||||
# Print options
|
||||
complete -c $progname -n $print -s d -l defaultconfig -d 'Print current yay configuration' -f
|
||||
complete -c $progname -n $print -s n -l numberupgrades -d 'Print number of updates' -f
|
||||
complete -c $progname -n $print -s s -l stats -d 'Display system package statistics' -f
|
||||
complete -c $progname -n $print -s u -l upgrades -d 'Print update list' -f
|
||||
|
||||
# Transaction options (sync, remove, upgrade)
|
||||
for condition in sync remove upgrade
|
||||
complete -c $progname -n $$condition -s d -l nodeps -d 'Skip [all] dependency checks' -f
|
||||
complete -c $progname -n $$condition -l dbonly -d 'Modify database entry only' -f
|
||||
complete -c $progname -n $$condition -l noprogressbar -d 'Do not display progress bar' -f
|
||||
complete -c $progname -n $$condition -l noscriptlet -d 'Do not execute install script' -f
|
||||
complete -c $progname -n $$condition -s p -l print -d 'Dry run, only print targets' -f
|
||||
complete -c $progname -n $$condition -l print-format -x -d 'Specify printf-like format' -f
|
||||
end
|
||||
|
||||
# Database and upgrade options (database, sync, upgrade)
|
||||
for condition in database sync upgrade
|
||||
complete -c $progname -n $$condition -l asdeps -d 'Mark PACKAGE as dependency' -f
|
||||
complete -c $progname -n $$condition -l asexplicit -d 'Mark PACKAGE as explicitly installed' -f
|
||||
end
|
||||
|
||||
# Upgrade options (sync, upgrade)
|
||||
for condition in sync upgrade
|
||||
complete -c $progname -n $$condition -l force -d 'Bypass file conflict checks' -f
|
||||
complete -c $progname -n $$condition -l ignore -d 'Ignore upgrade of PACKAGE' -xa "$listinstalled" -f
|
||||
complete -c $progname -n $$condition -l ignoregroup -d 'Ignore upgrade of GROUP' -xa "$listgroups" -f
|
||||
complete -c $progname -n $$condition -l needed -d 'Do not reinstall up-to-date targets' -f
|
||||
complete -c $progname -n $$condition -l recursive -d 'Recursively reinstall all dependencies' -f
|
||||
end
|
||||
|
||||
# Query and sync options
|
||||
for condition in query sync
|
||||
complete -c $progname -n $$condition -s g -l groups -d 'Display all packages in GROUP' -xa "$listgroups" -f
|
||||
complete -c $progname -n $$condition -s i -l info -d 'Display information on PACKAGE' -f
|
||||
complete -c $progname -n $$condition -s q -l quiet -d 'Show less information' -f
|
||||
complete -c $progname -n $$condition -s s -l search -r -d 'Search packages for regexp' -f
|
||||
end
|
||||
|
||||
# Get PKGBUILD options
|
||||
complete -c $progname -n "$getpkgbuild" -xa "$listall"
|
||||
|
||||
# Query options
|
||||
complete -c $progname -n $query -s c -l changelog -d 'View the change log of PACKAGE' -f
|
||||
complete -c $progname -n $query -s d -l deps -d 'List only non-explicit packages (dependencies)' -f
|
||||
complete -c $progname -n $query -s e -l explicit -d 'List only explicitly installed packages' -f
|
||||
complete -c $progname -n $query -s k -l check -d 'Check if all files owned by PACKAGE are present' -f
|
||||
complete -c $progname -n $query -s l -l list -d 'List all files owned by PACKAGE' -f
|
||||
complete -c $progname -n $query -s m -l foreign -d 'List all packages not in the database' -f
|
||||
complete -c $progname -n $query -s o -l owns -r -d 'Search for the package that owns FILE' -xa '' -f
|
||||
complete -c $progname -n $query -s p -l file -d 'Apply the query to a package file, not package' -xa '' -f
|
||||
complete -c $progname -n $query -s t -l unrequired -d 'List only unrequired packages' -f
|
||||
complete -c $progname -n $query -s u -l upgrades -d 'List only out-of-date packages' -f
|
||||
complete -c $progname -n "$query" -d 'Installed package' -xa $listinstalled -f
|
||||
|
||||
# Remove options
|
||||
complete -c $progname -n $remove -s c -l cascade -d 'Also remove packages depending on PACKAGE' -f
|
||||
complete -c $progname -n $remove -s n -l nosave -d 'Ignore file backup designations' -f
|
||||
complete -c $progname -n $remove -s s -l recursive -d 'Also remove dependencies of PACKAGE' -f
|
||||
complete -c $progname -n $remove -s u -l unneeded -d 'Only remove targets not required by PACKAGE' -f
|
||||
complete -c $progname -n "$remove" -d 'Installed package' -xa $listinstalled -f
|
||||
|
||||
# Sync options
|
||||
complete -c $progname -n $sync -s c -l clean -d 'Remove [all] packages from cache'
|
||||
complete -c $progname -n $sync -s l -l list -xa "$listrepos" -d 'List all packages in REPOSITORY'
|
||||
complete -c $progname -n "$sync; and not __fish_contains_opt -s u sysupgrade" -s u -l sysupgrade -d 'Upgrade all packages that are out of date'
|
||||
complete -c $progname -n "$sync; and __fish_contains_opt -s u sysupgrade" -s u -l sysupgrade -d 'Also downgrade packages'
|
||||
complete -c $progname -n $sync -s w -l downloadonly -d 'Only download the target packages'
|
||||
complete -c $progname -n $sync -s y -l refresh -d 'Download fresh copy of the package list'
|
||||
complete -c $progname -n "$sync" -xa "$listall $listgroups"
|
||||
|
||||
# Database options
|
||||
set -l has_db_opt '__fish_contains_opt asdeps asexplicit'
|
||||
complete -c $progname -n "$database; and not $has_db_opt" -xa --asdeps -d 'Mark PACKAGE as dependency'
|
||||
complete -c $progname -n "$database; and not $has_db_opt" -xa --asexplicit -d 'Mark PACKAGE as explicitly installed'
|
||||
complete -c $progname -n "$database; and not $has_db_opt" -s k -l check -d 'Check database validity'
|
||||
complete -c $progname -n "$has_db_opt; and $database" -xa "$listinstalled"
|
||||
|
||||
# File options - since pacman 5
|
||||
set -l has_file_opt '__fish_contains_opt list search -s l -s s'
|
||||
complete -c $progname -n "$files; and not $has_file_opt" -xa --list -d 'List files owned by given packages'
|
||||
complete -c $progname -n "$files; and not $has_file_opt" -xa -l -d 'List files owned by given packages'
|
||||
complete -c $progname -n "$files; and not $has_file_opt" -xa --search -d 'Search packages for matching files'
|
||||
complete -c $progname -n "$files; and not $has_file_opt" -xa -s -d 'Search packages for matching files'
|
||||
complete -c $progname -n "$files" -s y -l refresh -d 'Refresh the files database' -f
|
||||
complete -c $progname -n "$files" -s l -l list -d 'List files owned by given packages' -xa $listpacman
|
||||
complete -c $progname -n "$files" -s s -l search -d 'Search packages for matching files'
|
||||
complete -c $progname -n "$files" -s o -l owns -d 'Search for packages that include the given files'
|
||||
complete -c $progname -n "$files" -s q -l quiet -d 'Show less information' -f
|
||||
complete -c $progname -n "$files" -l machinereadable -d 'Show in machine readable format: repo\0pkgname\0pkgver\0path\n' -f
|
||||
|
||||
# Upgrade options
|
||||
# Theoretically, pacman reads packages in all formats that libarchive supports
|
||||
# In practice, it's going to be tar.xz or tar.gz
|
||||
# Using "pkg.tar.*" here would change __fish_complete_suffix's descriptions to "unknown"
|
||||
complete -c $progname -n "$upgrade" -xa '(__fish_complete_suffix pkg.tar.xz)' -d 'Package file'
|
||||
complete -c $progname -n "$upgrade" -xa '(__fish_complete_suffix pkg.tar.gz)' -d 'Package file'
|
||||
complete -c $progname -n "not $noopt" -l mflags -d 'Pass the following options to makepkg' -f
|
||||
complete -c $progname -n "not $noopt" -l gpgflags -d 'Pass the following options to gpg' -f
|
||||
complete -c $progname -n "not $noopt" -l sudoloop -d 'Loop sudo calls in the background to avoid timeout' -f
|
||||
|
910
completions/zsh
910
completions/zsh
File diff suppressed because it is too large
Load Diff
234
config.go
234
config.go
@ -1,234 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
|
||||
alpm "github.com/jguer/go-alpm"
|
||||
)
|
||||
|
||||
// Verbosity settings for search
|
||||
const (
|
||||
NumberMenu = iota
|
||||
Detailed
|
||||
Minimal
|
||||
)
|
||||
|
||||
// Describes Sorting method for numberdisplay
|
||||
const (
|
||||
BottomUp = iota
|
||||
TopDown
|
||||
)
|
||||
|
||||
// Configuration stores yay's config.
|
||||
type Configuration struct {
|
||||
BuildDir string `json:"buildDir"`
|
||||
Editor string `json:"editor"`
|
||||
MakepkgBin string `json:"makepkgbin"`
|
||||
PacmanBin string `json:"pacmanbin"`
|
||||
PacmanConf string `json:"pacmanconf"`
|
||||
TarBin string `json:"tarbin"`
|
||||
ReDownload string `json:"redownload"`
|
||||
ReBuild string `json:"rebuild"`
|
||||
GitBin string `json:"gitbin"`
|
||||
GpgBin string `json:"gpgbin"`
|
||||
GpgFlags string `json:"gpgflags"`
|
||||
MFlags string `json:"mflags"`
|
||||
RequestSplitN int `json:"requestsplitn"`
|
||||
SearchMode int `json:"-"`
|
||||
SortMode int `json:"sortmode"`
|
||||
SudoLoop bool `json:"sudoloop"`
|
||||
TimeUpdate bool `json:"timeupdate"`
|
||||
NoConfirm bool `json:"-"`
|
||||
Devel bool `json:"devel"`
|
||||
CleanAfter bool `json:"cleanAfter"`
|
||||
}
|
||||
|
||||
var version = "3.373"
|
||||
|
||||
// configFileName holds the name of the config file.
|
||||
const configFileName string = "config.json"
|
||||
|
||||
// vcsFileName holds the name of the vcs file.
|
||||
const vcsFileName string = "vcs.json"
|
||||
|
||||
// completionFilePrefix holds the prefix used for storing shell completion files.
|
||||
const completionFilePrefix string = "aur_"
|
||||
|
||||
// baseURL givers the AUR default address.
|
||||
const baseURL string = "https://aur.archlinux.org"
|
||||
|
||||
// useColor enables/disables colored printing
|
||||
var useColor bool
|
||||
|
||||
// configHome handles config directory home
|
||||
var configHome string
|
||||
|
||||
// cacheHome handles cache home
|
||||
var cacheHome string
|
||||
|
||||
// savedInfo holds the current vcs info
|
||||
var savedInfo vcsInfo
|
||||
|
||||
// configfile holds yay config file path.
|
||||
var configFile string
|
||||
|
||||
// vcsfile holds yay vcs info file path.
|
||||
var vcsFile string
|
||||
|
||||
// completion file
|
||||
var completionFile string
|
||||
|
||||
// shouldSaveConfig holds whether or not the config should be saved
|
||||
var shouldSaveConfig bool
|
||||
|
||||
// YayConf holds the current config values for yay.
|
||||
var config Configuration
|
||||
|
||||
// AlpmConf holds the current config values for pacman.
|
||||
var alpmConf alpm.PacmanConfig
|
||||
|
||||
// AlpmHandle is the alpm handle used by yay.
|
||||
var alpmHandle *alpm.Handle
|
||||
|
||||
func readAlpmConfig(pacmanconf string) (conf alpm.PacmanConfig, err error) {
|
||||
file, err := os.Open(pacmanconf)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
conf, err = alpm.ParseConfig(file)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// SaveConfig writes yay config to file.
|
||||
func (config *Configuration) saveConfig() error {
|
||||
marshalledinfo, _ := json.MarshalIndent(config, "", "\t")
|
||||
in, err := os.OpenFile(configFile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer in.Close()
|
||||
_, err = in.Write(marshalledinfo)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = in.Sync()
|
||||
return err
|
||||
}
|
||||
|
||||
func defaultSettings(config *Configuration) {
|
||||
config.BuildDir = cacheHome + "/"
|
||||
config.CleanAfter = false
|
||||
config.Editor = ""
|
||||
config.Devel = false
|
||||
config.MakepkgBin = "makepkg"
|
||||
config.NoConfirm = false
|
||||
config.PacmanBin = "pacman"
|
||||
config.PacmanConf = "/etc/pacman.conf"
|
||||
config.GpgFlags = ""
|
||||
config.MFlags = ""
|
||||
config.SortMode = BottomUp
|
||||
config.SudoLoop = false
|
||||
config.TarBin = "bsdtar"
|
||||
config.GitBin = "git"
|
||||
config.GpgBin = "gpg"
|
||||
config.TimeUpdate = false
|
||||
config.RequestSplitN = 150
|
||||
config.ReDownload = "no"
|
||||
config.ReBuild = "no"
|
||||
}
|
||||
|
||||
// Editor returns the preferred system editor.
|
||||
func editor() string {
|
||||
switch {
|
||||
case config.Editor != "":
|
||||
editor, err := exec.LookPath(config.Editor)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
} else {
|
||||
return editor
|
||||
}
|
||||
fallthrough
|
||||
case os.Getenv("EDITOR") != "":
|
||||
editor, err := exec.LookPath(os.Getenv("EDITOR"))
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
} else {
|
||||
return editor
|
||||
}
|
||||
fallthrough
|
||||
case os.Getenv("VISUAL") != "":
|
||||
editor, err := exec.LookPath(os.Getenv("VISUAL"))
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
} else {
|
||||
return editor
|
||||
}
|
||||
fallthrough
|
||||
default:
|
||||
fmt.Println(bold(red("Warning:")),
|
||||
bold(magenta("$EDITOR")), "is not set")
|
||||
fmt.Println("Please add $EDITOR or to your environment variables.")
|
||||
|
||||
editorLoop:
|
||||
fmt.Print(green("Edit PKGBUILD with:"))
|
||||
var editorInput string
|
||||
_, err := fmt.Scanln(&editorInput)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
goto editorLoop
|
||||
}
|
||||
|
||||
editor, err := exec.LookPath(editorInput)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
goto editorLoop
|
||||
}
|
||||
return editor
|
||||
}
|
||||
}
|
||||
|
||||
// ContinueTask prompts if user wants to continue task.
|
||||
//If NoConfirm is set the action will continue without user input.
|
||||
func continueTask(s string, def string) (cont bool) {
|
||||
if config.NoConfirm {
|
||||
return true
|
||||
}
|
||||
var postFix string
|
||||
|
||||
if def == "nN" {
|
||||
postFix = " [Y/n] "
|
||||
} else {
|
||||
postFix = " [y/N] "
|
||||
}
|
||||
|
||||
var response string
|
||||
fmt.Print(bold(green(arrow+" "+s+" ")), bold(postFix))
|
||||
|
||||
n, err := fmt.Scanln(&response)
|
||||
if err != nil || n == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
if response == string(def[0]) || response == string(def[1]) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (config Configuration) String() string {
|
||||
var buf bytes.Buffer
|
||||
enc := json.NewEncoder(&buf)
|
||||
enc.SetIndent("", "\t")
|
||||
if err := enc.Encode(config); err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
353
conflicts.go
353
conflicts.go
@ -1,353 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
alpm "github.com/jguer/go-alpm"
|
||||
gopkg "github.com/mikkeloscar/gopkgbuild"
|
||||
)
|
||||
|
||||
// Checks a single conflict against every other to be installed package's
|
||||
// name and its provides.
|
||||
func checkInnerConflict(name string, conflict string, conflicts map[string]stringSet, dc *depCatagories) {
|
||||
deps, err := gopkg.ParseDeps([]string{conflict})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
dep := deps[0]
|
||||
|
||||
for _, pkg := range dc.Aur {
|
||||
if name == pkg.Name {
|
||||
continue
|
||||
}
|
||||
|
||||
version, err := gopkg.NewCompleteVersion(pkg.Version)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if dep.Name == pkg.Name && version.Satisfies(dep) {
|
||||
addMapStringSet(conflicts, name, pkg.Name)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, provide := range pkg.Provides {
|
||||
// Provides are not versioned unless explicitly defined as
|
||||
// such. If a conflict is versioned but a provide is
|
||||
// not it can not conflict.
|
||||
if (dep.MaxVer != nil || dep.MinVer != nil) && !strings.ContainsAny(provide, "><=") {
|
||||
continue
|
||||
}
|
||||
|
||||
var version *gopkg.CompleteVersion
|
||||
var err error
|
||||
|
||||
pname, pversion := splitNameFromDep(provide)
|
||||
|
||||
if dep.Name != pname {
|
||||
continue
|
||||
}
|
||||
|
||||
if pversion != "" {
|
||||
version, err = gopkg.NewCompleteVersion(provide)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if version != nil && version.Satisfies(dep) {
|
||||
addMapStringSet(conflicts, name, pkg.Name)
|
||||
break
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
for _, pkg := range dc.Repo {
|
||||
if name == pkg.Name() {
|
||||
continue
|
||||
}
|
||||
|
||||
version, err := gopkg.NewCompleteVersion(pkg.Version())
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if dep.Name == pkg.Name() && version.Satisfies(dep) {
|
||||
addMapStringSet(conflicts, name, pkg.Name())
|
||||
continue
|
||||
}
|
||||
|
||||
pkg.Provides().ForEach(func(provide alpm.Depend) error {
|
||||
// Provides are not versioned unless explicitly defined as
|
||||
// such. If a conflict is versioned but a provide is
|
||||
// not it can not conflict.
|
||||
if (dep.MaxVer != nil || dep.MinVer != nil) && provide.Mod == alpm.DepModAny {
|
||||
return nil
|
||||
}
|
||||
|
||||
if dep.Name != pkg.Name() {
|
||||
return nil
|
||||
}
|
||||
|
||||
if provide.Mod == alpm.DepModAny {
|
||||
addMapStringSet(conflicts, name, pkg.Name())
|
||||
return fmt.Errorf("")
|
||||
}
|
||||
|
||||
version, err := gopkg.NewCompleteVersion(provide.Version)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if version.Satisfies(dep) {
|
||||
addMapStringSet(conflicts, name, pkg.Name())
|
||||
return fmt.Errorf("")
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Checks every to be installed package's conflicts against every other to be
|
||||
// installed package and its provides.
|
||||
func checkForInnerConflicts(dc *depCatagories) map[string]stringSet {
|
||||
conflicts := make(map[string]stringSet)
|
||||
|
||||
for _, pkg := range dc.Aur {
|
||||
for _, cpkg := range pkg.Conflicts {
|
||||
checkInnerConflict(pkg.Name, cpkg, conflicts, dc)
|
||||
}
|
||||
}
|
||||
|
||||
for _, pkg := range dc.Repo {
|
||||
pkg.Conflicts().ForEach(func(conflict alpm.Depend) error {
|
||||
checkInnerConflict(pkg.Name(), conflict.String(), conflicts, dc)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
return conflicts
|
||||
}
|
||||
|
||||
// Checks a provide or packagename from a to be installed package
|
||||
// against every already installed package's conflicts
|
||||
func checkReverseConflict(name string, provide string, conflicts map[string]stringSet) error {
|
||||
var version *gopkg.CompleteVersion
|
||||
var err error
|
||||
|
||||
localDb, err := alpmHandle.LocalDb()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
pname, pversion := splitNameFromDep(provide)
|
||||
if pversion != "" {
|
||||
version, err = gopkg.NewCompleteVersion(pversion)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
localDb.PkgCache().ForEach(func(pkg alpm.Package) error {
|
||||
if name == pkg.Name() {
|
||||
return nil
|
||||
}
|
||||
|
||||
pkg.Conflicts().ForEach(func(conflict alpm.Depend) error {
|
||||
deps, err := gopkg.ParseDeps([]string{conflict.String()})
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
dep := deps[0]
|
||||
// Provides are not versioned unless explicitly defined as
|
||||
// such. If a conflict is versioned but a provide is
|
||||
// not it can not conflict.
|
||||
if (dep.MaxVer != nil || dep.MinVer != nil) && version == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if dep.Name != pname {
|
||||
return nil
|
||||
}
|
||||
|
||||
if version == nil || version.Satisfies(dep) {
|
||||
// Todo
|
||||
addMapStringSet(conflicts, name, pkg.Name()+" ("+provide+")")
|
||||
return fmt.Errorf("")
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Checks the conflict of a to be installed package against the package name and
|
||||
// provides of every installed package.
|
||||
func checkConflict(name string, conflict string, conflicts map[string]stringSet) error {
|
||||
localDb, err := alpmHandle.LocalDb()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deps, err := gopkg.ParseDeps([]string{conflict})
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
dep := deps[0]
|
||||
|
||||
localDb.PkgCache().ForEach(func(pkg alpm.Package) error {
|
||||
if name == pkg.Name() {
|
||||
return nil
|
||||
}
|
||||
|
||||
version, err := gopkg.NewCompleteVersion(pkg.Version())
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if dep.Name == pkg.Name() && version.Satisfies(dep) {
|
||||
addMapStringSet(conflicts, name, pkg.Name())
|
||||
return nil
|
||||
}
|
||||
|
||||
pkg.Provides().ForEach(func(provide alpm.Depend) error {
|
||||
if dep.Name != provide.Name {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Provides arent version unless explicitly defined as
|
||||
// such. If a conflict is versioned but a provide is
|
||||
// not it can not conflict.
|
||||
if (dep.MaxVer != nil || dep.MinVer != nil) && provide.Mod == alpm.DepModAny {
|
||||
return nil
|
||||
}
|
||||
|
||||
if provide.Mod == alpm.DepModAny {
|
||||
addMapStringSet(conflicts, name, pkg.Name()+" ("+provide.Name+")")
|
||||
return fmt.Errorf("")
|
||||
}
|
||||
|
||||
version, err := gopkg.NewCompleteVersion(provide.Version)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if version.Satisfies(dep) {
|
||||
addMapStringSet(conflicts, name, pkg.Name()+" ("+provide.Name+")")
|
||||
return fmt.Errorf("")
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Checks every to be installed package's conflicts against the names and
|
||||
// provides of every already installed package and checks every to be installed
|
||||
// package's name and provides against every already installed package.
|
||||
func checkForConflicts(dc *depCatagories) (map[string]stringSet, error) {
|
||||
conflicts := make(map[string]stringSet)
|
||||
|
||||
for _, pkg := range dc.Aur {
|
||||
for _, cpkg := range pkg.Conflicts {
|
||||
checkConflict(pkg.Name, cpkg, conflicts)
|
||||
}
|
||||
}
|
||||
|
||||
for _, pkg := range dc.Repo {
|
||||
pkg.Conflicts().ForEach(func(conflict alpm.Depend) error {
|
||||
checkConflict(pkg.Name(), conflict.String(), conflicts)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
for _, pkg := range dc.Aur {
|
||||
checkReverseConflict(pkg.Name, pkg.Name, conflicts)
|
||||
for _, ppkg := range pkg.Provides {
|
||||
checkReverseConflict(pkg.Name, ppkg, conflicts)
|
||||
}
|
||||
}
|
||||
|
||||
for _, pkg := range dc.Repo {
|
||||
checkReverseConflict(pkg.Name(), pkg.Name(), conflicts)
|
||||
pkg.Provides().ForEach(func(provide alpm.Depend) error {
|
||||
checkReverseConflict(pkg.Name(), provide.String(), conflicts)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
return conflicts, nil
|
||||
}
|
||||
|
||||
// Combiles checkForConflicts() and checkForInnerConflicts() in parallel and
|
||||
// does some printing.
|
||||
func checkForAllConflicts(dc *depCatagories) error {
|
||||
var err error
|
||||
var conflicts map[string]stringSet
|
||||
var innerConflicts map[string]stringSet
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(2)
|
||||
|
||||
fmt.Println(bold(cyan("::") + " Checking for conflicts..."))
|
||||
go func() {
|
||||
conflicts, err = checkForConflicts(dc)
|
||||
wg.Done()
|
||||
}()
|
||||
|
||||
fmt.Println(bold(cyan("::") + " Checking for inner conflicts..."))
|
||||
go func() {
|
||||
innerConflicts = checkForInnerConflicts(dc)
|
||||
wg.Done()
|
||||
}()
|
||||
|
||||
wg.Wait()
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(innerConflicts) != 0 {
|
||||
fmt.Println(
|
||||
red("\nInner conflicts found:"))
|
||||
for name, pkgs := range innerConflicts {
|
||||
str := "\t" + name + ":"
|
||||
for pkg := range pkgs {
|
||||
str += " " + magenta(pkg)
|
||||
}
|
||||
|
||||
fmt.Println(str)
|
||||
}
|
||||
|
||||
return fmt.Errorf("Aborting")
|
||||
}
|
||||
|
||||
if len(conflicts) != 0 {
|
||||
fmt.Println(
|
||||
red("\nPackage conflicts found:"))
|
||||
for name, pkgs := range conflicts {
|
||||
str := "\tInstalling " + magenta(name) + " will remove:"
|
||||
for pkg := range pkgs {
|
||||
str += " " + magenta(pkg)
|
||||
}
|
||||
|
||||
fmt.Println(str)
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
551
dependencies.go
551
dependencies.go
@ -1,551 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
alpm "github.com/jguer/go-alpm"
|
||||
rpc "github.com/mikkeloscar/aur"
|
||||
gopkg "github.com/mikkeloscar/gopkgbuild"
|
||||
)
|
||||
|
||||
type depTree struct {
|
||||
ToProcess stringSet
|
||||
Repo map[string]*alpm.Package
|
||||
Aur map[string]*rpc.Pkg
|
||||
Missing stringSet
|
||||
Groups stringSet
|
||||
}
|
||||
|
||||
type depCatagories struct {
|
||||
Repo []*alpm.Package
|
||||
Aur []*rpc.Pkg
|
||||
MakeOnly stringSet
|
||||
Bases map[string][]*rpc.Pkg
|
||||
}
|
||||
|
||||
func makeDepTree() *depTree {
|
||||
dt := depTree{
|
||||
make(stringSet),
|
||||
make(map[string]*alpm.Package),
|
||||
make(map[string]*rpc.Pkg),
|
||||
make(stringSet),
|
||||
make(stringSet),
|
||||
}
|
||||
|
||||
return &dt
|
||||
}
|
||||
|
||||
func makeDependCatagories() *depCatagories {
|
||||
dc := depCatagories{
|
||||
make([]*alpm.Package, 0),
|
||||
make([]*rpc.Pkg, 0),
|
||||
make(stringSet),
|
||||
make(map[string][]*rpc.Pkg),
|
||||
}
|
||||
|
||||
return &dc
|
||||
}
|
||||
|
||||
// Cut the version requirement from a dependency leaving just the name.
|
||||
func splitNameFromDep(dep string) (string, string) {
|
||||
split := strings.FieldsFunc(dep, func(c rune) bool {
|
||||
return c == '>' || c == '<' || c == '='
|
||||
})
|
||||
|
||||
if len(split) == 1 {
|
||||
return split[0], ""
|
||||
}
|
||||
|
||||
return split[0], split[1]
|
||||
}
|
||||
|
||||
//split apart db/package to db and package
|
||||
func splitDbFromName(pkg string) (string, string) {
|
||||
split := strings.SplitN(pkg, "/", 2)
|
||||
|
||||
if len(split) == 2 {
|
||||
return split[0], split[1]
|
||||
}
|
||||
return "", split[0]
|
||||
}
|
||||
|
||||
func getBases(pkgs map[string]*rpc.Pkg) map[string][]*rpc.Pkg {
|
||||
bases := make(map[string][]*rpc.Pkg)
|
||||
|
||||
for _, pkg := range pkgs {
|
||||
_, ok := bases[pkg.PackageBase]
|
||||
if !ok {
|
||||
bases[pkg.PackageBase] = make([]*rpc.Pkg, 0)
|
||||
}
|
||||
bases[pkg.PackageBase] = append(bases[pkg.PackageBase], pkg)
|
||||
}
|
||||
|
||||
return bases
|
||||
}
|
||||
|
||||
// Step two of dependency resolving. We already have all the information on the
|
||||
// packages we need, now it's just about ordering them correctly.
|
||||
// pkgs is a list of targets, the packages we want to install. Dependencies are
|
||||
// not included.
|
||||
// For each package we want we iterate down the tree until we hit the bottom.
|
||||
// This is done recursively for each branch.
|
||||
// The start of the tree is defined as the package we want.
|
||||
// When we hit the bottom of the branch we know thats the first package
|
||||
// we need to install so we add it to the start of the to install
|
||||
// list (dc.Aur and dc.Repo).
|
||||
// We work our way up until there is another branch to go down and do it all
|
||||
// again.
|
||||
//
|
||||
// Here is a visual example:
|
||||
//
|
||||
// a
|
||||
// / \
|
||||
// b c
|
||||
// / \
|
||||
// d e
|
||||
//
|
||||
// We see a and it needs b and c
|
||||
// We see b and it needs d and e
|
||||
// We see d - it needs nothing so we add d to our list and move up
|
||||
// We see e - it needs nothing so we add e to our list and move up
|
||||
// We see c - it needs nothing so we add c to our list and move up
|
||||
//
|
||||
// The final install order would come out as debca
|
||||
//
|
||||
// There is a little more to this, handling provides, multiple packages wanting the
|
||||
// same dependencies, etc. This is just the basic premise.
|
||||
func getDepCatagories(pkgs []string, dt *depTree) (*depCatagories, error) {
|
||||
dc := makeDependCatagories()
|
||||
seen := make(stringSet)
|
||||
|
||||
dc.Bases = getBases(dt.Aur)
|
||||
|
||||
for _, pkg := range pkgs {
|
||||
_, name := splitDbFromName(pkg)
|
||||
dep, _ := splitNameFromDep(name)
|
||||
alpmpkg, exists := dt.Repo[dep]
|
||||
if exists {
|
||||
repoDepCatagoriesRecursive(alpmpkg, dc, dt, false)
|
||||
dc.Repo = append(dc.Repo, alpmpkg)
|
||||
delete(dt.Repo, dep)
|
||||
}
|
||||
|
||||
aurpkg, exists := dt.Aur[dep]
|
||||
if exists {
|
||||
depCatagoriesRecursive(aurpkg, dc, dt, false, seen)
|
||||
if !seen.get(aurpkg.PackageBase) {
|
||||
dc.Aur = append(dc.Aur, aurpkg)
|
||||
seen.set(aurpkg.PackageBase)
|
||||
}
|
||||
|
||||
delete(dt.Aur, dep)
|
||||
}
|
||||
}
|
||||
|
||||
for _, base := range dc.Bases {
|
||||
for _, pkg := range base {
|
||||
for _, dep := range pkg.Depends {
|
||||
dc.MakeOnly.remove(dep)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, pkg := range dc.Repo {
|
||||
pkg.Depends().ForEach(func(_dep alpm.Depend) error {
|
||||
dep := _dep.Name
|
||||
dc.MakeOnly.remove(dep)
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
for _, pkg := range pkgs {
|
||||
dc.MakeOnly.remove(pkg)
|
||||
}
|
||||
|
||||
dupes := make(map[*alpm.Package]struct{})
|
||||
filteredRepo := make([]*alpm.Package, 0)
|
||||
|
||||
for _, pkg := range dc.Repo {
|
||||
_, ok := dupes[pkg]
|
||||
if ok {
|
||||
continue
|
||||
}
|
||||
dupes[pkg] = struct{}{}
|
||||
filteredRepo = append(filteredRepo, pkg)
|
||||
}
|
||||
|
||||
dc.Repo = filteredRepo
|
||||
|
||||
return dc, nil
|
||||
}
|
||||
|
||||
func repoDepCatagoriesRecursive(pkg *alpm.Package, dc *depCatagories, dt *depTree, isMake bool) {
|
||||
pkg.Depends().ForEach(func(_dep alpm.Depend) error {
|
||||
dep := _dep.Name
|
||||
alpmpkg, exists := dt.Repo[dep]
|
||||
if exists {
|
||||
delete(dt.Repo, dep)
|
||||
repoDepCatagoriesRecursive(alpmpkg, dc, dt, isMake)
|
||||
|
||||
if isMake {
|
||||
dc.MakeOnly.set(alpmpkg.Name())
|
||||
}
|
||||
|
||||
dc.Repo = append(dc.Repo, alpmpkg)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func depCatagoriesRecursive(_pkg *rpc.Pkg, dc *depCatagories, dt *depTree, isMake bool, seen stringSet) {
|
||||
for _, pkg := range dc.Bases[_pkg.PackageBase] {
|
||||
for _, deps := range [3][]string{pkg.Depends, pkg.MakeDepends, pkg.CheckDepends} {
|
||||
for _, _dep := range deps {
|
||||
dep, _ := splitNameFromDep(_dep)
|
||||
|
||||
aurpkg, exists := dt.Aur[dep]
|
||||
if exists {
|
||||
delete(dt.Aur, dep)
|
||||
depCatagoriesRecursive(aurpkg, dc, dt, isMake, seen)
|
||||
|
||||
if !seen.get(aurpkg.PackageBase) {
|
||||
dc.Aur = append(dc.Aur, aurpkg)
|
||||
seen.set(aurpkg.PackageBase)
|
||||
}
|
||||
|
||||
if isMake {
|
||||
dc.MakeOnly.set(aurpkg.Name)
|
||||
}
|
||||
}
|
||||
|
||||
alpmpkg, exists := dt.Repo[dep]
|
||||
if exists {
|
||||
delete(dt.Repo, dep)
|
||||
repoDepCatagoriesRecursive(alpmpkg, dc, dt, isMake)
|
||||
|
||||
if isMake {
|
||||
dc.MakeOnly.set(alpmpkg.Name())
|
||||
}
|
||||
|
||||
dc.Repo = append(dc.Repo, alpmpkg)
|
||||
}
|
||||
|
||||
}
|
||||
isMake = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This is step one for dependency resolving. pkgs is a slice of the packages you
|
||||
// want to resolve the dependencies for. They can be a mix of aur and repo
|
||||
// dependencies. All unmet dependencies will be resolved.
|
||||
//
|
||||
// For Aur dependencies depends, makedepends and checkdepends are resolved but
|
||||
// for repo packages only depends are resolved as they are prebuilt.
|
||||
// The return will be split into three catagories: Repo, Aur and Missing.
|
||||
// The return is in no way ordered. This step is is just aimed at gathering the
|
||||
// packages we need.
|
||||
//
|
||||
// This has been designed to make the least amount of rpc requests as possible.
|
||||
// Web requests are probably going to be the bottleneck here so minimizing them
|
||||
// provides a nice speed boost.
|
||||
//
|
||||
// Here is a visual expample of the request system.
|
||||
// Remember only unsatisfied packages are requested, if a package is already
|
||||
// installed we dont bother.
|
||||
//
|
||||
// a
|
||||
// / \
|
||||
// b c
|
||||
// / \
|
||||
// d e
|
||||
//
|
||||
// We see a so we send a request for a
|
||||
// We see a wants b and c so we send a request for b and c
|
||||
// We see d and e so we send a request for d and e
|
||||
//
|
||||
// Thats 5 packages in 3 requests. The amount of requests needed should always be
|
||||
// the same as the height of the tree.
|
||||
// The example does not really do this justice, In the real world where packages
|
||||
// have 10+ dependencies each this is a very nice optimization.
|
||||
func getDepTree(pkgs []string) (*depTree, error) {
|
||||
dt := makeDepTree()
|
||||
|
||||
localDb, err := alpmHandle.LocalDb()
|
||||
if err != nil {
|
||||
return dt, err
|
||||
}
|
||||
syncDb, err := alpmHandle.SyncDbs()
|
||||
if err != nil {
|
||||
return dt, err
|
||||
}
|
||||
|
||||
for _, pkg := range pkgs {
|
||||
db, name := splitDbFromName(pkg)
|
||||
var foundPkg *alpm.Package
|
||||
var singleDb *alpm.Db
|
||||
|
||||
if db == "aur" {
|
||||
dt.ToProcess.set(name)
|
||||
continue
|
||||
}
|
||||
|
||||
// Check the repos for a matching dep
|
||||
if db != "" {
|
||||
singleDb, err = alpmHandle.SyncDbByName(db)
|
||||
if err != nil {
|
||||
return dt, err
|
||||
}
|
||||
foundPkg, err = singleDb.PkgCache().FindSatisfier(name)
|
||||
} else {
|
||||
foundPkg, err = syncDb.FindSatisfier(name)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
repoTreeRecursive(foundPkg, dt, localDb, syncDb)
|
||||
continue
|
||||
} else {
|
||||
//would be better to check the groups from singleDb if
|
||||
//the user specified a db but theres no easy way to do
|
||||
//it without making alpm_lists so dont bother for now
|
||||
//db/group is probably a rare use case
|
||||
_, err := syncDb.PkgCachebyGroup(name)
|
||||
|
||||
if err == nil {
|
||||
dt.Groups.set(pkg)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if db == "" {
|
||||
dt.ToProcess.set(name)
|
||||
} else {
|
||||
dt.Missing.set(pkg)
|
||||
}
|
||||
}
|
||||
|
||||
if len(dt.ToProcess) > 0 {
|
||||
fmt.Println(bold(cyan("::") + " Querying AUR..."))
|
||||
}
|
||||
|
||||
err = depTreeRecursive(dt, localDb, syncDb, false)
|
||||
if err != nil {
|
||||
return dt, err
|
||||
}
|
||||
|
||||
if !cmdArgs.existsArg("d", "nodeps") {
|
||||
err = checkVersions(dt)
|
||||
}
|
||||
|
||||
return dt, err
|
||||
}
|
||||
|
||||
// Takes a repo package,
|
||||
// gives all of the non installed deps,
|
||||
// repeats on each sub dep.
|
||||
func repoTreeRecursive(pkg *alpm.Package, dt *depTree, localDb *alpm.Db, syncDb alpm.DbList) (err error) {
|
||||
_, exists := dt.Repo[pkg.Name()]
|
||||
if exists {
|
||||
return
|
||||
}
|
||||
|
||||
dt.Repo[pkg.Name()] = pkg
|
||||
(*pkg).Provides().ForEach(func(dep alpm.Depend) (err error) {
|
||||
dt.Repo[dep.Name] = pkg
|
||||
return nil
|
||||
})
|
||||
|
||||
(*pkg).Depends().ForEach(func(dep alpm.Depend) (err error) {
|
||||
_, exists := dt.Repo[dep.Name]
|
||||
if exists {
|
||||
return
|
||||
}
|
||||
|
||||
_, isInstalled := localDb.PkgCache().FindSatisfier(dep.String())
|
||||
if isInstalled == nil {
|
||||
return
|
||||
}
|
||||
|
||||
repoPkg, inRepos := syncDb.FindSatisfier(dep.String())
|
||||
if inRepos == nil {
|
||||
repoTreeRecursive(repoPkg, dt, localDb, syncDb)
|
||||
return
|
||||
}
|
||||
|
||||
dt.Missing.set(dep.String())
|
||||
|
||||
return
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func depTreeRecursive(dt *depTree, localDb *alpm.Db, syncDb alpm.DbList, isMake bool) (err error) {
|
||||
if len(dt.ToProcess) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
nextProcess := make(stringSet)
|
||||
currentProcess := make(stringSet)
|
||||
// Strip version conditions
|
||||
for _dep := range dt.ToProcess {
|
||||
dep, _ := splitNameFromDep(_dep)
|
||||
currentProcess.set(dep)
|
||||
}
|
||||
|
||||
// Assume toprocess only contains aur stuff we have not seen
|
||||
info, err := aurInfo(currentProcess.toSlice())
|
||||
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Cache the results
|
||||
for _, pkg := range info {
|
||||
dt.Aur[pkg.Name] = pkg
|
||||
|
||||
}
|
||||
|
||||
// Loop through to process and check if we now have
|
||||
// each packaged cached.
|
||||
// If not cached, we assume it is missing.
|
||||
for pkgName := range currentProcess {
|
||||
pkg, exists := dt.Aur[pkgName]
|
||||
|
||||
// Did not get it in the request.
|
||||
if !exists {
|
||||
dt.Missing.set(pkgName)
|
||||
continue
|
||||
}
|
||||
|
||||
// for each dep and makedep
|
||||
for _, deps := range [3][]string{pkg.Depends, pkg.MakeDepends, pkg.CheckDepends} {
|
||||
for _, versionedDep := range deps {
|
||||
dep, _ := splitNameFromDep(versionedDep)
|
||||
|
||||
_, exists = dt.Aur[dep]
|
||||
// We have it cached so skip.
|
||||
if exists {
|
||||
continue
|
||||
}
|
||||
|
||||
_, exists = dt.Repo[dep]
|
||||
// We have it cached so skip.
|
||||
if exists {
|
||||
continue
|
||||
}
|
||||
|
||||
_, exists = dt.Missing[dep]
|
||||
// We know it does not resolve so skip.
|
||||
if exists {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if already installed.
|
||||
_, isInstalled := localDb.PkgCache().FindSatisfier(versionedDep)
|
||||
if isInstalled == nil && config.ReBuild != "tree" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check the repos for a matching dep.
|
||||
repoPkg, inRepos := syncDb.FindSatisfier(versionedDep)
|
||||
if inRepos == nil {
|
||||
if isInstalled == nil && config.ReBuild == "tree" {
|
||||
continue
|
||||
}
|
||||
|
||||
repoTreeRecursive(repoPkg, dt, localDb, syncDb)
|
||||
continue
|
||||
}
|
||||
|
||||
// If all else fails add it to next search.
|
||||
nextProcess.set(versionedDep)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dt.ToProcess = nextProcess
|
||||
depTreeRecursive(dt, localDb, syncDb, true)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func checkVersions(dt *depTree) error {
|
||||
depStrings := make([]string, 0)
|
||||
has := make(map[string][]string)
|
||||
|
||||
for _, pkg := range dt.Aur {
|
||||
for _, deps := range [3][]string{pkg.Depends, pkg.MakeDepends, pkg.CheckDepends} {
|
||||
for _, dep := range deps {
|
||||
_, _dep := splitNameFromDep(dep)
|
||||
if _dep != "" {
|
||||
depStrings = append(depStrings, dep)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
addMapStringSlice(has, pkg.Name, pkg.Version)
|
||||
|
||||
for _, name := range pkg.Provides {
|
||||
_name, _ver := splitNameFromDep(name)
|
||||
if _ver != "" {
|
||||
addMapStringSlice(has, _name, _ver)
|
||||
} else {
|
||||
delete(has, _name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, pkg := range dt.Repo {
|
||||
pkg.Depends().ForEach(func(dep alpm.Depend) error {
|
||||
if dep.Mod != alpm.DepModAny {
|
||||
depStrings = append(depStrings, dep.String())
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
addMapStringSlice(has, pkg.Name(), pkg.Version())
|
||||
|
||||
pkg.Provides().ForEach(func(dep alpm.Depend) error {
|
||||
if dep.Mod != alpm.DepModAny {
|
||||
addMapStringSlice(has, dep.Name, dep.Version)
|
||||
} else {
|
||||
delete(has, dep.Name)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
deps, _ := gopkg.ParseDeps(depStrings)
|
||||
|
||||
for _, dep := range deps {
|
||||
satisfied := false
|
||||
verStrs, ok := has[dep.Name]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, verStr := range verStrs {
|
||||
version, err := gopkg.NewCompleteVersion(verStr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if version.Satisfies(dep) {
|
||||
satisfied = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !satisfied {
|
||||
dt.Missing.set(dep.String())
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
899
doc/yay.8
899
doc/yay.8
@ -1,382 +1,599 @@
|
||||
'\" t
|
||||
.TH "YAY" "8" "2018-02-29" "Yay v3\&.460+" "Yay Manual"
|
||||
.TH "YAY" "8" "2019\-10\-21" "Yay v12.0+" "Yay Manual"
|
||||
.nh
|
||||
.ad l
|
||||
.SH "NAME"
|
||||
.SH NAME
|
||||
yay \- AUR Helper written in go
|
||||
.SH "SYNOPSIS"
|
||||
.sp
|
||||
|
||||
.SH SYNOPSIS
|
||||
\fIyay\fR <operation> [options] [targets]
|
||||
.sp
|
||||
\fIyay\fR <search pattern>
|
||||
.SH "DESCRIPTION"
|
||||
\fIyay\fR <search terms>
|
||||
.sp
|
||||
Yay is a Pacman wrapper with AUR support\&. It passes options to Makepkg and
|
||||
Pacman after resolving packages to install/upgrade\&.
|
||||
.sp
|
||||
This manpage only covers options unique to Yay\&. For other options see
|
||||
\fBpacman(8)\fR\&.
|
||||
.SH "YAY OPERATIONS"
|
||||
.PP
|
||||
\fB\-Y, --yay\fR
|
||||
.RS 4
|
||||
Perform yay specific operations\&. This is the default if no other operation is
|
||||
selected\&.
|
||||
\fIyay\fR
|
||||
|
||||
.SH DESCRIPTION
|
||||
Yay is a Pacman wrapper with AUR support. It passes options to Makepkg and
|
||||
Pacman after resolving packages to install/upgrade.
|
||||
|
||||
This manpage only covers options unique to Yay. For other options see
|
||||
\fBpacman(8)\fR.
|
||||
|
||||
.SH YAY OPERATIONS
|
||||
|
||||
.TP
|
||||
.B \-Y, \-\-yay
|
||||
Perform yay specific operations. This is the default if no other operation is
|
||||
selected and targets are defined.
|
||||
|
||||
.TP
|
||||
.B \-B, \-\-build
|
||||
Build a PKGBUILD in a given directory.
|
||||
|
||||
.TP
|
||||
.B \-P, \-\-show
|
||||
Perform yay specific print operations.
|
||||
|
||||
.TP
|
||||
.B \-G, \-\-getpkgbuild
|
||||
Downloads PKGBUILD from ABS or AUR. The ABS can only be used for Arch Linux repositories.
|
||||
|
||||
.TP
|
||||
.B \-W, \-\-web
|
||||
Web related operations such as voting for AUR packages.
|
||||
|
||||
.RE
|
||||
.PP
|
||||
\fB\-P, --print\fR
|
||||
.RS 4
|
||||
Perform yay specific print operations\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-G, --getpkgbuild\fR
|
||||
.RS 4
|
||||
Downloads PKGBUILD from ABS or AUR\&.
|
||||
.RE
|
||||
.PP
|
||||
If no operation is selected -Y will be assumed\&.
|
||||
.SH "EXTENDED PACMAN OPERATIONS"
|
||||
.PP
|
||||
\fB\-S, -Si, -Ss, -Su, -Qu\fR
|
||||
.RS 4
|
||||
These operations are extended to support both AUR and repo packages\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-R\fR
|
||||
.RS 4
|
||||
Yay will also remove cached data about devel packages\&.
|
||||
.RE
|
||||
.SH "YAY OPTIONS (APPLY TO -Y AND --YAY)"
|
||||
.PP
|
||||
\fB<NO OPTION>\fR
|
||||
.RS 4
|
||||
If no operation is specified 'yay \-Syu' will be performed
|
||||
|
||||
If no operation is specified and targets are provided \-Y will be assumed
|
||||
|
||||
.SH EXTENDED PACMAN OPERATIONS
|
||||
.TP
|
||||
.B \-S, \-Si, \-Sl, \-Ss, \-Su, \-Sc, \-Qu
|
||||
These operations are extended to support both AUR and repo packages.
|
||||
|
||||
.TP
|
||||
.B \-Sc
|
||||
Yay will also clean cached AUR package and any untracked Files in the
|
||||
cache. Cleaning untracked files will wipe any downloaded sources or
|
||||
built packages but will keep already downloaded vcs sources.
|
||||
|
||||
.TP
|
||||
.B \-R
|
||||
Yay will also remove cached data about devel packages.
|
||||
|
||||
.SH NEW OPTIONS
|
||||
.TP
|
||||
.B \-N, \-\-repo
|
||||
Assume all targets are from the repositories. Additionally Actions such as
|
||||
sysupgrade will only act on repository packages.
|
||||
|
||||
.TP
|
||||
.B \-a, \-\-aur
|
||||
Assume all targets are from the AUR. Additionally Actions such as
|
||||
sysupgrade will only act on AUR packages.
|
||||
|
||||
Note that dependency resolving will still act normally and include repository
|
||||
packages.
|
||||
|
||||
.SH YAY OPTIONS (APPLY TO \-Y AND \-\-YAY)
|
||||
|
||||
.TP
|
||||
.B <NO OPTION>
|
||||
Displays a list of packages matching the search terms and prompts the user on
|
||||
which packages to install (yogurt mode)\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB \-\-gendb\fR
|
||||
.RS 4
|
||||
Generate development package databse\&. Tracks the latest commit for each
|
||||
development package, when there is a new commit Yay will know to update\&. This
|
||||
which packages to install (yogurt mode).
|
||||
|
||||
The first search term is used to query the different sources and
|
||||
the following search terms are used to narrow the search results
|
||||
through exact matching.
|
||||
|
||||
.TP
|
||||
.B \-\-gendb
|
||||
Generate development package database. Tracks the latest commit for each
|
||||
development package, when there is a new commit Yay will know to update. This
|
||||
is done per package whenever a package is synced. This option should only be
|
||||
used when migrating to Yay from another AUR helper.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-c \-\-clean\fR
|
||||
.RS 4
|
||||
Remove unneeded dependencies\&.
|
||||
.RE
|
||||
.SH "PRINT OPTIONS (APPLY TO -P AND --PRINT)"
|
||||
\fB\-c \-\-complete\fR
|
||||
.RS 4
|
||||
Print a list of all AUR and repo packages\&. This is to allow shell completion
|
||||
and is not intended to be used directly by the user\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-f \-\-fish\fR
|
||||
.RS 4
|
||||
During complete adjust the output for the fish shell\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-d \-\-defaultconfig\fR
|
||||
.RS 4
|
||||
Print default yay configuration\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-g \-\-config\fR
|
||||
.RS 4
|
||||
Print current yay configuration\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-n \-\-numberupgrades\fR
|
||||
.RS 4
|
||||
Print number of packages that need to be updated\&. Note this does not preform
|
||||
a database refresh\&. Run \fByay -Sy\fR Before this for an up to date result\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-s \-\-stats\fR
|
||||
.RS 4
|
||||
Displays information about installed packages and system health\&. If there are
|
||||
orphaned, out-of-date or packages that no longer exist on the AUR warnings will
|
||||
be displayed\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-u \-\-upgrades\fR
|
||||
.RS 4
|
||||
Print Names of packages that need to be updated\&. Note this does not preform
|
||||
a database refresh\&. Run \fByay -Sy\fR Before this for an up to date result\&.
|
||||
.PP
|
||||
.SH "PERMANENT CONFIGURATION SETTINGS"
|
||||
.PP
|
||||
\fB\-\-save\fR
|
||||
.RS 4
|
||||
Causes the following options to be saved back to the config file\&. This
|
||||
|
||||
.TP
|
||||
.B \-c, \-\-clean
|
||||
Remove unneeded dependencies.
|
||||
|
||||
.TP
|
||||
.B \-cc
|
||||
Remove unneeded dependencies, including packages optionally required by any other package.
|
||||
|
||||
.SH SHOW OPTIONS (APPLY TO \-P AND \-\-show)
|
||||
.TP
|
||||
.B \-c, \-\-complete
|
||||
Print a list of all AUR and repo packages. This allows shell completion
|
||||
and is not intended to be used directly by the user.
|
||||
|
||||
.TP
|
||||
.B \-d, \-\-defaultconfig
|
||||
Print default yay configuration.
|
||||
|
||||
.TP
|
||||
.B \-g, \-\-currentconfig
|
||||
Print current yay configuration.
|
||||
|
||||
.TP
|
||||
.B \-s, \-\-stats
|
||||
Displays information about installed packages and system health. If there are
|
||||
orphaned, or out\-of\-date packages, or packages that no longer exist on the
|
||||
AUR; warnings will be displayed.
|
||||
|
||||
.TP
|
||||
.B \-w, \-\-news
|
||||
Print new news from the Archlinux homepage. News is considered new if it is
|
||||
newer than the build date of all native packages. Pass this twice to show all
|
||||
available news.
|
||||
|
||||
.TP
|
||||
.B \-q, \-\-quiet
|
||||
Only show titles when printing news.
|
||||
|
||||
.SH BUILD OPTIONS (APPLY TO \-B AND \-\-build)
|
||||
.TP
|
||||
.B \-i, \-\-install
|
||||
Build and install a PKGBUILD in a given directory
|
||||
|
||||
.SH GETPKGBUILD OPTIONS (APPLY TO \-G AND \-\-getpkgbuild)
|
||||
.TP
|
||||
.B \-f, \-\-force
|
||||
Force download for ABS packages that already exist in the current directory. This
|
||||
ensures directories are not accidentally overwritten.
|
||||
|
||||
.TP
|
||||
.B \-p, \-\-print
|
||||
Prints the PKGBUILD of the given packages to stdout.
|
||||
|
||||
.SH WEB OPTIONS (APPLY TO \-W AND \-\-web)
|
||||
|
||||
.TP
|
||||
Web related operations such as voting for AUR packages.
|
||||
Requires setting AUR_USERNAME and AUR_PASSWORD environment variables.
|
||||
|
||||
.TP
|
||||
.B \-u, \-\-unvote
|
||||
Remove vote from AUR package(s)
|
||||
|
||||
.TP
|
||||
.B \-v, \-\-vote
|
||||
Vote for AUR package(s)
|
||||
|
||||
.SH PERMANENT CONFIGURATION SETTINGS
|
||||
.TP
|
||||
.B \-\-save
|
||||
Causes the following options to be saved back to the config file. This
|
||||
provides an easy way to change config options without directly editing the
|
||||
file\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-builddir <dir>\fR
|
||||
.RS 4
|
||||
Directory to use for Building AUR Packages\&. This directory is also used as
|
||||
the AUR cache when deciding if Yay should should skip builds\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-editor <file>\fR
|
||||
.RS 4
|
||||
Editor to use when editing PKGBUILDs\&. If this is not set the \fBEDITOR\fR
|
||||
environment variable will be checked, followed by \fBVISUAL\fR\&. If none of
|
||||
these are set Yay will prompt the user for an editor\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-makepkg <file>\fR
|
||||
.RS 4
|
||||
file.
|
||||
|
||||
.TP
|
||||
.B \-\-aururl
|
||||
Set an alternative AUR URL.
|
||||
|
||||
.TP
|
||||
.B \-\-aurrpcurl
|
||||
Set an alternative URL for the AUR /rpc endpoint.
|
||||
|
||||
.TP
|
||||
.B \-\-builddir <dir>
|
||||
Directory to use for Building AUR Packages. This directory is also used as
|
||||
the AUR cache when deciding if Yay should skip builds.
|
||||
|
||||
.TP
|
||||
.B \-\-editor <command>
|
||||
Editor to use when editing PKGBUILDs. If this is not set the \fBVISUAL\fR
|
||||
environment variable will be checked, followed by \fBEDITOR\fR. If none of
|
||||
these are set Yay will prompt the user for an editor.
|
||||
|
||||
.TP
|
||||
.B \-\-editorflags <flags>
|
||||
Passes arguments to the editor. These flags get passed to every instance where
|
||||
the editor is called by Yay. Arguments are split on whitespace before being
|
||||
passed to the editor. Multiple arguments may be passed by supplying a space
|
||||
separated list that is quoted by the shell.
|
||||
|
||||
.TP
|
||||
.B \-\-makepkg <command>
|
||||
The command to use for \fBmakepkg\fR calls. This can be a command in
|
||||
\fBPATH\fR or an absolute path to the file\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-pacman <file>\fR
|
||||
.RS 4
|
||||
\fBPATH\fR or an absolute path to the file.
|
||||
|
||||
.TP
|
||||
.B \-\-pacman <command>
|
||||
The command to use for \fBpacman\fR calls. This can be a command in
|
||||
\fBPATH\fR or an absolute path to the file\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-tar <file>\fR
|
||||
.RS 4
|
||||
\fBPATH\fR or an absolute path to the file.
|
||||
|
||||
.TP
|
||||
.B \-\-tar <command>
|
||||
The command to use for \fBbsdtar\fR calls. This can be a command in
|
||||
\fBPATH\fR or an absolute path to the file\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-git <file>\fR
|
||||
.RS 4
|
||||
\fBPATH\fR or an absolute path to the file.
|
||||
|
||||
.TP
|
||||
.B \-\-git <command>
|
||||
The command to use for \fBgit\fR calls. This can be a command in
|
||||
\fBPATH\fR or an absolute path to the file\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-gpg <file>\fR
|
||||
.RS 4
|
||||
\fBPATH\fR or an absolute path to the file.
|
||||
|
||||
.TP
|
||||
.B \-\-gpg <command>
|
||||
The command to use for \fBgpg\fR calls. This can be a command in
|
||||
\fBPATH\fR or an absolute path to the file\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-config <file>\fR
|
||||
.RS 4
|
||||
The pacman config file to use\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-requestsplitn <n>\fR
|
||||
.RS 4
|
||||
The maximum amount of packages to request per AUR query\&. The higher the
|
||||
number the faster AUR requests will be\&. Requesting too many packages in one
|
||||
AUR query will cause an error\%. This should only make a noticeable difference
|
||||
with very large requests (>500) packages\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-topdown\fR
|
||||
.RS 4
|
||||
Display repository packages first and then AUR packages\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-bottomup\fR
|
||||
.RS 4
|
||||
Show AUR packages first and then repository packages\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-devel\fR
|
||||
.RS 4
|
||||
During sysupgrade also check AUR development packages for updates\&. Currently
|
||||
only GitHub packages are supported\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-nodevel\fR
|
||||
.RS 4
|
||||
Do not check for development packages updates during sysupgrade\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-afterclean\fR
|
||||
.RS 4
|
||||
Remove package sources after successful Install\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-noafterclean\fR
|
||||
.RS 4
|
||||
Do not remove package sources after successful Install\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-timeupdate\fR
|
||||
.RS 4
|
||||
\fBPATH\fR or an absolute path to the file.
|
||||
|
||||
.TP
|
||||
.B \-\-config <file>
|
||||
The pacman config file to use.
|
||||
|
||||
.TP
|
||||
.B \-\-makepkgconf <file>
|
||||
The config file for makepkg to use\%. If this is not set then the default
|
||||
config file will be used.
|
||||
|
||||
.TP
|
||||
.B \-\-nomakepkgconf
|
||||
Reset the makepkg config file back to its default.
|
||||
|
||||
.TP
|
||||
.B \-\-requestsplitn <number>
|
||||
The maximum amount of packages to request per AUR query. The higher the
|
||||
number the faster AUR requests will be. Requesting too many packages in one
|
||||
AUR query will cause an error. This should only make a noticeable difference
|
||||
with very large requests (>500) packages.
|
||||
|
||||
.TP
|
||||
.B \-\-completioninterval <days>
|
||||
Time in days to refresh the completion cache. Setting this to 0 will cause
|
||||
the cache to be refreshed every time, while setting this to -1 will cause the
|
||||
cache to never be refreshed.
|
||||
|
||||
.TP
|
||||
.B \-\-sortby <votes|popularity|id|baseid|name|base|submitted|modified>
|
||||
Sort AUR results by a specific field during search.
|
||||
|
||||
.TP
|
||||
.B \-\-searchby <name|name-desc|maintainer|depends|checkdepends|makedepends|optdepends|provides|conflicts|replaces|groups|keywords|comaintainers>
|
||||
Search for AUR packages by querying the specified field.
|
||||
|
||||
.TP
|
||||
.B \-\-answerclean <All|None|Installed|NotInstalled|...>
|
||||
Set a predetermined answer for the clean build menu question. This answer
|
||||
will be used instead of reading from standard input but will be parsed exactly
|
||||
the same.
|
||||
|
||||
.TP
|
||||
.B \-\-answerdiff <All|None|Installed|NotInstalled|...>
|
||||
Set a predetermined answer for the edit diff menu question. This answer
|
||||
will be used instead of reading from standard input but will be parsed exactly
|
||||
the same.
|
||||
|
||||
.TP
|
||||
.B \-\-answeredit <All|None|Installed|NotInstalled|...>
|
||||
Set a predetermined answer for the edit pkgbuild menu question. This answer
|
||||
will be used instead of reading from standard input but will be parsed exactly
|
||||
the same.
|
||||
|
||||
.TP
|
||||
.B \-\-answerupgrade <Repo|^Repo|None|...>
|
||||
Set a predetermined answer for the upgrade menu question. Selects which package
|
||||
ranges or repos to omit for updates. This answer will be used instead of
|
||||
reading from standard input but will be treated exactly the same.
|
||||
|
||||
.TP
|
||||
.B \-\-noanswerclean
|
||||
Unset the answer for the clean build menu.
|
||||
|
||||
.TP
|
||||
.B \-\-noanswerdiff
|
||||
Unset the answer for the diff menu.
|
||||
|
||||
.TP
|
||||
.B \-\-noansweredit
|
||||
Unset the answer for the edit pkgbuild menu.
|
||||
|
||||
.TP
|
||||
.B \-\-noanswerupgrade
|
||||
Unset the answer for the upgrade menu.
|
||||
|
||||
.TP
|
||||
.B \-\-cleanmenu
|
||||
Show the clean menu. This menu gives you the chance to fully delete the
|
||||
downloaded build files from Yay's cache before redownloading a fresh copy.
|
||||
|
||||
If 'cleanmenu' is enabled in the configuration file, you can temporarily disable it by
|
||||
using '--cleanmenu=false' on the command line
|
||||
|
||||
.TP
|
||||
.B \-\-diffmenu
|
||||
Show the diff menu. This menu gives you the option to view diffs from
|
||||
build files before building.
|
||||
|
||||
Diffs are shown via \fBgit diff\fR which uses
|
||||
less by default. This behaviour can be changed via git's config, the
|
||||
\fB$GIT_PAGER\fR or \fB$PAGER\fR environment variables.
|
||||
|
||||
.TP
|
||||
.B \-\-editmenu
|
||||
Show the edit menu. This menu gives you the option to edit or view PKGBUILDs
|
||||
before building.
|
||||
|
||||
\fBWarning\fR: Yay resolves dependencies ahead of time via the RPC. It is not
|
||||
recommended to edit pkgbuild variables unless you know what you are doing.
|
||||
|
||||
.TP
|
||||
.B \-\-askremovemake
|
||||
Ask to remove makedepends after installing packages.
|
||||
|
||||
.TP
|
||||
.B \-\-askyesremovemake
|
||||
Ask to remove makedepends after installing packages(with "Y" as default).
|
||||
|
||||
.TP
|
||||
.B \-\-removemake
|
||||
Remove makedepends after installing packages.
|
||||
|
||||
.TP
|
||||
.B \-\-noremovemake
|
||||
Do not remove makedepends after installing packages.
|
||||
|
||||
.TP
|
||||
.B \-\-topdown
|
||||
Display repository packages first and then AUR packages.
|
||||
|
||||
.TP
|
||||
.B \-\-bottomup
|
||||
Show AUR packages first and then repository packages.
|
||||
|
||||
.TP
|
||||
.B \-\-singlelineresults
|
||||
Override pacman's usual double-line search result format and list each result
|
||||
on its own line.
|
||||
|
||||
.TP
|
||||
.B \-\-doublelineresults
|
||||
Follow pacman's double-line search result format and list each result using
|
||||
two lines.
|
||||
|
||||
.TP
|
||||
.B \-\-devel
|
||||
During sysupgrade also check AUR development packages for updates. Currently
|
||||
only Git packages are supported.
|
||||
|
||||
Devel checking is done using \fBgit ls-remote\fR. The newest commit hash is
|
||||
compared against the hash at install time. This allows devel updates to be
|
||||
checked almost instantly and not require the original pkgbuild to be downloaded.
|
||||
|
||||
The slower pacaur-like devel checks can be implemented manually by piping
|
||||
a list of packages into yay (see \fBexamples\fR).
|
||||
|
||||
If 'devel' is enabled in the configuration file, you can temporarily disable it by
|
||||
using '--devel=false' on the command line
|
||||
|
||||
.TP
|
||||
.B \-\-cleanafter
|
||||
Remove untracked files after installation.
|
||||
|
||||
Untracked files are removed with the exception of directories.
|
||||
This allows VCS packages to easily pull an update
|
||||
instead of having to reclone the entire repo.
|
||||
|
||||
.TP
|
||||
.B \-\-keepsrc
|
||||
Keep pkg/ and src/ after building packages
|
||||
|
||||
.TP
|
||||
.B \-\-timeupdate
|
||||
During sysupgrade also compare the build time of installed packages against
|
||||
the last modification time of each package's AUR page\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-notimeupdate\fR
|
||||
.RS 4
|
||||
Do not consider build times during sysupgrade\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-redownload\fR
|
||||
.RS 4
|
||||
Always download pkgbuilds of targets even when a copy is available in cache\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-redownloadall\fR
|
||||
.RS 4
|
||||
the last modification time of each package's AUR page.
|
||||
|
||||
.TP
|
||||
.B \-\-separatesources
|
||||
Separate query results by source, AUR and sync
|
||||
|
||||
.TP
|
||||
.B \-\-redownload
|
||||
Always download pkgbuilds of targets even when a copy is available in cache.
|
||||
|
||||
.TP
|
||||
.B \-\-redownloadall
|
||||
Always download pkgbuilds of all AUR packages even when a copy is available
|
||||
in cache\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-noredownload\fR
|
||||
.RS 4
|
||||
in cache.
|
||||
|
||||
.TP
|
||||
.B \-\-noredownload
|
||||
When downloading pkgbuilds if the pkgbuild is found in cache and is equal or
|
||||
newer than the AUR's version use that instead of downloading a new one\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-rebuild\fR
|
||||
.RS 4
|
||||
Always build target packages even when a copy is available in cache\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-rebuildall\fR
|
||||
.RS 4
|
||||
newer than the AUR's version use that instead of downloading a new one.
|
||||
|
||||
.TP
|
||||
.B \-\-provides
|
||||
Look for matching providers when searching for AUR packages. When multiple
|
||||
providers are found a menu will appear prompting you to pick one. This
|
||||
increases dependency resolve time although this should not be noticeable.
|
||||
|
||||
.TP
|
||||
.B \-\-pgpfetch
|
||||
Prompt to import unknown PGP keys from the \fBvalidpgpkeys\fR field of each
|
||||
PKGBUILD.
|
||||
|
||||
.TP
|
||||
.B \-\-useask
|
||||
Use pacman's --ask flag to automatically confirm package conflicts. Yay lists
|
||||
conflicts ahead of time. It is possible that Yay does not detect
|
||||
a conflict, causing a package to be removed without the user's confirmation.
|
||||
However, this is very unlikely.
|
||||
|
||||
.TP
|
||||
.B \-\-combinedupgrade
|
||||
During sysupgrade, Yay will first perform a refresh, then show
|
||||
its combined menu of repo and AUR packages that will be upgraded. Then after
|
||||
reviewing the pkgbuilds, the repo and AUR upgrade will start with no need
|
||||
for manual intervention.
|
||||
|
||||
If Yay exits for any reason After the refresh without upgrading. It is then
|
||||
the user's responsibility to either resolve the reason Yay exited or run
|
||||
a sysupgrade through pacman directly.
|
||||
|
||||
.TP
|
||||
.B \-\-batchinstall
|
||||
When building and installing AUR packages instead of installing each package
|
||||
after building, queue each package for install. Then once either all packages
|
||||
are built or a package in the build queue is needed as a dependency to build
|
||||
another package, install all the packages in the install queue.
|
||||
|
||||
.TP
|
||||
.B \-\-rebuild
|
||||
Always build target packages even when a copy is available in cache.
|
||||
|
||||
.TP
|
||||
.B \-\-rebuildall
|
||||
Always build all AUR packages even when a copy is available
|
||||
in cache\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-rebuildtree\fR
|
||||
.RS 4
|
||||
in cache.
|
||||
|
||||
.TP
|
||||
.B \-\-rebuildtree
|
||||
When installing an AUR package rebuild and reinstall all of its AUR
|
||||
dependencies recursivley, even the ones already installed. This flag allows
|
||||
dependencies recursively, even the ones already installed. This flag allows
|
||||
you to easily rebuild packages against your current system's libraries if they
|
||||
have become incompatible.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-norebuild\fR
|
||||
.RS 4
|
||||
|
||||
.TP
|
||||
.B \-\-norebuild
|
||||
When building packages if the package is found in cache and is an equal version
|
||||
to the one wanted skip the package build and use the existing package\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-mflags <flags>\fR
|
||||
.RS 4
|
||||
Passes arguments to makepkg\&. These flags get passed to every instance where
|
||||
to the one wanted skip the package build and use the existing package.
|
||||
|
||||
.TP
|
||||
.B \-\-mflags <flags>
|
||||
Passes arguments to makepkg. These flags get passed to every instance where
|
||||
makepkg is called by Yay. Arguments are split on whitespace before being
|
||||
passed to makepkg. Multiple arguments may be passed by supplying a space
|
||||
separated list that is quoted by the shell.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-gpgflags <flags>\fR
|
||||
.RS 4
|
||||
Passes arguments to gpg\&. These flags get passed to every instance where
|
||||
|
||||
.TP
|
||||
.B \-\-gpgflags <flags>
|
||||
Passes arguments to gpg. These flags get passed to every instance where
|
||||
gpg is called by Yay. Arguments are split on whitespace before being
|
||||
passed to gpg. Multiple arguments may be passed by supplying a space
|
||||
separated list that is quoted by the shell.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-sudoloop\fR
|
||||
.RS 4
|
||||
|
||||
.TP
|
||||
.B \-\-sudo <command>
|
||||
The command to use for \fBsudo\fR calls. This can be a command in
|
||||
\fBPATH\fR or an absolute path to the file.
|
||||
The sudoloop is not guaranteed to work with a custom \fBsudo\fR command.
|
||||
|
||||
.TP
|
||||
.B \-\-sudoflags <flags>
|
||||
Passes arguments to sudo. These flags get passed to every instance where
|
||||
sudo is called by Yay. Arguments are split on whitespace before being
|
||||
passed to sudo. Multiple arguments may be passed by supplying a space
|
||||
separated list that is quoted by the shell.
|
||||
|
||||
.TP
|
||||
.B \-\-sudoloop
|
||||
Loop sudo calls in the background to prevent sudo from timing out during long
|
||||
builds\&.
|
||||
.RE
|
||||
.PP
|
||||
\fB\-\-nosudoloop\fR
|
||||
.RS 4
|
||||
Do not loop sudo calls in the background\&.
|
||||
.RE
|
||||
.SH "EXAMPLES"
|
||||
.PP
|
||||
builds.
|
||||
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
yay \fIfoo\fR
|
||||
.RS 4
|
||||
Search and install from the repos and the \fBAUR\fR\ using yogurt mode\&.
|
||||
.RE
|
||||
.PP
|
||||
yay -Syu
|
||||
.RS 4
|
||||
Update package list and upgrade all currently installed repo and \fBAUR\fR\&.
|
||||
.RE
|
||||
.PP
|
||||
yay -S \fIfoo\fR
|
||||
.RS 4
|
||||
Installs package \fIfoo\fR from the repos or the \fBAUR\fR\&.
|
||||
.RE
|
||||
.PP
|
||||
yay -Ss \fIfoo\fR
|
||||
.RS 4
|
||||
Searches for package \fIfoo\fR on the repos or the \fBAUR\fR\&.
|
||||
.RE
|
||||
.PP
|
||||
yay -Si \fIfoo\fR
|
||||
.RS 4
|
||||
Gets information about package \fIfoo\fR from the repos or the \fBAUR\fR\&.
|
||||
.RE
|
||||
.PP
|
||||
yay -S \fIfoo\fR --mflags "--skipchecksums --skippgpcheck"
|
||||
.RS 4
|
||||
Installs \fIfoo\fR while skipping checksums and pgp checks\&.
|
||||
.RE
|
||||
.PP
|
||||
yay --devel --save
|
||||
.RS 4
|
||||
Sets devel to true in the config\&.
|
||||
.RE
|
||||
.PP
|
||||
yay --stats
|
||||
.RS 4
|
||||
Shows statistics for installed packages and system health\&.
|
||||
.RE
|
||||
.SH "FILES"
|
||||
.sp
|
||||
\fBCONFIG DIRECTORY\fR
|
||||
.RS 4
|
||||
The config directory is \fI$XDG_CONFIG_HOME/yay/\fR\&. if
|
||||
Search and install from the repos and the \fBAUR\fR\ using yogurt mode.
|
||||
|
||||
.TP
|
||||
yay \-Syu
|
||||
Update package list and upgrade all currently installed repo and \fBAUR\fR.
|
||||
|
||||
.TP
|
||||
yay \-Sua
|
||||
Update all currently installed \fBAUR\fR packages.
|
||||
|
||||
.TP
|
||||
yay \-S \fIfoo\fR
|
||||
Installs package \fIfoo\fR from the repos or the \fBAUR\fR.
|
||||
|
||||
.TP
|
||||
yay \-Ss \fIfoo\fR
|
||||
Searches for package \fIfoo\fR on the repos or the \fBAUR\fR.
|
||||
|
||||
.TP
|
||||
yay \-Si \fIfoo\fR
|
||||
Gets information about package \fIfoo\fR from the repos or the \fBAUR\fR.
|
||||
|
||||
.TP
|
||||
yay \-S \fIfoo\fR \-\-mflags "\-\-skipchecksums \-\-skippgpcheck"
|
||||
Installs \fIfoo\fR while skipping checksums and pgp checks.
|
||||
|
||||
.TP
|
||||
yay \-\-devel \-\-save
|
||||
Sets devel to true in the config.
|
||||
|
||||
.TP
|
||||
yay \-P \-\-stats
|
||||
Shows statistics for installed packages and system health.
|
||||
|
||||
.TP
|
||||
pacman -Qmq | grep -Ee '-(cvs|svn|git|hg|bzr|darcs)$' | yay -S --needed -
|
||||
pacaur-like devel check.
|
||||
|
||||
.SH ENVIRONMENT VARIABLES
|
||||
.TP
|
||||
.B AURDEST
|
||||
Can be set to configure the build directory.
|
||||
|
||||
Overridden by \-\-builddir.
|
||||
|
||||
.TP
|
||||
.B VISUAL, EDITOR
|
||||
When editor is not configured, use these variables to pick what editor
|
||||
to use when editing PKGBUILDS.
|
||||
|
||||
.SH FILES
|
||||
.TP
|
||||
.B CONFIG DIRECTORY
|
||||
The config directory is \fI$XDG_CONFIG_HOME/yay/\fR. If
|
||||
\fB$XDG_CONFIG_HOME\fR is unset, the config directory will fall back to
|
||||
\fI$HOME/.config/yay\fR\%.
|
||||
.PP
|
||||
\fIconfig.json\fR\& Is used to store all of Yay's config options\&. Editing
|
||||
\fI$HOME/.config/yay\fR.
|
||||
|
||||
\fIconfig.json\fR Is used to store all of Yay's config options. Editing
|
||||
this file should be done through Yay, using the options
|
||||
mentioned in \fBPERMANENT CONFIGURATION SETTINGS\fR\&.
|
||||
.RE
|
||||
.PP
|
||||
\fBCACHE DIRECTORY\fR
|
||||
.RS 4
|
||||
The cache directory is \fI$XDG_CACHE_HOME/yay/\fR\&. if
|
||||
mentioned in \fBPERMANENT CONFIGURATION SETTINGS\fR.
|
||||
|
||||
.TP
|
||||
.B CACHE DIRECTORY
|
||||
The cache directory is \fI$XDG_CACHE_HOME/yay/\fR. If
|
||||
\fB$XDG_CACHE_HOME\fR is unset, the cache directory will fall back to
|
||||
\fI$HOME/.cache/yay\fR\&.
|
||||
.PP
|
||||
\fIaur_<shellname>\fR holds a list of of all packages, including the AUR,
|
||||
for shell completion\&. The completion files are refreshed every 48 hours\&.
|
||||
.PP
|
||||
\fI$HOME/.cache/yay\fR.
|
||||
|
||||
\fIcompletion.cache\fR holds a list of of all packages, including the AUR,
|
||||
for shell completion. By default the completion files are refreshed every
|
||||
7 days.
|
||||
|
||||
\fIvcs.json\fR tracks VCS packages and the latest commit of each source. If
|
||||
any of these commits change the package will be upgraded during a devel update.
|
||||
.RE
|
||||
.PP
|
||||
\fBBUILD DIRECTORY\fR
|
||||
.RS 4
|
||||
|
||||
.TP
|
||||
.B BUILD DIRECTORY
|
||||
Unless otherwise set this should be the same as \fBCACHE DIRECTORY\fR. This
|
||||
directory is used to store downloaded AUR Packages as well as any source files
|
||||
and built packages from those packages\&.
|
||||
.RE
|
||||
.PP
|
||||
\fBPACMAN.CONF\fR
|
||||
.RS 4
|
||||
and built packages from those packages.
|
||||
|
||||
.TP
|
||||
.B PACMAN.CONF
|
||||
Yay uses Pacman's config file to set certain pacman options either through
|
||||
go-alpm or Yay itself. Options inherited include most libalpm options and
|
||||
pacman options\&.
|
||||
.PP
|
||||
Notably \fBDatabases\fR, \fBColor\fR and \fB*Path/*Dir\fR options are used\&.
|
||||
.RE
|
||||
.PP
|
||||
.SH "SEE ALSO"
|
||||
.sp
|
||||
\fBmakepkg\fR(8)
|
||||
\fBPKGBUILD\fR(5)
|
||||
\fBpacman\fR(8)
|
||||
\fBpacman\&.conf\fR(5)
|
||||
.PP
|
||||
See the arch wiki at https://wiki\&.archlinux\&.org/index\&.php/Arch_User_Repository for more info on the \fBAUR\fR\&.
|
||||
.SH "BUGS"
|
||||
.PP
|
||||
Please report bugs to our GitHub page https://github\&.com/Jguer/yay
|
||||
.SH "AUTHORS"
|
||||
.sp
|
||||
Jguer <joaogg3@gmail\&.com>
|
||||
go\-alpm or Yay itself. Options inherited include most libalpm options and
|
||||
pacman options.
|
||||
|
||||
Notably: \fBDatabases\fR, \fBColor\fR and \fB*Path/*Dir\fR options are used.
|
||||
|
||||
.SH SEE ALSO
|
||||
.BR makepkg (8),
|
||||
.BR makepkg.conf (5),
|
||||
.BR PKGBUILD (5),
|
||||
.BR pacman (8),
|
||||
.BR pacman.conf (5)
|
||||
|
||||
See the arch wiki at https://wiki.archlinux.org/index.php/Arch_User_Repository for more info on the \fBAUR\fR.
|
||||
|
||||
.SH BUGS
|
||||
Please report bugs to our GitHub page https://github.com/Jguer/yay
|
||||
|
||||
.SH AUTHORS
|
||||
Jguer <joguer@proton.me>
|
||||
.br
|
||||
Morgana <morganamilo@gmail\&.com>
|
||||
Morgan <morganamilo@archlinux.org>
|
||||
|
137
download.go
137
download.go
@ -1,137 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func downloadFile(path string, url string) (err error) {
|
||||
// Create the file
|
||||
out, err := os.Create(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
// Get the data
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Writer the body to file
|
||||
_, err = io.Copy(out, resp.Body)
|
||||
return err
|
||||
}
|
||||
|
||||
// DownloadAndUnpack downloads url tgz and extracts to path.
|
||||
func downloadAndUnpack(url string, path string, trim bool) (err error) {
|
||||
err = os.MkdirAll(path, 0755)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
tokens := strings.Split(url, "/")
|
||||
fileName := tokens[len(tokens)-1]
|
||||
|
||||
tarLocation := path + fileName
|
||||
defer os.Remove(tarLocation)
|
||||
|
||||
err = downloadFile(tarLocation, url)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if trim {
|
||||
err = exec.Command("/bin/sh", "-c",
|
||||
config.TarBin+" --strip-components 2 --include='*/"+fileName[:len(fileName)-7]+"/trunk/' -xf "+tarLocation+" -C "+path).Run()
|
||||
os.Rename(path+"trunk", path+fileName[:len(fileName)-7]) // kurwa
|
||||
} else {
|
||||
err = exec.Command(config.TarBin, "-xf", tarLocation, "-C", path).Run()
|
||||
}
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func getPkgbuilds(pkgs []string) error {
|
||||
//possibleAurs := make([]string, 0, 0)
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
wd = wd + "/"
|
||||
|
||||
missing, err := getPkgbuildsfromABS(pkgs, wd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = getPkgbuildsfromAUR(missing, wd)
|
||||
return err
|
||||
}
|
||||
|
||||
// GetPkgbuild downloads pkgbuild from the ABS.
|
||||
func getPkgbuildsfromABS(pkgs []string, path string) (missing []string, err error) {
|
||||
dbList, err := alpmHandle.SyncDbs()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
nextPkg:
|
||||
for _, pkgN := range pkgs {
|
||||
for _, db := range dbList.Slice() {
|
||||
pkg, err := db.PkgByName(pkgN)
|
||||
if err == nil {
|
||||
var url string
|
||||
name := pkg.Base()
|
||||
if name == "" {
|
||||
name = pkg.Name()
|
||||
}
|
||||
|
||||
if db.Name() == "core" || db.Name() == "extra" {
|
||||
url = "https://projects.archlinux.org/svntogit/packages.git/snapshot/packages/" + name + ".tar.gz"
|
||||
} else if db.Name() == "community" || db.Name() == "multilib" {
|
||||
url = "https://projects.archlinux.org/svntogit/community.git/snapshot/community-packages/" + name + ".tar.gz"
|
||||
} else {
|
||||
fmt.Println(pkgN + " not in standard repositories")
|
||||
continue nextPkg
|
||||
}
|
||||
|
||||
errD := downloadAndUnpack(url, path, true)
|
||||
if errD != nil {
|
||||
fmt.Println(bold(magenta(pkg.Name())), bold(green(errD.Error())))
|
||||
}
|
||||
|
||||
fmt.Println(bold(green(arrow)), bold(green("Downloaded")), bold(magenta(pkg.Name())), bold(green("from ABS")))
|
||||
continue nextPkg
|
||||
}
|
||||
}
|
||||
|
||||
missing = append(missing, pkgN)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// GetPkgbuild downloads pkgbuild from the AUR.
|
||||
func getPkgbuildsfromAUR(pkgs []string, dir string) (err error) {
|
||||
aq, err := aurInfo(pkgs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, pkg := range aq {
|
||||
downloadAndUnpack(baseURL+aq[0].URLPath, dir, false)
|
||||
fmt.Println(bold(green(arrow)), bold(green("Downloaded")), bold(magenta(pkg.Name)), bold(green("from AUR")))
|
||||
}
|
||||
|
||||
return
|
||||
}
|
9
errors.go
Normal file
9
errors.go
Normal file
@ -0,0 +1,9 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/leonelquinteros/gotext"
|
||||
)
|
||||
|
||||
var ErrPackagesNotFound = errors.New(gotext.Get("could not find all required packages"))
|
80
get.go
Normal file
80
get.go
Normal file
@ -0,0 +1,80 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/Jguer/aur"
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/download"
|
||||
"github.com/Jguer/yay/v12/pkg/runtime"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
// yay -Gp.
|
||||
func printPkgbuilds(dbExecutor download.DBSearcher, aurClient aur.QueryClient,
|
||||
httpClient *http.Client, logger *text.Logger, targets []string,
|
||||
mode parser.TargetMode, aurURL string,
|
||||
) error {
|
||||
pkgbuilds, err := download.PKGBUILDs(dbExecutor, aurClient, httpClient, logger, targets, aurURL, mode)
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
}
|
||||
|
||||
for target, pkgbuild := range pkgbuilds {
|
||||
logger.Printf("\n\n# %s\n\n%s", target, string(pkgbuild))
|
||||
}
|
||||
|
||||
if len(pkgbuilds) != len(targets) {
|
||||
missing := []string{}
|
||||
|
||||
for _, target := range targets {
|
||||
if _, ok := pkgbuilds[target]; !ok {
|
||||
missing = append(missing, target)
|
||||
}
|
||||
}
|
||||
|
||||
logger.Warnln(gotext.Get("Unable to find the following packages:"), " ", strings.Join(missing, ", "))
|
||||
|
||||
return fmt.Errorf("")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// yay -G.
|
||||
func getPkgbuilds(ctx context.Context, dbExecutor download.DBSearcher, aurClient aur.QueryClient,
|
||||
run *runtime.Runtime, targets []string, force bool,
|
||||
) error {
|
||||
wd, err := os.Getwd()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cloned, errD := download.PKGBUILDRepos(ctx, dbExecutor, aurClient,
|
||||
run.CmdBuilder, run.Logger, targets, run.Cfg.Mode, run.Cfg.AURURL, wd, force)
|
||||
if errD != nil {
|
||||
run.Logger.Errorln(errD)
|
||||
}
|
||||
|
||||
if len(targets) != len(cloned) {
|
||||
missing := []string{}
|
||||
|
||||
for _, target := range targets {
|
||||
if _, ok := cloned[target]; !ok {
|
||||
missing = append(missing, target)
|
||||
}
|
||||
}
|
||||
|
||||
run.Logger.Warnln(gotext.Get("Unable to find the following packages:"), " ", strings.Join(missing, ", "))
|
||||
|
||||
err = fmt.Errorf("")
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
35
go.mod
Normal file
35
go.mod
Normal file
@ -0,0 +1,35 @@
|
||||
module github.com/Jguer/yay/v12
|
||||
|
||||
require (
|
||||
github.com/Jguer/aur v1.2.3
|
||||
github.com/Jguer/go-alpm/v2 v2.2.2
|
||||
github.com/Jguer/votar v1.0.0
|
||||
github.com/Morganamilo/go-pacmanconf v0.0.0-20210502114700-cff030e927a5
|
||||
github.com/Morganamilo/go-srcinfo v1.0.0
|
||||
github.com/adrg/strutil v0.3.1
|
||||
github.com/bradleyjkemp/cupaloy v2.3.0+incompatible
|
||||
github.com/deckarep/golang-set/v2 v2.8.0
|
||||
github.com/hashicorp/go-multierror v1.1.1
|
||||
github.com/leonelquinteros/gotext v1.7.2
|
||||
github.com/stretchr/testify v1.10.0
|
||||
golang.org/x/net v0.41.0
|
||||
golang.org/x/sys v0.33.0
|
||||
golang.org/x/term v0.32.0
|
||||
gopkg.in/h2non/gock.v1 v1.1.2
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/itchyny/gojq v0.12.17 // indirect
|
||||
github.com/itchyny/timefmt-go v0.1.6 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/ohler55/ojg v1.26.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
||||
go 1.23.5
|
||||
|
||||
toolchain go1.24.0
|
68
go.sum
Normal file
68
go.sum
Normal file
@ -0,0 +1,68 @@
|
||||
github.com/Jguer/aur v1.2.3 h1:D+OGgLxnAnZnw88DsRvnRQsn0Poxsy9ng7pBcsA0krM=
|
||||
github.com/Jguer/aur v1.2.3/go.mod h1:Dahvb6L1yr0rR7svyYSDwaRJoQMeyvJblwJ3QH/7CUs=
|
||||
github.com/Jguer/go-alpm/v2 v2.2.2 h1:sPwUoZp1X5Tw6K6Ba1lWvVJfcgVNEGVcxARLBttZnC0=
|
||||
github.com/Jguer/go-alpm/v2 v2.2.2/go.mod h1:lfe8gSe83F/KERaQvEfrSqQ4n+8bES+ZIyKWR/gm3MI=
|
||||
github.com/Jguer/votar v1.0.0 h1:drPYpV5Py5BeAQS8xezmT6uCEfLzotNjLf5yfmlHKTg=
|
||||
github.com/Jguer/votar v1.0.0/go.mod h1:rc6vgVlTqNjI4nAnPbDTbdxw/N7kXkbB8BcUDjeFbYQ=
|
||||
github.com/Morganamilo/go-pacmanconf v0.0.0-20210502114700-cff030e927a5 h1:TMscPjkb1ThXN32LuFY5bEYIcXZx3YlwzhS1GxNpn/c=
|
||||
github.com/Morganamilo/go-pacmanconf v0.0.0-20210502114700-cff030e927a5/go.mod h1:Hk55m330jNiwxRodIlMCvw5iEyoRUCIY64W1p9D+tHc=
|
||||
github.com/Morganamilo/go-srcinfo v1.0.0 h1:Wh4nEF+HJWo+29hnxM18Q2hi+DUf0GejS13+Wg+dzmI=
|
||||
github.com/Morganamilo/go-srcinfo v1.0.0/go.mod h1:MP6VGY1NNpVUmYIEgoM9acix95KQqIRyqQ0hCLsyYUY=
|
||||
github.com/adrg/strutil v0.3.1 h1:OLvSS7CSJO8lBii4YmBt8jiK9QOtB9CzCzwl4Ic/Fz4=
|
||||
github.com/adrg/strutil v0.3.1/go.mod h1:8h90y18QLrs11IBffcGX3NW/GFBXCMcNg4M7H6MspPA=
|
||||
github.com/alexflint/go-arg v1.4.3/go.mod h1:3PZ/wp/8HuqRZMUUgu7I+e1qcpUbvmS258mRXkFH4IA=
|
||||
github.com/alexflint/go-scalar v1.1.0/go.mod h1:LoFvNMqS1CPrMVltza4LvnGKhaSpc3oyLEBUZVhhS2o=
|
||||
github.com/bradleyjkemp/cupaloy v2.3.0+incompatible h1:UafIjBvWQmS9i/xRg+CamMrnLTKNzo+bdmT/oH34c2Y=
|
||||
github.com/bradleyjkemp/cupaloy v2.3.0+incompatible/go.mod h1:Au1Xw1sgaJ5iSFktEhYsS0dbQiS1B0/XMXl+42y9Ilk=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/deckarep/golang-set/v2 v2.8.0 h1:swm0rlPCmdWn9mESxKOjWk8hXSqoxOp+ZlfuyaAdFlQ=
|
||||
github.com/deckarep/golang-set/v2 v2.8.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4=
|
||||
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw=
|
||||
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI=
|
||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
||||
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||
github.com/itchyny/gojq v0.12.17 h1:8av8eGduDb5+rvEdaOO+zQUjA04MS0m3Ps8HiD+fceg=
|
||||
github.com/itchyny/gojq v0.12.17/go.mod h1:WBrEMkgAfAGO1LUcGOckBl5O726KPp+OlkKug0I/FEY=
|
||||
github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q=
|
||||
github.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg=
|
||||
github.com/leonelquinteros/gotext v1.7.2 h1:bDPndU8nt+/kRo1m4l/1OXiiy2v7Z7dfPQ9+YP7G1Mc=
|
||||
github.com/leonelquinteros/gotext v1.7.2/go.mod h1:9/haCkm5P7Jay1sxKDGJ5WIg4zkz8oZKw4ekNpALob8=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4=
|
||||
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms=
|
||||
github.com/ohler55/ojg v1.26.1 h1:J5TaLmVEuvnpVH7JMdT1QdbpJU545Yp6cKiCO4aQILc=
|
||||
github.com/ohler55/ojg v1.26.1/go.mod h1:gQhDVpQLqrmnd2eqGAvJtn+NfKoYJbe/A4Sj3/Vro4o=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw=
|
||||
golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
|
||||
golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/h2non/gock.v1 v1.1.2 h1:jBbHXgGBK/AoPVfJh5x4r/WxIrElvbLel8TCZkkZJoY=
|
||||
gopkg.in/h2non/gock.v1 v1.1.2/go.mod h1:n7UGz/ckNChHiK05rDoiC4MYSunEC/lyaUm2WWaDva0=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
679
install.go
679
install.go
@ -1,679 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
alpm "github.com/jguer/go-alpm"
|
||||
rpc "github.com/mikkeloscar/aur"
|
||||
gopkg "github.com/mikkeloscar/gopkgbuild"
|
||||
)
|
||||
|
||||
// Install handles package installs
|
||||
func install(parser *arguments) error {
|
||||
requestTargets := parser.targets.toSlice()
|
||||
var err error
|
||||
var incompatable stringSet
|
||||
var dc *depCatagories
|
||||
var toClean []*rpc.Pkg
|
||||
var toEdit []*rpc.Pkg
|
||||
|
||||
removeMake := false
|
||||
srcinfosStale := make(map[string]*gopkg.PKGBUILD)
|
||||
srcinfos := make(map[string]*gopkg.PKGBUILD)
|
||||
//remotenames: names of all non repo packages on the system
|
||||
_, _, _, remoteNames, err := filterPackages()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//cache as a stringset. maybe make it return a string set in the first
|
||||
//place
|
||||
remoteNamesCache := sliceToStringSet(remoteNames)
|
||||
|
||||
//if we are doing -u also request every non repo package on the system
|
||||
if parser.existsArg("u", "sysupgrade") {
|
||||
requestTargets = append(requestTargets, remoteNames...)
|
||||
}
|
||||
|
||||
//if len(aurTargets) > 0 || parser.existsArg("u", "sysupgrade") && len(remoteNames) > 0 {
|
||||
// fmt.Println(bold(cyan("::") + " Querying AUR..."))
|
||||
//}
|
||||
dt, err := getDepTree(requestTargets)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Deptree will handle db/pkg prefixes. Now they can be striped from the
|
||||
// targets.
|
||||
for pkg := range parser.targets {
|
||||
_, name := splitDbFromName(pkg)
|
||||
parser.targets.remove(pkg)
|
||||
parser.targets.set(name)
|
||||
}
|
||||
|
||||
//only error if direct targets or deps are missing
|
||||
for missing := range dt.Missing {
|
||||
_, missingName := splitDbFromName(missing)
|
||||
if !remoteNamesCache.get(missingName) || parser.targets.get(missingName) {
|
||||
str := bold(red(arrow+" Error: ")) + "Could not find all required packages:"
|
||||
|
||||
for name := range dt.Missing {
|
||||
str += "\n\t" + name
|
||||
}
|
||||
|
||||
return fmt.Errorf("%s", str)
|
||||
}
|
||||
}
|
||||
|
||||
//create the arguments to pass for the repo install
|
||||
arguments := parser.copy()
|
||||
arguments.delArg("y", "refresh")
|
||||
arguments.op = "S"
|
||||
arguments.targets = make(stringSet)
|
||||
|
||||
if parser.existsArg("u", "sysupgrade") {
|
||||
ignore, aurUp, err := upgradePkgs(dt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
arguments.addParam("ignore", strings.Join(ignore.toSlice(), ","))
|
||||
fmt.Println()
|
||||
|
||||
for pkg := range aurUp {
|
||||
parser.addTarget(pkg)
|
||||
}
|
||||
|
||||
//discard stuff thats
|
||||
//not a target and
|
||||
//not an upgrade and
|
||||
//is installed
|
||||
for pkg := range dt.Aur {
|
||||
if !parser.targets.get(pkg) && remoteNamesCache.get(pkg) {
|
||||
delete(dt.Aur, pkg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
hasAur := false
|
||||
for pkg := range parser.targets {
|
||||
_, ok := dt.Aur[pkg]
|
||||
if ok {
|
||||
hasAur = true
|
||||
}
|
||||
}
|
||||
|
||||
if hasAur && 0 == os.Geteuid() {
|
||||
return fmt.Errorf(red(arrow + " Refusing to install AUR Packages as root, Aborting."))
|
||||
}
|
||||
|
||||
dc, err = getDepCatagories(parser.formatTargets(), dt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, pkg := range dc.Repo {
|
||||
arguments.addTarget(pkg.DB().Name() + "/" + pkg.Name())
|
||||
}
|
||||
|
||||
for pkg := range dt.Groups {
|
||||
arguments.addTarget(pkg)
|
||||
}
|
||||
|
||||
if len(dc.Aur) == 0 && len(arguments.targets) == 0 && !parser.existsArg("u", "sysupgrade") {
|
||||
fmt.Println("There is nothing to do")
|
||||
return nil
|
||||
}
|
||||
|
||||
if hasAur {
|
||||
printDepCatagories(dc)
|
||||
hasAur = len(dc.Aur) != 0
|
||||
fmt.Println()
|
||||
|
||||
err = checkForAllConflicts(dc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(dc.MakeOnly) > 0 {
|
||||
if !continueTask("Remove make dependencies after install?", "yY") {
|
||||
removeMake = true
|
||||
}
|
||||
}
|
||||
|
||||
toClean, toEdit, err = cleanEditNumberMenu(dc.Aur, dc.Bases, remoteNamesCache)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cleanBuilds(toClean)
|
||||
|
||||
err = downloadPkgBuilds(dc.Aur, parser.targets, dc.Bases)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(toEdit) > 0 {
|
||||
err = editPkgBuilds(toEdit)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
//inital srcinfo parse before pkgver() bump
|
||||
err = parsesrcinfosFile(dc.Aur, srcinfosStale, dc.Bases)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
incompatable, err = getIncompatable(dc.Aur, srcinfosStale, dc.Bases)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = checkPgpKeys(dc.Aur, dc.Bases, srcinfosStale)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(arguments.targets) > 0 || arguments.existsArg("u") {
|
||||
err := passToPacman(arguments)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error installing repo packages")
|
||||
}
|
||||
|
||||
depArguments := makeArguments()
|
||||
depArguments.addArg("D", "asdeps")
|
||||
|
||||
for _, pkg := range dc.Repo {
|
||||
if !parser.targets.get(pkg.Name()) {
|
||||
depArguments.addTarget(pkg.Name())
|
||||
}
|
||||
}
|
||||
|
||||
if len(depArguments.targets) > 0 {
|
||||
_, stderr, err := passToPacmanCapture(depArguments)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s%s", stderr, err)
|
||||
}
|
||||
}
|
||||
} else if hasAur {
|
||||
if len(toEdit) > 0 && !continueTask("Proceed with install?", "nN") {
|
||||
return fmt.Errorf("Aborting due to user")
|
||||
}
|
||||
}
|
||||
|
||||
if hasAur {
|
||||
//conflicts have been checked so answer y for them
|
||||
ask, _ := strconv.Atoi(cmdArgs.globals["ask"])
|
||||
uask := alpm.QuestionType(ask) | alpm.QuestionTypeConflictPkg
|
||||
cmdArgs.globals["ask"] = fmt.Sprint(uask)
|
||||
|
||||
err = downloadPkgBuildsSources(dc.Aur, dc.Bases, incompatable)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = parsesrcinfosGenerate(dc.Aur, srcinfos, dc.Bases)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = buildInstallPkgBuilds(dc.Aur, srcinfos, parser.targets, parser, dc.Bases, incompatable)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(dc.MakeOnly) > 0 {
|
||||
if !removeMake {
|
||||
return nil
|
||||
}
|
||||
|
||||
removeArguments := makeArguments()
|
||||
removeArguments.addArg("R", "u")
|
||||
|
||||
for pkg := range dc.MakeOnly {
|
||||
removeArguments.addTarget(pkg)
|
||||
}
|
||||
|
||||
oldValue := config.NoConfirm
|
||||
config.NoConfirm = true
|
||||
err = passToPacman(removeArguments)
|
||||
config.NoConfirm = oldValue
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if config.CleanAfter {
|
||||
clean(dc.Aur)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getIncompatable(pkgs []*rpc.Pkg, srcinfos map[string]*gopkg.PKGBUILD, bases map[string][]*rpc.Pkg) (stringSet, error) {
|
||||
incompatable := make(stringSet)
|
||||
alpmArch, err := alpmHandle.Arch()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nextpkg:
|
||||
for _, pkg := range pkgs {
|
||||
for _, arch := range srcinfos[pkg.PackageBase].Arch {
|
||||
if arch == "any" || arch == alpmArch {
|
||||
continue nextpkg
|
||||
}
|
||||
}
|
||||
|
||||
incompatable.set(pkg.PackageBase)
|
||||
}
|
||||
|
||||
if len(incompatable) > 0 {
|
||||
fmt.Print(
|
||||
bold(green(("\nThe following packages are not compatable with your architecture:"))))
|
||||
for pkg := range incompatable {
|
||||
fmt.Print(" " + cyan(pkg))
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
|
||||
if !continueTask("Try to build them anyway?", "nN") {
|
||||
return nil, fmt.Errorf("Aborting due to user")
|
||||
}
|
||||
}
|
||||
|
||||
return incompatable, nil
|
||||
}
|
||||
|
||||
func cleanEditNumberMenu(pkgs []*rpc.Pkg, bases map[string][]*rpc.Pkg, installed stringSet) ([]*rpc.Pkg, []*rpc.Pkg, error) {
|
||||
toPrint := ""
|
||||
askClean := false
|
||||
|
||||
toClean := make([]*rpc.Pkg, 0)
|
||||
toEdit := make([]*rpc.Pkg, 0)
|
||||
|
||||
if config.NoConfirm {
|
||||
return toClean, toEdit, nil
|
||||
}
|
||||
|
||||
for n, pkg := range pkgs {
|
||||
dir := config.BuildDir + pkg.PackageBase + "/"
|
||||
|
||||
toPrint += fmt.Sprintf("%s %-40s", magenta(strconv.Itoa(len(pkgs)-n)),
|
||||
bold(formatPkgbase(pkg, bases)))
|
||||
if installed.get(pkg.Name) {
|
||||
toPrint += bold(green(" (Installed)"))
|
||||
}
|
||||
|
||||
if _, err := os.Stat(dir); !os.IsNotExist(err) {
|
||||
toPrint += bold(green(" (Build Files Exist)"))
|
||||
askClean = true
|
||||
}
|
||||
|
||||
toPrint += "\n"
|
||||
}
|
||||
|
||||
fmt.Print(toPrint)
|
||||
|
||||
if askClean {
|
||||
fmt.Println(bold(green(arrow + " Packages to cleanBuild?")))
|
||||
fmt.Println(bold(green(arrow) + cyan(" [N]one ") + green("[A]ll [Ab]ort [I]nstalled [No]tInstalled or (1 2 3, 1-3, ^4)")))
|
||||
fmt.Print(bold(green(arrow + " ")))
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
|
||||
numberBuf, overflow, err := reader.ReadLine()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if overflow {
|
||||
return nil, nil, fmt.Errorf("Input too long")
|
||||
}
|
||||
|
||||
cleanInput := string(numberBuf)
|
||||
|
||||
cInclude, cExclude, cOtherInclude, cOtherExclude := parseNumberMenu(cleanInput)
|
||||
cIsInclude := len(cExclude) == 0 && len(cOtherExclude) == 0
|
||||
|
||||
if cOtherInclude.get("abort") || cOtherInclude.get("ab") {
|
||||
return nil, nil, fmt.Errorf("Aborting due to user")
|
||||
}
|
||||
|
||||
if !cOtherInclude.get("n") && !cOtherInclude.get("none") {
|
||||
for i, pkg := range pkgs {
|
||||
dir := config.BuildDir + pkg.PackageBase + "/"
|
||||
if _, err := os.Stat(dir); os.IsNotExist(err) {
|
||||
continue
|
||||
}
|
||||
|
||||
if !cIsInclude && cExclude.get(len(pkgs)-i) {
|
||||
continue
|
||||
}
|
||||
|
||||
if installed.get(pkg.Name) && (cOtherInclude.get("i") || cOtherInclude.get("installed")) {
|
||||
toClean = append(toClean, pkg)
|
||||
continue
|
||||
}
|
||||
|
||||
if !installed.get(pkg.Name) && (cOtherInclude.get("no") || cOtherInclude.get("notinstalled")) {
|
||||
toClean = append(toClean, pkg)
|
||||
continue
|
||||
}
|
||||
|
||||
if cOtherInclude.get("a") || cOtherInclude.get("all") {
|
||||
toClean = append(toClean, pkg)
|
||||
continue
|
||||
}
|
||||
|
||||
if cIsInclude && cInclude.get(len(pkgs)-i) {
|
||||
toClean = append(toClean, pkg)
|
||||
}
|
||||
|
||||
if !cIsInclude && !cExclude.get(len(pkgs)-i) {
|
||||
toClean = append(toClean, pkg)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Println(bold(green(arrow + " PKGBUILDs to edit?")))
|
||||
fmt.Println(bold(green(arrow) + cyan(" [N]one ") + green("[A]ll [Ab]ort [I]nstalled [No]tInstalled or (1 2 3, 1-3, ^4)")))
|
||||
|
||||
fmt.Print(bold(green(arrow + " ")))
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
|
||||
numberBuf, overflow, err := reader.ReadLine()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if overflow {
|
||||
return nil, nil, fmt.Errorf("Input too long")
|
||||
}
|
||||
|
||||
editInput := string(numberBuf)
|
||||
|
||||
eInclude, eExclude, eOtherInclude, eOtherExclude := parseNumberMenu(editInput)
|
||||
eIsInclude := len(eExclude) == 0 && len(eOtherExclude) == 0
|
||||
|
||||
if eOtherInclude.get("abort") || eOtherInclude.get("ab") {
|
||||
return nil, nil, fmt.Errorf("Aborting due to user")
|
||||
}
|
||||
|
||||
if !eOtherInclude.get("n") && !eOtherInclude.get("none") {
|
||||
for i, pkg := range pkgs {
|
||||
if !eIsInclude && eExclude.get(len(pkgs)-i) {
|
||||
continue
|
||||
}
|
||||
|
||||
if installed.get(pkg.Name) && (eOtherInclude.get("i") || eOtherInclude.get("installed")) {
|
||||
toEdit = append(toEdit, pkg)
|
||||
continue
|
||||
}
|
||||
|
||||
if !installed.get(pkg.Name) && (eOtherInclude.get("no") || eOtherInclude.get("notinstalled")) {
|
||||
toEdit = append(toEdit, pkg)
|
||||
continue
|
||||
}
|
||||
|
||||
if eOtherInclude.get("a") || eOtherInclude.get("all") {
|
||||
toEdit = append(toEdit, pkg)
|
||||
continue
|
||||
}
|
||||
|
||||
if eIsInclude && eInclude.get(len(pkgs)-i) {
|
||||
toEdit = append(toEdit, pkg)
|
||||
}
|
||||
|
||||
if !eIsInclude && !eExclude.get(len(pkgs)-i) {
|
||||
toEdit = append(toEdit, pkg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return toClean, toEdit, nil
|
||||
}
|
||||
|
||||
func cleanBuilds(pkgs []*rpc.Pkg) {
|
||||
for i, pkg := range pkgs {
|
||||
dir := config.BuildDir + pkg.PackageBase
|
||||
fmt.Printf(bold(cyan("::")+" Deleting (%d/%d): %s\n"), i+1, len(pkgs), dir)
|
||||
os.RemoveAll(dir)
|
||||
}
|
||||
}
|
||||
|
||||
func editPkgBuilds(pkgs []*rpc.Pkg) error {
|
||||
pkgbuilds := make([]string, 0, len(pkgs))
|
||||
for _, pkg := range pkgs {
|
||||
dir := config.BuildDir + pkg.PackageBase + "/"
|
||||
pkgbuilds = append(pkgbuilds, dir+"PKGBUILD")
|
||||
}
|
||||
|
||||
editcmd := exec.Command(editor(), pkgbuilds...)
|
||||
editcmd.Stdin, editcmd.Stdout, editcmd.Stderr = os.Stdin, os.Stdout, os.Stderr
|
||||
err := editcmd.Run()
|
||||
if err != nil {
|
||||
return fmt.Errorf("Editor did not exit successfully, Abotring: %s", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parsesrcinfosFile(pkgs []*rpc.Pkg, srcinfos map[string]*gopkg.PKGBUILD, bases map[string][]*rpc.Pkg) error {
|
||||
for k, pkg := range pkgs {
|
||||
dir := config.BuildDir + pkg.PackageBase + "/"
|
||||
|
||||
str := bold(cyan("::") + " Parsing SRCINFO (%d/%d): %s\n")
|
||||
fmt.Printf(str, k+1, len(pkgs), formatPkgbase(pkg, bases))
|
||||
|
||||
pkgbuild, err := gopkg.ParseSRCINFO(dir + ".SRCINFO")
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s: %s", pkg.Name, err)
|
||||
}
|
||||
|
||||
srcinfos[pkg.PackageBase] = pkgbuild
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func parsesrcinfosGenerate(pkgs []*rpc.Pkg, srcinfos map[string]*gopkg.PKGBUILD, bases map[string][]*rpc.Pkg) error {
|
||||
for k, pkg := range pkgs {
|
||||
dir := config.BuildDir + pkg.PackageBase + "/"
|
||||
|
||||
str := bold(cyan("::") + " Parsing SRCINFO (%d/%d): %s\n")
|
||||
fmt.Printf(str, k+1, len(pkgs), formatPkgbase(pkg, bases))
|
||||
|
||||
cmd := exec.Command(config.MakepkgBin, "--printsrcinfo")
|
||||
cmd.Stderr = os.Stderr
|
||||
cmd.Dir = dir
|
||||
srcinfo, err := cmd.Output()
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
pkgbuild, err := gopkg.ParseSRCINFOContent(srcinfo)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s: %s", pkg.Name, err)
|
||||
}
|
||||
|
||||
srcinfos[pkg.PackageBase] = pkgbuild
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func downloadPkgBuilds(pkgs []*rpc.Pkg, targets stringSet, bases map[string][]*rpc.Pkg) error {
|
||||
for k, pkg := range pkgs {
|
||||
if config.ReDownload == "no" || (config.ReDownload == "yes" && !targets.get(pkg.Name)) {
|
||||
dir := config.BuildDir + pkg.PackageBase + "/.SRCINFO"
|
||||
pkgbuild, err := gopkg.ParseSRCINFO(dir)
|
||||
|
||||
if err == nil {
|
||||
version, err := gopkg.NewCompleteVersion(pkg.Version)
|
||||
if err == nil {
|
||||
if !version.Newer(pkgbuild.Version()) {
|
||||
str := bold(cyan("::") + " PKGBUILD up to date, Skipping (%d/%d): %s\n")
|
||||
fmt.Printf(str, k+1, len(pkgs), formatPkgbase(pkg, bases))
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
str := bold(cyan("::") + " Downloading PKGBUILD (%d/%d): %s\n")
|
||||
|
||||
fmt.Printf(str, k+1, len(pkgs), formatPkgbase(pkg, bases))
|
||||
|
||||
err := downloadAndUnpack(baseURL+pkg.URLPath, config.BuildDir, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func downloadPkgBuildsSources(pkgs []*rpc.Pkg, bases map[string][]*rpc.Pkg, incompatable stringSet) (err error) {
|
||||
for _, pkg := range pkgs {
|
||||
dir := config.BuildDir + pkg.PackageBase + "/"
|
||||
args := []string{"--nobuild", "--nocheck", "--noprepare", "--nodeps"}
|
||||
|
||||
if incompatable.get(pkg.PackageBase) {
|
||||
args = append(args, "--ignorearch")
|
||||
}
|
||||
|
||||
err = passToMakepkg(dir, args...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error downloading sources: %s", formatPkgbase(pkg, bases))
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func buildInstallPkgBuilds(pkgs []*rpc.Pkg, srcinfos map[string]*gopkg.PKGBUILD, targets stringSet, parser *arguments, bases map[string][]*rpc.Pkg, incompatable stringSet) error {
|
||||
alpmArch, err := alpmHandle.Arch()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, pkg := range pkgs {
|
||||
var arch string
|
||||
dir := config.BuildDir + pkg.PackageBase + "/"
|
||||
built := true
|
||||
|
||||
srcinfo := srcinfos[pkg.PackageBase]
|
||||
version := srcinfo.CompleteVersion()
|
||||
|
||||
if srcinfos[pkg.PackageBase].Arch[0] == "any" {
|
||||
arch = "any"
|
||||
} else {
|
||||
arch = alpmArch
|
||||
}
|
||||
|
||||
if config.ReBuild == "no" || (config.ReBuild == "yes" && !targets.get(pkg.Name)) {
|
||||
for _, split := range bases[pkg.PackageBase] {
|
||||
file, err := completeFileName(dir, split.Name+"-"+version.String()+"-"+arch+".pkg")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if file == "" {
|
||||
built = false
|
||||
}
|
||||
}
|
||||
} else {
|
||||
built = false
|
||||
}
|
||||
|
||||
if built {
|
||||
fmt.Println(bold(red(arrow+" Warning:")),
|
||||
pkg.Name+"-"+pkg.Version+" Already made -- skipping build")
|
||||
} else {
|
||||
args := []string{"-Ccf", "--noconfirm"}
|
||||
|
||||
if incompatable.get(pkg.PackageBase) {
|
||||
args = append(args, "--ignorearch")
|
||||
}
|
||||
|
||||
err := passToMakepkg(dir, args...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error making: %s", pkg.Name)
|
||||
}
|
||||
}
|
||||
|
||||
arguments := parser.copy()
|
||||
arguments.targets = make(stringSet)
|
||||
arguments.op = "U"
|
||||
arguments.delArg("confirm")
|
||||
arguments.delArg("c", "clean")
|
||||
arguments.delArg("q", "quiet")
|
||||
arguments.delArg("q", "quiet")
|
||||
arguments.delArg("y", "refresh")
|
||||
arguments.delArg("u", "sysupgrade")
|
||||
arguments.delArg("w", "downloadonly")
|
||||
|
||||
depArguments := makeArguments()
|
||||
depArguments.addArg("D", "asdeps")
|
||||
|
||||
for _, split := range bases[pkg.PackageBase] {
|
||||
file, err := completeFileName(dir, split.Name+"-"+version.String()+"-"+arch+".pkg")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if file == "" {
|
||||
return fmt.Errorf("Could not find built package " + split.Name + "-" + version.String() + "-" + arch + ".pkg")
|
||||
}
|
||||
|
||||
arguments.addTarget(file)
|
||||
if !targets.get(split.Name) {
|
||||
depArguments.addTarget(split.Name)
|
||||
}
|
||||
}
|
||||
|
||||
oldConfirm := config.NoConfirm
|
||||
config.NoConfirm = true
|
||||
err := passToPacman(arguments)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, pkg := range bases[pkg.PackageBase] {
|
||||
updateVCSData(pkg.Name, srcinfo.Source)
|
||||
}
|
||||
|
||||
if len(depArguments.targets) > 0 {
|
||||
_, stderr, err := passToPacmanCapture(depArguments)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s%s", stderr, err)
|
||||
}
|
||||
}
|
||||
config.NoConfirm = oldConfirm
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func clean(pkgs []*rpc.Pkg) {
|
||||
for _, pkg := range pkgs {
|
||||
dir := config.BuildDir + pkg.PackageBase + "/"
|
||||
|
||||
fmt.Println(bold(green(arrow +
|
||||
" CleanAfter enabled. Deleting " + pkg.Name + " source folder.")))
|
||||
os.RemoveAll(dir)
|
||||
}
|
||||
}
|
121
keys.go
121
keys.go
@ -1,121 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
|
||||
rpc "github.com/mikkeloscar/aur"
|
||||
gopkg "github.com/mikkeloscar/gopkgbuild"
|
||||
)
|
||||
|
||||
// pgpKeySet maps a PGP key with a list of PKGBUILDs that require it.
|
||||
// This is similar to stringSet, used throughout the code.
|
||||
type pgpKeySet map[string][]*rpc.Pkg
|
||||
|
||||
func (set pgpKeySet) toSlice() []string {
|
||||
slice := make([]string, 0, len(set))
|
||||
for v := range set {
|
||||
slice = append(slice, v)
|
||||
}
|
||||
return slice
|
||||
}
|
||||
|
||||
func (set pgpKeySet) set(key string, p *rpc.Pkg) {
|
||||
// Using ToUpper to make sure keys with a different case will be
|
||||
// considered the same.
|
||||
upperKey := strings.ToUpper(key)
|
||||
if _, exists := set[upperKey]; !exists {
|
||||
set[upperKey] = []*rpc.Pkg{}
|
||||
}
|
||||
set[key] = append(set[key], p)
|
||||
}
|
||||
|
||||
func (set pgpKeySet) get(key string) bool {
|
||||
upperKey := strings.ToUpper(key)
|
||||
_, exists := set[upperKey]
|
||||
return exists
|
||||
}
|
||||
|
||||
// checkPgpKeys iterates through the keys listed in the PKGBUILDs and if needed,
|
||||
// asks the user whether yay should try to import them.
|
||||
func checkPgpKeys(pkgs []*rpc.Pkg, bases map[string][]*rpc.Pkg, srcinfos map[string]*gopkg.PKGBUILD) error {
|
||||
// Let's check the keys individually, and then we can offer to import
|
||||
// the problematic ones.
|
||||
problematic := make(pgpKeySet)
|
||||
args := append(strings.Fields(config.GpgFlags), "--list-keys")
|
||||
|
||||
// Mapping all the keys.
|
||||
for _, pkg := range pkgs {
|
||||
srcinfo := srcinfos[pkg.PackageBase]
|
||||
|
||||
for _, key := range srcinfo.Validpgpkeys {
|
||||
// If key already marked as problematic, indicate the current
|
||||
// PKGBUILD requires it.
|
||||
if problematic.get(key) {
|
||||
problematic.set(key, pkg)
|
||||
continue
|
||||
}
|
||||
|
||||
cmd := exec.Command(config.GpgBin, append(args, key)...)
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
problematic.set(key, pkg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// No key issues!
|
||||
if len(problematic) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
question, err := formatKeysToImport(problematic, bases)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if continueTask(question, "nN") {
|
||||
return importKeys(problematic.toSlice())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// importKeys tries to import the list of keys specified in its argument.
|
||||
func importKeys(keys []string) error {
|
||||
args := append(strings.Fields(config.GpgFlags), "--recv-keys")
|
||||
cmd := exec.Command(config.GpgBin, append(args, keys...)...)
|
||||
cmd.Stdin, cmd.Stdout, cmd.Stderr = os.Stdin, os.Stdout, os.Stderr
|
||||
|
||||
fmt.Printf("%s Importing keys with gpg...\n", bold(cyan("::")))
|
||||
err := cmd.Run()
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s Problem importing keys", bold(red(arrow+" Error:")))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// formatKeysToImport receives a set of keys and returns a string containing the
|
||||
// question asking the user wants to import the problematic keys.
|
||||
func formatKeysToImport(keys pgpKeySet, bases map[string][]*rpc.Pkg) (string, error) {
|
||||
if len(keys) == 0 {
|
||||
return "", fmt.Errorf("%s No keys to import", bold(red(arrow+" Error:")))
|
||||
}
|
||||
|
||||
var buffer bytes.Buffer
|
||||
buffer.WriteString(bold(green(("GPG keys need importing:\n"))))
|
||||
for key, pkgs := range keys {
|
||||
pkglist := ""
|
||||
for _, pkg := range pkgs {
|
||||
pkglist += formatPkgbase(pkg, bases) + " "
|
||||
}
|
||||
pkglist = strings.TrimRight(pkglist, " ")
|
||||
buffer.WriteString(fmt.Sprintf("\t%s, required by: %s\n", green(key), cyan(pkglist)))
|
||||
}
|
||||
buffer.WriteString(bold(green(fmt.Sprintf("%s Import?", arrow))))
|
||||
return buffer.String(), nil
|
||||
}
|
309
keys_test.go
309
keys_test.go
@ -1,309 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
rpc "github.com/mikkeloscar/aur"
|
||||
gopkg "github.com/mikkeloscar/gopkgbuild"
|
||||
)
|
||||
|
||||
const (
|
||||
// The default port used by the PGP key server.
|
||||
gpgServerPort = 11371
|
||||
)
|
||||
|
||||
func init() {
|
||||
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
regex := regexp.MustCompile(`search=0[xX]([a-fA-F0-9]+)`)
|
||||
matches := regex.FindStringSubmatch(r.RequestURI)
|
||||
data := ""
|
||||
if matches != nil {
|
||||
data = getPgpKey(matches[1])
|
||||
}
|
||||
w.Header().Set("Content-Type", "application/pgp-keys")
|
||||
w.Write([]byte(data))
|
||||
})
|
||||
}
|
||||
|
||||
func newPkg(basename string) *rpc.Pkg {
|
||||
return &rpc.Pkg{Name: basename, PackageBase: basename}
|
||||
}
|
||||
|
||||
func newSplitPkg(basename, name string) *rpc.Pkg {
|
||||
return &rpc.Pkg{Name: name, PackageBase: basename}
|
||||
}
|
||||
|
||||
func getPgpKey(key string) string {
|
||||
var buffer bytes.Buffer
|
||||
|
||||
if contents, err := ioutil.ReadFile(path.Join("testdata", "keys", key)); err == nil {
|
||||
buffer.WriteString("-----BEGIN PGP PUBLIC KEY BLOCK-----\n")
|
||||
buffer.WriteString("Version: SKS 1.1.6\n")
|
||||
buffer.WriteString("Comment: Hostname: yay\n\n")
|
||||
buffer.Write(contents)
|
||||
buffer.WriteString("\n-----END PGP PUBLIC KEY BLOCK-----\n")
|
||||
}
|
||||
return buffer.String()
|
||||
}
|
||||
|
||||
func startPgpKeyServer() *http.Server {
|
||||
srv := &http.Server{Addr: fmt.Sprintf("127.0.0.1:%d", gpgServerPort)}
|
||||
|
||||
go func() {
|
||||
srv.ListenAndServe()
|
||||
}()
|
||||
return srv
|
||||
}
|
||||
|
||||
func TestFormatKeysToImport(t *testing.T) {
|
||||
casetests := []struct {
|
||||
keySet pgpKeySet
|
||||
bases map[string][]*rpc.Pkg
|
||||
expected string
|
||||
alternate string
|
||||
wantError bool
|
||||
}{
|
||||
// Single key, required by single package.
|
||||
{
|
||||
keySet: pgpKeySet{"KEY-1": []*rpc.Pkg{newPkg("PKG-foo")}},
|
||||
expected: fmt.Sprintf("GPG keys need importing:\n\tKEY-1, required by: PKG-foo\n%s Import?", arrow),
|
||||
wantError: false,
|
||||
},
|
||||
// Single key, required by two packages.
|
||||
{
|
||||
keySet: pgpKeySet{"KEY-1": []*rpc.Pkg{newPkg("PKG-foo"), newPkg("PKG-bar")}},
|
||||
expected: fmt.Sprintf("GPG keys need importing:\n\tKEY-1, required by: PKG-foo PKG-bar\n%s Import?", arrow),
|
||||
wantError: false,
|
||||
},
|
||||
// Two keys, each required by a single package. Since iterating the map
|
||||
// does not force any particular order, we cannot really predict the
|
||||
// order in which the elements will appear. As we have only two cases,
|
||||
// let's add the second possibility to the alternate variable, to check
|
||||
// if there are any errors.
|
||||
{
|
||||
keySet: pgpKeySet{"KEY-1": []*rpc.Pkg{newPkg("PKG-foo")}, "KEY-2": []*rpc.Pkg{newPkg("PKG-bar")}},
|
||||
expected: fmt.Sprintf("GPG keys need importing:\n\tKEY-1, required by: PKG-foo\n\tKEY-2, required by: PKG-bar\n%s Import?", arrow),
|
||||
alternate: fmt.Sprintf("GPG keys need importing:\n\tKEY-2, required by: PKG-bar\n\tKEY-1, required by: PKG-foo\n%s Import?", arrow),
|
||||
wantError: false,
|
||||
},
|
||||
// Two keys required by single package.
|
||||
{
|
||||
keySet: pgpKeySet{"KEY-1": []*rpc.Pkg{newPkg("PKG-foo")}, "KEY-2": []*rpc.Pkg{newPkg("PKG-foo")}},
|
||||
expected: fmt.Sprintf("GPG keys need importing:\n\tKEY-1, required by: PKG-foo\n\tKEY-2, required by: PKG-foo\n%s Import?", arrow),
|
||||
alternate: fmt.Sprintf("GPG keys need importing:\n\tKEY-2, required by: PKG-foo\n\tKEY-1, required by: PKG-foo\n%s Import?", arrow),
|
||||
wantError: false,
|
||||
},
|
||||
// Two keys, one of them required by two packages.
|
||||
{
|
||||
keySet: pgpKeySet{"KEY-1": []*rpc.Pkg{newPkg("PKG-foo"), newPkg("PKG-bar")}, "KEY-2": []*rpc.Pkg{newPkg("PKG-bar")}},
|
||||
expected: fmt.Sprintf("GPG keys need importing:\n\tKEY-1, required by: PKG-foo PKG-bar\n\tKEY-2, required by: PKG-bar\n%s Import?", arrow),
|
||||
alternate: fmt.Sprintf("GPG keys need importing:\n\tKEY-2, required by: PKG-bar\n\tKEY-1, required by: PKG-foo PKG-bar\n%s Import?", arrow),
|
||||
wantError: false,
|
||||
},
|
||||
// Two keys, split package (linux-ck/linux-ck-headers).
|
||||
{
|
||||
keySet: pgpKeySet{"ABAF11C65A2970B130ABE3C479BE3E4300411886": []*rpc.Pkg{newPkg("linux-ck")}, "647F28654894E3BD457199BE38DBBDC86092693E": []*rpc.Pkg{newPkg("linux-ck")}},
|
||||
|
||||
bases: map[string][]*rpc.Pkg{"linux-ck": {newSplitPkg("linux-ck", "linux-ck-headers"), newPkg("linux-ck")}},
|
||||
expected: fmt.Sprintf("GPG keys need importing:\n\tABAF11C65A2970B130ABE3C479BE3E4300411886, required by: linux-ck (linux-ck-headers linux-ck)\n\t647F28654894E3BD457199BE38DBBDC86092693E, required by: linux-ck (linux-ck-headers linux-ck)\n%s Import?", arrow),
|
||||
alternate: fmt.Sprintf("GPG keys need importing:\n\t647F28654894E3BD457199BE38DBBDC86092693E, required by: linux-ck (linux-ck-headers linux-ck)\n\tABAF11C65A2970B130ABE3C479BE3E4300411886, required by: linux-ck (linux-ck-headers linux-ck)\n%s Import?", arrow),
|
||||
wantError: false,
|
||||
},
|
||||
// One key, three split packages.
|
||||
{
|
||||
keySet: pgpKeySet{"KEY-1": []*rpc.Pkg{newPkg("PKG-foo")}},
|
||||
bases: map[string][]*rpc.Pkg{"PKG-foo": {newPkg("PKG-foo"), newSplitPkg("PKG-foo", "PKG-foo-1"), newSplitPkg("PKG-foo", "PKG-foo-2")}},
|
||||
expected: fmt.Sprintf("GPG keys need importing:\n\tKEY-1, required by: PKG-foo (PKG-foo PKG-foo-1 PKG-foo-2)\n%s Import?", arrow),
|
||||
wantError: false,
|
||||
},
|
||||
// No keys, should fail.
|
||||
{
|
||||
keySet: pgpKeySet{},
|
||||
expected: "",
|
||||
wantError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range casetests {
|
||||
question, err := formatKeysToImport(tt.keySet, tt.bases)
|
||||
if !tt.wantError {
|
||||
if err != nil {
|
||||
t.Fatalf("Got error %q, want no error", err)
|
||||
}
|
||||
|
||||
if question != tt.expected && question != tt.alternate {
|
||||
t.Fatalf("Got %q\n, expected: %q", question, tt.expected)
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Here, we want to see the error.
|
||||
if err == nil {
|
||||
t.Fatalf("Got no error; want error")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestImportKeys(t *testing.T) {
|
||||
keyringDir, err := ioutil.TempDir("/tmp", "yay-test-keyring")
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to init test keyring %q: %v\n", keyringDir, err)
|
||||
}
|
||||
defer os.RemoveAll(keyringDir)
|
||||
|
||||
config.GpgBin = "gpg"
|
||||
config.GpgFlags = fmt.Sprintf("--homedir %s --keyserver 127.0.0.1", keyringDir)
|
||||
|
||||
server := startPgpKeyServer()
|
||||
defer server.Shutdown(nil)
|
||||
|
||||
casetests := []struct {
|
||||
keys []string
|
||||
wantError bool
|
||||
}{
|
||||
// Single key, should succeed.
|
||||
// C52048C0C0748FEE227D47A2702353E0F7E48EDB: Thomas Dickey.
|
||||
{
|
||||
keys: []string{"C52048C0C0748FEE227D47A2702353E0F7E48EDB"},
|
||||
wantError: false,
|
||||
},
|
||||
// Two keys, should succeed as well.
|
||||
// 11E521D646982372EB577A1F8F0871F202119294: Tom Stellard.
|
||||
// B6C8F98282B944E3B0D5C2530FC3042E345AD05D: Hans Wennborg.
|
||||
{
|
||||
keys: []string{"11E521D646982372EB577A1F8F0871F202119294",
|
||||
"B6C8F98282B944E3B0D5C2530FC3042E345AD05D"},
|
||||
wantError: false,
|
||||
},
|
||||
// Single invalid key, should fail.
|
||||
{
|
||||
keys: []string{"THIS-SHOULD-FAIL"},
|
||||
wantError: true,
|
||||
},
|
||||
// Two invalid keys, should fail.
|
||||
{
|
||||
keys: []string{"THIS-SHOULD-FAIL", "THIS-ONE-SHOULD-FAIL-TOO"},
|
||||
wantError: true,
|
||||
},
|
||||
// Invalid + valid key. Should fail as well.
|
||||
// 647F28654894E3BD457199BE38DBBDC86092693E: Greg Kroah-Hartman.
|
||||
{
|
||||
keys: []string{"THIS-SHOULD-FAIL",
|
||||
"647F28654894E3BD457199BE38DBBDC86092693E"},
|
||||
wantError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range casetests {
|
||||
err := importKeys(tt.keys)
|
||||
if !tt.wantError {
|
||||
if err != nil {
|
||||
t.Fatalf("Got error %q, want no error", err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Here, we want to see the error.
|
||||
if err == nil {
|
||||
t.Fatalf("Got no error; want error")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCheckPgpKeys(t *testing.T) {
|
||||
keyringDir, err := ioutil.TempDir("/tmp", "yay-test-keyring")
|
||||
if err != nil {
|
||||
t.Fatalf("Unable to init test keyring: %v\n", err)
|
||||
}
|
||||
defer os.RemoveAll(keyringDir)
|
||||
|
||||
config.GpgBin = "gpg"
|
||||
config.GpgFlags = fmt.Sprintf("--homedir %s --keyserver 127.0.0.1", keyringDir)
|
||||
|
||||
server := startPgpKeyServer()
|
||||
defer server.Shutdown(nil)
|
||||
|
||||
casetests := []struct {
|
||||
pkgs []*rpc.Pkg
|
||||
srcinfos map[string]*gopkg.PKGBUILD
|
||||
bases map[string][]*rpc.Pkg
|
||||
wantError bool
|
||||
}{
|
||||
// cower: single package, one valid key not yet in the keyring.
|
||||
// 487EACC08557AD082088DABA1EB2638FF56C0C53: Dave Reisner.
|
||||
{
|
||||
pkgs: []*rpc.Pkg{newPkg("cower")},
|
||||
srcinfos: map[string]*gopkg.PKGBUILD{"cower": &gopkg.PKGBUILD{Pkgbase: "cower", Validpgpkeys: []string{"487EACC08557AD082088DABA1EB2638FF56C0C53"}}},
|
||||
bases: map[string][]*rpc.Pkg{"cower": {newPkg("cower")}},
|
||||
wantError: false,
|
||||
},
|
||||
// libc++: single package, two valid keys not yet in the keyring.
|
||||
// 11E521D646982372EB577A1F8F0871F202119294: Tom Stellard.
|
||||
// B6C8F98282B944E3B0D5C2530FC3042E345AD05D: Hans Wennborg.
|
||||
{
|
||||
pkgs: []*rpc.Pkg{newPkg("libc++")},
|
||||
srcinfos: map[string]*gopkg.PKGBUILD{"libc++": &gopkg.PKGBUILD{Pkgbase: "libc++", Validpgpkeys: []string{"11E521D646982372EB577A1F8F0871F202119294", "B6C8F98282B944E3B0D5C2530FC3042E345AD05D"}}},
|
||||
bases: map[string][]*rpc.Pkg{"libc++": {newPkg("libc++")}},
|
||||
wantError: false,
|
||||
},
|
||||
// Two dummy packages requiring the same key.
|
||||
// ABAF11C65A2970B130ABE3C479BE3E4300411886: Linus Torvalds.
|
||||
{
|
||||
pkgs: []*rpc.Pkg{newPkg("dummy-1"), newPkg("dummy-2")},
|
||||
srcinfos: map[string]*gopkg.PKGBUILD{"dummy-1": &gopkg.PKGBUILD{Pkgbase: "dummy-1", Validpgpkeys: []string{"ABAF11C65A2970B130ABE3C479BE3E4300411886"}}, "dummy-2": &gopkg.PKGBUILD{Pkgbase: "dummy-2", Validpgpkeys: []string{"ABAF11C65A2970B130ABE3C479BE3E4300411886"}}},
|
||||
bases: map[string][]*rpc.Pkg{"dummy-1": {newPkg("dummy-1")}, "dummy-2": {newPkg("dummy-2")}},
|
||||
wantError: false,
|
||||
},
|
||||
// dummy package: single package, two valid keys, one of them already
|
||||
// in the keyring.
|
||||
// 11E521D646982372EB577A1F8F0871F202119294: Tom Stellard.
|
||||
// C52048C0C0748FEE227D47A2702353E0F7E48EDB: Thomas Dickey.
|
||||
{
|
||||
pkgs: []*rpc.Pkg{newPkg("dummy-3")},
|
||||
srcinfos: map[string]*gopkg.PKGBUILD{"dummy-3": &gopkg.PKGBUILD{Pkgbase: "dummy-3", Validpgpkeys: []string{"11E521D646982372EB577A1F8F0871F202119294", "C52048C0C0748FEE227D47A2702353E0F7E48EDB"}}},
|
||||
bases: map[string][]*rpc.Pkg{"dummy-3": {newPkg("dummy-3")}},
|
||||
wantError: false,
|
||||
},
|
||||
// Two dummy packages with existing keys.
|
||||
{
|
||||
pkgs: []*rpc.Pkg{newPkg("dummy-4"), newPkg("dummy-5")},
|
||||
srcinfos: map[string]*gopkg.PKGBUILD{"dummy-4": &gopkg.PKGBUILD{Pkgbase: "dummy-4", Validpgpkeys: []string{"11E521D646982372EB577A1F8F0871F202119294"}}, "dummy-5": &gopkg.PKGBUILD{Pkgbase: "dummy-5", Validpgpkeys: []string{"C52048C0C0748FEE227D47A2702353E0F7E48EDB"}}},
|
||||
bases: map[string][]*rpc.Pkg{"dummy-4": {newPkg("dummy-4")}, "dummy-5": {newPkg("dummy-5")}},
|
||||
wantError: false,
|
||||
},
|
||||
// Dummy package with invalid key, should fail.
|
||||
{
|
||||
pkgs: []*rpc.Pkg{newPkg("dummy-7")},
|
||||
srcinfos: map[string]*gopkg.PKGBUILD{"dummy-7": &gopkg.PKGBUILD{Pkgbase: "dummy-7", Validpgpkeys: []string{"THIS-SHOULD-FAIL"}}},
|
||||
bases: map[string][]*rpc.Pkg{"dummy-7": {newPkg("dummy-7")}},
|
||||
wantError: true,
|
||||
},
|
||||
// Dummy package with both an invalid an another valid key, should fail.
|
||||
// A314827C4E4250A204CE6E13284FC34C8E4B1A25: Thomas Bächler.
|
||||
{
|
||||
pkgs: []*rpc.Pkg{newPkg("dummy-8")},
|
||||
srcinfos: map[string]*gopkg.PKGBUILD{"dummy-8": &gopkg.PKGBUILD{Pkgbase: "dummy-8", Validpgpkeys: []string{"A314827C4E4250A204CE6E13284FC34C8E4B1A25", "THIS-SHOULD-FAIL"}}},
|
||||
bases: map[string][]*rpc.Pkg{"dummy-8": {newPkg("dummy-8")}},
|
||||
wantError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range casetests {
|
||||
err := checkPgpKeys(tt.pkgs, tt.bases, tt.srcinfos)
|
||||
if !tt.wantError {
|
||||
if err != nil {
|
||||
t.Fatalf("Got error %q, want no error", err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Here, we want to see the error.
|
||||
if err == nil {
|
||||
t.Fatalf("Got no error; want error")
|
||||
}
|
||||
}
|
||||
}
|
108
local_install.go
Normal file
108
local_install.go
Normal file
@ -0,0 +1,108 @@
|
||||
// Experimental code for install local with dependency refactoring
|
||||
// Not at feature parity with install.go
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
"github.com/Jguer/yay/v12/pkg/dep"
|
||||
"github.com/Jguer/yay/v12/pkg/multierror"
|
||||
"github.com/Jguer/yay/v12/pkg/runtime"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
"github.com/Jguer/yay/v12/pkg/sync"
|
||||
|
||||
gosrc "github.com/Morganamilo/go-srcinfo"
|
||||
"github.com/leonelquinteros/gotext"
|
||||
)
|
||||
|
||||
var ErrNoBuildFiles = errors.New(gotext.Get("cannot find PKGBUILD and .SRCINFO in directory"))
|
||||
|
||||
func srcinfoExists(ctx context.Context,
|
||||
cmdBuilder exe.ICmdBuilder, targetDir string,
|
||||
) error {
|
||||
srcInfoDir := filepath.Join(targetDir, ".SRCINFO")
|
||||
pkgbuildDir := filepath.Join(targetDir, "PKGBUILD")
|
||||
if _, err := os.Stat(srcInfoDir); err == nil {
|
||||
if _, err := os.Stat(pkgbuildDir); err == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
if _, err := os.Stat(pkgbuildDir); err == nil {
|
||||
// run makepkg to generate .SRCINFO
|
||||
srcinfo, stderr, err := cmdBuilder.Capture(cmdBuilder.BuildMakepkgCmd(ctx, targetDir, "--printsrcinfo"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to generate .SRCINFO: %w - %s", err, stderr)
|
||||
}
|
||||
|
||||
if srcinfo == "" {
|
||||
return fmt.Errorf("generated .SRCINFO is empty, check your PKGBUILD for errors")
|
||||
}
|
||||
|
||||
if err := os.WriteFile(srcInfoDir, []byte(srcinfo), 0o600); err != nil {
|
||||
return fmt.Errorf("unable to write .SRCINFO: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("%w: %s", ErrNoBuildFiles, targetDir)
|
||||
}
|
||||
|
||||
func installLocalPKGBUILD(
|
||||
ctx context.Context,
|
||||
run *runtime.Runtime,
|
||||
cmdArgs *parser.Arguments,
|
||||
dbExecutor db.Executor,
|
||||
) error {
|
||||
aurCache := run.AURClient
|
||||
noCheck := strings.Contains(run.Cfg.MFlags, "--nocheck")
|
||||
|
||||
if len(cmdArgs.Targets) < 1 {
|
||||
return errors.New(gotext.Get("no target directories specified"))
|
||||
}
|
||||
|
||||
srcInfos := map[string]*gosrc.Srcinfo{}
|
||||
for _, targetDir := range cmdArgs.Targets {
|
||||
if err := srcinfoExists(ctx, run.CmdBuilder, targetDir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
pkgbuild, err := gosrc.ParseFile(filepath.Join(targetDir, ".SRCINFO"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s: %w", gotext.Get("failed to parse .SRCINFO"), err)
|
||||
}
|
||||
|
||||
srcInfos[targetDir] = pkgbuild
|
||||
}
|
||||
|
||||
grapher := dep.NewGrapher(dbExecutor, aurCache, false, settings.NoConfirm,
|
||||
cmdArgs.ExistsDouble("d", "nodeps"), noCheck, cmdArgs.ExistsArg("needed"),
|
||||
run.Logger.Child("grapher"))
|
||||
graph, err := grapher.GraphFromSrcInfos(ctx, nil, srcInfos)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
opService := sync.NewOperationService(ctx, dbExecutor, run)
|
||||
multiErr := &multierror.MultiError{}
|
||||
targets := graph.TopoSortedLayerMap(func(name string, ii *dep.InstallInfo) error {
|
||||
if ii.Source == dep.Missing {
|
||||
multiErr.Add(fmt.Errorf("%w: %s %s", ErrPackagesNotFound, name, ii.Version))
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
if err := multiErr.Return(); err != nil {
|
||||
return err
|
||||
}
|
||||
return opService.Run(ctx, run, cmdArgs, targets, []string{})
|
||||
}
|
1019
local_install_test.go
Normal file
1019
local_install_test.go
Normal file
File diff suppressed because it is too large
Load Diff
337
main.go
337
main.go
@ -1,229 +1,154 @@
|
||||
package main
|
||||
package main // import "github.com/Jguer/yay"
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"context"
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"os/exec"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
|
||||
alpm "github.com/jguer/go-alpm"
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db/ialpm"
|
||||
"github.com/Jguer/yay/v12/pkg/runtime"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
func initPaths() {
|
||||
if configHome = os.Getenv("XDG_CONFIG_HOME"); configHome != "" {
|
||||
if info, err := os.Stat(configHome); err == nil && info.IsDir() {
|
||||
configHome = configHome + "/yay"
|
||||
} else {
|
||||
configHome = os.Getenv("HOME") + "/.config/yay"
|
||||
var (
|
||||
yayVersion = "12.0.4" // To be set by compiler.
|
||||
localePath = "/usr/share/locale" // To be set by compiler.
|
||||
)
|
||||
|
||||
func initGotext() {
|
||||
if envLocalePath := os.Getenv("LOCALE_PATH"); envLocalePath != "" {
|
||||
localePath = envLocalePath
|
||||
}
|
||||
|
||||
if lc := os.Getenv("LANGUAGE"); lc != "" {
|
||||
// Split LANGUAGE by ':' and prioritize the first locale
|
||||
// Should fix in gotext to support this
|
||||
locales := strings.Split(lc, ":")
|
||||
if len(locales) > 0 && locales[0] != "" {
|
||||
gotext.Configure(localePath, locales[0], "yay")
|
||||
}
|
||||
} else if lc := os.Getenv("LC_ALL"); lc != "" {
|
||||
gotext.Configure(localePath, lc, "yay")
|
||||
} else if lc := os.Getenv("LC_MESSAGES"); lc != "" {
|
||||
gotext.Configure(localePath, lc, "yay")
|
||||
} else {
|
||||
configHome = os.Getenv("HOME") + "/.config/yay"
|
||||
gotext.Configure(localePath, os.Getenv("LANG"), "yay")
|
||||
}
|
||||
|
||||
if cacheHome = os.Getenv("XDG_CACHE_HOME"); cacheHome != "" {
|
||||
if info, err := os.Stat(cacheHome); err == nil && info.IsDir() {
|
||||
cacheHome = cacheHome + "/yay"
|
||||
} else {
|
||||
cacheHome = os.Getenv("HOME") + "/.cache/yay"
|
||||
}
|
||||
} else {
|
||||
cacheHome = os.Getenv("HOME") + "/.cache/yay"
|
||||
}
|
||||
|
||||
configFile = configHome + "/" + configFileName
|
||||
vcsFile = cacheHome + "/" + vcsFileName
|
||||
completionFile = cacheHome + "/" + completionFilePrefix
|
||||
}
|
||||
|
||||
func initConfig() (err error) {
|
||||
defaultSettings(&config)
|
||||
|
||||
if _, err = os.Stat(configFile); os.IsNotExist(err) {
|
||||
err = os.MkdirAll(filepath.Dir(configFile), 0755)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Unable to create config directory:\n%s\n"+
|
||||
"The error was:\n%s", filepath.Dir(configFile), err)
|
||||
return
|
||||
}
|
||||
// Save the default config if nothing is found
|
||||
config.saveConfig()
|
||||
} else {
|
||||
cfile, errf := os.OpenFile(configFile, os.O_RDWR|os.O_CREATE, 0644)
|
||||
if errf != nil {
|
||||
fmt.Printf("Error reading config: %s\n", err)
|
||||
} else {
|
||||
defer cfile.Close()
|
||||
decoder := json.NewDecoder(cfile)
|
||||
err = decoder.Decode(&config)
|
||||
if err != nil {
|
||||
fmt.Println("Loading default Settings.\nError reading config:",
|
||||
err)
|
||||
defaultSettings(&config)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func initVCS() (err error) {
|
||||
if _, err = os.Stat(vcsFile); os.IsNotExist(err) {
|
||||
err = os.MkdirAll(filepath.Dir(vcsFile), 0755)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Unable to create vcs directory:\n%s\n"+
|
||||
"The error was:\n%s", filepath.Dir(configFile), err)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
vfile, err := os.OpenFile(vcsFile, os.O_RDONLY|os.O_CREATE, 0644)
|
||||
if err == nil {
|
||||
defer vfile.Close()
|
||||
decoder := json.NewDecoder(vfile)
|
||||
_ = decoder.Decode(&savedInfo)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func initAlpm() (err error) {
|
||||
var value string
|
||||
var exists bool
|
||||
//var double bool
|
||||
|
||||
value, _, exists = cmdArgs.getArg("config")
|
||||
if exists {
|
||||
config.PacmanConf = value
|
||||
}
|
||||
|
||||
alpmConf, err = readAlpmConfig(config.PacmanConf)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Unable to read Pacman conf: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
value, _, exists = cmdArgs.getArg("dbpath", "b")
|
||||
if exists {
|
||||
alpmConf.DBPath = value
|
||||
}
|
||||
|
||||
value, _, exists = cmdArgs.getArg("root", "r")
|
||||
if exists {
|
||||
alpmConf.RootDir = value
|
||||
}
|
||||
|
||||
value, _, exists = cmdArgs.getArg("arch")
|
||||
if exists {
|
||||
alpmConf.Architecture = value
|
||||
}
|
||||
|
||||
value, _, exists = cmdArgs.getArg("ignore")
|
||||
if exists {
|
||||
alpmConf.IgnorePkg = append(alpmConf.IgnorePkg, strings.Split(value, ",")...)
|
||||
}
|
||||
|
||||
value, _, exists = cmdArgs.getArg("ignoregroup")
|
||||
if exists {
|
||||
alpmConf.IgnoreGroup = append(alpmConf.IgnoreGroup, strings.Split(value, ",")...)
|
||||
}
|
||||
|
||||
//TODO
|
||||
//current system does not allow duplicate arguments
|
||||
//but pacman allows multiple cachdirs to be passed
|
||||
//for now only handle one cache dir
|
||||
value, _, exists = cmdArgs.getArg("cachdir")
|
||||
if exists {
|
||||
alpmConf.CacheDir = []string{value}
|
||||
}
|
||||
|
||||
value, _, exists = cmdArgs.getArg("gpgdir")
|
||||
if exists {
|
||||
alpmConf.GPGDir = value
|
||||
}
|
||||
|
||||
alpmHandle, err = alpmConf.CreateHandle()
|
||||
if err != nil {
|
||||
err = fmt.Errorf("Unable to CreateHandle: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
value, _, exists = cmdArgs.getArg("color")
|
||||
if value == "always" || value == "auto" {
|
||||
useColor = true
|
||||
} else if value == "never" {
|
||||
useColor = false
|
||||
} else {
|
||||
useColor = alpmConf.Options&alpm.ConfColor > 0
|
||||
}
|
||||
|
||||
alpmHandle.SetQuestionCallback(questionCallback)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func main() {
|
||||
var status int
|
||||
var err error
|
||||
fallbackLog := text.NewLogger(os.Stdout, os.Stderr, os.Stdin, false, "fallback")
|
||||
var (
|
||||
err error
|
||||
ctx = context.Background()
|
||||
ret = 0
|
||||
)
|
||||
|
||||
if 0 == os.Geteuid() {
|
||||
fmt.Println("Please avoid running yay as root/sudo.")
|
||||
}
|
||||
|
||||
err = cmdArgs.parseCommandLine()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
status = 1
|
||||
goto cleanup
|
||||
}
|
||||
|
||||
initPaths()
|
||||
|
||||
err = initConfig()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
status = 1
|
||||
goto cleanup
|
||||
}
|
||||
|
||||
err = initVCS()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
status = 1
|
||||
goto cleanup
|
||||
|
||||
}
|
||||
|
||||
err = initAlpm()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
status = 1
|
||||
goto cleanup
|
||||
}
|
||||
|
||||
err = handleCmd()
|
||||
if err != nil {
|
||||
if err.Error() != "" {
|
||||
fmt.Println(err)
|
||||
defer func() {
|
||||
if rec := recover(); rec != nil {
|
||||
fallbackLog.Errorln("Panic occurred:", rec)
|
||||
fallbackLog.Errorln("Stack trace:", string(debug.Stack()))
|
||||
ret = 1
|
||||
}
|
||||
|
||||
status = 1
|
||||
goto cleanup
|
||||
os.Exit(ret)
|
||||
}()
|
||||
|
||||
initGotext()
|
||||
|
||||
if os.Geteuid() == 0 {
|
||||
fallbackLog.Warnln(gotext.Get("Avoid running yay as root/sudo."))
|
||||
}
|
||||
|
||||
cleanup:
|
||||
//cleanup
|
||||
//from here on out dont exit if an error occurs
|
||||
//if we fail to save the configuration
|
||||
//at least continue on and try clean up other parts
|
||||
configPath := settings.GetConfigPath()
|
||||
// Parse config
|
||||
cfg, err := settings.NewConfig(fallbackLog, configPath, yayVersion)
|
||||
if err != nil {
|
||||
if str := err.Error(); str != "" {
|
||||
fallbackLog.Errorln(str)
|
||||
}
|
||||
|
||||
if alpmHandle != nil {
|
||||
err = alpmHandle.Release()
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
status = 1
|
||||
ret = 1
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if errS := cfg.RunMigrations(fallbackLog,
|
||||
settings.DefaultMigrations(), configPath, yayVersion); errS != nil {
|
||||
fallbackLog.Errorln(errS)
|
||||
}
|
||||
|
||||
cmdArgs := parser.MakeArguments()
|
||||
|
||||
// Parse command line
|
||||
if err = cfg.ParseCommandLine(cmdArgs); err != nil {
|
||||
if str := err.Error(); str != "" {
|
||||
fallbackLog.Errorln(str)
|
||||
}
|
||||
|
||||
ret = 1
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if cfg.SaveConfig {
|
||||
if errS := cfg.Save(configPath, yayVersion); errS != nil {
|
||||
fallbackLog.Errorln(errS)
|
||||
}
|
||||
}
|
||||
|
||||
os.Exit(status)
|
||||
// Build run
|
||||
run, err := runtime.NewRuntime(cfg, cmdArgs, yayVersion)
|
||||
if err != nil {
|
||||
if str := err.Error(); str != "" {
|
||||
fallbackLog.Errorln(str)
|
||||
}
|
||||
|
||||
ret = 1
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
dbExecutor, err := ialpm.NewExecutor(run.PacmanConf, run.Logger.Child("db"))
|
||||
if err != nil {
|
||||
if str := err.Error(); str != "" {
|
||||
fallbackLog.Errorln(str)
|
||||
}
|
||||
|
||||
ret = 1
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if rec := recover(); rec != nil {
|
||||
fallbackLog.Errorln("Panic occurred in DB operation:", rec)
|
||||
fallbackLog.Errorln("Stack trace:", string(debug.Stack()))
|
||||
}
|
||||
|
||||
dbExecutor.Cleanup()
|
||||
}()
|
||||
|
||||
if err = handleCmd(ctx, run, cmdArgs, dbExecutor); err != nil {
|
||||
if str := err.Error(); str != "" {
|
||||
fallbackLog.Errorln(str)
|
||||
}
|
||||
|
||||
exitError := &exec.ExitError{}
|
||||
if errors.As(err, &exitError) {
|
||||
// mirror pacman exit code when applicable
|
||||
ret = exitError.ExitCode()
|
||||
return
|
||||
}
|
||||
|
||||
// fallback
|
||||
ret = 1
|
||||
}
|
||||
}
|
||||
|
645
parser.go
645
parser.go
@ -1,645 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A basic set implementation for strings.
|
||||
// This is used a lot so it deserves its own type.
|
||||
// Other types of sets are used throughout the code but do not have
|
||||
// their own typedef.
|
||||
// String sets and <type>sets should be used throughout the code when applicable,
|
||||
// they are a lot more flexable than slices and provide easy lookup.
|
||||
type stringSet map[string]struct{}
|
||||
|
||||
func (set stringSet) set(v string) {
|
||||
set[v] = struct{}{}
|
||||
}
|
||||
|
||||
func (set stringSet) get(v string) bool {
|
||||
_, exists := set[v]
|
||||
return exists
|
||||
}
|
||||
|
||||
func (set stringSet) remove(v string) {
|
||||
delete(set, v)
|
||||
}
|
||||
|
||||
func (set stringSet) toSlice() []string {
|
||||
slice := make([]string, 0, len(set))
|
||||
|
||||
for v := range set {
|
||||
slice = append(slice, v)
|
||||
}
|
||||
|
||||
return slice
|
||||
}
|
||||
|
||||
func sliceToStringSet(in []string) stringSet {
|
||||
set := make(stringSet)
|
||||
|
||||
for _, v := range in {
|
||||
set.set(v)
|
||||
}
|
||||
|
||||
return set
|
||||
}
|
||||
|
||||
func makeStringSet(in ...string) stringSet {
|
||||
return sliceToStringSet(in)
|
||||
}
|
||||
|
||||
// Parses command line arguments in a way we can interact with programmatically but
|
||||
// also in a way that can easily be passed to pacman later on.
|
||||
type arguments struct {
|
||||
op string
|
||||
options map[string]string
|
||||
globals map[string]string
|
||||
doubles stringSet // Tracks args passed twice such as -yy and -dd
|
||||
targets stringSet
|
||||
}
|
||||
|
||||
func makeArguments() *arguments {
|
||||
return &arguments{
|
||||
"",
|
||||
make(map[string]string),
|
||||
make(map[string]string),
|
||||
make(stringSet),
|
||||
make(stringSet),
|
||||
}
|
||||
}
|
||||
|
||||
func (parser *arguments) copy() (cp *arguments) {
|
||||
cp = makeArguments()
|
||||
|
||||
cp.op = parser.op
|
||||
|
||||
for k, v := range parser.options {
|
||||
cp.options[k] = v
|
||||
}
|
||||
|
||||
for k, v := range parser.globals {
|
||||
cp.globals[k] = v
|
||||
}
|
||||
|
||||
for k, v := range parser.targets {
|
||||
cp.targets[k] = v
|
||||
}
|
||||
|
||||
for k, v := range parser.doubles {
|
||||
cp.doubles[k] = v
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (parser *arguments) delArg(options ...string) {
|
||||
for _, option := range options {
|
||||
delete(parser.options, option)
|
||||
delete(parser.globals, option)
|
||||
delete(parser.doubles, option)
|
||||
}
|
||||
}
|
||||
|
||||
func (parser *arguments) needRoot() bool {
|
||||
if parser.existsArg("h", "help") {
|
||||
return false
|
||||
}
|
||||
|
||||
if parser.existsArg("p", "print") {
|
||||
return false
|
||||
}
|
||||
|
||||
switch parser.op {
|
||||
case "V", "version":
|
||||
return false
|
||||
case "D", "database":
|
||||
return true
|
||||
case "F", "files":
|
||||
if parser.existsArg("y", "refresh") {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
case "Q", "query":
|
||||
return false
|
||||
case "R", "remove":
|
||||
return true
|
||||
case "S", "sync":
|
||||
if parser.existsArg("y", "refresh") {
|
||||
return true
|
||||
}
|
||||
if parser.existsArg("u", "sysupgrade") {
|
||||
return true
|
||||
}
|
||||
if parser.existsArg("s", "search") {
|
||||
return false
|
||||
}
|
||||
if parser.existsArg("l", "list") {
|
||||
return false
|
||||
}
|
||||
if parser.existsArg("i", "info") {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
case "T", "deptest":
|
||||
return false
|
||||
case "U", "upgrade":
|
||||
return true
|
||||
|
||||
// yay specific
|
||||
case "Y", "yay":
|
||||
return false
|
||||
case "P", "print":
|
||||
return false
|
||||
case "G", "getpkgbuild":
|
||||
return false
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (parser *arguments) addOP(op string) (err error) {
|
||||
if parser.op != "" {
|
||||
err = fmt.Errorf("only one operation may be used at a time")
|
||||
return
|
||||
}
|
||||
|
||||
parser.op = op
|
||||
return
|
||||
}
|
||||
|
||||
func (parser *arguments) addParam(option string, arg string) (err error) {
|
||||
if isOp(option) {
|
||||
err = parser.addOP(option)
|
||||
return
|
||||
}
|
||||
|
||||
if parser.existsArg(option) {
|
||||
parser.doubles[option] = struct{}{}
|
||||
} else if isGlobal(option) {
|
||||
parser.globals[option] = arg
|
||||
} else {
|
||||
parser.options[option] = arg
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (parser *arguments) addArg(options ...string) (err error) {
|
||||
for _, option := range options {
|
||||
err = parser.addParam(option, "")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Multiple args acts as an OR operator
|
||||
func (parser *arguments) existsArg(options ...string) bool {
|
||||
for _, option := range options {
|
||||
_, exists := parser.options[option]
|
||||
if exists {
|
||||
return true
|
||||
}
|
||||
|
||||
_, exists = parser.globals[option]
|
||||
if exists {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (parser *arguments) getArg(options ...string) (arg string, double bool, exists bool) {
|
||||
for _, option := range options {
|
||||
arg, exists = parser.options[option]
|
||||
|
||||
if exists {
|
||||
_, double = parser.doubles[option]
|
||||
return
|
||||
}
|
||||
|
||||
arg, exists = parser.globals[option]
|
||||
|
||||
if exists {
|
||||
_, double = parser.doubles[option]
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (parser *arguments) addTarget(targets ...string) {
|
||||
for _, target := range targets {
|
||||
parser.targets[target] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
func (parser *arguments) delTarget(targets ...string) {
|
||||
for _, target := range targets {
|
||||
delete(parser.targets, target)
|
||||
}
|
||||
}
|
||||
|
||||
// Multiple args acts as an OR operator
|
||||
func (parser *arguments) existsDouble(options ...string) bool {
|
||||
for _, option := range options {
|
||||
_, exists := parser.doubles[option]
|
||||
if exists {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (parser *arguments) formatTargets() (args []string) {
|
||||
for target := range parser.targets {
|
||||
args = append(args, target)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (parser *arguments) formatArgs() (args []string) {
|
||||
var op string
|
||||
|
||||
if parser.op != "" {
|
||||
op = formatArg(parser.op)
|
||||
}
|
||||
|
||||
args = append(args, op)
|
||||
|
||||
for option, arg := range parser.options {
|
||||
if option == "--" {
|
||||
continue
|
||||
}
|
||||
|
||||
formatedOption := formatArg(option)
|
||||
args = append(args, formatedOption)
|
||||
|
||||
if hasParam(option) {
|
||||
args = append(args, arg)
|
||||
}
|
||||
|
||||
if parser.existsDouble(option) {
|
||||
args = append(args, formatedOption)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (parser *arguments) formatGlobals() (args []string) {
|
||||
for option, arg := range parser.globals {
|
||||
formatedOption := formatArg(option)
|
||||
args = append(args, formatedOption)
|
||||
|
||||
if hasParam(option) {
|
||||
args = append(args, arg)
|
||||
}
|
||||
|
||||
if parser.existsDouble(option) {
|
||||
args = append(args, formatedOption)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
|
||||
}
|
||||
|
||||
func formatArg(arg string) string {
|
||||
if len(arg) > 1 {
|
||||
arg = "--" + arg
|
||||
} else {
|
||||
arg = "-" + arg
|
||||
}
|
||||
|
||||
return arg
|
||||
}
|
||||
|
||||
func isOp(op string) bool {
|
||||
switch op {
|
||||
case "V", "version":
|
||||
return true
|
||||
case "D", "database":
|
||||
return true
|
||||
case "F", "files":
|
||||
return true
|
||||
case "Q", "query":
|
||||
return true
|
||||
case "R", "remove":
|
||||
return true
|
||||
case "S", "sync":
|
||||
return true
|
||||
case "T", "deptest":
|
||||
return true
|
||||
case "U", "upgrade":
|
||||
return true
|
||||
|
||||
// yay specific
|
||||
case "Y", "yay":
|
||||
return true
|
||||
case "P", "print":
|
||||
return true
|
||||
case "G", "getpkgbuild":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func isGlobal(op string) bool {
|
||||
switch op {
|
||||
case "b", "dbpath":
|
||||
return true
|
||||
case "r", "root":
|
||||
return true
|
||||
case "v", "verbose":
|
||||
return true
|
||||
case "arch":
|
||||
return true
|
||||
case "cachedir":
|
||||
return true
|
||||
case "color":
|
||||
return true
|
||||
case "config":
|
||||
return true
|
||||
case "debug":
|
||||
return true
|
||||
case "gpgdir":
|
||||
return true
|
||||
case "hookdir":
|
||||
return true
|
||||
case "logfile":
|
||||
return true
|
||||
case "noconfirm":
|
||||
return true
|
||||
case "confirm":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func hasParam(arg string) bool {
|
||||
switch arg {
|
||||
case "dbpath", "b":
|
||||
return true
|
||||
case "root", "r":
|
||||
return true
|
||||
case "sysroot":
|
||||
return true
|
||||
case "config":
|
||||
return true
|
||||
case "ignore":
|
||||
return true
|
||||
case "assume-installed":
|
||||
return true
|
||||
case "overwrite":
|
||||
return true
|
||||
case "ask":
|
||||
return true
|
||||
case "cachedir":
|
||||
return true
|
||||
case "hookdir":
|
||||
return true
|
||||
case "logfile":
|
||||
return true
|
||||
case "ignoregroup":
|
||||
return true
|
||||
case "arch":
|
||||
return true
|
||||
case "print-format":
|
||||
return true
|
||||
case "gpgdir":
|
||||
return true
|
||||
case "color":
|
||||
return true
|
||||
|
||||
//yay params
|
||||
case "mflags":
|
||||
return true
|
||||
case "gpgflags":
|
||||
return true
|
||||
case "builddir":
|
||||
return true
|
||||
case "editor":
|
||||
return true
|
||||
case "makepkg":
|
||||
return true
|
||||
case "pacman":
|
||||
return true
|
||||
case "tar":
|
||||
return true
|
||||
case "git":
|
||||
return true
|
||||
case "gpg":
|
||||
return true
|
||||
case "requestsplitn":
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Parses short hand options such as:
|
||||
// -Syu -b/some/path -
|
||||
func (parser *arguments) parseShortOption(arg string, param string) (usedNext bool, err error) {
|
||||
if arg == "-" {
|
||||
err = parser.addArg("-")
|
||||
return
|
||||
}
|
||||
|
||||
arg = arg[1:]
|
||||
|
||||
for k, _char := range arg {
|
||||
char := string(_char)
|
||||
|
||||
if hasParam(char) {
|
||||
if k < len(arg)-2 {
|
||||
err = parser.addParam(char, arg[k+2:])
|
||||
} else {
|
||||
usedNext = true
|
||||
err = parser.addParam(char, param)
|
||||
}
|
||||
|
||||
break
|
||||
} else {
|
||||
err = parser.addArg(char)
|
||||
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Parses full length options such as:
|
||||
// --sync --refresh --sysupgrade --dbpath /some/path --
|
||||
func (parser *arguments) parseLongOption(arg string, param string) (usedNext bool, err error) {
|
||||
if arg == "--" {
|
||||
err = parser.addArg(arg)
|
||||
return
|
||||
}
|
||||
|
||||
arg = arg[2:]
|
||||
|
||||
if hasParam(arg) {
|
||||
err = parser.addParam(arg, param)
|
||||
usedNext = true
|
||||
} else {
|
||||
err = parser.addArg(arg)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (parser *arguments) parseStdin() (err error) {
|
||||
for {
|
||||
var target string
|
||||
_, err = fmt.Scan(&target)
|
||||
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
err = nil
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
parser.addTarget(target)
|
||||
}
|
||||
}
|
||||
|
||||
func (parser *arguments) parseCommandLine() (err error) {
|
||||
args := os.Args[1:]
|
||||
usedNext := false
|
||||
|
||||
if len(args) < 1 {
|
||||
err = fmt.Errorf("no operation specified (use -h for help)")
|
||||
return
|
||||
}
|
||||
|
||||
for k, arg := range args {
|
||||
var nextArg string
|
||||
|
||||
if usedNext {
|
||||
usedNext = false
|
||||
continue
|
||||
}
|
||||
|
||||
if k+1 < len(args) {
|
||||
nextArg = args[k+1]
|
||||
}
|
||||
|
||||
if parser.existsArg("--") {
|
||||
parser.addTarget(arg)
|
||||
} else if strings.HasPrefix(arg, "--") {
|
||||
usedNext, err = parser.parseLongOption(arg, nextArg)
|
||||
} else if strings.HasPrefix(arg, "-") {
|
||||
usedNext, err = parser.parseShortOption(arg, nextArg)
|
||||
} else {
|
||||
parser.addTarget(arg)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if parser.op == "" {
|
||||
parser.op = "Y"
|
||||
}
|
||||
|
||||
if cmdArgs.existsArg("-") {
|
||||
var file *os.File
|
||||
err = cmdArgs.parseStdin()
|
||||
cmdArgs.delArg("-")
|
||||
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
file, err = os.Open("/dev/tty")
|
||||
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
os.Stdin = file
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
//parses input for number menus
|
||||
//supports individual selection: 1 2 3 4
|
||||
//supports range selections: 1-4 10-20
|
||||
//supports negation: ^1 ^1-4
|
||||
//
|
||||
//include and excule holds numbers that should be added and should not be added
|
||||
//respectively. other holds anythign that can't be parsed as an int. This is
|
||||
//intended to allow words inside of number menus. e.g. 'all' 'none' 'abort'
|
||||
//of course the implementation is up to the caller, this function mearley parses
|
||||
//the input and organizes it
|
||||
func parseNumberMenu(input string) (intRanges, intRanges, stringSet, stringSet) {
|
||||
include := make(intRanges, 0, 0)
|
||||
exclude := make(intRanges, 0, 0)
|
||||
otherInclude := make(stringSet)
|
||||
otherExclude := make(stringSet)
|
||||
|
||||
words := strings.Fields(input)
|
||||
|
||||
for _, word := range words {
|
||||
var num1 int
|
||||
var num2 int
|
||||
var err error
|
||||
invert := false
|
||||
other := otherInclude
|
||||
|
||||
if word[0] == '^' {
|
||||
invert = true
|
||||
other = otherExclude
|
||||
word = word[1:]
|
||||
}
|
||||
|
||||
ranges := strings.SplitN(word, "-", 2)
|
||||
|
||||
num1, err = strconv.Atoi(ranges[0])
|
||||
if err != nil {
|
||||
other.set(strings.ToLower(word))
|
||||
continue
|
||||
}
|
||||
|
||||
if len(ranges) == 2 {
|
||||
num2, err = strconv.Atoi(ranges[1])
|
||||
if err != nil {
|
||||
other.set(strings.ToLower(word))
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
num2 = num1
|
||||
}
|
||||
|
||||
mi := min(num1, num2)
|
||||
ma := max(num1, num2)
|
||||
|
||||
if !invert {
|
||||
include = append(include, makeIntRange(mi, ma))
|
||||
} else {
|
||||
exclude = append(exclude, makeIntRange(mi, ma))
|
||||
}
|
||||
}
|
||||
|
||||
return include, exclude, otherInclude, otherExclude
|
||||
}
|
@ -1,99 +0,0 @@
|
||||
package main
|
||||
|
||||
import "testing"
|
||||
|
||||
func intRangesEqual(a, b intRanges) bool {
|
||||
if a == nil && b == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
if a == nil || b == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
|
||||
for n := range a {
|
||||
r1 := a[n]
|
||||
r2 := b[n]
|
||||
|
||||
if r1.min != r1.min || r1.max != r2.max {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func stringSetEqual(a, b stringSet) bool {
|
||||
if a == nil && b == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
if a == nil || b == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
|
||||
for n := range a {
|
||||
if !b.get(n) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func TestParseNumberMenu(t *testing.T) {
|
||||
type result struct {
|
||||
Include intRanges
|
||||
Exclude intRanges
|
||||
OtherInclude stringSet
|
||||
OtherExclude stringSet
|
||||
}
|
||||
|
||||
inputs := []string{
|
||||
"1 2 3 4 5",
|
||||
"1-10 5-15",
|
||||
"10-5 90-85",
|
||||
"1 ^2 ^10-5 99 ^40-38 ^123 60-62",
|
||||
"abort all none",
|
||||
"a-b ^a-b ^abort",
|
||||
"1\t2 3 4\t\t \t 5",
|
||||
"",
|
||||
" \t ",
|
||||
"A B C D E",
|
||||
}
|
||||
|
||||
expected := []result{
|
||||
{intRanges{makeIntRange(1, 1), makeIntRange(2, 2), makeIntRange(3, 3), makeIntRange(4, 4), makeIntRange(5, 5)}, intRanges{}, make(stringSet), make(stringSet)},
|
||||
{intRanges{makeIntRange(1, 10), makeIntRange(5, 15)}, intRanges{}, make(stringSet), make(stringSet)},
|
||||
{intRanges{makeIntRange(5, 10), makeIntRange(85, 90)}, intRanges{}, make(stringSet), make(stringSet)},
|
||||
{intRanges{makeIntRange(1, 1), makeIntRange(99, 99), makeIntRange(60, 62)}, intRanges{makeIntRange(2, 2), makeIntRange(5, 10), makeIntRange(38, 40), makeIntRange(123, 123)}, make(stringSet), make(stringSet)},
|
||||
{intRanges{}, intRanges{}, makeStringSet("abort", "all", "none"), make(stringSet)},
|
||||
{intRanges{}, intRanges{}, makeStringSet("a-b"), makeStringSet("abort", "a-b")},
|
||||
{intRanges{makeIntRange(1, 1), makeIntRange(2, 2), makeIntRange(3, 3), makeIntRange(4, 4), makeIntRange(5, 5)}, intRanges{}, make(stringSet), make(stringSet)},
|
||||
{intRanges{}, intRanges{}, make(stringSet), make(stringSet)},
|
||||
{intRanges{}, intRanges{}, make(stringSet), make(stringSet)},
|
||||
{intRanges{}, intRanges{}, makeStringSet("a", "b", "c", "d", "e"), make(stringSet)},
|
||||
}
|
||||
|
||||
for n, in := range inputs {
|
||||
res := expected[n]
|
||||
include, exclude, otherInclude, otherExclude := parseNumberMenu(in)
|
||||
|
||||
if !intRangesEqual(include, res.Include) ||
|
||||
!intRangesEqual(exclude, res.Exclude) ||
|
||||
!stringSetEqual(otherInclude, res.OtherInclude) ||
|
||||
!stringSetEqual(otherExclude, res.OtherExclude) {
|
||||
|
||||
t.Fatalf("Test %d Failed: Expected: include=%+v exclude=%+v otherInclude=%+v otherExclude=%+v got include=%+v excluive=%+v otherInclude=%+v otherExclude=%+v",
|
||||
n+1, res.Include, res.Exclude, res.OtherInclude, res.OtherExclude, include, exclude, otherInclude, otherExclude)
|
||||
}
|
||||
}
|
||||
}
|
82
pkg/cmd/graph/main.go
Normal file
82
pkg/cmd/graph/main.go
Normal file
@ -0,0 +1,82 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db/ialpm"
|
||||
"github.com/Jguer/yay/v12/pkg/dep"
|
||||
"github.com/Jguer/yay/v12/pkg/runtime"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
|
||||
"github.com/Jguer/aur/metadata"
|
||||
"github.com/leonelquinteros/gotext"
|
||||
)
|
||||
|
||||
func handleCmd(logger *text.Logger) error {
|
||||
cfg, err := settings.NewConfig(logger, settings.GetConfigPath(), "")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cmdArgs := parser.MakeArguments()
|
||||
if errP := cfg.ParseCommandLine(cmdArgs); errP != nil {
|
||||
return errP
|
||||
}
|
||||
|
||||
run, err := runtime.NewRuntime(cfg, cmdArgs, "1.0.0")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dbExecutor, err := ialpm.NewExecutor(run.PacmanConf, logger)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
aurCache, err := metadata.New(
|
||||
metadata.WithCacheFilePath(
|
||||
filepath.Join(cfg.BuildDir, "aur.json")))
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s: %w", gotext.Get("failed to retrieve aur Cache"), err)
|
||||
}
|
||||
|
||||
grapher := dep.NewGrapher(dbExecutor, aurCache, true, settings.NoConfirm,
|
||||
cmdArgs.ExistsDouble("d", "nodeps"), false, false,
|
||||
run.Logger.Child("grapher"))
|
||||
|
||||
return graphPackage(context.Background(), grapher, cmdArgs.Targets)
|
||||
}
|
||||
|
||||
func main() {
|
||||
fallbackLog := text.NewLogger(os.Stdout, os.Stderr, os.Stdin, false, "fallback")
|
||||
if err := handleCmd(fallbackLog); err != nil {
|
||||
fallbackLog.Errorln(err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func graphPackage(
|
||||
ctx context.Context,
|
||||
grapher *dep.Grapher,
|
||||
targets []string,
|
||||
) error {
|
||||
if len(targets) != 1 {
|
||||
return errors.New(gotext.Get("only one target is allowed"))
|
||||
}
|
||||
|
||||
graph, err := grapher.GraphFromAUR(ctx, nil, []string{targets[0]})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Fprintln(os.Stdout, graph.String())
|
||||
fmt.Fprintln(os.Stdout, "\nlayers map\n", graph.TopoSortedLayerMap(nil))
|
||||
|
||||
return nil
|
||||
}
|
130
pkg/completion/completion.go
Normal file
130
pkg/completion/completion.go
Normal file
@ -0,0 +1,130 @@
|
||||
package completion
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
)
|
||||
|
||||
type PkgSynchronizer interface {
|
||||
SyncPackages(...string) []db.IPackage
|
||||
}
|
||||
|
||||
type httpRequestDoer interface {
|
||||
Do(req *http.Request) (*http.Response, error)
|
||||
}
|
||||
|
||||
// Show provides completion info for shells.
|
||||
func Show(ctx context.Context, httpClient httpRequestDoer,
|
||||
dbExecutor PkgSynchronizer, aurURL, completionPath string, interval int, force bool,
|
||||
) error {
|
||||
err := Update(ctx, httpClient, dbExecutor, aurURL, completionPath, interval, force)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
in, err := os.OpenFile(completionPath, os.O_RDWR|os.O_CREATE, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer in.Close()
|
||||
|
||||
_, err = io.Copy(os.Stdout, in)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
// Update updates completion cache to be used by Complete.
|
||||
func Update(ctx context.Context, httpClient httpRequestDoer,
|
||||
dbExecutor PkgSynchronizer, aurURL, completionPath string, interval int, force bool,
|
||||
) error {
|
||||
info, err := os.Stat(completionPath)
|
||||
|
||||
if os.IsNotExist(err) || (interval != -1 && time.Since(info.ModTime()).Hours() >= float64(interval*24)) || force {
|
||||
errd := os.MkdirAll(filepath.Dir(completionPath), 0o755)
|
||||
if errd != nil {
|
||||
return errd
|
||||
}
|
||||
|
||||
out, errf := os.Create(completionPath)
|
||||
if errf != nil {
|
||||
return errf
|
||||
}
|
||||
|
||||
if createAURList(ctx, httpClient, aurURL, out) != nil {
|
||||
defer os.Remove(completionPath)
|
||||
}
|
||||
|
||||
erra := createRepoList(dbExecutor, out)
|
||||
|
||||
out.Close()
|
||||
|
||||
return erra
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CreateAURList creates a new completion file.
|
||||
func createAURList(ctx context.Context, client httpRequestDoer, aurURL string, out io.Writer) error {
|
||||
u, err := url.Parse(aurURL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
u.Path = path.Join(u.Path, "packages.gz")
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), http.NoBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("invalid status code: %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
|
||||
scanner.Scan()
|
||||
|
||||
for scanner.Scan() {
|
||||
text := scanner.Text()
|
||||
if strings.HasPrefix(text, "#") {
|
||||
continue
|
||||
}
|
||||
|
||||
if _, err := io.WriteString(out, text+"\tAUR\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// createRepoList appends Repo packages to completion cache.
|
||||
func createRepoList(dbExecutor PkgSynchronizer, out io.Writer) error {
|
||||
for _, pkg := range dbExecutor.SyncPackages() {
|
||||
_, err := io.WriteString(out, pkg.Name()+"\t"+pkg.DB().Name()+"\n")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
99
pkg/completion/completion_test.go
Normal file
99
pkg/completion/completion_test.go
Normal file
@ -0,0 +1,99 @@
|
||||
//go:build !integration
|
||||
// +build !integration
|
||||
|
||||
package completion
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
const samplePackageResp = `
|
||||
# AUR package list, generated on Fri, 24 Jul 2020 22:05:22 GMT
|
||||
cytadela
|
||||
bitefusion
|
||||
globs-svn
|
||||
ri-li
|
||||
globs-benchmarks-svn
|
||||
dunelegacy
|
||||
lumina
|
||||
eternallands-sound
|
||||
`
|
||||
|
||||
const expectPackageCompletion = `cytadela AUR
|
||||
bitefusion AUR
|
||||
globs-svn AUR
|
||||
ri-li AUR
|
||||
globs-benchmarks-svn AUR
|
||||
dunelegacy AUR
|
||||
lumina AUR
|
||||
eternallands-sound AUR
|
||||
`
|
||||
|
||||
type mockDoer struct {
|
||||
t *testing.T
|
||||
returnBody string
|
||||
returnStatusCode int
|
||||
returnErr error
|
||||
wantUrl string
|
||||
}
|
||||
|
||||
func (m *mockDoer) Do(req *http.Request) (*http.Response, error) {
|
||||
assert.Equal(m.t, m.wantUrl, req.URL.String())
|
||||
return &http.Response{
|
||||
StatusCode: m.returnStatusCode,
|
||||
Body: io.NopCloser(bytes.NewBufferString(m.returnBody)),
|
||||
}, m.returnErr
|
||||
}
|
||||
|
||||
func Test_createAURList(t *testing.T) {
|
||||
t.Parallel()
|
||||
doer := &mockDoer{
|
||||
t: t,
|
||||
wantUrl: "https://aur.archlinux.org/packages.gz",
|
||||
returnStatusCode: 200,
|
||||
returnBody: samplePackageResp,
|
||||
returnErr: nil,
|
||||
}
|
||||
out := &bytes.Buffer{}
|
||||
err := createAURList(context.Background(), doer, "https://aur.archlinux.org", out)
|
||||
assert.NoError(t, err)
|
||||
gotOut := out.String()
|
||||
assert.Equal(t, expectPackageCompletion, gotOut)
|
||||
}
|
||||
|
||||
func Test_createAURListHTTPError(t *testing.T) {
|
||||
t.Parallel()
|
||||
doer := &mockDoer{
|
||||
t: t,
|
||||
wantUrl: "https://aur.archlinux.org/packages.gz",
|
||||
returnStatusCode: 200,
|
||||
returnBody: samplePackageResp,
|
||||
returnErr: errors.New("Not available"),
|
||||
}
|
||||
|
||||
out := &bytes.Buffer{}
|
||||
err := createAURList(context.Background(), doer, "https://aur.archlinux.org", out)
|
||||
assert.EqualError(t, err, "Not available")
|
||||
}
|
||||
|
||||
func Test_createAURListStatusError(t *testing.T) {
|
||||
t.Parallel()
|
||||
doer := &mockDoer{
|
||||
t: t,
|
||||
wantUrl: "https://aur.archlinux.org/packages.gz",
|
||||
returnStatusCode: 503,
|
||||
returnBody: samplePackageResp,
|
||||
returnErr: nil,
|
||||
}
|
||||
|
||||
out := &bytes.Buffer{}
|
||||
err := createAURList(context.Background(), doer, "https://aur.archlinux.org", out)
|
||||
assert.EqualError(t, err, "invalid status code: 503")
|
||||
}
|
68
pkg/db/executor.go
Normal file
68
pkg/db/executor.go
Normal file
@ -0,0 +1,68 @@
|
||||
package db
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
alpm "github.com/Jguer/go-alpm/v2"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
type (
|
||||
IPackage = alpm.IPackage
|
||||
Depend = alpm.Depend
|
||||
)
|
||||
|
||||
// VerCmp performs version comparison according to Pacman conventions. Return
|
||||
// value is <0 if and only if v1 is older than v2.
|
||||
func VerCmp(v1, v2 string) int {
|
||||
return alpm.VerCmp(v1, v2)
|
||||
}
|
||||
|
||||
type Upgrade struct {
|
||||
Name string
|
||||
Base string
|
||||
Repository string
|
||||
LocalVersion string
|
||||
RemoteVersion string
|
||||
Reason alpm.PkgReason
|
||||
Extra string // Extra information to be displayed
|
||||
}
|
||||
|
||||
type SyncUpgrade struct {
|
||||
Package alpm.IPackage
|
||||
LocalVersion string
|
||||
Reason alpm.PkgReason
|
||||
}
|
||||
|
||||
type Executor interface {
|
||||
AlpmArchitectures() ([]string, error)
|
||||
BiggestPackages() []IPackage
|
||||
Cleanup()
|
||||
InstalledRemotePackageNames() []string
|
||||
InstalledRemotePackages() map[string]IPackage
|
||||
InstalledSyncPackageNames() []string
|
||||
IsCorrectVersionInstalled(string, string) bool
|
||||
LastBuildTime() time.Time
|
||||
LocalPackage(string) IPackage
|
||||
LocalPackages() []IPackage
|
||||
LocalSatisfierExists(string) bool
|
||||
PackageDepends(IPackage) []Depend
|
||||
PackageGroups(IPackage) []string
|
||||
PackageOptionalDepends(IPackage) []Depend
|
||||
PackageProvides(IPackage) []Depend
|
||||
PackagesFromGroup(string) []IPackage
|
||||
PackagesFromGroupAndDB(string, string) ([]IPackage, error)
|
||||
RefreshHandle() error
|
||||
SyncUpgrades(enableDowngrade bool) (
|
||||
map[string]SyncUpgrade, error)
|
||||
Repos() []string
|
||||
SatisfierFromDB(string, string) (IPackage, error)
|
||||
SyncPackage(string) IPackage
|
||||
SyncPackageFromDB(string, string) IPackage
|
||||
SyncPackages(...string) []IPackage
|
||||
SyncSatisfier(string) IPackage
|
||||
SyncSatisfierExists(string) bool
|
||||
|
||||
SetLogger(logger *text.Logger)
|
||||
}
|
543
pkg/db/ialpm/alpm.go
Normal file
543
pkg/db/ialpm/alpm.go
Normal file
@ -0,0 +1,543 @@
|
||||
package ialpm
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
alpm "github.com/Jguer/go-alpm/v2"
|
||||
pacmanconf "github.com/Morganamilo/go-pacmanconf"
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
type AlpmExecutor struct {
|
||||
handle *alpm.Handle
|
||||
localDB alpm.IDB
|
||||
syncDB alpm.IDBList
|
||||
syncDBsCache []alpm.IDB
|
||||
conf *pacmanconf.Config
|
||||
log *text.Logger
|
||||
|
||||
installedRemotePkgNames []string
|
||||
installedRemotePkgMap map[string]alpm.IPackage
|
||||
installedSyncPkgNames []string
|
||||
}
|
||||
|
||||
func NewExecutor(pacmanConf *pacmanconf.Config, logger *text.Logger) (*AlpmExecutor, error) {
|
||||
ae := &AlpmExecutor{
|
||||
handle: nil,
|
||||
localDB: nil,
|
||||
syncDB: nil,
|
||||
syncDBsCache: []alpm.IDB{},
|
||||
conf: pacmanConf,
|
||||
log: logger,
|
||||
installedRemotePkgNames: nil,
|
||||
installedRemotePkgMap: nil,
|
||||
installedSyncPkgNames: nil,
|
||||
}
|
||||
|
||||
if err := ae.RefreshHandle(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var err error
|
||||
ae.localDB, err = ae.handle.LocalDB()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ae.syncDB, err = ae.handle.SyncDBs()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ae, nil
|
||||
}
|
||||
|
||||
func toUsage(usages []string) alpm.Usage {
|
||||
if len(usages) == 0 {
|
||||
return alpm.UsageAll
|
||||
}
|
||||
|
||||
var ret alpm.Usage
|
||||
|
||||
for _, usage := range usages {
|
||||
switch usage {
|
||||
case "Sync":
|
||||
ret |= alpm.UsageSync
|
||||
case "Search":
|
||||
ret |= alpm.UsageSearch
|
||||
case "Install":
|
||||
ret |= alpm.UsageInstall
|
||||
case "Upgrade":
|
||||
ret |= alpm.UsageUpgrade
|
||||
case "All":
|
||||
ret |= alpm.UsageAll
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func configureAlpm(pacmanConf *pacmanconf.Config, alpmHandle *alpm.Handle) error {
|
||||
for _, repo := range pacmanConf.Repos {
|
||||
// TODO: set SigLevel
|
||||
alpmDB, err := alpmHandle.RegisterSyncDB(repo.Name, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
alpmDB.SetServers(repo.Servers)
|
||||
alpmDB.SetUsage(toUsage(repo.Usage))
|
||||
}
|
||||
|
||||
if err := alpmHandle.SetCacheDirs(pacmanConf.CacheDir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// add hook directories 1-by-1 to avoid overwriting the system directory
|
||||
for _, dir := range pacmanConf.HookDir {
|
||||
if err := alpmHandle.AddHookDir(dir); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := alpmHandle.SetGPGDir(pacmanConf.GPGDir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := alpmHandle.SetLogFile(pacmanConf.LogFile); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := alpmHandle.SetIgnorePkgs(pacmanConf.IgnorePkg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := alpmHandle.SetIgnoreGroups(pacmanConf.IgnoreGroup); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := alpmSetArchitecture(alpmHandle, pacmanConf.Architecture); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := alpmHandle.SetNoUpgrades(pacmanConf.NoUpgrade); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := alpmHandle.SetNoExtracts(pacmanConf.NoExtract); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := alpmHandle.SetUseSyslog(pacmanConf.UseSyslog); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return alpmHandle.SetCheckSpace(pacmanConf.CheckSpace)
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) logCallback() func(level alpm.LogLevel, str string) {
|
||||
return func(level alpm.LogLevel, str string) {
|
||||
switch level {
|
||||
case alpm.LogWarning:
|
||||
ae.log.Warn(str)
|
||||
case alpm.LogError:
|
||||
ae.log.Error(str)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) questionCallback() func(question alpm.QuestionAny) {
|
||||
return func(question alpm.QuestionAny) {
|
||||
if qi, err := question.QuestionInstallIgnorepkg(); err == nil {
|
||||
qi.SetInstall(true)
|
||||
}
|
||||
|
||||
qp, err := question.QuestionSelectProvider()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if settings.HideMenus {
|
||||
return
|
||||
}
|
||||
|
||||
size := 0
|
||||
|
||||
_ = qp.Providers(ae.handle).ForEach(func(pkg alpm.IPackage) error {
|
||||
size++
|
||||
return nil
|
||||
})
|
||||
|
||||
str := text.Bold(gotext.Get("There are %[1]d providers available for %[2]s:", size, qp.Dep()))
|
||||
|
||||
size = 1
|
||||
|
||||
var dbName string
|
||||
|
||||
_ = qp.Providers(ae.handle).ForEach(func(pkg alpm.IPackage) error {
|
||||
thisDB := pkg.DB().Name()
|
||||
|
||||
if dbName != thisDB {
|
||||
dbName = thisDB
|
||||
str += "\n"
|
||||
str += ae.log.SprintOperationInfo(gotext.Get("Repository"), " ", dbName, "\n ")
|
||||
}
|
||||
str += fmt.Sprintf("%d) %s ", size, pkg.Name())
|
||||
size++
|
||||
return nil
|
||||
})
|
||||
|
||||
ae.log.OperationInfoln(str)
|
||||
|
||||
for {
|
||||
ae.log.Println(gotext.Get("\nEnter a number (default=1): "))
|
||||
|
||||
// TODO: reenable noconfirm
|
||||
if settings.NoConfirm {
|
||||
ae.log.Println()
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
numberBuf, err := ae.log.GetInput("", false)
|
||||
if err != nil {
|
||||
ae.log.Errorln(err)
|
||||
break
|
||||
}
|
||||
|
||||
if numberBuf == "" {
|
||||
break
|
||||
}
|
||||
|
||||
num, err := strconv.Atoi(numberBuf)
|
||||
if err != nil {
|
||||
ae.log.Errorln(gotext.Get("invalid number: %s", numberBuf))
|
||||
continue
|
||||
}
|
||||
|
||||
if num < 1 || num > size {
|
||||
ae.log.Errorln(gotext.Get("invalid value: %d is not between %d and %d", num, 1, size))
|
||||
continue
|
||||
}
|
||||
|
||||
qp.SetUseIndex(num - 1)
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) RefreshHandle() error {
|
||||
if ae.handle != nil {
|
||||
if errRelease := ae.handle.Release(); errRelease != nil {
|
||||
return errRelease
|
||||
}
|
||||
}
|
||||
|
||||
alpmHandle, err := alpm.Initialize(ae.conf.RootDir, ae.conf.DBPath)
|
||||
if err != nil {
|
||||
return errors.New(gotext.Get("unable to CreateHandle: %s", err))
|
||||
}
|
||||
|
||||
if errConf := configureAlpm(ae.conf, alpmHandle); errConf != nil {
|
||||
return errConf
|
||||
}
|
||||
|
||||
alpmSetQuestionCallback(alpmHandle, ae.questionCallback())
|
||||
alpmSetLogCallback(alpmHandle, ae.logCallback())
|
||||
ae.handle = alpmHandle
|
||||
ae.syncDBsCache = nil
|
||||
|
||||
ae.syncDB, err = alpmHandle.SyncDBs()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ae.localDB, err = alpmHandle.LocalDB()
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) LocalSatisfierExists(pkgName string) bool {
|
||||
if _, err := ae.localDB.PkgCache().FindSatisfier(pkgName); err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) SyncSatisfierExists(pkgName string) bool {
|
||||
if _, err := ae.syncDB.FindSatisfier(pkgName); err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) IsCorrectVersionInstalled(pkgName, versionRequired string) bool {
|
||||
alpmPackage := ae.localDB.Pkg(pkgName)
|
||||
if alpmPackage == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return alpmPackage.Version() == versionRequired
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) SyncSatisfier(pkgName string) alpm.IPackage {
|
||||
foundPkg, err := ae.syncDB.FindSatisfier(pkgName)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return foundPkg
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) PackagesFromGroup(groupName string) []alpm.IPackage {
|
||||
groupPackages := []alpm.IPackage{}
|
||||
_ = ae.syncDB.FindGroupPkgs(groupName).ForEach(func(pkg alpm.IPackage) error {
|
||||
groupPackages = append(groupPackages, pkg)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return groupPackages
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) PackagesFromGroupAndDB(groupName, dbName string) ([]alpm.IPackage, error) {
|
||||
singleDBList, err := ae.handle.SyncDBListByDBName(dbName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
groupPackages := []alpm.IPackage{}
|
||||
_ = singleDBList.FindGroupPkgs(groupName).ForEach(func(pkg alpm.IPackage) error {
|
||||
groupPackages = append(groupPackages, pkg)
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return groupPackages, nil
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) LocalPackages() []alpm.IPackage {
|
||||
localPackages := []alpm.IPackage{}
|
||||
_ = ae.localDB.PkgCache().ForEach(func(pkg alpm.IPackage) error {
|
||||
localPackages = append(localPackages, pkg)
|
||||
return nil
|
||||
})
|
||||
|
||||
return localPackages
|
||||
}
|
||||
|
||||
// SyncPackages searches SyncDB for packages or returns all packages if no search param is given.
|
||||
func (ae *AlpmExecutor) SyncPackages(pkgNames ...string) []alpm.IPackage {
|
||||
repoPackages := []alpm.IPackage{}
|
||||
_ = ae.syncDB.ForEach(func(alpmDB alpm.IDB) error {
|
||||
if len(pkgNames) == 0 {
|
||||
_ = alpmDB.PkgCache().ForEach(func(pkg alpm.IPackage) error {
|
||||
repoPackages = append(repoPackages, pkg)
|
||||
return nil
|
||||
})
|
||||
} else {
|
||||
_ = alpmDB.Search(pkgNames).ForEach(func(pkg alpm.IPackage) error {
|
||||
repoPackages = append(repoPackages, pkg)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return repoPackages
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) LocalPackage(pkgName string) alpm.IPackage {
|
||||
pkg := ae.localDB.Pkg(pkgName)
|
||||
if pkg == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return pkg
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) syncDBs() []alpm.IDB {
|
||||
if ae.syncDBsCache == nil {
|
||||
ae.syncDBsCache = ae.syncDB.Slice()
|
||||
}
|
||||
|
||||
return ae.syncDBsCache
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) SyncPackage(pkgName string) alpm.IPackage {
|
||||
for _, db := range ae.syncDBs() {
|
||||
if dbPkg := db.Pkg(pkgName); dbPkg != nil {
|
||||
return dbPkg
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) SyncPackageFromDB(pkgName, dbName string) alpm.IPackage {
|
||||
singleDB, err := ae.handle.SyncDBByName(dbName)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return singleDB.Pkg(pkgName)
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) SatisfierFromDB(pkgName, dbName string) (alpm.IPackage, error) {
|
||||
singleDBList, err := ae.handle.SyncDBListByDBName(dbName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
foundPkg, err := singleDBList.FindSatisfier(pkgName)
|
||||
if err != nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return foundPkg, nil
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) PackageDepends(pkg alpm.IPackage) []alpm.Depend {
|
||||
alpmPackage := pkg.(*alpm.Package)
|
||||
return alpmPackage.Depends().Slice()
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) PackageOptionalDepends(pkg alpm.IPackage) []alpm.Depend {
|
||||
alpmPackage := pkg.(*alpm.Package)
|
||||
return alpmPackage.OptionalDepends().Slice()
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) PackageProvides(pkg alpm.IPackage) []alpm.Depend {
|
||||
alpmPackage := pkg.(*alpm.Package)
|
||||
return alpmPackage.Provides().Slice()
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) PackageGroups(pkg alpm.IPackage) []string {
|
||||
alpmPackage := pkg.(*alpm.Package)
|
||||
return alpmPackage.Groups().Slice()
|
||||
}
|
||||
|
||||
// upRepo gathers local packages and checks if they have new versions.
|
||||
// Output: Upgrade type package list.
|
||||
func (ae *AlpmExecutor) SyncUpgrades(enableDowngrade bool) (
|
||||
map[string]db.SyncUpgrade, error,
|
||||
) {
|
||||
ups := map[string]db.SyncUpgrade{}
|
||||
var errReturn error
|
||||
|
||||
localDB, errDB := ae.handle.LocalDB()
|
||||
if errDB != nil {
|
||||
return ups, errDB
|
||||
}
|
||||
|
||||
if err := ae.handle.TransInit(alpm.TransFlagNoLock); err != nil {
|
||||
return ups, err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
errReturn = ae.handle.TransRelease()
|
||||
}()
|
||||
|
||||
if err := ae.handle.SyncSysupgrade(enableDowngrade); err != nil {
|
||||
return ups, err
|
||||
}
|
||||
|
||||
_ = ae.handle.TransGetAdd().ForEach(func(pkg alpm.IPackage) error {
|
||||
localVer := "-"
|
||||
reason := alpm.PkgReasonExplicit
|
||||
|
||||
if localPkg := localDB.Pkg(pkg.Name()); localPkg != nil {
|
||||
localVer = localPkg.Version()
|
||||
reason = localPkg.Reason()
|
||||
}
|
||||
|
||||
ups[pkg.Name()] = db.SyncUpgrade{
|
||||
Package: pkg,
|
||||
Reason: reason,
|
||||
LocalVersion: localVer,
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return ups, errReturn
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) BiggestPackages() []alpm.IPackage {
|
||||
localPackages := []alpm.IPackage{}
|
||||
_ = ae.localDB.PkgCache().SortBySize().ForEach(func(pkg alpm.IPackage) error {
|
||||
localPackages = append(localPackages, pkg)
|
||||
return nil
|
||||
})
|
||||
|
||||
return localPackages
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) LastBuildTime() time.Time {
|
||||
var lastTime time.Time
|
||||
|
||||
_ = ae.syncDB.ForEach(func(db alpm.IDB) error {
|
||||
_ = db.PkgCache().ForEach(func(pkg alpm.IPackage) error {
|
||||
thisTime := pkg.BuildDate()
|
||||
if thisTime.After(lastTime) {
|
||||
lastTime = thisTime
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return nil
|
||||
})
|
||||
|
||||
return lastTime
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) Cleanup() {
|
||||
if ae.handle != nil {
|
||||
if err := ae.handle.Release(); err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) Repos() (repos []string) {
|
||||
_ = ae.syncDB.ForEach(func(db alpm.IDB) error {
|
||||
repos = append(repos, db.Name())
|
||||
return nil
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func alpmSetArchitecture(alpmHandle *alpm.Handle, arch []string) error {
|
||||
return alpmHandle.SetArchitectures(arch)
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) AlpmArchitectures() ([]string, error) {
|
||||
architectures, err := ae.handle.GetArchitectures()
|
||||
|
||||
return architectures.Slice(), err
|
||||
}
|
||||
|
||||
func alpmSetLogCallback(alpmHandle *alpm.Handle, cb func(alpm.LogLevel, string)) {
|
||||
alpmHandle.SetLogCallback(func(ctx interface{}, lvl alpm.LogLevel, msg string) {
|
||||
cbo := ctx.(func(alpm.LogLevel, string))
|
||||
cbo(lvl, msg)
|
||||
}, cb)
|
||||
}
|
||||
|
||||
func alpmSetQuestionCallback(alpmHandle *alpm.Handle, cb func(alpm.QuestionAny)) {
|
||||
alpmHandle.SetQuestionCallback(func(ctx interface{}, q alpm.QuestionAny) {
|
||||
cbo := ctx.(func(alpm.QuestionAny))
|
||||
cbo(q)
|
||||
}, cb)
|
||||
}
|
116
pkg/db/ialpm/alpm_test.go
Normal file
116
pkg/db/ialpm/alpm_test.go
Normal file
@ -0,0 +1,116 @@
|
||||
//go:build !integration
|
||||
// +build !integration
|
||||
|
||||
package ialpm
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
alpm "github.com/Jguer/go-alpm/v2"
|
||||
"github.com/Morganamilo/go-pacmanconf"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
func TestAlpmExecutor(t *testing.T) {
|
||||
t.Parallel()
|
||||
pacmanConf := &pacmanconf.Config{
|
||||
RootDir: "/",
|
||||
DBPath: "/var/lib/pacman/",
|
||||
CacheDir: []string{"/cachedir/", "/another/"},
|
||||
HookDir: []string{"/hookdir/"},
|
||||
GPGDir: "/gpgdir/",
|
||||
LogFile: "/logfile",
|
||||
HoldPkg: []string(nil),
|
||||
IgnorePkg: []string{"ignore", "this", "package"},
|
||||
IgnoreGroup: []string{"ignore", "this", "group"},
|
||||
Architecture: []string{"8086"},
|
||||
XferCommand: "",
|
||||
NoUpgrade: []string{"noupgrade"},
|
||||
NoExtract: []string{"noextract"},
|
||||
CleanMethod: []string{"KeepInstalled"},
|
||||
SigLevel: []string{"PackageOptional", "PackageTrustedOnly", "DatabaseOptional", "DatabaseTrustedOnly"},
|
||||
LocalFileSigLevel: []string(nil),
|
||||
RemoteFileSigLevel: []string(nil),
|
||||
UseSyslog: false,
|
||||
Color: false,
|
||||
UseDelta: 0,
|
||||
TotalDownload: true,
|
||||
CheckSpace: true,
|
||||
VerbosePkgLists: true,
|
||||
DisableDownloadTimeout: false,
|
||||
Repos: []pacmanconf.Repository{
|
||||
{Name: "repo1", Servers: []string{"repo1"}, SigLevel: []string(nil), Usage: []string{"All"}},
|
||||
{Name: "repo2", Servers: []string{"repo2"}, SigLevel: []string(nil), Usage: []string{"All"}},
|
||||
},
|
||||
}
|
||||
|
||||
aExec, err := NewExecutor(pacmanConf, text.NewLogger(io.Discard, io.Discard, strings.NewReader(""), false, "test"))
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.NotNil(t, aExec.conf)
|
||||
assert.EqualValues(t, pacmanConf, aExec.conf)
|
||||
|
||||
assert.NotNil(t, aExec.localDB)
|
||||
assert.NotNil(t, aExec.syncDB)
|
||||
assert.NotNil(t, aExec.questionCallback)
|
||||
h := aExec.handle
|
||||
assert.NotNil(t, h)
|
||||
|
||||
root, err := h.Root()
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "/", root)
|
||||
|
||||
dbPath, err := h.DBPath()
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "/var/lib/pacman/", dbPath)
|
||||
|
||||
cache, err := h.CacheDirs()
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []string{"/cachedir/", "/another/"}, cache.Slice())
|
||||
|
||||
log, err := h.LogFile()
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "/logfile", log)
|
||||
|
||||
gpg, err := h.GPGDir()
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, "/gpgdir/", gpg)
|
||||
|
||||
hook, err := h.HookDirs()
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []string{"/usr/share/libalpm/hooks/", "/hookdir/"}, hook.Slice())
|
||||
|
||||
arch, err := alpmTestGetArch(h)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []string{"8086"}, arch)
|
||||
|
||||
ignorePkg, err := h.IgnorePkgs()
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []string{"ignore", "this", "package"}, ignorePkg.Slice())
|
||||
|
||||
ignoreGroup, err := h.IgnoreGroups()
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []string{"ignore", "this", "group"}, ignoreGroup.Slice())
|
||||
|
||||
noUp, err := h.NoUpgrades()
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []string{"noupgrade"}, noUp.Slice())
|
||||
|
||||
noEx, err := h.NoExtracts()
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, []string{"noextract"}, noEx.Slice())
|
||||
|
||||
check, err := h.CheckSpace()
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, true, check)
|
||||
}
|
||||
|
||||
func alpmTestGetArch(h *alpm.Handle) ([]string, error) {
|
||||
architectures, err := h.GetArchitectures()
|
||||
|
||||
return architectures.Slice(), err
|
||||
}
|
54
pkg/db/ialpm/high_level.go
Normal file
54
pkg/db/ialpm/high_level.go
Normal file
@ -0,0 +1,54 @@
|
||||
package ialpm
|
||||
|
||||
import (
|
||||
alpm "github.com/Jguer/go-alpm/v2"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
// GetPackageNamesBySource returns package names with and without correspondence in SyncDBS respectively.
|
||||
func (ae *AlpmExecutor) getPackageNamesBySource() {
|
||||
if ae.installedRemotePkgMap == nil {
|
||||
ae.installedRemotePkgMap = map[string]alpm.IPackage{}
|
||||
}
|
||||
for _, localpkg := range ae.LocalPackages() {
|
||||
pkgName := localpkg.Name()
|
||||
if ae.SyncPackage(pkgName) != nil {
|
||||
ae.installedSyncPkgNames = append(ae.installedSyncPkgNames, pkgName)
|
||||
} else {
|
||||
ae.installedRemotePkgNames = append(ae.installedRemotePkgNames, pkgName)
|
||||
ae.installedRemotePkgMap[pkgName] = localpkg
|
||||
}
|
||||
}
|
||||
|
||||
ae.log.Debugln("populating db executor package caches.",
|
||||
"sync_len", len(ae.installedSyncPkgNames), "remote_len", len(ae.installedRemotePkgNames))
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) InstalledRemotePackages() map[string]alpm.IPackage {
|
||||
if ae.installedRemotePkgMap == nil {
|
||||
ae.getPackageNamesBySource()
|
||||
}
|
||||
|
||||
return ae.installedRemotePkgMap
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) InstalledRemotePackageNames() []string {
|
||||
if ae.installedRemotePkgNames == nil {
|
||||
ae.getPackageNamesBySource()
|
||||
}
|
||||
|
||||
return ae.installedRemotePkgNames
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) InstalledSyncPackageNames() []string {
|
||||
if ae.installedSyncPkgNames == nil {
|
||||
ae.getPackageNamesBySource()
|
||||
}
|
||||
|
||||
return ae.installedSyncPkgNames
|
||||
}
|
||||
|
||||
func (ae *AlpmExecutor) SetLogger(logger *text.Logger) {
|
||||
ae.log = logger
|
||||
}
|
214
pkg/db/mock/executor.go
Normal file
214
pkg/db/mock/executor.go
Normal file
@ -0,0 +1,214 @@
|
||||
package mock
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
|
||||
"github.com/Jguer/go-alpm/v2"
|
||||
)
|
||||
|
||||
type (
|
||||
IPackage = alpm.IPackage
|
||||
Depend = alpm.Depend
|
||||
Upgrade = db.Upgrade
|
||||
)
|
||||
|
||||
type DBExecutor struct {
|
||||
db.Executor
|
||||
AlpmArchitecturesFn func() ([]string, error)
|
||||
InstalledRemotePackageNamesFn func() []string
|
||||
InstalledRemotePackagesFn func() map[string]IPackage
|
||||
IsCorrectVersionInstalledFn func(string, string) bool
|
||||
LocalPackageFn func(string) IPackage
|
||||
LocalPackagesFn func() []IPackage
|
||||
LocalSatisfierExistsFn func(string) bool
|
||||
PackageDependsFn func(IPackage) []Depend
|
||||
PackageOptionalDependsFn func(alpm.IPackage) []alpm.Depend
|
||||
PackageProvidesFn func(IPackage) []Depend
|
||||
PackagesFromGroupFn func(string) []IPackage
|
||||
PackagesFromGroupAndDBFn func(string, string) ([]IPackage, error)
|
||||
RefreshHandleFn func() error
|
||||
ReposFn func() []string
|
||||
SyncPackageFn func(string) IPackage
|
||||
SyncPackagesFn func(...string) []IPackage
|
||||
SyncSatisfierFn func(string) IPackage
|
||||
SatisfierFromDBFn func(string, string) (IPackage, error)
|
||||
SyncUpgradesFn func(bool) (map[string]db.SyncUpgrade, error)
|
||||
SetLoggerFn func(*text.Logger)
|
||||
}
|
||||
|
||||
func (t *DBExecutor) InstalledRemotePackageNames() []string {
|
||||
if t.InstalledRemotePackageNamesFn != nil {
|
||||
return t.InstalledRemotePackageNamesFn()
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) InstalledRemotePackages() map[string]IPackage {
|
||||
if t.InstalledRemotePackagesFn != nil {
|
||||
return t.InstalledRemotePackagesFn()
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) AlpmArchitectures() ([]string, error) {
|
||||
if t.AlpmArchitecturesFn != nil {
|
||||
return t.AlpmArchitecturesFn()
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) BiggestPackages() []IPackage {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) Cleanup() {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) IsCorrectVersionInstalled(s, s2 string) bool {
|
||||
if t.IsCorrectVersionInstalledFn != nil {
|
||||
return t.IsCorrectVersionInstalledFn(s, s2)
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) LastBuildTime() time.Time {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) LocalPackage(s string) IPackage {
|
||||
if t.LocalPackageFn != nil {
|
||||
return t.LocalPackageFn(s)
|
||||
}
|
||||
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) LocalPackages() []IPackage {
|
||||
if t.LocalPackagesFn != nil {
|
||||
return t.LocalPackagesFn()
|
||||
}
|
||||
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) LocalSatisfierExists(s string) bool {
|
||||
if t.LocalSatisfierExistsFn != nil {
|
||||
return t.LocalSatisfierExistsFn(s)
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) PackageConflicts(iPackage IPackage) []Depend {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) PackageDepends(iPackage IPackage) []Depend {
|
||||
if t.PackageDependsFn != nil {
|
||||
return t.PackageDependsFn(iPackage)
|
||||
}
|
||||
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) PackageGroups(iPackage IPackage) []string {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
func (t *DBExecutor) PackageOptionalDepends(iPackage IPackage) []Depend {
|
||||
if t.PackageOptionalDependsFn != nil {
|
||||
return t.PackageOptionalDependsFn(iPackage)
|
||||
}
|
||||
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) PackageProvides(iPackage IPackage) []Depend {
|
||||
if t.PackageProvidesFn != nil {
|
||||
return t.PackageProvidesFn(iPackage)
|
||||
}
|
||||
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) PackagesFromGroup(s string) []IPackage {
|
||||
if t.PackagesFromGroupFn != nil {
|
||||
return t.PackagesFromGroupFn(s)
|
||||
}
|
||||
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) PackagesFromGroupAndDB(s, s2 string) ([]IPackage, error) {
|
||||
if t.PackagesFromGroupAndDBFn != nil {
|
||||
return t.PackagesFromGroupAndDBFn(s, s2)
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) RefreshHandle() error {
|
||||
if t.RefreshHandleFn != nil {
|
||||
return t.RefreshHandleFn()
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) SyncUpgrades(b bool) (map[string]db.SyncUpgrade, error) {
|
||||
if t.SyncUpgradesFn != nil {
|
||||
return t.SyncUpgradesFn(b)
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) Repos() []string {
|
||||
if t.ReposFn != nil {
|
||||
return t.ReposFn()
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) SatisfierFromDB(s, s2 string) (IPackage, error) {
|
||||
if t.SatisfierFromDBFn != nil {
|
||||
return t.SatisfierFromDBFn(s, s2)
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) SyncPackage(s string) IPackage {
|
||||
if t.SyncPackageFn != nil {
|
||||
return t.SyncPackageFn(s)
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) SyncPackages(s ...string) []IPackage {
|
||||
if t.SyncPackagesFn != nil {
|
||||
return t.SyncPackagesFn(s...)
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) SyncSatisfier(s string) IPackage {
|
||||
if t.SyncSatisfierFn != nil {
|
||||
return t.SyncSatisfierFn(s)
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) SyncSatisfierExists(s string) bool {
|
||||
if t.SyncSatisfierFn != nil {
|
||||
return t.SyncSatisfierFn(s) != nil
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (t *DBExecutor) SetLogger(logger *text.Logger) {
|
||||
if t.SetLoggerFn != nil {
|
||||
t.SetLoggerFn(logger)
|
||||
return
|
||||
}
|
||||
panic("implement me")
|
||||
}
|
229
pkg/db/mock/repo.go
Normal file
229
pkg/db/mock/repo.go
Normal file
@ -0,0 +1,229 @@
|
||||
package mock
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
alpm "github.com/Jguer/go-alpm/v2"
|
||||
)
|
||||
|
||||
type DependList struct {
|
||||
Depends []Depend
|
||||
}
|
||||
|
||||
func (d DependList) Slice() []alpm.Depend {
|
||||
return d.Depends
|
||||
}
|
||||
|
||||
func (d DependList) ForEach(f func(*alpm.Depend) error) error {
|
||||
for i := range d.Depends {
|
||||
dep := &d.Depends[i]
|
||||
err := f(dep)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type Package struct {
|
||||
PBase string
|
||||
PBuildDate time.Time
|
||||
PDB *DB
|
||||
PDescription string
|
||||
PISize int64
|
||||
PName string
|
||||
PShouldIgnore bool
|
||||
PSize int64
|
||||
PVersion string
|
||||
PReason alpm.PkgReason
|
||||
PDepends alpm.IDependList
|
||||
PProvides alpm.IDependList
|
||||
}
|
||||
|
||||
func (p *Package) Base() string {
|
||||
return p.PBase
|
||||
}
|
||||
|
||||
func (p *Package) BuildDate() time.Time {
|
||||
return p.PBuildDate
|
||||
}
|
||||
|
||||
func (p *Package) DB() alpm.IDB {
|
||||
return p.PDB
|
||||
}
|
||||
|
||||
func (p *Package) Description() string {
|
||||
return p.PDescription
|
||||
}
|
||||
|
||||
func (p *Package) ISize() int64 {
|
||||
return p.PISize
|
||||
}
|
||||
|
||||
func (p *Package) Name() string {
|
||||
return p.PName
|
||||
}
|
||||
|
||||
func (p *Package) ShouldIgnore() bool {
|
||||
return p.PShouldIgnore
|
||||
}
|
||||
|
||||
func (p *Package) Size() int64 {
|
||||
return p.PSize
|
||||
}
|
||||
|
||||
func (p *Package) Version() string {
|
||||
return p.PVersion
|
||||
}
|
||||
|
||||
func (p *Package) Reason() alpm.PkgReason {
|
||||
return p.PReason
|
||||
}
|
||||
|
||||
func (p *Package) FileName() string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
func (p *Package) Base64Signature() string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
func (p *Package) Validation() alpm.Validation {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Architecture returns the package target Architecture.
|
||||
func (p *Package) Architecture() string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Backup returns a list of package backups.
|
||||
func (p *Package) Backup() alpm.BackupList {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Conflicts returns the conflicts of the package as a DependList.
|
||||
func (p *Package) Conflicts() alpm.IDependList {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Depends returns the package's dependency list.
|
||||
func (p *Package) Depends() alpm.IDependList {
|
||||
if p.PDepends != nil {
|
||||
return p.PDepends
|
||||
}
|
||||
return alpm.DependList{}
|
||||
}
|
||||
|
||||
// Depends returns the package's optional dependency list.
|
||||
func (p *Package) OptionalDepends() alpm.IDependList {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Depends returns the package's check dependency list.
|
||||
func (p *Package) CheckDepends() alpm.IDependList {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Depends returns the package's make dependency list.
|
||||
func (p *Package) MakeDepends() alpm.IDependList {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Files returns the file list of the package.
|
||||
func (p *Package) Files() []alpm.File {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// ContainsFile checks if the path is in the package filelist.
|
||||
func (p *Package) ContainsFile(path string) (alpm.File, error) {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Groups returns the groups the package belongs to.
|
||||
func (p *Package) Groups() alpm.StringList {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// InstallDate returns the package install date.
|
||||
func (p *Package) InstallDate() time.Time {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Licenses returns the package license list.
|
||||
func (p *Package) Licenses() alpm.StringList {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// SHA256Sum returns package SHA256Sum.
|
||||
func (p *Package) SHA256Sum() string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// MD5Sum returns package MD5Sum.
|
||||
func (p *Package) MD5Sum() string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Packager returns package packager name.
|
||||
func (p *Package) Packager() string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Provides returns DependList of packages provides by package.
|
||||
func (p *Package) Provides() alpm.IDependList {
|
||||
if p.PProvides == nil {
|
||||
return alpm.DependList{}
|
||||
}
|
||||
return p.PProvides
|
||||
}
|
||||
|
||||
// Origin returns package origin.
|
||||
func (p *Package) Origin() alpm.PkgFrom {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// Replaces returns a DependList with the packages this package replaces.
|
||||
func (p *Package) Replaces() alpm.IDependList {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// URL returns the upstream URL of the package.
|
||||
func (p *Package) URL() string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// ComputeRequiredBy returns the names of reverse dependencies of a package.
|
||||
func (p *Package) ComputeRequiredBy() []string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// ComputeOptionalFor returns the names of packages that optionally
|
||||
// require the given package.
|
||||
func (p *Package) ComputeOptionalFor() []string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// SyncNewVersion checks if there is a new version of the
|
||||
// package in a given DBlist.
|
||||
func (p *Package) SyncNewVersion(l alpm.IDBList) alpm.IPackage {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
func (p *Package) Type() string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
type DB struct {
|
||||
alpm.IDB
|
||||
name string
|
||||
}
|
||||
|
||||
func NewDB(name string) *DB {
|
||||
return &DB{name: name}
|
||||
}
|
||||
|
||||
func (d *DB) Name() string {
|
||||
return d.name
|
||||
}
|
15
pkg/db/types.go
Normal file
15
pkg/db/types.go
Normal file
@ -0,0 +1,15 @@
|
||||
package db
|
||||
|
||||
func ArchIsSupported(alpmArch []string, arch string) bool {
|
||||
if arch == "any" {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, a := range alpmArch {
|
||||
if a == arch {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
87
pkg/dep/dep.go
Normal file
87
pkg/dep/dep.go
Normal file
@ -0,0 +1,87 @@
|
||||
package dep
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
aur "github.com/Jguer/yay/v12/pkg/query"
|
||||
)
|
||||
|
||||
func splitDep(dep string) (pkg, mod, ver string) {
|
||||
split := strings.FieldsFunc(dep, func(c rune) bool {
|
||||
match := c == '>' || c == '<' || c == '='
|
||||
|
||||
if match {
|
||||
mod += string(c)
|
||||
}
|
||||
|
||||
return match
|
||||
})
|
||||
|
||||
if len(split) == 0 {
|
||||
return "", "", ""
|
||||
}
|
||||
|
||||
if len(split) == 1 {
|
||||
return split[0], "", ""
|
||||
}
|
||||
|
||||
return split[0], mod, split[1]
|
||||
}
|
||||
|
||||
func pkgSatisfies(name, version, dep string) bool {
|
||||
depName, depMod, depVersion := splitDep(dep)
|
||||
|
||||
if depName != name {
|
||||
return false
|
||||
}
|
||||
|
||||
return verSatisfies(version, depMod, depVersion)
|
||||
}
|
||||
|
||||
func provideSatisfies(provide, dep, pkgVersion string) bool {
|
||||
depName, depMod, depVersion := splitDep(dep)
|
||||
provideName, provideMod, provideVersion := splitDep(provide)
|
||||
|
||||
if provideName != depName {
|
||||
return false
|
||||
}
|
||||
|
||||
// Unversioned provides can not satisfy a versioned dep
|
||||
if provideMod == "" && depMod != "" {
|
||||
provideVersion = pkgVersion // Example package: pagure
|
||||
}
|
||||
|
||||
return verSatisfies(provideVersion, depMod, depVersion)
|
||||
}
|
||||
|
||||
func verSatisfies(ver1, mod, ver2 string) bool {
|
||||
switch mod {
|
||||
case "=":
|
||||
return db.VerCmp(ver1, ver2) == 0
|
||||
case "<":
|
||||
return db.VerCmp(ver1, ver2) < 0
|
||||
case "<=":
|
||||
return db.VerCmp(ver1, ver2) <= 0
|
||||
case ">":
|
||||
return db.VerCmp(ver1, ver2) > 0
|
||||
case ">=":
|
||||
return db.VerCmp(ver1, ver2) >= 0
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func satisfiesAur(dep string, pkg *aur.Pkg) bool {
|
||||
if pkgSatisfies(pkg.Name, pkg.Version, dep) {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, provide := range pkg.Provides {
|
||||
if provideSatisfies(provide, dep, pkg.Version) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
853
pkg/dep/dep_graph.go
Normal file
853
pkg/dep/dep_graph.go
Normal file
@ -0,0 +1,853 @@
|
||||
package dep
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
aurc "github.com/Jguer/aur"
|
||||
alpm "github.com/Jguer/go-alpm/v2"
|
||||
gosrc "github.com/Morganamilo/go-srcinfo"
|
||||
mapset "github.com/deckarep/golang-set/v2"
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
"github.com/Jguer/yay/v12/pkg/dep/topo"
|
||||
"github.com/Jguer/yay/v12/pkg/intrange"
|
||||
aur "github.com/Jguer/yay/v12/pkg/query"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
type InstallInfo struct {
|
||||
Source Source
|
||||
Reason Reason
|
||||
Version string
|
||||
LocalVersion string
|
||||
SrcinfoPath *string
|
||||
AURBase *string
|
||||
SyncDBName *string
|
||||
|
||||
IsGroup bool
|
||||
Upgrade bool
|
||||
Devel bool
|
||||
}
|
||||
|
||||
func (i *InstallInfo) String() string {
|
||||
return fmt.Sprintf("InstallInfo{Source: %v, Reason: %v}", i.Source, i.Reason)
|
||||
}
|
||||
|
||||
type (
|
||||
Reason uint
|
||||
Source int
|
||||
)
|
||||
|
||||
func (r Reason) String() string {
|
||||
return ReasonNames[r]
|
||||
}
|
||||
|
||||
func (s Source) String() string {
|
||||
return SourceNames[s]
|
||||
}
|
||||
|
||||
const (
|
||||
Explicit Reason = iota // 0
|
||||
Dep // 1
|
||||
MakeDep // 2
|
||||
CheckDep // 3
|
||||
)
|
||||
|
||||
var ReasonNames = map[Reason]string{
|
||||
Explicit: gotext.Get("Explicit"),
|
||||
Dep: gotext.Get("Dependency"),
|
||||
MakeDep: gotext.Get("Make Dependency"),
|
||||
CheckDep: gotext.Get("Check Dependency"),
|
||||
}
|
||||
|
||||
const (
|
||||
AUR Source = iota
|
||||
Sync
|
||||
Local
|
||||
SrcInfo
|
||||
Missing
|
||||
)
|
||||
|
||||
var SourceNames = map[Source]string{
|
||||
AUR: gotext.Get("AUR"),
|
||||
Sync: gotext.Get("Sync"),
|
||||
Local: gotext.Get("Local"),
|
||||
SrcInfo: gotext.Get("SRCINFO"),
|
||||
Missing: gotext.Get("Missing"),
|
||||
}
|
||||
|
||||
var bgColorMap = map[Source]string{
|
||||
AUR: "lightblue",
|
||||
Sync: "lemonchiffon",
|
||||
Local: "darkolivegreen1",
|
||||
Missing: "tomato",
|
||||
}
|
||||
|
||||
var colorMap = map[Reason]string{
|
||||
Explicit: "black",
|
||||
Dep: "deeppink",
|
||||
MakeDep: "navyblue",
|
||||
CheckDep: "forestgreen",
|
||||
}
|
||||
|
||||
type Grapher struct {
|
||||
logger *text.Logger
|
||||
providerCache map[string][]aur.Pkg
|
||||
|
||||
dbExecutor db.Executor
|
||||
aurClient aurc.QueryClient
|
||||
fullGraph bool // If true, the graph will include all dependencies including already installed ones or repo
|
||||
noConfirm bool // If true, the graph will not prompt for confirmation
|
||||
noDeps bool // If true, the graph will not include dependencies
|
||||
noCheckDeps bool // If true, the graph will not include check dependencies
|
||||
needed bool // If true, the graph will only include packages that are not installed
|
||||
}
|
||||
|
||||
func NewGrapher(dbExecutor db.Executor, aurCache aurc.QueryClient,
|
||||
fullGraph, noConfirm, noDeps, noCheckDeps, needed bool,
|
||||
logger *text.Logger,
|
||||
) *Grapher {
|
||||
return &Grapher{
|
||||
dbExecutor: dbExecutor,
|
||||
aurClient: aurCache,
|
||||
fullGraph: fullGraph,
|
||||
noConfirm: noConfirm,
|
||||
noDeps: noDeps,
|
||||
noCheckDeps: noCheckDeps,
|
||||
needed: needed,
|
||||
providerCache: make(map[string][]aurc.Pkg, 5),
|
||||
logger: logger,
|
||||
}
|
||||
}
|
||||
|
||||
func NewGraph() *topo.Graph[string, *InstallInfo] {
|
||||
return topo.New[string, *InstallInfo]()
|
||||
}
|
||||
|
||||
func (g *Grapher) GraphFromTargets(ctx context.Context,
|
||||
graph *topo.Graph[string, *InstallInfo], targets []string,
|
||||
) (*topo.Graph[string, *InstallInfo], error) {
|
||||
if graph == nil {
|
||||
graph = NewGraph()
|
||||
}
|
||||
|
||||
aurTargets := make([]string, 0, len(targets))
|
||||
|
||||
for _, targetString := range targets {
|
||||
target := ToTarget(targetString)
|
||||
|
||||
switch target.DB {
|
||||
case "": // unspecified db
|
||||
if pkg := g.dbExecutor.SyncSatisfier(target.Name); pkg != nil {
|
||||
g.GraphSyncPkg(ctx, graph, pkg, nil)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
groupPackages := g.dbExecutor.PackagesFromGroup(target.Name)
|
||||
if len(groupPackages) > 0 {
|
||||
dbName := groupPackages[0].DB().Name()
|
||||
g.GraphSyncGroup(ctx, graph, target.Name, dbName)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
fallthrough
|
||||
case "aur":
|
||||
aurTargets = append(aurTargets, target.Name)
|
||||
default:
|
||||
pkg, err := g.dbExecutor.SatisfierFromDB(target.Name, target.DB)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if pkg != nil {
|
||||
g.GraphSyncPkg(ctx, graph, pkg, nil)
|
||||
continue
|
||||
}
|
||||
|
||||
groupPackages, err := g.dbExecutor.PackagesFromGroupAndDB(target.Name, target.DB)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(groupPackages) > 0 {
|
||||
g.GraphSyncGroup(ctx, graph, target.Name, target.DB)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
g.logger.Errorln(gotext.Get("No package found for"), " ", target)
|
||||
}
|
||||
}
|
||||
|
||||
var errA error
|
||||
graph, errA = g.GraphFromAUR(ctx, graph, aurTargets)
|
||||
if errA != nil {
|
||||
return nil, errA
|
||||
}
|
||||
|
||||
return graph, nil
|
||||
}
|
||||
|
||||
func (g *Grapher) pickSrcInfoPkgs(pkgs []*aurc.Pkg) ([]*aurc.Pkg, error) {
|
||||
final := make([]*aurc.Pkg, 0, len(pkgs))
|
||||
for i := range pkgs {
|
||||
g.logger.Println(text.Magenta(strconv.Itoa(i+1)+" ") + text.Bold(pkgs[i].Name) +
|
||||
" " + text.Cyan(pkgs[i].Version))
|
||||
g.logger.Println(" " + pkgs[i].Description)
|
||||
}
|
||||
g.logger.Infoln(gotext.Get("Packages to exclude") + " (eg: \"1 2 3\", \"1-3\", \"^4\"):")
|
||||
|
||||
numberBuf, err := g.logger.GetInput("", g.noConfirm)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
include, exclude, _, otherExclude := intrange.ParseNumberMenu(numberBuf)
|
||||
isInclude := len(exclude) == 0 && otherExclude.Cardinality() == 0
|
||||
|
||||
for i := 1; i <= len(pkgs); i++ {
|
||||
target := i - 1
|
||||
|
||||
if isInclude && !include.Get(i) {
|
||||
final = append(final, pkgs[target])
|
||||
}
|
||||
|
||||
if !isInclude && (exclude.Get(i)) {
|
||||
final = append(final, pkgs[target])
|
||||
}
|
||||
}
|
||||
|
||||
return final, nil
|
||||
}
|
||||
|
||||
func (g *Grapher) addAurPkgProvides(pkg *aurc.Pkg, graph *topo.Graph[string, *InstallInfo]) {
|
||||
for i := range pkg.Provides {
|
||||
depName, mod, version := splitDep(pkg.Provides[i])
|
||||
g.logger.Debugln(pkg.String() + " provides: " + depName)
|
||||
graph.Provides(depName, &alpm.Depend{
|
||||
Name: depName,
|
||||
Version: version,
|
||||
Mod: aurDepModToAlpmDep(mod),
|
||||
}, pkg.Name)
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Grapher) GraphFromSrcInfos(ctx context.Context, graph *topo.Graph[string, *InstallInfo],
|
||||
srcInfos map[string]*gosrc.Srcinfo,
|
||||
) (*topo.Graph[string, *InstallInfo], error) {
|
||||
if graph == nil {
|
||||
graph = NewGraph()
|
||||
}
|
||||
|
||||
aurPkgsAdded := []*aurc.Pkg{}
|
||||
for pkgBuildDir, pkgbuild := range srcInfos {
|
||||
pkgBuildDir := pkgBuildDir
|
||||
|
||||
aurPkgs, err := makeAURPKGFromSrcinfo(g.dbExecutor, pkgbuild)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(aurPkgs) > 1 {
|
||||
var errPick error
|
||||
aurPkgs, errPick = g.pickSrcInfoPkgs(aurPkgs)
|
||||
if errPick != nil {
|
||||
return nil, errPick
|
||||
}
|
||||
}
|
||||
|
||||
for _, pkg := range aurPkgs {
|
||||
pkg := pkg
|
||||
|
||||
reason := Explicit
|
||||
if pkg := g.dbExecutor.LocalPackage(pkg.Name); pkg != nil {
|
||||
reason = Reason(pkg.Reason())
|
||||
}
|
||||
|
||||
graph.AddNode(pkg.Name)
|
||||
|
||||
g.addAurPkgProvides(pkg, graph)
|
||||
|
||||
g.ValidateAndSetNodeInfo(graph, pkg.Name, &topo.NodeInfo[*InstallInfo]{
|
||||
Color: colorMap[reason],
|
||||
Background: bgColorMap[AUR],
|
||||
Value: &InstallInfo{
|
||||
Source: SrcInfo,
|
||||
Reason: reason,
|
||||
SrcinfoPath: &pkgBuildDir,
|
||||
AURBase: &pkg.PackageBase,
|
||||
Version: pkg.Version,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
aurPkgsAdded = append(aurPkgsAdded, aurPkgs...)
|
||||
}
|
||||
|
||||
g.AddDepsForPkgs(ctx, aurPkgsAdded, graph)
|
||||
|
||||
return graph, nil
|
||||
}
|
||||
|
||||
func (g *Grapher) AddDepsForPkgs(ctx context.Context, pkgs []*aur.Pkg, graph *topo.Graph[string, *InstallInfo]) {
|
||||
for _, pkg := range pkgs {
|
||||
g.addDepNodes(ctx, pkg, graph)
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Grapher) addDepNodes(ctx context.Context, pkg *aur.Pkg, graph *topo.Graph[string, *InstallInfo]) {
|
||||
if len(pkg.MakeDepends) > 0 {
|
||||
g.addNodes(ctx, graph, pkg.Name, pkg.MakeDepends, MakeDep)
|
||||
}
|
||||
|
||||
if !g.noDeps && len(pkg.Depends) > 0 {
|
||||
g.addNodes(ctx, graph, pkg.Name, pkg.Depends, Dep)
|
||||
}
|
||||
|
||||
if !g.noCheckDeps && !g.noDeps && len(pkg.CheckDepends) > 0 {
|
||||
g.addNodes(ctx, graph, pkg.Name, pkg.CheckDepends, CheckDep)
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Grapher) GraphSyncPkg(ctx context.Context,
|
||||
graph *topo.Graph[string, *InstallInfo],
|
||||
pkg alpm.IPackage, upgradeInfo *db.SyncUpgrade,
|
||||
) *topo.Graph[string, *InstallInfo] {
|
||||
if graph == nil {
|
||||
graph = NewGraph()
|
||||
}
|
||||
|
||||
graph.AddNode(pkg.Name())
|
||||
_ = pkg.Provides().ForEach(func(p *alpm.Depend) error {
|
||||
g.logger.Debugln(pkg.Name() + " provides: " + p.String())
|
||||
graph.Provides(p.Name, p, pkg.Name())
|
||||
return nil
|
||||
})
|
||||
|
||||
dbName := pkg.DB().Name()
|
||||
info := &InstallInfo{
|
||||
Source: Sync,
|
||||
Reason: Explicit,
|
||||
Version: pkg.Version(),
|
||||
SyncDBName: &dbName,
|
||||
}
|
||||
|
||||
if upgradeInfo == nil {
|
||||
if localPkg := g.dbExecutor.LocalPackage(pkg.Name()); localPkg != nil {
|
||||
info.Reason = Reason(localPkg.Reason())
|
||||
}
|
||||
} else {
|
||||
info.Upgrade = true
|
||||
info.Reason = Reason(upgradeInfo.Reason)
|
||||
info.LocalVersion = upgradeInfo.LocalVersion
|
||||
}
|
||||
|
||||
g.ValidateAndSetNodeInfo(graph, pkg.Name(), &topo.NodeInfo[*InstallInfo]{
|
||||
Color: colorMap[info.Reason],
|
||||
Background: bgColorMap[info.Source],
|
||||
Value: info,
|
||||
})
|
||||
|
||||
return graph
|
||||
}
|
||||
|
||||
func (g *Grapher) GraphSyncGroup(ctx context.Context,
|
||||
graph *topo.Graph[string, *InstallInfo],
|
||||
groupName, dbName string,
|
||||
) *topo.Graph[string, *InstallInfo] {
|
||||
if graph == nil {
|
||||
graph = NewGraph()
|
||||
}
|
||||
|
||||
graph.AddNode(groupName)
|
||||
|
||||
g.ValidateAndSetNodeInfo(graph, groupName, &topo.NodeInfo[*InstallInfo]{
|
||||
Color: colorMap[Explicit],
|
||||
Background: bgColorMap[Sync],
|
||||
Value: &InstallInfo{
|
||||
Source: Sync,
|
||||
Reason: Explicit,
|
||||
Version: "",
|
||||
SyncDBName: &dbName,
|
||||
IsGroup: true,
|
||||
},
|
||||
})
|
||||
|
||||
return graph
|
||||
}
|
||||
|
||||
func (g *Grapher) GraphAURTarget(ctx context.Context,
|
||||
graph *topo.Graph[string, *InstallInfo],
|
||||
pkg *aurc.Pkg, instalInfo *InstallInfo,
|
||||
) *topo.Graph[string, *InstallInfo] {
|
||||
if graph == nil {
|
||||
graph = NewGraph()
|
||||
}
|
||||
|
||||
graph.AddNode(pkg.Name)
|
||||
|
||||
g.addAurPkgProvides(pkg, graph)
|
||||
|
||||
g.ValidateAndSetNodeInfo(graph, pkg.Name, &topo.NodeInfo[*InstallInfo]{
|
||||
Color: colorMap[instalInfo.Reason],
|
||||
Background: bgColorMap[AUR],
|
||||
Value: instalInfo,
|
||||
})
|
||||
|
||||
return graph
|
||||
}
|
||||
|
||||
func (g *Grapher) GraphFromAUR(ctx context.Context,
|
||||
graph *topo.Graph[string, *InstallInfo],
|
||||
targets []string,
|
||||
) (*topo.Graph[string, *InstallInfo], error) {
|
||||
if graph == nil {
|
||||
graph = NewGraph()
|
||||
}
|
||||
|
||||
if len(targets) == 0 {
|
||||
return graph, nil
|
||||
}
|
||||
|
||||
aurPkgs, errCache := g.aurClient.Get(ctx, &aurc.Query{By: aurc.Name, Needles: targets})
|
||||
if errCache != nil {
|
||||
g.logger.Errorln(errCache)
|
||||
}
|
||||
|
||||
for i := range aurPkgs {
|
||||
pkg := &aurPkgs[i]
|
||||
if _, ok := g.providerCache[pkg.Name]; !ok {
|
||||
g.providerCache[pkg.Name] = []aurc.Pkg{*pkg}
|
||||
}
|
||||
}
|
||||
|
||||
aurPkgsAdded := []*aurc.Pkg{}
|
||||
|
||||
for _, target := range targets {
|
||||
if cachedProvidePkg, ok := g.providerCache[target]; ok {
|
||||
aurPkgs = cachedProvidePkg
|
||||
} else {
|
||||
var errA error
|
||||
aurPkgs, errA = g.aurClient.Get(ctx, &aurc.Query{By: aurc.Provides, Needles: []string{target}, Contains: true})
|
||||
if errA != nil {
|
||||
g.logger.Errorln(gotext.Get("Failed to find AUR package for"), " ", target, ":", errA)
|
||||
}
|
||||
}
|
||||
|
||||
if len(aurPkgs) == 0 {
|
||||
g.logger.Errorln(gotext.Get("No AUR package found for"), " ", target)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
aurPkg := &aurPkgs[0]
|
||||
if len(aurPkgs) > 1 {
|
||||
chosen := g.provideMenu(target, aurPkgs)
|
||||
aurPkg = chosen
|
||||
g.providerCache[target] = []aurc.Pkg{*aurPkg}
|
||||
}
|
||||
|
||||
reason := Explicit
|
||||
if pkg := g.dbExecutor.LocalPackage(aurPkg.Name); pkg != nil {
|
||||
reason = Reason(pkg.Reason())
|
||||
|
||||
if g.needed {
|
||||
if db.VerCmp(pkg.Version(), aurPkg.Version) >= 0 {
|
||||
g.logger.Warnln(gotext.Get("%s is up to date -- skipping", text.Cyan(pkg.Name()+"-"+pkg.Version())))
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
graph = g.GraphAURTarget(ctx, graph, aurPkg, &InstallInfo{
|
||||
AURBase: &aurPkg.PackageBase,
|
||||
Reason: reason,
|
||||
Source: AUR,
|
||||
Version: aurPkg.Version,
|
||||
})
|
||||
aurPkgsAdded = append(aurPkgsAdded, aurPkg)
|
||||
}
|
||||
|
||||
g.AddDepsForPkgs(ctx, aurPkgsAdded, graph)
|
||||
|
||||
return graph, nil
|
||||
}
|
||||
|
||||
// Removes found deps from the deps mapset and returns the found deps.
|
||||
func (g *Grapher) findDepsFromAUR(ctx context.Context,
|
||||
deps mapset.Set[string],
|
||||
) []aurc.Pkg {
|
||||
pkgsToAdd := make([]aurc.Pkg, 0, deps.Cardinality())
|
||||
if deps.Cardinality() == 0 {
|
||||
return []aurc.Pkg{}
|
||||
}
|
||||
|
||||
missingNeedles := make([]string, 0, deps.Cardinality())
|
||||
for _, depString := range deps.ToSlice() {
|
||||
if _, ok := g.providerCache[depString]; !ok {
|
||||
depName, _, _ := splitDep(depString)
|
||||
missingNeedles = append(missingNeedles, depName)
|
||||
}
|
||||
}
|
||||
|
||||
if len(missingNeedles) != 0 {
|
||||
g.logger.Debugln("deps to find", missingNeedles)
|
||||
// provider search is more demanding than a simple search
|
||||
// try to find name match if possible and then try to find provides.
|
||||
aurPkgs, errCache := g.aurClient.Get(ctx, &aurc.Query{
|
||||
By: aurc.Name, Needles: missingNeedles, Contains: false,
|
||||
})
|
||||
if errCache != nil {
|
||||
g.logger.Errorln(errCache)
|
||||
}
|
||||
|
||||
for i := range aurPkgs {
|
||||
pkg := &aurPkgs[i]
|
||||
if deps.Contains(pkg.Name) {
|
||||
g.providerCache[pkg.Name] = append(g.providerCache[pkg.Name], *pkg)
|
||||
}
|
||||
|
||||
for _, val := range pkg.Provides {
|
||||
if val == pkg.Name {
|
||||
continue
|
||||
}
|
||||
if deps.Contains(val) {
|
||||
g.providerCache[val] = append(g.providerCache[val], *pkg)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, depString := range deps.ToSlice() {
|
||||
var aurPkgs []aurc.Pkg
|
||||
depName, _, _ := splitDep(depString)
|
||||
|
||||
if cachedProvidePkg, ok := g.providerCache[depString]; ok {
|
||||
aurPkgs = cachedProvidePkg
|
||||
} else {
|
||||
var errA error
|
||||
aurPkgs, errA = g.aurClient.Get(ctx, &aurc.Query{By: aurc.Provides, Needles: []string{depName}, Contains: true})
|
||||
if errA != nil {
|
||||
g.logger.Errorln(gotext.Get("Failed to find AUR package for"), depString, ":", errA)
|
||||
}
|
||||
}
|
||||
|
||||
// remove packages that don't satisfy the dependency
|
||||
satisfyingPkgs := make([]aurc.Pkg, 0, len(aurPkgs))
|
||||
for i := range aurPkgs {
|
||||
if satisfiesAur(depString, &aurPkgs[i]) {
|
||||
satisfyingPkgs = append(satisfyingPkgs, aurPkgs[i])
|
||||
}
|
||||
}
|
||||
aurPkgs = satisfyingPkgs
|
||||
|
||||
if len(aurPkgs) == 0 {
|
||||
g.logger.Errorln(gotext.Get("No AUR package found for"), " ", depString)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
pkg := aurPkgs[0]
|
||||
if len(aurPkgs) > 1 {
|
||||
chosen := g.provideMenu(depString, aurPkgs)
|
||||
pkg = *chosen
|
||||
}
|
||||
|
||||
g.providerCache[depString] = []aurc.Pkg{pkg}
|
||||
deps.Remove(depString)
|
||||
pkgsToAdd = append(pkgsToAdd, pkg)
|
||||
}
|
||||
|
||||
return pkgsToAdd
|
||||
}
|
||||
|
||||
func (g *Grapher) ValidateAndSetNodeInfo(graph *topo.Graph[string, *InstallInfo],
|
||||
node string, nodeInfo *topo.NodeInfo[*InstallInfo],
|
||||
) {
|
||||
info := graph.GetNodeInfo(node)
|
||||
if info != nil && info.Value != nil {
|
||||
if info.Value.Reason < nodeInfo.Value.Reason {
|
||||
return // refuse to downgrade reason
|
||||
}
|
||||
|
||||
if info.Value.Upgrade {
|
||||
return // refuse to overwrite an upgrade
|
||||
}
|
||||
}
|
||||
|
||||
graph.SetNodeInfo(node, nodeInfo)
|
||||
}
|
||||
|
||||
func (g *Grapher) addNodes(
|
||||
ctx context.Context,
|
||||
graph *topo.Graph[string, *InstallInfo],
|
||||
parentPkgName string,
|
||||
deps []string,
|
||||
depType Reason,
|
||||
) {
|
||||
targetsToFind := mapset.NewThreadUnsafeSet(deps...)
|
||||
// Check if in graph already
|
||||
for _, depString := range targetsToFind.ToSlice() {
|
||||
depName, _, _ := splitDep(depString)
|
||||
if !graph.Exists(depName) && !graph.ProvidesExists(depName) {
|
||||
continue
|
||||
}
|
||||
|
||||
if graph.Exists(depName) {
|
||||
if err := graph.DependOn(depName, parentPkgName); err != nil {
|
||||
g.logger.Warnln(depString, parentPkgName, err)
|
||||
}
|
||||
|
||||
targetsToFind.Remove(depString)
|
||||
}
|
||||
|
||||
if p := graph.GetProviderNode(depName); p != nil {
|
||||
if provideSatisfies(p.String(), depString, p.Version) {
|
||||
if err := graph.DependOn(p.Provider, parentPkgName); err != nil {
|
||||
g.logger.Warnln(p.Provider, parentPkgName, err)
|
||||
}
|
||||
|
||||
targetsToFind.Remove(depString)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check installed
|
||||
for _, depString := range targetsToFind.ToSlice() {
|
||||
depName, _, _ := splitDep(depString)
|
||||
if !g.dbExecutor.LocalSatisfierExists(depString) {
|
||||
continue
|
||||
}
|
||||
|
||||
if g.fullGraph {
|
||||
g.ValidateAndSetNodeInfo(
|
||||
graph,
|
||||
depName,
|
||||
&topo.NodeInfo[*InstallInfo]{Color: colorMap[depType], Background: bgColorMap[Local]})
|
||||
|
||||
if err := graph.DependOn(depName, parentPkgName); err != nil {
|
||||
g.logger.Warnln(depName, parentPkgName, err)
|
||||
}
|
||||
}
|
||||
|
||||
targetsToFind.Remove(depString)
|
||||
}
|
||||
|
||||
// Check Sync
|
||||
for _, depString := range targetsToFind.ToSlice() {
|
||||
alpmPkg := g.dbExecutor.SyncSatisfier(depString)
|
||||
if alpmPkg == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := graph.DependOn(alpmPkg.Name(), parentPkgName); err != nil {
|
||||
g.logger.Warnln("repo dep warn:", depString, parentPkgName, err)
|
||||
}
|
||||
|
||||
dbName := alpmPkg.DB().Name()
|
||||
g.ValidateAndSetNodeInfo(
|
||||
graph,
|
||||
alpmPkg.Name(),
|
||||
&topo.NodeInfo[*InstallInfo]{
|
||||
Color: colorMap[depType],
|
||||
Background: bgColorMap[Sync],
|
||||
Value: &InstallInfo{
|
||||
Source: Sync,
|
||||
Reason: depType,
|
||||
Version: alpmPkg.Version(),
|
||||
SyncDBName: &dbName,
|
||||
},
|
||||
})
|
||||
|
||||
if newDeps := alpmPkg.Depends().Slice(); len(newDeps) != 0 && g.fullGraph {
|
||||
newDepsSlice := make([]string, 0, len(newDeps))
|
||||
for _, newDep := range newDeps {
|
||||
newDepsSlice = append(newDepsSlice, newDep.Name)
|
||||
}
|
||||
|
||||
g.addNodes(ctx, graph, alpmPkg.Name(), newDepsSlice, Dep)
|
||||
}
|
||||
|
||||
targetsToFind.Remove(depString)
|
||||
}
|
||||
|
||||
// Check AUR
|
||||
pkgsToAdd := g.findDepsFromAUR(ctx, targetsToFind)
|
||||
for i := range pkgsToAdd {
|
||||
aurPkg := &pkgsToAdd[i]
|
||||
if err := graph.DependOn(aurPkg.Name, parentPkgName); err != nil {
|
||||
g.logger.Warnln("aur dep warn:", aurPkg.Name, parentPkgName, err)
|
||||
}
|
||||
|
||||
graph.SetNodeInfo(
|
||||
aurPkg.Name,
|
||||
&topo.NodeInfo[*InstallInfo]{
|
||||
Color: colorMap[depType],
|
||||
Background: bgColorMap[AUR],
|
||||
Value: &InstallInfo{
|
||||
Source: AUR,
|
||||
Reason: depType,
|
||||
AURBase: &aurPkg.PackageBase,
|
||||
Version: aurPkg.Version,
|
||||
},
|
||||
})
|
||||
|
||||
g.addDepNodes(ctx, aurPkg, graph)
|
||||
}
|
||||
|
||||
// Add missing to graph
|
||||
for _, depString := range targetsToFind.ToSlice() {
|
||||
depName, mod, ver := splitDep(depString)
|
||||
// no dep found. add as missing
|
||||
if err := graph.DependOn(depName, parentPkgName); err != nil {
|
||||
g.logger.Warnln("missing dep warn:", depString, parentPkgName, err)
|
||||
}
|
||||
graph.SetNodeInfo(depName, &topo.NodeInfo[*InstallInfo]{
|
||||
Color: colorMap[depType],
|
||||
Background: bgColorMap[Missing],
|
||||
Value: &InstallInfo{
|
||||
Source: Missing,
|
||||
Reason: depType,
|
||||
Version: fmt.Sprintf("%s%s", mod, ver),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Grapher) provideMenu(dep string, options []aur.Pkg) *aur.Pkg {
|
||||
size := len(options)
|
||||
if size == 1 {
|
||||
return &options[0]
|
||||
}
|
||||
|
||||
str := text.Bold(gotext.Get("There are %[1]d providers available for %[2]s:", size, dep))
|
||||
str += "\n"
|
||||
|
||||
size = 1
|
||||
str += g.logger.SprintOperationInfo(gotext.Get("Repository AUR"), "\n ")
|
||||
|
||||
for i := range options {
|
||||
str += fmt.Sprintf("%d) %s ", size, options[i].Name)
|
||||
size++
|
||||
}
|
||||
|
||||
g.logger.OperationInfoln(str)
|
||||
|
||||
for {
|
||||
g.logger.Println(gotext.Get("\nEnter a number (default=1): "))
|
||||
|
||||
if g.noConfirm {
|
||||
g.logger.Println("1")
|
||||
|
||||
return &options[0]
|
||||
}
|
||||
|
||||
numberBuf, err := g.logger.GetInput("", false)
|
||||
if err != nil {
|
||||
g.logger.Errorln(err)
|
||||
|
||||
return &options[0]
|
||||
}
|
||||
|
||||
if numberBuf == "" {
|
||||
return &options[0]
|
||||
}
|
||||
|
||||
num, err := strconv.Atoi(numberBuf)
|
||||
if err != nil {
|
||||
g.logger.Errorln(gotext.Get("invalid number: %s", numberBuf))
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if num < 1 || num >= size {
|
||||
g.logger.Errorln(gotext.Get("invalid value: %d is not between %d and %d",
|
||||
num, 1, size-1))
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
return &options[num-1]
|
||||
}
|
||||
}
|
||||
|
||||
func makeAURPKGFromSrcinfo(dbExecutor db.Executor, srcInfo *gosrc.Srcinfo) ([]*aur.Pkg, error) {
|
||||
pkgs := make([]*aur.Pkg, 0, 1)
|
||||
|
||||
alpmArch, err := dbExecutor.AlpmArchitectures()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
alpmArch = append(alpmArch, "") // srcinfo assumes no value as ""
|
||||
|
||||
getDesc := func(pkg *gosrc.Package) string {
|
||||
if pkg.Pkgdesc != "" {
|
||||
return pkg.Pkgdesc
|
||||
}
|
||||
|
||||
return srcInfo.Pkgdesc
|
||||
}
|
||||
|
||||
for i := range srcInfo.Packages {
|
||||
pkg := &srcInfo.Packages[i]
|
||||
|
||||
pkgs = append(pkgs, &aur.Pkg{
|
||||
ID: 0,
|
||||
Name: pkg.Pkgname,
|
||||
PackageBaseID: 0,
|
||||
PackageBase: srcInfo.Pkgbase,
|
||||
Version: srcInfo.Version(),
|
||||
Description: getDesc(pkg),
|
||||
URL: pkg.URL,
|
||||
Depends: append(archStringToString(alpmArch, pkg.Depends),
|
||||
archStringToString(alpmArch, srcInfo.Depends)...),
|
||||
MakeDepends: archStringToString(alpmArch, srcInfo.MakeDepends),
|
||||
CheckDepends: archStringToString(alpmArch, srcInfo.CheckDepends),
|
||||
Conflicts: append(archStringToString(alpmArch, pkg.Conflicts),
|
||||
archStringToString(alpmArch, srcInfo.Conflicts)...),
|
||||
Provides: append(archStringToString(alpmArch, pkg.Provides),
|
||||
archStringToString(alpmArch, srcInfo.Provides)...),
|
||||
Replaces: append(archStringToString(alpmArch, pkg.Replaces),
|
||||
archStringToString(alpmArch, srcInfo.Replaces)...),
|
||||
OptDepends: append(archStringToString(alpmArch, pkg.OptDepends),
|
||||
archStringToString(alpmArch, srcInfo.OptDepends)...),
|
||||
Groups: pkg.Groups,
|
||||
License: pkg.License,
|
||||
Keywords: []string{},
|
||||
})
|
||||
}
|
||||
|
||||
return pkgs, nil
|
||||
}
|
||||
|
||||
func archStringToString(alpmArches []string, archString []gosrc.ArchString) []string {
|
||||
pkgs := make([]string, 0, len(archString))
|
||||
|
||||
for _, arch := range archString {
|
||||
if db.ArchIsSupported(alpmArches, arch.Arch) {
|
||||
pkgs = append(pkgs, arch.Value)
|
||||
}
|
||||
}
|
||||
|
||||
return pkgs
|
||||
}
|
||||
|
||||
func aurDepModToAlpmDep(mod string) alpm.DepMod {
|
||||
switch mod {
|
||||
case "=":
|
||||
return alpm.DepModEq
|
||||
case ">=":
|
||||
return alpm.DepModGE
|
||||
case "<=":
|
||||
return alpm.DepModLE
|
||||
case ">":
|
||||
return alpm.DepModGT
|
||||
case "<":
|
||||
return alpm.DepModLT
|
||||
}
|
||||
return alpm.DepModAny
|
||||
}
|
811
pkg/dep/dep_graph_test.go
Normal file
811
pkg/dep/dep_graph_test.go
Normal file
@ -0,0 +1,811 @@
|
||||
//go:build !integration
|
||||
// +build !integration
|
||||
|
||||
package dep
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
aurc "github.com/Jguer/aur"
|
||||
alpm "github.com/Jguer/go-alpm/v2"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
"github.com/Jguer/yay/v12/pkg/db/mock"
|
||||
mockaur "github.com/Jguer/yay/v12/pkg/dep/mock"
|
||||
aur "github.com/Jguer/yay/v12/pkg/query"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
func ptrString(s string) *string {
|
||||
return &s
|
||||
}
|
||||
|
||||
func getFromFile(t *testing.T, filePath string) mockaur.GetFunc {
|
||||
f, err := os.Open(filePath)
|
||||
require.NoError(t, err)
|
||||
|
||||
fBytes, err := io.ReadAll(f)
|
||||
require.NoError(t, err)
|
||||
|
||||
pkgs := []aur.Pkg{}
|
||||
err = json.Unmarshal(fBytes, &pkgs)
|
||||
require.NoError(t, err)
|
||||
|
||||
return func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
|
||||
return pkgs, nil
|
||||
}
|
||||
}
|
||||
|
||||
func TestGrapher_GraphFromTargets_jellyfin(t *testing.T) {
|
||||
mockDB := &mock.DBExecutor{
|
||||
SyncPackageFn: func(string) mock.IPackage { return nil },
|
||||
SyncSatisfierFn: func(s string) mock.IPackage {
|
||||
switch s {
|
||||
case "jellyfin":
|
||||
return nil
|
||||
case "dotnet-runtime-6.0":
|
||||
return &mock.Package{
|
||||
PName: "dotnet-runtime-6.0",
|
||||
PBase: "dotnet-runtime-6.0",
|
||||
PVersion: "6.0.100-1",
|
||||
PDB: mock.NewDB("community"),
|
||||
}
|
||||
case "dotnet-sdk-6.0":
|
||||
return &mock.Package{
|
||||
PName: "dotnet-sdk-6.0",
|
||||
PBase: "dotnet-sdk-6.0",
|
||||
PVersion: "6.0.100-1",
|
||||
PDB: mock.NewDB("community"),
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
PackagesFromGroupFn: func(string) []mock.IPackage { return nil },
|
||||
LocalSatisfierExistsFn: func(s string) bool {
|
||||
switch s {
|
||||
case "dotnet-sdk-6.0", "dotnet-runtime-6.0", "jellyfin-server=10.8.8", "jellyfin-web=10.8.8":
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
},
|
||||
LocalPackageFn: func(string) mock.IPackage { return nil },
|
||||
}
|
||||
|
||||
mockAUR := &mockaur.MockAUR{GetFn: func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
|
||||
if query.Needles[0] == "jellyfin" {
|
||||
jfinFn := getFromFile(t, "testdata/jellyfin.json")
|
||||
return jfinFn(ctx, query)
|
||||
}
|
||||
|
||||
if query.Needles[0] == "jellyfin-web" {
|
||||
jfinWebFn := getFromFile(t, "testdata/jellyfin-web.json")
|
||||
return jfinWebFn(ctx, query)
|
||||
}
|
||||
|
||||
if query.Needles[0] == "jellyfin-server" {
|
||||
jfinServerFn := getFromFile(t, "testdata/jellyfin-server.json")
|
||||
return jfinServerFn(ctx, query)
|
||||
}
|
||||
|
||||
panic(fmt.Sprintf("implement me %v", query.Needles))
|
||||
}}
|
||||
|
||||
type fields struct {
|
||||
dbExecutor db.Executor
|
||||
aurCache aurc.QueryClient
|
||||
noDeps bool
|
||||
noCheckDeps bool
|
||||
}
|
||||
type args struct {
|
||||
targets []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
fields fields
|
||||
args args
|
||||
want []map[string]*InstallInfo
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "noDeps",
|
||||
fields: fields{
|
||||
dbExecutor: mockDB,
|
||||
aurCache: mockAUR,
|
||||
noDeps: true,
|
||||
noCheckDeps: false,
|
||||
},
|
||||
args: args{
|
||||
targets: []string{"jellyfin"},
|
||||
},
|
||||
want: []map[string]*InstallInfo{
|
||||
{
|
||||
"jellyfin": {
|
||||
Source: AUR,
|
||||
Reason: Explicit,
|
||||
Version: "10.8.8-1",
|
||||
AURBase: ptrString("jellyfin"),
|
||||
},
|
||||
},
|
||||
{
|
||||
"dotnet-sdk-6.0": {
|
||||
Source: Sync,
|
||||
Reason: MakeDep,
|
||||
Version: "6.0.100-1",
|
||||
SyncDBName: ptrString("community"),
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "deps",
|
||||
fields: fields{
|
||||
dbExecutor: mockDB,
|
||||
aurCache: mockAUR,
|
||||
noDeps: false,
|
||||
noCheckDeps: false,
|
||||
},
|
||||
args: args{
|
||||
targets: []string{"jellyfin"},
|
||||
},
|
||||
want: []map[string]*InstallInfo{
|
||||
{
|
||||
"jellyfin": {
|
||||
Source: AUR,
|
||||
Reason: Explicit,
|
||||
Version: "10.8.8-1",
|
||||
AURBase: ptrString("jellyfin"),
|
||||
},
|
||||
},
|
||||
{
|
||||
"jellyfin-web": {
|
||||
Source: AUR,
|
||||
Reason: Dep,
|
||||
Version: "10.8.8-1",
|
||||
AURBase: ptrString("jellyfin"),
|
||||
},
|
||||
"jellyfin-server": {
|
||||
Source: AUR,
|
||||
Reason: Dep,
|
||||
Version: "10.8.8-1",
|
||||
AURBase: ptrString("jellyfin"),
|
||||
},
|
||||
},
|
||||
{
|
||||
"dotnet-sdk-6.0": {
|
||||
Source: Sync,
|
||||
Reason: MakeDep,
|
||||
Version: "6.0.100-1",
|
||||
SyncDBName: ptrString("community"),
|
||||
},
|
||||
"dotnet-runtime-6.0": {
|
||||
Source: Sync,
|
||||
Reason: Dep,
|
||||
Version: "6.0.100-1",
|
||||
SyncDBName: ptrString("community"),
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
g := NewGrapher(tt.fields.dbExecutor,
|
||||
tt.fields.aurCache, false, true,
|
||||
tt.fields.noDeps, tt.fields.noCheckDeps, false,
|
||||
text.NewLogger(io.Discard, io.Discard, &os.File{}, true, "test"))
|
||||
got, err := g.GraphFromTargets(context.Background(), nil, tt.args.targets)
|
||||
require.NoError(t, err)
|
||||
layers := got.TopoSortedLayerMap(nil)
|
||||
require.EqualValues(t, tt.want, layers, layers)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGrapher_GraphProvides_androidsdk(t *testing.T) {
|
||||
mockDB := &mock.DBExecutor{
|
||||
SyncPackageFn: func(string) mock.IPackage { return nil },
|
||||
SyncSatisfierFn: func(s string) mock.IPackage {
|
||||
switch s {
|
||||
case "android-sdk":
|
||||
return nil
|
||||
case "jdk11-openjdk":
|
||||
return &mock.Package{
|
||||
PName: "jdk11-openjdk",
|
||||
PVersion: "11.0.12.u7-1",
|
||||
PDB: mock.NewDB("community"),
|
||||
PProvides: mock.DependList{
|
||||
Depends: []alpm.Depend{
|
||||
{Name: "java-environment", Version: "11", Mod: alpm.DepModEq},
|
||||
{Name: "java-environment-openjdk", Version: "11", Mod: alpm.DepModEq},
|
||||
{Name: "jdk11-openjdk", Version: "11.0.19.u7-1", Mod: alpm.DepModEq},
|
||||
},
|
||||
},
|
||||
}
|
||||
case "java-environment":
|
||||
panic("not supposed to be called")
|
||||
}
|
||||
panic("implement me " + s)
|
||||
},
|
||||
PackagesFromGroupFn: func(string) []mock.IPackage { return nil },
|
||||
LocalSatisfierExistsFn: func(s string) bool {
|
||||
switch s {
|
||||
case "java-environment":
|
||||
return false
|
||||
}
|
||||
|
||||
switch s {
|
||||
case "libxtst", "fontconfig", "freetype2", "lib32-gcc-libs", "lib32-glibc", "libx11", "libxext", "libxrender", "zlib", "gcc-libs":
|
||||
return true
|
||||
}
|
||||
|
||||
panic("implement me " + s)
|
||||
},
|
||||
LocalPackageFn: func(string) mock.IPackage { return nil },
|
||||
}
|
||||
|
||||
mockAUR := &mockaur.MockAUR{GetFn: func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
|
||||
if query.Needles[0] == "android-sdk" {
|
||||
jfinFn := getFromFile(t, "testdata/android-sdk.json")
|
||||
return jfinFn(ctx, query)
|
||||
}
|
||||
|
||||
panic(fmt.Sprintf("implement me %v", query.Needles))
|
||||
}}
|
||||
|
||||
type fields struct {
|
||||
dbExecutor db.Executor
|
||||
aurCache aurc.QueryClient
|
||||
noDeps bool
|
||||
noCheckDeps bool
|
||||
}
|
||||
type args struct {
|
||||
targets []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
fields fields
|
||||
args args
|
||||
want []map[string]*InstallInfo
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "explicit dep",
|
||||
fields: fields{
|
||||
dbExecutor: mockDB,
|
||||
aurCache: mockAUR,
|
||||
noDeps: false,
|
||||
noCheckDeps: false,
|
||||
},
|
||||
args: args{
|
||||
targets: []string{"android-sdk", "jdk11-openjdk"},
|
||||
},
|
||||
want: []map[string]*InstallInfo{
|
||||
{
|
||||
"android-sdk": {
|
||||
Source: AUR,
|
||||
Reason: Explicit,
|
||||
Version: "26.1.1-2",
|
||||
AURBase: ptrString("android-sdk"),
|
||||
},
|
||||
},
|
||||
{
|
||||
"jdk11-openjdk": {
|
||||
Source: Sync,
|
||||
Reason: Explicit,
|
||||
Version: "11.0.12.u7-1",
|
||||
SyncDBName: ptrString("community"),
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
g := NewGrapher(tt.fields.dbExecutor,
|
||||
tt.fields.aurCache, false, true,
|
||||
tt.fields.noDeps, tt.fields.noCheckDeps, false,
|
||||
text.NewLogger(io.Discard, io.Discard, &os.File{}, true, "test"))
|
||||
got, err := g.GraphFromTargets(context.Background(), nil, tt.args.targets)
|
||||
require.NoError(t, err)
|
||||
layers := got.TopoSortedLayerMap(nil)
|
||||
require.EqualValues(t, tt.want, layers, layers)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGrapher_GraphFromAUR_Deps_ceph_bin(t *testing.T) {
|
||||
mockDB := &mock.DBExecutor{
|
||||
SyncPackageFn: func(string) mock.IPackage { return nil },
|
||||
PackagesFromGroupFn: func(string) []mock.IPackage { return []mock.IPackage{} },
|
||||
SyncSatisfierFn: func(s string) mock.IPackage {
|
||||
switch s {
|
||||
case "ceph-bin", "ceph-libs-bin":
|
||||
return nil
|
||||
case "ceph", "ceph-libs", "ceph-libs=17.2.6-2":
|
||||
return nil
|
||||
}
|
||||
|
||||
panic("implement me " + s)
|
||||
},
|
||||
|
||||
LocalSatisfierExistsFn: func(s string) bool {
|
||||
switch s {
|
||||
case "ceph-libs", "ceph-libs=17.2.6-2":
|
||||
return false
|
||||
case "dep1", "dep2", "dep3", "makedep1", "makedep2", "checkdep1":
|
||||
return true
|
||||
}
|
||||
|
||||
panic("implement me " + s)
|
||||
},
|
||||
LocalPackageFn: func(string) mock.IPackage { return nil },
|
||||
}
|
||||
|
||||
mockAUR := &mockaur.MockAUR{GetFn: func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
|
||||
mockPkgs := map[string]aur.Pkg{
|
||||
"ceph-bin": {
|
||||
Name: "ceph-bin",
|
||||
PackageBase: "ceph-bin",
|
||||
Version: "17.2.6-2",
|
||||
Depends: []string{"ceph-libs=17.2.6-2", "dep1"},
|
||||
Provides: []string{"ceph=17.2.6-2"},
|
||||
},
|
||||
"ceph-libs-bin": {
|
||||
Name: "ceph-libs-bin",
|
||||
PackageBase: "ceph-bin",
|
||||
Version: "17.2.6-2",
|
||||
Depends: []string{"dep1", "dep2"},
|
||||
Provides: []string{"ceph-libs=17.2.6-2"},
|
||||
},
|
||||
"ceph": {
|
||||
Name: "ceph",
|
||||
PackageBase: "ceph",
|
||||
Version: "17.2.6-2",
|
||||
Depends: []string{"ceph-libs=17.2.6-2", "dep1"},
|
||||
MakeDepends: []string{"makedep1"},
|
||||
CheckDepends: []string{"checkdep1"},
|
||||
Provides: []string{"ceph=17.2.6-2"},
|
||||
},
|
||||
"ceph-libs": {
|
||||
Name: "ceph-libs",
|
||||
PackageBase: "ceph",
|
||||
Version: "17.2.6-2",
|
||||
Depends: []string{"dep1", "dep2", "dep3"},
|
||||
MakeDepends: []string{"makedep1", "makedep2"},
|
||||
CheckDepends: []string{"checkdep1"},
|
||||
Provides: []string{"ceph-libs=17.2.6-2"},
|
||||
},
|
||||
}
|
||||
|
||||
pkgs := []aur.Pkg{}
|
||||
for _, needle := range query.Needles {
|
||||
if pkg, ok := mockPkgs[needle]; ok {
|
||||
pkgs = append(pkgs, pkg)
|
||||
} else {
|
||||
panic(fmt.Sprintf("implement me %v", needle))
|
||||
}
|
||||
}
|
||||
|
||||
return pkgs, nil
|
||||
}}
|
||||
|
||||
installInfos := map[string]*InstallInfo{
|
||||
"ceph-bin exp": {
|
||||
Source: AUR,
|
||||
Reason: Explicit,
|
||||
Version: "17.2.6-2",
|
||||
AURBase: ptrString("ceph-bin"),
|
||||
},
|
||||
"ceph-libs-bin exp": {
|
||||
Source: AUR,
|
||||
Reason: Explicit,
|
||||
Version: "17.2.6-2",
|
||||
AURBase: ptrString("ceph-bin"),
|
||||
},
|
||||
"ceph exp": {
|
||||
Source: AUR,
|
||||
Reason: Explicit,
|
||||
Version: "17.2.6-2",
|
||||
AURBase: ptrString("ceph"),
|
||||
},
|
||||
"ceph-libs exp": {
|
||||
Source: AUR,
|
||||
Reason: Explicit,
|
||||
Version: "17.2.6-2",
|
||||
AURBase: ptrString("ceph"),
|
||||
},
|
||||
"ceph-libs dep": {
|
||||
Source: AUR,
|
||||
Reason: Dep,
|
||||
Version: "17.2.6-2",
|
||||
AURBase: ptrString("ceph"),
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
targets []string
|
||||
wantLayers []map[string]*InstallInfo
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "ceph-bin ceph-libs-bin",
|
||||
targets: []string{"ceph-bin", "ceph-libs-bin"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"ceph-bin": installInfos["ceph-bin exp"]},
|
||||
{"ceph-libs-bin": installInfos["ceph-libs-bin exp"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "ceph-libs-bin ceph-bin (reversed order)",
|
||||
targets: []string{"ceph-libs-bin", "ceph-bin"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"ceph-bin": installInfos["ceph-bin exp"]},
|
||||
{"ceph-libs-bin": installInfos["ceph-libs-bin exp"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "ceph",
|
||||
targets: []string{"ceph"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"ceph": installInfos["ceph exp"]},
|
||||
{"ceph-libs": installInfos["ceph-libs dep"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "ceph-bin",
|
||||
targets: []string{"ceph-bin"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"ceph-bin": installInfos["ceph-bin exp"]},
|
||||
{"ceph-libs": installInfos["ceph-libs dep"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "ceph-bin ceph-libs",
|
||||
targets: []string{"ceph-bin", "ceph-libs"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"ceph-bin": installInfos["ceph-bin exp"]},
|
||||
{"ceph-libs": installInfos["ceph-libs exp"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "ceph-libs ceph-bin (reversed order)",
|
||||
targets: []string{"ceph-libs", "ceph-bin"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"ceph-bin": installInfos["ceph-bin exp"]},
|
||||
{"ceph-libs": installInfos["ceph-libs exp"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "ceph ceph-libs-bin",
|
||||
targets: []string{"ceph", "ceph-libs-bin"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"ceph": installInfos["ceph exp"]},
|
||||
{"ceph-libs-bin": installInfos["ceph-libs-bin exp"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "ceph-libs-bin ceph (reversed order)",
|
||||
targets: []string{"ceph-libs-bin", "ceph"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"ceph": installInfos["ceph exp"]},
|
||||
{"ceph-libs-bin": installInfos["ceph-libs-bin exp"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
g := NewGrapher(mockDB, mockAUR,
|
||||
false, true, false, false, false,
|
||||
text.NewLogger(io.Discard, io.Discard, &os.File{}, true, "test"))
|
||||
got, err := g.GraphFromTargets(context.Background(), nil, tt.targets)
|
||||
require.NoError(t, err)
|
||||
layers := got.TopoSortedLayerMap(nil)
|
||||
require.EqualValues(t, tt.wantLayers, layers, layers)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGrapher_GraphFromAUR_Deps_gourou(t *testing.T) {
|
||||
mockDB := &mock.DBExecutor{
|
||||
SyncPackageFn: func(string) mock.IPackage { return nil },
|
||||
PackagesFromGroupFn: func(string) []mock.IPackage { return []mock.IPackage{} },
|
||||
SyncSatisfierFn: func(s string) mock.IPackage {
|
||||
switch s {
|
||||
case "gourou", "libzip-git":
|
||||
return nil
|
||||
case "libzip":
|
||||
return &mock.Package{
|
||||
PName: "libzip",
|
||||
PVersion: "1.9.2-1",
|
||||
PDB: mock.NewDB("extra"),
|
||||
}
|
||||
}
|
||||
|
||||
panic("implement me " + s)
|
||||
},
|
||||
|
||||
LocalSatisfierExistsFn: func(s string) bool {
|
||||
switch s {
|
||||
case "gourou", "libzip", "libzip-git":
|
||||
return false
|
||||
case "dep1", "dep2":
|
||||
return true
|
||||
}
|
||||
|
||||
panic("implement me " + s)
|
||||
},
|
||||
LocalPackageFn: func(string) mock.IPackage { return nil },
|
||||
}
|
||||
|
||||
mockAUR := &mockaur.MockAUR{GetFn: func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
|
||||
mockPkgs := map[string]aur.Pkg{
|
||||
"gourou": {
|
||||
Name: "gourou",
|
||||
PackageBase: "gourou",
|
||||
Version: "0.8.1",
|
||||
Depends: []string{"libzip"},
|
||||
},
|
||||
"libzip-git": {
|
||||
Name: "libzip-git",
|
||||
PackageBase: "libzip-git",
|
||||
Version: "1.9.2.r159.gb3ac716c-1",
|
||||
Depends: []string{"dep1", "dep2"},
|
||||
Provides: []string{"libzip=1.9.2.r159.gb3ac716c"},
|
||||
},
|
||||
}
|
||||
|
||||
pkgs := []aur.Pkg{}
|
||||
for _, needle := range query.Needles {
|
||||
if pkg, ok := mockPkgs[needle]; ok {
|
||||
pkgs = append(pkgs, pkg)
|
||||
} else {
|
||||
panic(fmt.Sprintf("implement me %v", needle))
|
||||
}
|
||||
}
|
||||
|
||||
return pkgs, nil
|
||||
}}
|
||||
|
||||
installInfos := map[string]*InstallInfo{
|
||||
"gourou exp": {
|
||||
Source: AUR,
|
||||
Reason: Explicit,
|
||||
Version: "0.8.1",
|
||||
AURBase: ptrString("gourou"),
|
||||
},
|
||||
"libzip dep": {
|
||||
Source: Sync,
|
||||
Reason: Dep,
|
||||
Version: "1.9.2-1",
|
||||
SyncDBName: ptrString("extra"),
|
||||
},
|
||||
"libzip exp": {
|
||||
Source: Sync,
|
||||
Reason: Explicit,
|
||||
Version: "1.9.2-1",
|
||||
SyncDBName: ptrString("extra"),
|
||||
},
|
||||
"libzip-git exp": {
|
||||
Source: AUR,
|
||||
Reason: Explicit,
|
||||
Version: "1.9.2.r159.gb3ac716c-1",
|
||||
AURBase: ptrString("libzip-git"),
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
targets []string
|
||||
wantLayers []map[string]*InstallInfo
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "gourou",
|
||||
targets: []string{"gourou"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"gourou": installInfos["gourou exp"]},
|
||||
{"libzip": installInfos["libzip dep"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "gourou libzip",
|
||||
targets: []string{"gourou", "libzip"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"gourou": installInfos["gourou exp"]},
|
||||
{"libzip": installInfos["libzip exp"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "gourou libzip-git",
|
||||
targets: []string{"gourou", "libzip-git"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"gourou": installInfos["gourou exp"]},
|
||||
{"libzip-git": installInfos["libzip-git exp"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "libzip-git gourou (reversed order)",
|
||||
targets: []string{"libzip-git", "gourou"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"gourou": installInfos["gourou exp"]},
|
||||
{"libzip-git": installInfos["libzip-git exp"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
g := NewGrapher(mockDB, mockAUR,
|
||||
false, true, false, false, false,
|
||||
text.NewLogger(io.Discard, io.Discard, &os.File{}, true, "test"))
|
||||
got, err := g.GraphFromTargets(context.Background(), nil, tt.targets)
|
||||
require.NoError(t, err)
|
||||
layers := got.TopoSortedLayerMap(nil)
|
||||
require.EqualValues(t, tt.wantLayers, layers, layers)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGrapher_GraphFromTargets_ReinstalledDeps(t *testing.T) {
|
||||
mockDB := &mock.DBExecutor{
|
||||
SyncPackageFn: func(string) mock.IPackage { return nil },
|
||||
PackagesFromGroupFn: func(string) []mock.IPackage { return []mock.IPackage{} },
|
||||
SyncSatisfierFn: func(s string) mock.IPackage {
|
||||
switch s {
|
||||
case "gourou":
|
||||
return nil
|
||||
case "libzip":
|
||||
return &mock.Package{
|
||||
PName: "libzip",
|
||||
PVersion: "1.9.2-1",
|
||||
PDB: mock.NewDB("extra"),
|
||||
}
|
||||
}
|
||||
|
||||
panic("implement me " + s)
|
||||
},
|
||||
SatisfierFromDBFn: func(s, s2 string) (mock.IPackage, error) {
|
||||
if s2 == "extra" {
|
||||
switch s {
|
||||
case "libzip":
|
||||
return &mock.Package{
|
||||
PName: "libzip",
|
||||
PVersion: "1.9.2-1",
|
||||
PDB: mock.NewDB("extra"),
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
|
||||
panic("implement me " + s2 + "/" + s)
|
||||
},
|
||||
|
||||
LocalSatisfierExistsFn: func(s string) bool {
|
||||
switch s {
|
||||
case "gourou", "libzip":
|
||||
return true
|
||||
}
|
||||
|
||||
panic("implement me " + s)
|
||||
},
|
||||
LocalPackageFn: func(s string) mock.IPackage {
|
||||
switch s {
|
||||
case "libzip":
|
||||
return &mock.Package{
|
||||
PName: "libzip",
|
||||
PVersion: "1.9.2-1",
|
||||
PDB: mock.NewDB("extra"),
|
||||
PReason: alpm.PkgReasonDepend,
|
||||
}
|
||||
case "gourou":
|
||||
return &mock.Package{
|
||||
PName: "gourou",
|
||||
PVersion: "0.8.1",
|
||||
PDB: mock.NewDB("aur"),
|
||||
PReason: alpm.PkgReasonDepend,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
mockAUR := &mockaur.MockAUR{GetFn: func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
|
||||
mockPkgs := map[string]aur.Pkg{
|
||||
"gourou": {
|
||||
Name: "gourou",
|
||||
PackageBase: "gourou",
|
||||
Version: "0.8.1",
|
||||
Depends: []string{"libzip"},
|
||||
},
|
||||
}
|
||||
|
||||
pkgs := []aur.Pkg{}
|
||||
for _, needle := range query.Needles {
|
||||
if pkg, ok := mockPkgs[needle]; ok {
|
||||
pkgs = append(pkgs, pkg)
|
||||
} else {
|
||||
panic(fmt.Sprintf("implement me %v", needle))
|
||||
}
|
||||
}
|
||||
|
||||
return pkgs, nil
|
||||
}}
|
||||
|
||||
installInfos := map[string]*InstallInfo{
|
||||
"gourou dep": {
|
||||
Source: AUR,
|
||||
Reason: Dep,
|
||||
Version: "0.8.1",
|
||||
AURBase: ptrString("gourou"),
|
||||
},
|
||||
"libzip dep": {
|
||||
Source: Sync,
|
||||
Reason: Dep,
|
||||
Version: "1.9.2-1",
|
||||
SyncDBName: ptrString("extra"),
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
targets []string
|
||||
wantLayers []map[string]*InstallInfo
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "gourou libzip",
|
||||
targets: []string{"gourou", "libzip"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"gourou": installInfos["gourou dep"]},
|
||||
{"libzip": installInfos["libzip dep"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "aur/gourou extra/libzip",
|
||||
targets: []string{"aur/gourou", "extra/libzip"},
|
||||
wantLayers: []map[string]*InstallInfo{
|
||||
{"gourou": installInfos["gourou dep"]},
|
||||
{"libzip": installInfos["libzip dep"]},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
g := NewGrapher(mockDB, mockAUR,
|
||||
false, true, false, false, false,
|
||||
text.NewLogger(io.Discard, io.Discard, &os.File{}, true, "test"))
|
||||
got, err := g.GraphFromTargets(context.Background(), nil, tt.targets)
|
||||
require.NoError(t, err)
|
||||
layers := got.TopoSortedLayerMap(nil)
|
||||
require.EqualValues(t, tt.wantLayers, layers, layers)
|
||||
})
|
||||
}
|
||||
}
|
21
pkg/dep/mock/aur.go
Normal file
21
pkg/dep/mock/aur.go
Normal file
@ -0,0 +1,21 @@
|
||||
package mock
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/Jguer/aur"
|
||||
)
|
||||
|
||||
type GetFunc func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error)
|
||||
|
||||
type MockAUR struct {
|
||||
GetFn GetFunc
|
||||
}
|
||||
|
||||
func (m *MockAUR) Get(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
if m.GetFn != nil {
|
||||
return m.GetFn(ctx, query)
|
||||
}
|
||||
|
||||
panic("implement me")
|
||||
}
|
34
pkg/dep/target_handler.go
Normal file
34
pkg/dep/target_handler.go
Normal file
@ -0,0 +1,34 @@
|
||||
package dep
|
||||
|
||||
import "github.com/Jguer/yay/v12/pkg/text"
|
||||
|
||||
type Target struct {
|
||||
DB string
|
||||
Name string
|
||||
Mod string
|
||||
Version string
|
||||
}
|
||||
|
||||
func ToTarget(pkg string) Target {
|
||||
dbName, depString := text.SplitDBFromName(pkg)
|
||||
name, mod, depVersion := splitDep(depString)
|
||||
|
||||
return Target{
|
||||
DB: dbName,
|
||||
Name: name,
|
||||
Mod: mod,
|
||||
Version: depVersion,
|
||||
}
|
||||
}
|
||||
|
||||
func (t Target) DepString() string {
|
||||
return t.Name + t.Mod + t.Version
|
||||
}
|
||||
|
||||
func (t Target) String() string {
|
||||
if t.DB != "" {
|
||||
return t.DB + "/" + t.DepString()
|
||||
}
|
||||
|
||||
return t.DepString()
|
||||
}
|
3
pkg/dep/testdata/android-sdk.json
vendored
Normal file
3
pkg/dep/testdata/android-sdk.json
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
[
|
||||
{"ID":1055234,"Name":"android-sdk","PackageBaseID":13751,"PackageBase":"android-sdk","Version":"26.1.1-2","Description":"Google Android SDK","URL":"https://developer.android.com/studio/releases/sdk-tools.html","NumVotes":1487,"Popularity":0.802316,"OutOfDate":null,"Maintainer":"dreamingincode","Submitter":null,"FirstSubmitted":1194895596,"LastModified":1647982720,"URLPath":"/cgit/aur.git/snapshot/android-sdk.tar.gz","Depends":["java-environment","libxtst","fontconfig","freetype2","lib32-gcc-libs","lib32-glibc","libx11","libxext","libxrender","zlib","gcc-libs"],"OptDepends":["android-emulator","android-sdk-platform-tools","android-udev"],"License":["custom"],"Keywords":["android","development"]}
|
||||
]
|
3
pkg/dep/testdata/jellyfin-server.json
vendored
Normal file
3
pkg/dep/testdata/jellyfin-server.json
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
[
|
||||
{"ID":1176791,"Name":"jellyfin-server","PackageBaseID":138631,"PackageBase":"jellyfin","Version":"10.8.8-1","Description":"Jellyfin server component","URL":"https://github.com/jellyfin/jellyfin","NumVotes":84,"Popularity":1.272964,"OutOfDate":null,"Maintainer":"z3ntu","Submitter":"z3ntu","FirstSubmitted":1547053171,"LastModified":1669830147,"URLPath":"/cgit/aur.git/snapshot/jellyfin-server.tar.gz","Depends":["dotnet-runtime-6.0","aspnet-runtime-6.0","ffmpeg","sqlite"],"MakeDepends":["dotnet-sdk-6.0","nodejs","npm","git"],"License":["GPL2"]}
|
||||
]
|
3
pkg/dep/testdata/jellyfin-web.json
vendored
Normal file
3
pkg/dep/testdata/jellyfin-web.json
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
[
|
||||
{"ID":1176790,"Name":"jellyfin-web","PackageBaseID":138631,"PackageBase":"jellyfin","Version":"10.8.8-1","Description":"Jellyfin web client","URL":"https://github.com/jellyfin/jellyfin","NumVotes":84,"Popularity":1.272964,"OutOfDate":null,"Maintainer":"z3ntu","Submitter":"z3ntu","FirstSubmitted":1547053171,"LastModified":1669830147,"URLPath":"/cgit/aur.git/snapshot/jellyfin-web.tar.gz","MakeDepends":["dotnet-sdk-6.0","nodejs","npm","git"],"License":["GPL2"]}
|
||||
]
|
3
pkg/dep/testdata/jellyfin.json
vendored
Normal file
3
pkg/dep/testdata/jellyfin.json
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
[
|
||||
{"ID":1176789,"Name":"jellyfin","PackageBaseID":138631,"PackageBase":"jellyfin","Version":"10.8.8-1","Description":"The Free Software Media System","URL":"https://github.com/jellyfin/jellyfin","NumVotes":84,"Popularity":1.272964,"OutOfDate":null,"Maintainer":"z3ntu","Submitter":"z3ntu","FirstSubmitted":1547053171,"LastModified":1669830147,"URLPath":"/cgit/aur.git/snapshot/jellyfin.tar.gz","Depends":["jellyfin-web=10.8.8","jellyfin-server=10.8.8"],"MakeDepends":["dotnet-sdk-6.0","nodejs","npm","git"],"License":["GPL2"]}
|
||||
]
|
371
pkg/dep/topo/dep.go
Normal file
371
pkg/dep/topo/dep.go
Normal file
@ -0,0 +1,371 @@
|
||||
package topo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/Jguer/go-alpm/v2"
|
||||
)
|
||||
|
||||
type (
|
||||
NodeSet[T comparable] map[T]bool
|
||||
ProvidesMap[T comparable] map[T]*DependencyInfo[T]
|
||||
DepMap[T comparable] map[T]NodeSet[T]
|
||||
)
|
||||
|
||||
func (n NodeSet[T]) Slice() []T {
|
||||
var slice []T
|
||||
|
||||
for node := range n {
|
||||
slice = append(slice, node)
|
||||
}
|
||||
|
||||
return slice
|
||||
}
|
||||
|
||||
type NodeInfo[V any] struct {
|
||||
Color string
|
||||
Background string
|
||||
Value V
|
||||
}
|
||||
|
||||
type DependencyInfo[T comparable] struct {
|
||||
Provider T
|
||||
alpm.Depend
|
||||
}
|
||||
|
||||
type CheckFn[T comparable, V any] func(T, V) error
|
||||
|
||||
type Graph[T comparable, V any] struct {
|
||||
nodes NodeSet[T]
|
||||
|
||||
// node info map
|
||||
nodeInfo map[T]*NodeInfo[V]
|
||||
|
||||
// `provides` tracks provides -> node.
|
||||
provides ProvidesMap[T]
|
||||
|
||||
// `dependencies` tracks child -> parents.
|
||||
dependencies DepMap[T]
|
||||
// `dependents` tracks parent -> children.
|
||||
dependents DepMap[T]
|
||||
}
|
||||
|
||||
func New[T comparable, V any]() *Graph[T, V] {
|
||||
return &Graph[T, V]{
|
||||
nodes: make(NodeSet[T]),
|
||||
dependencies: make(DepMap[T]),
|
||||
dependents: make(DepMap[T]),
|
||||
nodeInfo: make(map[T]*NodeInfo[V]),
|
||||
provides: make(ProvidesMap[T]),
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) Len() int {
|
||||
return len(g.nodes)
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) Exists(node T) bool {
|
||||
_, ok := g.nodes[node]
|
||||
|
||||
return ok
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) AddNode(node T) {
|
||||
g.nodes[node] = true
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) ProvidesExists(provides T) bool {
|
||||
_, ok := g.provides[provides]
|
||||
|
||||
return ok
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) GetProviderNode(provides T) *DependencyInfo[T] {
|
||||
return g.provides[provides]
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) Provides(provides T, depInfo *alpm.Depend, node T) {
|
||||
g.provides[provides] = &DependencyInfo[T]{
|
||||
Provider: node,
|
||||
Depend: *depInfo,
|
||||
}
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) ForEach(f CheckFn[T, V]) error {
|
||||
for node := range g.nodes {
|
||||
if err := f(node, g.nodeInfo[node].Value); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) SetNodeInfo(node T, nodeInfo *NodeInfo[V]) {
|
||||
g.nodeInfo[node] = nodeInfo
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) GetNodeInfo(node T) *NodeInfo[V] {
|
||||
return g.nodeInfo[node]
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) DependOn(child, parent T) error {
|
||||
if child == parent {
|
||||
return ErrSelfReferential
|
||||
}
|
||||
|
||||
if g.DependsOn(parent, child) {
|
||||
return ErrCircular
|
||||
}
|
||||
|
||||
g.AddNode(parent)
|
||||
g.AddNode(child)
|
||||
|
||||
// Add edges.
|
||||
g.dependents.addNodeToNodeset(parent, child)
|
||||
g.dependencies.addNodeToNodeset(child, parent)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) String() string {
|
||||
var sb strings.Builder
|
||||
|
||||
sb.WriteString("digraph {\n")
|
||||
sb.WriteString("compound=true;\n")
|
||||
sb.WriteString("concentrate=true;\n")
|
||||
sb.WriteString("node [shape = record, ordering=out];\n")
|
||||
|
||||
for node := range g.nodes {
|
||||
extra := ""
|
||||
|
||||
if info, ok := g.nodeInfo[node]; ok {
|
||||
if info.Background != "" || info.Color != "" {
|
||||
extra = fmt.Sprintf("[color = %s, style = filled, fillcolor = %s]", info.Color, info.Background)
|
||||
}
|
||||
}
|
||||
|
||||
sb.WriteString(fmt.Sprintf("\t\"%v\"%s;\n", node, extra))
|
||||
}
|
||||
|
||||
for parent, children := range g.dependencies {
|
||||
for child := range children {
|
||||
sb.WriteString(fmt.Sprintf("\t\"%v\" -> \"%v\";\n", parent, child))
|
||||
}
|
||||
}
|
||||
|
||||
sb.WriteString("}")
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) DependsOn(child, parent T) bool {
|
||||
deps := g.Dependencies(child)
|
||||
_, ok := deps[parent]
|
||||
|
||||
return ok
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) HasDependent(parent, child T) bool {
|
||||
deps := g.Dependents(parent)
|
||||
_, ok := deps[child]
|
||||
|
||||
return ok
|
||||
}
|
||||
|
||||
// leavesMap returns a map of leaves with the node as key and the node info value as value.
|
||||
func (g *Graph[T, V]) leavesMap() map[T]V {
|
||||
leaves := make(map[T]V, 0)
|
||||
|
||||
for node := range g.nodes {
|
||||
if _, ok := g.dependencies[node]; !ok {
|
||||
nodeInfo := g.GetNodeInfo(node)
|
||||
if nodeInfo == nil {
|
||||
nodeInfo = &NodeInfo[V]{}
|
||||
}
|
||||
|
||||
leaves[node] = nodeInfo.Value
|
||||
}
|
||||
}
|
||||
|
||||
return leaves
|
||||
}
|
||||
|
||||
// TopoSortedLayerMap returns a slice of all of the graph nodes in topological sort order with their node info.
|
||||
func (g *Graph[T, V]) TopoSortedLayerMap(checkFn CheckFn[T, V]) []map[T]V {
|
||||
layers := []map[T]V{}
|
||||
|
||||
// Copy the graph
|
||||
shrinkingGraph := g.clone()
|
||||
|
||||
for {
|
||||
leaves := shrinkingGraph.leavesMap()
|
||||
if len(leaves) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
layers = append(layers, leaves)
|
||||
|
||||
for leafNode := range leaves {
|
||||
if checkFn != nil {
|
||||
if err := checkFn(leafNode, leaves[leafNode]); err != nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
shrinkingGraph.remove(leafNode)
|
||||
}
|
||||
}
|
||||
|
||||
return layers
|
||||
}
|
||||
|
||||
// returns if it was the last
|
||||
func (dm DepMap[T]) removeFromDepmap(key, node T) bool {
|
||||
if nodes := dm[key]; len(nodes) == 1 {
|
||||
// The only element in the nodeset must be `node`, so we
|
||||
// can delete the entry entirely.
|
||||
delete(dm, key)
|
||||
return true
|
||||
} else {
|
||||
// Otherwise, remove the single node from the nodeset.
|
||||
delete(nodes, node)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Prune removes the node,
|
||||
// its dependencies if there are no other dependents
|
||||
// and its dependents
|
||||
func (g *Graph[T, V]) Prune(node T) []T {
|
||||
pruned := []T{node}
|
||||
// Remove edges from things that depend on `node`.
|
||||
for dependent := range g.dependents[node] {
|
||||
last := g.dependencies.removeFromDepmap(dependent, node)
|
||||
if last {
|
||||
pruned = append(pruned, g.Prune(dependent)...)
|
||||
}
|
||||
}
|
||||
|
||||
delete(g.dependents, node)
|
||||
|
||||
// Remove all edges from node to the things it depends on.
|
||||
for dependency := range g.dependencies[node] {
|
||||
last := g.dependents.removeFromDepmap(dependency, node)
|
||||
if last {
|
||||
pruned = append(pruned, g.Prune(dependency)...)
|
||||
}
|
||||
}
|
||||
|
||||
delete(g.dependencies, node)
|
||||
|
||||
// Finally, remove the node itself.
|
||||
delete(g.nodes, node)
|
||||
return pruned
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) remove(node T) {
|
||||
// Remove edges from things that depend on `node`.
|
||||
for dependent := range g.dependents[node] {
|
||||
g.dependencies.removeFromDepmap(dependent, node)
|
||||
}
|
||||
|
||||
delete(g.dependents, node)
|
||||
|
||||
// Remove all edges from node to the things it depends on.
|
||||
for dependency := range g.dependencies[node] {
|
||||
g.dependents.removeFromDepmap(dependency, node)
|
||||
}
|
||||
|
||||
delete(g.dependencies, node)
|
||||
|
||||
// Finally, remove the node itself.
|
||||
delete(g.nodes, node)
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) Dependencies(child T) NodeSet[T] {
|
||||
return g.buildTransitive(child, g.ImmediateDependencies)
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) ImmediateDependencies(node T) NodeSet[T] {
|
||||
return g.dependencies[node]
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) Dependents(parent T) NodeSet[T] {
|
||||
return g.buildTransitive(parent, g.immediateDependents)
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) immediateDependents(node T) NodeSet[T] {
|
||||
return g.dependents[node]
|
||||
}
|
||||
|
||||
func (g *Graph[T, V]) clone() *Graph[T, V] {
|
||||
return &Graph[T, V]{
|
||||
dependencies: g.dependencies.copy(),
|
||||
dependents: g.dependents.copy(),
|
||||
nodes: g.nodes.copy(),
|
||||
nodeInfo: g.nodeInfo, // not copied, as it is not modified
|
||||
}
|
||||
}
|
||||
|
||||
// buildTransitive starts at `root` and continues calling `nextFn` to keep discovering more nodes until
|
||||
// the graph cannot produce any more. It returns the set of all discovered nodes.
|
||||
func (g *Graph[T, V]) buildTransitive(root T, nextFn func(T) NodeSet[T]) NodeSet[T] {
|
||||
if _, ok := g.nodes[root]; !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
out := make(NodeSet[T])
|
||||
searchNext := []T{root}
|
||||
|
||||
for len(searchNext) > 0 {
|
||||
// List of new nodes from this layer of the dependency graph. This is
|
||||
// assigned to `searchNext` at the end of the outer "discovery" loop.
|
||||
discovered := []T{}
|
||||
|
||||
for _, node := range searchNext {
|
||||
// For each node to discover, find the next nodes.
|
||||
for nextNode := range nextFn(node) {
|
||||
// If we have not seen the node before, add it to the output as well
|
||||
// as the list of nodes to traverse in the next iteration.
|
||||
if _, ok := out[nextNode]; !ok {
|
||||
out[nextNode] = true
|
||||
|
||||
discovered = append(discovered, nextNode)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
searchNext = discovered
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (s NodeSet[T]) copy() NodeSet[T] {
|
||||
out := make(NodeSet[T], len(s))
|
||||
for k, v := range s {
|
||||
out[k] = v
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (dm DepMap[T]) copy() DepMap[T] {
|
||||
out := make(DepMap[T], len(dm))
|
||||
for k := range dm {
|
||||
out[k] = dm[k].copy()
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func (dm DepMap[T]) addNodeToNodeset(key, node T) {
|
||||
nodes, ok := dm[key]
|
||||
if !ok {
|
||||
nodes = make(NodeSet[T])
|
||||
dm[key] = nodes
|
||||
}
|
||||
|
||||
nodes[node] = true
|
||||
}
|
9
pkg/dep/topo/errors.go
Normal file
9
pkg/dep/topo/errors.go
Normal file
@ -0,0 +1,9 @@
|
||||
package topo
|
||||
|
||||
import "errors"
|
||||
|
||||
var (
|
||||
ErrSelfReferential = errors.New(" self-referential dependencies not allowed")
|
||||
ErrConflictingAlias = errors.New(" alias already defined")
|
||||
ErrCircular = errors.New(" circular dependencies not allowed")
|
||||
)
|
92
pkg/download/abs.go
Normal file
92
pkg/download/abs.go
Normal file
@ -0,0 +1,92 @@
|
||||
package download
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"regexp"
|
||||
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
)
|
||||
|
||||
const (
|
||||
MaxConcurrentFetch = 20
|
||||
absPackageURL = "https://gitlab.archlinux.org/archlinux/packaging/packages"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrInvalidRepository = errors.New(gotext.Get("invalid repository"))
|
||||
ErrABSPackageNotFound = errors.New(gotext.Get("package not found in repos"))
|
||||
)
|
||||
|
||||
type regexReplace struct {
|
||||
repl string
|
||||
match *regexp.Regexp
|
||||
}
|
||||
|
||||
// regex replacements for Gitlab URLs
|
||||
// info: https://gitlab.archlinux.org/archlinux/devtools/-/blob/6ce666a1669235749c17d5c44d8a24dea4a135da/src/lib/api/gitlab.sh#L84
|
||||
var gitlabRepl = []regexReplace{
|
||||
{repl: `$1-$2`, match: regexp.MustCompile(`([a-zA-Z0-9]+)\+([a-zA-Z]+)`)},
|
||||
{repl: `plus`, match: regexp.MustCompile(`\+`)},
|
||||
{repl: `-`, match: regexp.MustCompile(`[^a-zA-Z0-9_\-.]`)},
|
||||
{repl: `-`, match: regexp.MustCompile(`[_\-]{2,}`)},
|
||||
{repl: `unix-tree`, match: regexp.MustCompile(`^tree$`)},
|
||||
}
|
||||
|
||||
// Return format for pkgbuild
|
||||
// https://gitlab.archlinux.org/archlinux/packaging/packages/0ad/-/raw/main/PKGBUILD
|
||||
func getPackagePKGBUILDURL(pkgName string) string {
|
||||
return fmt.Sprintf("%s/%s/-/raw/main/PKGBUILD", absPackageURL, convertPkgNameForURL(pkgName))
|
||||
}
|
||||
|
||||
// Return format for pkgbuild repo
|
||||
// https://gitlab.archlinux.org/archlinux/packaging/packages/0ad.git
|
||||
func getPackageRepoURL(pkgName string) string {
|
||||
return fmt.Sprintf("%s/%s.git", absPackageURL, convertPkgNameForURL(pkgName))
|
||||
}
|
||||
|
||||
// convert pkgName for Gitlab URL path (repo name)
|
||||
func convertPkgNameForURL(pkgName string) string {
|
||||
for _, regex := range gitlabRepl {
|
||||
pkgName = regex.match.ReplaceAllString(pkgName, regex.repl)
|
||||
}
|
||||
return pkgName
|
||||
}
|
||||
|
||||
// ABSPKGBUILD retrieves the PKGBUILD file to a dest directory.
|
||||
func ABSPKGBUILD(httpClient httpRequestDoer, dbName, pkgName string) ([]byte, error) {
|
||||
packageURL := getPackagePKGBUILDURL(pkgName)
|
||||
|
||||
resp, err := httpClient.Get(packageURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, ErrABSPackageNotFound
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
pkgBuild, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return pkgBuild, nil
|
||||
}
|
||||
|
||||
// ABSPKGBUILDRepo retrieves the PKGBUILD repository to a dest directory.
|
||||
func ABSPKGBUILDRepo(ctx context.Context, cmdBuilder exe.GitCmdBuilder,
|
||||
dbName, pkgName, dest string, force bool,
|
||||
) (bool, error) {
|
||||
pkgURL := getPackageRepoURL(pkgName)
|
||||
|
||||
return downloadGitRepo(ctx, cmdBuilder, pkgURL,
|
||||
pkgName, dest, force, "--single-branch")
|
||||
}
|
331
pkg/download/abs_test.go
Normal file
331
pkg/download/abs_test.go
Normal file
@ -0,0 +1,331 @@
|
||||
//go:build !integration
|
||||
// +build !integration
|
||||
|
||||
package download
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
)
|
||||
|
||||
const gitExtrasPKGBUILD = `pkgname=git-extras
|
||||
pkgver=6.1.0
|
||||
pkgrel=1
|
||||
pkgdesc="GIT utilities -- repo summary, commit counting, repl, changelog population and more"
|
||||
arch=('any')
|
||||
url="https://github.com/tj/${pkgname}"
|
||||
license=('MIT')
|
||||
depends=('git')
|
||||
source=("${pkgname}-${pkgver}.tar.gz::${url}/archive/${pkgver}.tar.gz")
|
||||
sha256sums=('7be0b15ee803d76d2c2e8036f5d9db6677f2232bb8d2c4976691ff7ae026a22f')
|
||||
b2sums=('3450edecb3116e19ffcf918b118aee04f025c06d812e29e8701f35a3c466b13d2578d41c8e1ee93327743d0019bf98bb3f397189e19435f89e3a259ff1b82747')
|
||||
|
||||
package() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
|
||||
# avoid annoying interactive prompts if an alias is in your gitconfig
|
||||
export GIT_CONFIG=/dev/null
|
||||
make DESTDIR="${pkgdir}" PREFIX=/usr SYSCONFDIR=/etc install
|
||||
install -Dm644 LICENSE "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
|
||||
}`
|
||||
|
||||
func Test_getPackageURL(t *testing.T) {
|
||||
t.Parallel()
|
||||
type args struct {
|
||||
db string
|
||||
pkgName string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "extra package",
|
||||
args: args{
|
||||
db: "extra",
|
||||
pkgName: "kitty",
|
||||
},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/kitty/-/raw/main/PKGBUILD",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "core package",
|
||||
args: args{
|
||||
db: "core",
|
||||
pkgName: "linux",
|
||||
},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/linux/-/raw/main/PKGBUILD",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "personal repo package",
|
||||
args: args{
|
||||
db: "sweswe",
|
||||
pkgName: "zabix",
|
||||
},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/zabix/-/raw/main/PKGBUILD",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "special name +",
|
||||
args: args{
|
||||
db: "core",
|
||||
pkgName: "my+package",
|
||||
},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package/-/raw/main/PKGBUILD",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "special name %",
|
||||
args: args{
|
||||
db: "core",
|
||||
pkgName: "my%package",
|
||||
},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package/-/raw/main/PKGBUILD",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "special name _-",
|
||||
args: args{
|
||||
db: "core",
|
||||
pkgName: "my_-package",
|
||||
},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package/-/raw/main/PKGBUILD",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "special name ++",
|
||||
args: args{
|
||||
db: "core",
|
||||
pkgName: "my++package",
|
||||
},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/mypluspluspackage/-/raw/main/PKGBUILD",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "special name tree",
|
||||
args: args{
|
||||
db: "sweswe",
|
||||
pkgName: "tree",
|
||||
},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/unix-tree/-/raw/main/PKGBUILD",
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
got := getPackagePKGBUILDURL(tt.args.pkgName)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetABSPkgbuild(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
type args struct {
|
||||
dbName string
|
||||
body string
|
||||
status int
|
||||
pkgName string
|
||||
wantURL string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "found package",
|
||||
args: args{
|
||||
dbName: "core",
|
||||
body: gitExtrasPKGBUILD,
|
||||
status: 200,
|
||||
pkgName: "git-extras",
|
||||
wantURL: "https://gitlab.archlinux.org/archlinux/packaging/packages/git-extras/-/raw/main/PKGBUILD",
|
||||
},
|
||||
want: gitExtrasPKGBUILD,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "not found package",
|
||||
args: args{
|
||||
dbName: "core",
|
||||
body: "",
|
||||
status: 404,
|
||||
pkgName: "git-git",
|
||||
wantURL: "https://gitlab.archlinux.org/archlinux/packaging/packages/git-git/-/raw/main/PKGBUILD",
|
||||
},
|
||||
want: "",
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
httpClient := &testClient{
|
||||
t: t,
|
||||
wantURL: tt.args.wantURL,
|
||||
body: tt.args.body,
|
||||
status: tt.args.status,
|
||||
}
|
||||
got, err := ABSPKGBUILD(httpClient, tt.args.dbName, tt.args.pkgName)
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
assert.Equal(t, tt.want, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_getPackageRepoURL(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
type args struct {
|
||||
pkgName string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "extra package",
|
||||
args: args{pkgName: "zoxide"},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/zoxide.git",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "core package",
|
||||
args: args{pkgName: "linux"},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/linux.git",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "personal repo package",
|
||||
args: args{pkgName: "sweswe"},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/sweswe.git",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "special name +",
|
||||
args: args{pkgName: "my+package"},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package.git",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "special name %",
|
||||
args: args{pkgName: "my%package"},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package.git",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "special name _-",
|
||||
args: args{pkgName: "my_-package"},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package.git",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "special name ++",
|
||||
args: args{pkgName: "my++package"},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/mypluspluspackage.git",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "special name tree",
|
||||
args: args{pkgName: "tree"},
|
||||
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/unix-tree.git",
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
got := getPackageRepoURL(tt.args.pkgName)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// GIVEN no previous existing folder
|
||||
// WHEN ABSPKGBUILDRepo is called
|
||||
// THEN a clone command should be formed
|
||||
func TestABSPKGBUILDRepo(t *testing.T) {
|
||||
t.Parallel()
|
||||
cmdRunner := &testRunner{}
|
||||
want := "/usr/local/bin/git --no-replace-objects -C /tmp/doesnt-exist clone --no-progress --single-branch https://gitlab.archlinux.org/archlinux/packaging/packages/linux.git linux"
|
||||
if os.Getuid() == 0 {
|
||||
ld := "systemd-run"
|
||||
if path, _ := exec.LookPath(ld); path != "" {
|
||||
ld = path
|
||||
}
|
||||
want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty --quiet -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C /tmp/doesnt-exist clone --no-progress --single-branch https://gitlab.archlinux.org/archlinux/packaging/packages/linux.git linux", ld)
|
||||
}
|
||||
|
||||
cmdBuilder := &testGitBuilder{
|
||||
index: 0,
|
||||
test: t,
|
||||
want: want,
|
||||
parentBuilder: &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "/usr/local/bin/git",
|
||||
GitFlags: []string{"--no-replace-objects"},
|
||||
},
|
||||
}
|
||||
newClone, err := ABSPKGBUILDRepo(context.Background(), cmdBuilder, "core", "linux", "/tmp/doesnt-exist", false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, true, newClone)
|
||||
}
|
||||
|
||||
// GIVEN a previous existing folder with permissions
|
||||
// WHEN ABSPKGBUILDRepo is called
|
||||
// THEN a pull command should be formed
|
||||
func TestABSPKGBUILDRepoExistsPerms(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := t.TempDir()
|
||||
|
||||
os.MkdirAll(filepath.Join(dir, "linux", ".git"), 0o777)
|
||||
|
||||
want := fmt.Sprintf("/usr/local/bin/git --no-replace-objects -C %s/linux pull --rebase --autostash", dir)
|
||||
if os.Getuid() == 0 {
|
||||
ld := "systemd-run"
|
||||
if path, _ := exec.LookPath(ld); path != "" {
|
||||
ld = path
|
||||
}
|
||||
want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty --quiet -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C %s/linux pull --rebase --autostash", ld, dir)
|
||||
}
|
||||
|
||||
cmdRunner := &testRunner{}
|
||||
cmdBuilder := &testGitBuilder{
|
||||
index: 0,
|
||||
test: t,
|
||||
want: want,
|
||||
parentBuilder: &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "/usr/local/bin/git",
|
||||
GitFlags: []string{"--no-replace-objects"},
|
||||
},
|
||||
}
|
||||
newClone, err := ABSPKGBUILDRepo(context.Background(), cmdBuilder, "core", "linux", dir, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, false, newClone)
|
||||
}
|
100
pkg/download/aur.go
Normal file
100
pkg/download/aur.go
Normal file
@ -0,0 +1,100 @@
|
||||
package download
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sync"
|
||||
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/multierror"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
func AURPKGBUILD(httpClient httpRequestDoer, pkgName, aurURL string) ([]byte, error) {
|
||||
values := url.Values{}
|
||||
values.Set("h", pkgName)
|
||||
pkgURL := aurURL + "/cgit/aur.git/plain/PKGBUILD?" + values.Encode()
|
||||
|
||||
resp, err := httpClient.Get(pkgURL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, ErrAURPackageNotFound{pkgName: pkgName}
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
pkgBuild, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return pkgBuild, nil
|
||||
}
|
||||
|
||||
// AURPkgbuildRepo retrieves the PKGBUILD repository to a dest directory.
|
||||
func AURPKGBUILDRepo(ctx context.Context, cmdBuilder exe.GitCmdBuilder, aurURL, pkgName, dest string, force bool) (bool, error) {
|
||||
pkgURL := fmt.Sprintf("%s/%s.git", aurURL, pkgName)
|
||||
|
||||
return downloadGitRepo(ctx, cmdBuilder, pkgURL, pkgName, dest, force)
|
||||
}
|
||||
|
||||
func AURPKGBUILDRepos(
|
||||
ctx context.Context,
|
||||
cmdBuilder exe.GitCmdBuilder, logger *text.Logger,
|
||||
targets []string, aurURL, dest string, force bool,
|
||||
) (map[string]bool, error) {
|
||||
cloned := make(map[string]bool, len(targets))
|
||||
|
||||
var (
|
||||
mux sync.Mutex
|
||||
errs multierror.MultiError
|
||||
wg sync.WaitGroup
|
||||
)
|
||||
|
||||
sem := make(chan uint8, MaxConcurrentFetch)
|
||||
|
||||
for _, target := range targets {
|
||||
sem <- 1
|
||||
wg.Add(1)
|
||||
|
||||
go func(target string) {
|
||||
defer func() {
|
||||
<-sem
|
||||
wg.Done()
|
||||
}()
|
||||
|
||||
newClone, err := AURPKGBUILDRepo(ctx, cmdBuilder, aurURL, target, dest, force)
|
||||
|
||||
mux.Lock()
|
||||
progress := len(cloned)
|
||||
if err != nil {
|
||||
errs.Add(err)
|
||||
mux.Unlock()
|
||||
logger.OperationInfoln(
|
||||
gotext.Get("(%d/%d) Failed to download PKGBUILD: %s",
|
||||
progress, len(targets), text.Cyan(target)))
|
||||
return
|
||||
}
|
||||
|
||||
cloned[target] = newClone
|
||||
progress = len(cloned)
|
||||
mux.Unlock()
|
||||
|
||||
logger.OperationInfoln(
|
||||
gotext.Get("(%d/%d) Downloaded PKGBUILD: %s",
|
||||
progress, len(targets), text.Cyan(target)))
|
||||
}(target)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
return cloned, errs.Return()
|
||||
}
|
165
pkg/download/aur_test.go
Normal file
165
pkg/download/aur_test.go
Normal file
@ -0,0 +1,165 @@
|
||||
//go:build !integration
|
||||
// +build !integration
|
||||
|
||||
package download
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
)
|
||||
|
||||
func TestGetAURPkgbuild(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
type args struct {
|
||||
body string
|
||||
status int
|
||||
pkgName string
|
||||
wantURL string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "found package",
|
||||
args: args{
|
||||
body: gitExtrasPKGBUILD,
|
||||
status: 200,
|
||||
pkgName: "git-extras",
|
||||
wantURL: "https://aur.archlinux.org/cgit/aur.git/plain/PKGBUILD?h=git-extras",
|
||||
},
|
||||
want: gitExtrasPKGBUILD,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "not found package",
|
||||
args: args{
|
||||
body: "",
|
||||
status: 404,
|
||||
pkgName: "git-git",
|
||||
wantURL: "https://aur.archlinux.org/cgit/aur.git/plain/PKGBUILD?h=git-git",
|
||||
},
|
||||
want: "",
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
httpClient := &testClient{
|
||||
t: t,
|
||||
wantURL: tt.args.wantURL,
|
||||
body: tt.args.body,
|
||||
status: tt.args.status,
|
||||
}
|
||||
got, err := AURPKGBUILD(httpClient, tt.args.pkgName, "https://aur.archlinux.org")
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
assert.Equal(t, tt.want, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// GIVEN no previous existing folder
|
||||
// WHEN AURPKGBUILDRepo is called
|
||||
// THEN a clone command should be formed
|
||||
func TestAURPKGBUILDRepo(t *testing.T) {
|
||||
t.Parallel()
|
||||
want := "/usr/local/bin/git --no-replace-objects -C /tmp/doesnt-exist clone --no-progress https://aur.archlinux.org/yay-bin.git yay-bin"
|
||||
if os.Getuid() == 0 {
|
||||
ld := "systemd-run"
|
||||
if path, _ := exec.LookPath(ld); path != "" {
|
||||
ld = path
|
||||
}
|
||||
want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty --quiet -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C /tmp/doesnt-exist clone --no-progress https://aur.archlinux.org/yay-bin.git yay-bin", ld)
|
||||
}
|
||||
|
||||
cmdRunner := &testRunner{}
|
||||
cmdBuilder := &testGitBuilder{
|
||||
index: 0,
|
||||
test: t,
|
||||
want: want,
|
||||
parentBuilder: &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "/usr/local/bin/git",
|
||||
GitFlags: []string{"--no-replace-objects"},
|
||||
},
|
||||
}
|
||||
newCloned, err := AURPKGBUILDRepo(context.Background(), cmdBuilder, "https://aur.archlinux.org", "yay-bin", "/tmp/doesnt-exist", false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, true, newCloned)
|
||||
}
|
||||
|
||||
// GIVEN a previous existing folder with permissions
|
||||
// WHEN AURPKGBUILDRepo is called
|
||||
// THEN a pull command should be formed
|
||||
func TestAURPKGBUILDRepoExistsPerms(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := t.TempDir()
|
||||
|
||||
os.MkdirAll(filepath.Join(dir, "yay-bin", ".git"), 0o777)
|
||||
|
||||
want := fmt.Sprintf("/usr/local/bin/git --no-replace-objects -C %s/yay-bin pull --rebase --autostash", dir)
|
||||
if os.Getuid() == 0 {
|
||||
ld := "systemd-run"
|
||||
if path, _ := exec.LookPath(ld); path != "" {
|
||||
ld = path
|
||||
}
|
||||
want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty --quiet -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C %s/yay-bin pull --rebase --autostash", ld, dir)
|
||||
}
|
||||
|
||||
cmdRunner := &testRunner{}
|
||||
cmdBuilder := &testGitBuilder{
|
||||
index: 0,
|
||||
test: t,
|
||||
want: want,
|
||||
parentBuilder: &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "/usr/local/bin/git",
|
||||
GitFlags: []string{"--no-replace-objects"},
|
||||
},
|
||||
}
|
||||
cloned, err := AURPKGBUILDRepo(context.Background(), cmdBuilder, "https://aur.archlinux.org", "yay-bin", dir, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, false, cloned)
|
||||
}
|
||||
|
||||
func TestAURPKGBUILDRepos(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := t.TempDir()
|
||||
|
||||
os.MkdirAll(filepath.Join(dir, "yay-bin", ".git"), 0o777)
|
||||
|
||||
targets := []string{"yay", "yay-bin", "yay-git"}
|
||||
cmdRunner := &testRunner{}
|
||||
cmdBuilder := &testGitBuilder{
|
||||
index: 0,
|
||||
test: t,
|
||||
want: "",
|
||||
parentBuilder: &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "/usr/local/bin/git",
|
||||
GitFlags: []string{},
|
||||
},
|
||||
}
|
||||
cloned, err := AURPKGBUILDRepos(context.Background(), cmdBuilder, newTestLogger(), targets, "https://aur.archlinux.org", dir, false)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, map[string]bool{"yay": true, "yay-bin": false, "yay-git": true}, cloned)
|
||||
}
|
31
pkg/download/errors.go
Normal file
31
pkg/download/errors.go
Normal file
@ -0,0 +1,31 @@
|
||||
package download
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/leonelquinteros/gotext"
|
||||
)
|
||||
|
||||
// ErrAURPackageNotFound means that package was not found in AUR.
|
||||
type ErrAURPackageNotFound struct {
|
||||
pkgName string
|
||||
}
|
||||
|
||||
func (e ErrAURPackageNotFound) Error() string {
|
||||
return fmt.Sprintln(gotext.Get("package not found in AUR"), ":", e.pkgName)
|
||||
}
|
||||
|
||||
type ErrGetPKGBUILDRepo struct {
|
||||
inner error
|
||||
pkgName string
|
||||
errOut string
|
||||
}
|
||||
|
||||
func (e ErrGetPKGBUILDRepo) Error() string {
|
||||
return fmt.Sprintln(gotext.Get("error fetching %s: %s", e.pkgName, e.errOut),
|
||||
"\n\t context:", e.inner.Error())
|
||||
}
|
||||
|
||||
func (e *ErrGetPKGBUILDRepo) Unwrap() error {
|
||||
return e.inner
|
||||
}
|
251
pkg/download/unified.go
Normal file
251
pkg/download/unified.go
Normal file
@ -0,0 +1,251 @@
|
||||
package download
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/aur"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
"github.com/Jguer/yay/v12/pkg/multierror"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
type httpRequestDoer interface {
|
||||
Get(string) (*http.Response, error)
|
||||
}
|
||||
|
||||
type DBSearcher interface {
|
||||
SyncPackage(string) db.IPackage
|
||||
SyncPackageFromDB(string, string) db.IPackage
|
||||
}
|
||||
|
||||
func downloadGitRepo(ctx context.Context, cmdBuilder exe.GitCmdBuilder,
|
||||
pkgURL, pkgName, dest string, force bool, gitArgs ...string,
|
||||
) (bool, error) {
|
||||
finalDir := filepath.Join(dest, pkgName)
|
||||
newClone := true
|
||||
|
||||
switch _, err := os.Stat(filepath.Join(finalDir, ".git")); {
|
||||
case os.IsNotExist(err) || (err == nil && force):
|
||||
if _, errD := os.Stat(finalDir); force && errD == nil {
|
||||
if errR := os.RemoveAll(finalDir); errR != nil {
|
||||
return false, ErrGetPKGBUILDRepo{inner: errR, pkgName: pkgName, errOut: ""}
|
||||
}
|
||||
}
|
||||
|
||||
gitArgs = append(gitArgs, pkgURL, pkgName)
|
||||
|
||||
cloneArgs := make([]string, 0, len(gitArgs)+4)
|
||||
cloneArgs = append(cloneArgs, "clone", "--no-progress")
|
||||
cloneArgs = append(cloneArgs, gitArgs...)
|
||||
cmd := cmdBuilder.BuildGitCmd(ctx, dest, cloneArgs...)
|
||||
|
||||
_, stderr, errCapture := cmdBuilder.Capture(cmd)
|
||||
if errCapture != nil {
|
||||
return false, ErrGetPKGBUILDRepo{inner: errCapture, pkgName: pkgName, errOut: stderr}
|
||||
}
|
||||
case err != nil:
|
||||
return false, ErrGetPKGBUILDRepo{
|
||||
inner: err,
|
||||
pkgName: pkgName,
|
||||
errOut: gotext.Get("error reading %s", filepath.Join(dest, pkgName, ".git")),
|
||||
}
|
||||
default:
|
||||
cmd := cmdBuilder.BuildGitCmd(ctx, filepath.Join(dest, pkgName), "pull", "--rebase", "--autostash")
|
||||
|
||||
_, stderr, errCmd := cmdBuilder.Capture(cmd)
|
||||
if errCmd != nil {
|
||||
return false, ErrGetPKGBUILDRepo{inner: errCmd, pkgName: pkgName, errOut: stderr}
|
||||
}
|
||||
|
||||
newClone = false
|
||||
}
|
||||
|
||||
return newClone, nil
|
||||
}
|
||||
|
||||
func getURLName(pkg db.IPackage) string {
|
||||
name := pkg.Base()
|
||||
if name == "" {
|
||||
name = pkg.Name()
|
||||
}
|
||||
|
||||
return name
|
||||
}
|
||||
|
||||
func PKGBUILDs(dbExecutor DBSearcher, aurClient aur.QueryClient, httpClient *http.Client,
|
||||
logger *text.Logger, targets []string, aurURL string, mode parser.TargetMode,
|
||||
) (map[string][]byte, error) {
|
||||
pkgbuilds := make(map[string][]byte, len(targets))
|
||||
|
||||
var (
|
||||
mux sync.Mutex
|
||||
errs multierror.MultiError
|
||||
wg sync.WaitGroup
|
||||
)
|
||||
|
||||
sem := make(chan uint8, MaxConcurrentFetch)
|
||||
|
||||
for _, target := range targets {
|
||||
// Probably replaceable by something in query.
|
||||
dbName, name, isAUR, toSkip := getPackageUsableName(dbExecutor, aurClient, logger, target, mode)
|
||||
if toSkip {
|
||||
continue
|
||||
}
|
||||
|
||||
sem <- 1
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
go func(target, dbName, pkgName string, aur bool) {
|
||||
var (
|
||||
err error
|
||||
pkgbuild []byte
|
||||
)
|
||||
|
||||
if aur {
|
||||
pkgbuild, err = AURPKGBUILD(httpClient, pkgName, aurURL)
|
||||
} else {
|
||||
pkgbuild, err = ABSPKGBUILD(httpClient, dbName, pkgName)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
mux.Lock()
|
||||
pkgbuilds[target] = pkgbuild
|
||||
mux.Unlock()
|
||||
} else {
|
||||
errs.Add(err)
|
||||
}
|
||||
|
||||
<-sem
|
||||
wg.Done()
|
||||
}(target, dbName, name, isAUR)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
return pkgbuilds, errs.Return()
|
||||
}
|
||||
|
||||
func PKGBUILDRepos(ctx context.Context, dbExecutor DBSearcher, aurClient aur.QueryClient,
|
||||
cmdBuilder exe.GitCmdBuilder, logger *text.Logger,
|
||||
targets []string, mode parser.TargetMode, aurURL, dest string, force bool,
|
||||
) (map[string]bool, error) {
|
||||
cloned := make(map[string]bool, len(targets))
|
||||
|
||||
var (
|
||||
mux sync.Mutex
|
||||
errs multierror.MultiError
|
||||
wg sync.WaitGroup
|
||||
)
|
||||
|
||||
sem := make(chan uint8, MaxConcurrentFetch)
|
||||
|
||||
for _, target := range targets {
|
||||
// Probably replaceable by something in query.
|
||||
dbName, name, isAUR, toSkip := getPackageUsableName(dbExecutor, aurClient, logger, target, mode)
|
||||
if toSkip {
|
||||
continue
|
||||
}
|
||||
|
||||
sem <- 1
|
||||
|
||||
wg.Add(1)
|
||||
|
||||
go func(target, dbName, pkgName string, aur bool) {
|
||||
var (
|
||||
err error
|
||||
newClone bool
|
||||
)
|
||||
|
||||
if aur {
|
||||
newClone, err = AURPKGBUILDRepo(ctx, cmdBuilder, aurURL, pkgName, dest, force)
|
||||
} else {
|
||||
newClone, err = ABSPKGBUILDRepo(ctx, cmdBuilder, dbName, pkgName, dest, force)
|
||||
}
|
||||
|
||||
progress := 0
|
||||
|
||||
if err != nil {
|
||||
errs.Add(err)
|
||||
} else {
|
||||
mux.Lock()
|
||||
cloned[target] = newClone
|
||||
progress = len(cloned)
|
||||
mux.Unlock()
|
||||
}
|
||||
|
||||
if aur {
|
||||
logger.OperationInfoln(
|
||||
gotext.Get("(%d/%d) Downloaded PKGBUILD: %s",
|
||||
progress, len(targets), text.Cyan(pkgName)))
|
||||
} else {
|
||||
logger.OperationInfoln(
|
||||
gotext.Get("(%d/%d) Downloaded PKGBUILD from ABS: %s",
|
||||
progress, len(targets), text.Cyan(pkgName)))
|
||||
}
|
||||
|
||||
<-sem
|
||||
|
||||
wg.Done()
|
||||
}(target, dbName, name, isAUR)
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
|
||||
return cloned, errs.Return()
|
||||
}
|
||||
|
||||
// TODO: replace with dep.ResolveTargets.
|
||||
func getPackageUsableName(dbExecutor DBSearcher, aurClient aur.QueryClient,
|
||||
logger *text.Logger, target string, mode parser.TargetMode,
|
||||
) (dbname, pkgname string, isAUR, toSkip bool) {
|
||||
dbName, name := text.SplitDBFromName(target)
|
||||
if dbName != "aur" && mode.AtLeastRepo() {
|
||||
var pkg db.IPackage
|
||||
if dbName != "" {
|
||||
pkg = dbExecutor.SyncPackageFromDB(name, dbName)
|
||||
} else {
|
||||
pkg = dbExecutor.SyncPackage(name)
|
||||
}
|
||||
|
||||
if pkg != nil {
|
||||
name = getURLName(pkg)
|
||||
dbName = pkg.DB().Name()
|
||||
return dbName, name, false, false
|
||||
}
|
||||
|
||||
// If the package is not found in the database and it was expected to be
|
||||
if pkg == nil && dbName != "" {
|
||||
return dbName, name, true, true
|
||||
}
|
||||
}
|
||||
|
||||
if mode == parser.ModeRepo {
|
||||
return dbName, name, true, true
|
||||
}
|
||||
|
||||
pkgs, err := aurClient.Get(context.Background(), &aur.Query{
|
||||
By: aur.Name,
|
||||
Contains: false,
|
||||
Needles: []string{name},
|
||||
})
|
||||
if err != nil {
|
||||
logger.Warnln(err)
|
||||
return dbName, name, true, true
|
||||
}
|
||||
|
||||
if len(pkgs) == 0 {
|
||||
return dbName, name, true, true
|
||||
}
|
||||
|
||||
return "aur", name, true, false
|
||||
}
|
106
pkg/download/unified_integration_test.go
Normal file
106
pkg/download/unified_integration_test.go
Normal file
@ -0,0 +1,106 @@
|
||||
//go:build integration
|
||||
// +build integration
|
||||
|
||||
package download
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/Jguer/aur"
|
||||
|
||||
mockaur "github.com/Jguer/yay/v12/pkg/dep/mock"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
func TestIntegrationPKGBUILDReposDefinedDBClone(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
|
||||
mockClient := &mockaur.MockAUR{
|
||||
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
return []aur.Pkg{{}}, nil // fakes a package found for all
|
||||
},
|
||||
}
|
||||
targets := []string{"core/linux", "yay-bin", "yay-git"}
|
||||
|
||||
testLogger := text.NewLogger(os.Stdout, os.Stderr, strings.NewReader(""), true, "test")
|
||||
cmdRunner := &exe.OSRunner{Log: testLogger}
|
||||
cmdBuilder := &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "git",
|
||||
GitFlags: []string{},
|
||||
Log: testLogger,
|
||||
}
|
||||
searcher := &testDBSearcher{
|
||||
absPackagesDB: map[string]string{"linux": "core"},
|
||||
}
|
||||
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient,
|
||||
cmdBuilder, testLogger.Child("test"),
|
||||
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, map[string]bool{"core/linux": true, "yay-bin": true, "yay-git": true}, cloned)
|
||||
}
|
||||
|
||||
func TestIntegrationPKGBUILDReposNotExist(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
|
||||
mockClient := &mockaur.MockAUR{
|
||||
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
return []aur.Pkg{{}}, nil // fakes a package found for all
|
||||
},
|
||||
}
|
||||
targets := []string{"core/yay", "yay-bin", "yay-git"}
|
||||
testLogger := text.NewLogger(os.Stdout, os.Stderr, strings.NewReader(""), true, "test")
|
||||
cmdRunner := &exe.OSRunner{Log: testLogger}
|
||||
cmdBuilder := &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "git",
|
||||
GitFlags: []string{},
|
||||
Log: testLogger,
|
||||
}
|
||||
|
||||
searcher := &testDBSearcher{
|
||||
absPackagesDB: map[string]string{"yay": "core"},
|
||||
}
|
||||
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient,
|
||||
cmdBuilder, testLogger.Child("test"),
|
||||
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
|
||||
|
||||
assert.Error(t, err)
|
||||
assert.EqualValues(t, map[string]bool{"yay-bin": true, "yay-git": true}, cloned)
|
||||
}
|
||||
|
||||
// GIVEN 2 aur packages and 1 in repo
|
||||
// WHEN defining as specified targets
|
||||
// THEN all aur be found and cloned
|
||||
func TestIntegrationPKGBUILDFull(t *testing.T) {
|
||||
mockClient := &mockaur.MockAUR{
|
||||
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
return []aur.Pkg{{}}, nil
|
||||
},
|
||||
}
|
||||
|
||||
testLogger := text.NewLogger(os.Stdout, os.Stderr, strings.NewReader(""), true, "test")
|
||||
targets := []string{"core/linux", "aur/yay-bin", "yay-git"}
|
||||
searcher := &testDBSearcher{
|
||||
absPackagesDB: map[string]string{"linux": "core"},
|
||||
}
|
||||
|
||||
fetched, err := PKGBUILDs(searcher, mockClient, &http.Client{}, testLogger.Child("test"),
|
||||
targets, "https://aur.archlinux.org", parser.ModeAny)
|
||||
|
||||
assert.NoError(t, err)
|
||||
|
||||
for _, target := range targets {
|
||||
assert.Contains(t, fetched, target)
|
||||
assert.NotEmpty(t, fetched[target])
|
||||
}
|
||||
}
|
281
pkg/download/unified_test.go
Normal file
281
pkg/download/unified_test.go
Normal file
@ -0,0 +1,281 @@
|
||||
//go:build !integration
|
||||
// +build !integration
|
||||
|
||||
package download
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"gopkg.in/h2non/gock.v1"
|
||||
|
||||
"github.com/Jguer/aur"
|
||||
|
||||
mockaur "github.com/Jguer/yay/v12/pkg/dep/mock"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
func newTestLogger() *text.Logger {
|
||||
return text.NewLogger(io.Discard, io.Discard, strings.NewReader(""), true, "test")
|
||||
}
|
||||
|
||||
// GIVEN 2 aur packages and 1 in repo
|
||||
// GIVEN package in repo is already present
|
||||
// WHEN defining package db as a target
|
||||
// THEN all should be found and cloned, except the repo one
|
||||
func TestPKGBUILDReposDefinedDBPull(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := t.TempDir()
|
||||
|
||||
mockClient := &mockaur.MockAUR{
|
||||
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
return []aur.Pkg{{}}, nil // fakes a package found for all
|
||||
},
|
||||
}
|
||||
|
||||
testLogger := text.NewLogger(os.Stdout, os.Stderr, strings.NewReader(""), true, "test")
|
||||
|
||||
os.MkdirAll(filepath.Join(dir, "yay", ".git"), 0o777)
|
||||
|
||||
targets := []string{"core/yay", "yay-bin", "yay-git"}
|
||||
cmdRunner := &testRunner{}
|
||||
cmdBuilder := &testGitBuilder{
|
||||
index: 0,
|
||||
test: t,
|
||||
parentBuilder: &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "/usr/local/bin/git",
|
||||
GitFlags: []string{},
|
||||
Log: testLogger,
|
||||
},
|
||||
}
|
||||
searcher := &testDBSearcher{
|
||||
absPackagesDB: map[string]string{"yay": "core"},
|
||||
}
|
||||
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient,
|
||||
cmdBuilder, newTestLogger(),
|
||||
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, map[string]bool{"core/yay": false, "yay-bin": true, "yay-git": true}, cloned)
|
||||
}
|
||||
|
||||
// GIVEN 2 aur packages and 1 in repo
|
||||
// WHEN defining package db as a target
|
||||
// THEN all should be found and cloned
|
||||
func TestPKGBUILDReposDefinedDBClone(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := t.TempDir()
|
||||
|
||||
mockClient := &mockaur.MockAUR{
|
||||
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
return []aur.Pkg{{}}, nil // fakes a package found for all
|
||||
},
|
||||
}
|
||||
targets := []string{"core/yay", "yay-bin", "yay-git"}
|
||||
cmdRunner := &testRunner{}
|
||||
cmdBuilder := &testGitBuilder{
|
||||
index: 0,
|
||||
test: t,
|
||||
parentBuilder: &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "/usr/local/bin/git",
|
||||
GitFlags: []string{},
|
||||
},
|
||||
}
|
||||
searcher := &testDBSearcher{
|
||||
absPackagesDB: map[string]string{"yay": "core"},
|
||||
}
|
||||
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient,
|
||||
cmdBuilder, newTestLogger(),
|
||||
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, map[string]bool{"core/yay": true, "yay-bin": true, "yay-git": true}, cloned)
|
||||
}
|
||||
|
||||
// GIVEN 2 aur packages and 1 in repo
|
||||
// WHEN defining as non specified targets
|
||||
// THEN all should be found and cloned
|
||||
func TestPKGBUILDReposClone(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := t.TempDir()
|
||||
|
||||
mockClient := &mockaur.MockAUR{
|
||||
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
return []aur.Pkg{{}}, nil // fakes a package found for all
|
||||
},
|
||||
}
|
||||
targets := []string{"yay", "yay-bin", "yay-git"}
|
||||
cmdRunner := &testRunner{}
|
||||
cmdBuilder := &testGitBuilder{
|
||||
index: 0,
|
||||
test: t,
|
||||
parentBuilder: &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "/usr/local/bin/git",
|
||||
GitFlags: []string{},
|
||||
},
|
||||
}
|
||||
searcher := &testDBSearcher{
|
||||
absPackagesDB: map[string]string{"yay": "core"},
|
||||
}
|
||||
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient,
|
||||
cmdBuilder, newTestLogger(),
|
||||
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, map[string]bool{"yay": true, "yay-bin": true, "yay-git": true}, cloned)
|
||||
}
|
||||
|
||||
// GIVEN 2 aur packages and 1 in repo but wrong db
|
||||
// WHEN defining as non specified targets
|
||||
// THEN all aur be found and cloned
|
||||
func TestPKGBUILDReposNotFound(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := t.TempDir()
|
||||
|
||||
mockClient := &mockaur.MockAUR{
|
||||
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
return []aur.Pkg{{}}, nil // fakes a package found for all
|
||||
},
|
||||
}
|
||||
targets := []string{"extra/yay", "yay-bin", "yay-git"}
|
||||
cmdRunner := &testRunner{}
|
||||
cmdBuilder := &testGitBuilder{
|
||||
index: 0,
|
||||
test: t,
|
||||
parentBuilder: &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "/usr/local/bin/git",
|
||||
GitFlags: []string{},
|
||||
},
|
||||
}
|
||||
searcher := &testDBSearcher{
|
||||
absPackagesDB: map[string]string{"yay": "core"},
|
||||
}
|
||||
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient,
|
||||
cmdBuilder, newTestLogger(),
|
||||
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, map[string]bool{"yay-bin": true, "yay-git": true}, cloned)
|
||||
}
|
||||
|
||||
// GIVEN 2 aur packages and 1 in repo
|
||||
// WHEN defining as non specified targets in repo mode
|
||||
// THEN only repo should be cloned
|
||||
func TestPKGBUILDReposRepoMode(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := t.TempDir()
|
||||
|
||||
mockClient := &mockaur.MockAUR{
|
||||
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
return []aur.Pkg{}, nil // fakes a package found for all
|
||||
},
|
||||
}
|
||||
targets := []string{"yay", "yay-bin", "yay-git"}
|
||||
cmdRunner := &testRunner{}
|
||||
cmdBuilder := &testGitBuilder{
|
||||
index: 0,
|
||||
test: t,
|
||||
parentBuilder: &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "/usr/local/bin/git",
|
||||
GitFlags: []string{},
|
||||
},
|
||||
}
|
||||
searcher := &testDBSearcher{
|
||||
absPackagesDB: map[string]string{"yay": "core"},
|
||||
}
|
||||
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient,
|
||||
cmdBuilder, newTestLogger(),
|
||||
targets, parser.ModeRepo, "https://aur.archlinux.org", dir, false)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, map[string]bool{"yay": true}, cloned)
|
||||
}
|
||||
|
||||
// GIVEN 2 aur packages and 1 in repo
|
||||
// WHEN defining as specified targets
|
||||
// THEN all aur be found and cloned
|
||||
func TestPKGBUILDFull(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
mockClient := &mockaur.MockAUR{
|
||||
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
return []aur.Pkg{{}}, nil
|
||||
},
|
||||
}
|
||||
gock.New("https://aur.archlinux.org").
|
||||
Get("/cgit/aur.git/plain/PKGBUILD").MatchParam("h", "yay-git").
|
||||
Reply(200).
|
||||
BodyString("example_yay-git")
|
||||
gock.New("https://aur.archlinux.org").
|
||||
Get("/cgit/aur.git/plain/PKGBUILD").MatchParam("h", "yay-bin").
|
||||
Reply(200).
|
||||
BodyString("example_yay-bin")
|
||||
|
||||
gock.New("https://gitlab.archlinux.org/").
|
||||
Get("archlinux/packaging/packages/yay/-/raw/main/PKGBUILD").
|
||||
Reply(200).
|
||||
BodyString("example_yay")
|
||||
|
||||
defer gock.Off()
|
||||
targets := []string{"core/yay", "aur/yay-bin", "yay-git"}
|
||||
searcher := &testDBSearcher{
|
||||
absPackagesDB: map[string]string{"yay": "core"},
|
||||
}
|
||||
|
||||
fetched, err := PKGBUILDs(searcher, mockClient, &http.Client{}, newTestLogger(),
|
||||
targets, "https://aur.archlinux.org", parser.ModeAny)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, map[string][]byte{
|
||||
"core/yay": []byte("example_yay"),
|
||||
"aur/yay-bin": []byte("example_yay-bin"),
|
||||
"yay-git": []byte("example_yay-git"),
|
||||
}, fetched)
|
||||
}
|
||||
|
||||
// GIVEN 2 aur packages and 1 in repo
|
||||
// WHEN aur packages are not found
|
||||
// only repo should be cloned
|
||||
func TestPKGBUILDReposMissingAUR(t *testing.T) {
|
||||
t.Parallel()
|
||||
dir := t.TempDir()
|
||||
|
||||
mockClient := &mockaur.MockAUR{
|
||||
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
|
||||
return []aur.Pkg{}, nil // fakes a package found for all
|
||||
},
|
||||
}
|
||||
targets := []string{"core/yay", "aur/yay-bin", "aur/yay-git"}
|
||||
cmdRunner := &testRunner{}
|
||||
cmdBuilder := &testGitBuilder{
|
||||
index: 0,
|
||||
test: t,
|
||||
parentBuilder: &exe.CmdBuilder{
|
||||
Runner: cmdRunner,
|
||||
GitBin: "/usr/local/bin/git",
|
||||
GitFlags: []string{},
|
||||
},
|
||||
}
|
||||
searcher := &testDBSearcher{
|
||||
absPackagesDB: map[string]string{"yay": "core"},
|
||||
}
|
||||
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient,
|
||||
cmdBuilder, newTestLogger(),
|
||||
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.EqualValues(t, map[string]bool{"core/yay": true}, cloned)
|
||||
}
|
120
pkg/download/utils_test.go
Normal file
120
pkg/download/utils_test.go
Normal file
@ -0,0 +1,120 @@
|
||||
package download
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/Jguer/go-alpm/v2"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
)
|
||||
|
||||
type testRunner struct{}
|
||||
|
||||
func (t *testRunner) Capture(cmd *exec.Cmd) (stdout string, stderr string, err error) {
|
||||
return "", "", nil
|
||||
}
|
||||
|
||||
func (t *testRunner) Show(cmd *exec.Cmd) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type testGitBuilder struct {
|
||||
index int
|
||||
test *testing.T
|
||||
want string
|
||||
parentBuilder *exe.CmdBuilder
|
||||
}
|
||||
|
||||
func (t *testGitBuilder) BuildGitCmd(ctx context.Context, dir string, extraArgs ...string) *exec.Cmd {
|
||||
cmd := t.parentBuilder.BuildGitCmd(ctx, dir, extraArgs...)
|
||||
|
||||
if t.want != "" {
|
||||
assert.Equal(t.test, t.want, cmd.String())
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (c *testGitBuilder) Show(cmd *exec.Cmd) error {
|
||||
return c.parentBuilder.Show(cmd)
|
||||
}
|
||||
|
||||
func (c *testGitBuilder) Capture(cmd *exec.Cmd) (stdout, stderr string, err error) {
|
||||
return c.parentBuilder.Capture(cmd)
|
||||
}
|
||||
|
||||
type (
|
||||
testDB struct {
|
||||
alpm.IDB
|
||||
name string
|
||||
}
|
||||
testPackage struct {
|
||||
db.IPackage
|
||||
name string
|
||||
base string
|
||||
db *testDB
|
||||
}
|
||||
testDBSearcher struct {
|
||||
absPackagesDB map[string]string
|
||||
}
|
||||
|
||||
testClient struct {
|
||||
t *testing.T
|
||||
wantURL string
|
||||
body string
|
||||
status int
|
||||
}
|
||||
)
|
||||
|
||||
func (d *testDB) Name() string {
|
||||
return d.name
|
||||
}
|
||||
|
||||
func (p *testPackage) Name() string {
|
||||
return p.name
|
||||
}
|
||||
|
||||
func (p *testPackage) Base() string {
|
||||
return p.base
|
||||
}
|
||||
|
||||
func (p *testPackage) DB() alpm.IDB {
|
||||
return p.db
|
||||
}
|
||||
|
||||
func (d *testDBSearcher) SyncPackage(name string) db.IPackage {
|
||||
if v, ok := d.absPackagesDB[name]; ok {
|
||||
return &testPackage{
|
||||
name: name,
|
||||
base: name,
|
||||
db: &testDB{name: v},
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *testDBSearcher) SyncPackageFromDB(name string, db string) db.IPackage {
|
||||
if v, ok := d.absPackagesDB[name]; ok && v == db {
|
||||
return &testPackage{
|
||||
name: name,
|
||||
base: name,
|
||||
db: &testDB{name: v},
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *testClient) Get(url string) (*http.Response, error) {
|
||||
assert.Equal(t.t, t.wantURL, url)
|
||||
return &http.Response{StatusCode: t.status, Body: io.NopCloser(strings.NewReader(t.body))}, nil
|
||||
}
|
112
pkg/intrange/intrange.go
Normal file
112
pkg/intrange/intrange.go
Normal file
@ -0,0 +1,112 @@
|
||||
package intrange
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
mapset "github.com/deckarep/golang-set/v2"
|
||||
)
|
||||
|
||||
// IntRange stores a max and min amount for range.
|
||||
type IntRange struct {
|
||||
min int
|
||||
max int
|
||||
}
|
||||
|
||||
// IntRanges is a slice of IntRange.
|
||||
type IntRanges []IntRange
|
||||
|
||||
func makeIntRange(minVal, maxVal int) IntRange {
|
||||
return IntRange{
|
||||
min: minVal,
|
||||
max: maxVal,
|
||||
}
|
||||
}
|
||||
|
||||
// Get returns true if the argument n is included in the closed range
|
||||
// between min and max.
|
||||
func (r IntRange) Get(n int) bool {
|
||||
return n >= r.min && n <= r.max
|
||||
}
|
||||
|
||||
// Get returns true if the argument n is included in the closed range
|
||||
// between min and max of any of the provided IntRanges.
|
||||
func (rs IntRanges) Get(n int) bool {
|
||||
for _, r := range rs {
|
||||
if r.Get(n) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// ParseNumberMenu parses input for number menus split by spaces or commas
|
||||
// supports individual selection: 1 2 3 4
|
||||
// supports range selections: 1-4 10-20
|
||||
// supports negation: ^1 ^1-4
|
||||
//
|
||||
// include and excule holds numbers that should be added and should not be added
|
||||
// respectively. other holds anything that can't be parsed as an int. This is
|
||||
// intended to allow words inside of number menus. e.g. 'all' 'none' 'abort'
|
||||
// of course the implementation is up to the caller, this function mearley parses
|
||||
// the input and organizes it.
|
||||
func ParseNumberMenu(input string) (include, exclude IntRanges,
|
||||
otherInclude, otherExclude mapset.Set[string],
|
||||
) {
|
||||
include = make(IntRanges, 0)
|
||||
exclude = make(IntRanges, 0)
|
||||
otherInclude = mapset.NewThreadUnsafeSet[string]()
|
||||
otherExclude = mapset.NewThreadUnsafeSet[string]()
|
||||
|
||||
words := strings.FieldsFunc(input, func(c rune) bool {
|
||||
return unicode.IsSpace(c) || c == ','
|
||||
})
|
||||
|
||||
for _, word := range words {
|
||||
var (
|
||||
num1 int
|
||||
num2 int
|
||||
err error
|
||||
)
|
||||
|
||||
invert := false
|
||||
other := otherInclude
|
||||
|
||||
if word[0] == '^' {
|
||||
invert = true
|
||||
other = otherExclude
|
||||
word = word[1:]
|
||||
}
|
||||
|
||||
ranges := strings.SplitN(word, "-", 2)
|
||||
|
||||
num1, err = strconv.Atoi(ranges[0])
|
||||
if err != nil {
|
||||
other.Add(strings.ToLower(word))
|
||||
continue
|
||||
}
|
||||
|
||||
if len(ranges) == 2 {
|
||||
num2, err = strconv.Atoi(ranges[1])
|
||||
if err != nil {
|
||||
other.Add(strings.ToLower(word))
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
num2 = num1
|
||||
}
|
||||
|
||||
mi := min(num1, num2)
|
||||
ma := max(num1, num2)
|
||||
|
||||
if !invert {
|
||||
include = append(include, makeIntRange(mi, ma))
|
||||
} else {
|
||||
exclude = append(exclude, makeIntRange(mi, ma))
|
||||
}
|
||||
}
|
||||
|
||||
return include, exclude, otherInclude, otherExclude
|
||||
}
|
194
pkg/intrange/intrange_test.go
Normal file
194
pkg/intrange/intrange_test.go
Normal file
@ -0,0 +1,194 @@
|
||||
//go:build !integration
|
||||
// +build !integration
|
||||
|
||||
package intrange
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
mapset "github.com/deckarep/golang-set/v2"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestParseNumberMenu(t *testing.T) {
|
||||
t.Parallel()
|
||||
type result struct {
|
||||
Include IntRanges
|
||||
Exclude IntRanges
|
||||
OtherInclude mapset.Set[string]
|
||||
OtherExclude mapset.Set[string]
|
||||
}
|
||||
|
||||
inputs := []string{
|
||||
"1 2 3 4 5",
|
||||
"1-10 5-15",
|
||||
"10-5 90-85",
|
||||
"1 ^2 ^10-5 99 ^40-38 ^123 60-62",
|
||||
"abort all none",
|
||||
"a-b ^a-b ^abort",
|
||||
"-9223372036854775809-9223372036854775809",
|
||||
"1\t2 3 4\t\t \t 5",
|
||||
"1 2,3, 4, 5,6 ,7 ,8",
|
||||
"",
|
||||
" \t ",
|
||||
"A B C D E",
|
||||
}
|
||||
|
||||
expected := []result{
|
||||
{IntRanges{
|
||||
makeIntRange(1, 1),
|
||||
makeIntRange(2, 2),
|
||||
makeIntRange(3, 3),
|
||||
makeIntRange(4, 4),
|
||||
makeIntRange(5, 5),
|
||||
}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()},
|
||||
{IntRanges{
|
||||
makeIntRange(1, 10),
|
||||
makeIntRange(5, 15),
|
||||
}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()},
|
||||
{IntRanges{
|
||||
makeIntRange(5, 10),
|
||||
makeIntRange(85, 90),
|
||||
}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()},
|
||||
{
|
||||
IntRanges{
|
||||
makeIntRange(1, 1),
|
||||
makeIntRange(99, 99),
|
||||
makeIntRange(60, 62),
|
||||
},
|
||||
IntRanges{
|
||||
makeIntRange(2, 2),
|
||||
makeIntRange(5, 10),
|
||||
makeIntRange(38, 40),
|
||||
makeIntRange(123, 123),
|
||||
},
|
||||
mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string](),
|
||||
},
|
||||
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet("abort", "all", "none"), mapset.NewThreadUnsafeSet[string]()},
|
||||
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet("a-b"), mapset.NewThreadUnsafeSet("abort", "a-b")},
|
||||
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet("-9223372036854775809-9223372036854775809"), mapset.NewThreadUnsafeSet[string]()},
|
||||
{IntRanges{
|
||||
makeIntRange(1, 1),
|
||||
makeIntRange(2, 2),
|
||||
makeIntRange(3, 3),
|
||||
makeIntRange(4, 4),
|
||||
makeIntRange(5, 5),
|
||||
}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()},
|
||||
{IntRanges{
|
||||
makeIntRange(1, 1),
|
||||
makeIntRange(2, 2),
|
||||
makeIntRange(3, 3),
|
||||
makeIntRange(4, 4),
|
||||
makeIntRange(5, 5),
|
||||
makeIntRange(6, 6),
|
||||
makeIntRange(7, 7),
|
||||
makeIntRange(8, 8),
|
||||
}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()},
|
||||
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()},
|
||||
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()},
|
||||
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet("a", "b", "c", "d", "e"), mapset.NewThreadUnsafeSet[string]()},
|
||||
}
|
||||
|
||||
for n, in := range inputs {
|
||||
res := expected[n]
|
||||
include, exclude, otherInclude, otherExclude := ParseNumberMenu(in)
|
||||
|
||||
assert.True(t, intRangesEqual(include, res.Include), "Test %d Failed: Expected: include=%+v got include=%+v", n+1, res.Include, include)
|
||||
assert.True(t, intRangesEqual(exclude, res.Exclude), "Test %d Failed: Expected: exclude=%+v got exclude=%+v", n+1, res.Exclude, exclude)
|
||||
assert.True(t, otherInclude.Equal(res.OtherInclude), "Test %d Failed: Expected: otherInclude=%+v got otherInclude=%+v", n+1, res.OtherInclude, otherInclude)
|
||||
assert.True(t, otherExclude.Equal(res.OtherExclude), "Test %d Failed: Expected: otherExclude=%+v got otherExclude=%+v", n+1, res.OtherExclude, otherExclude)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIntRange_Get(t *testing.T) {
|
||||
t.Parallel()
|
||||
type fields struct {
|
||||
min int
|
||||
max int
|
||||
}
|
||||
type args struct {
|
||||
n int
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
fields fields
|
||||
args args
|
||||
want bool
|
||||
}{
|
||||
{name: "normal range true", fields: fields{0, 10}, args: args{5}, want: true},
|
||||
{name: "normal start range true", fields: fields{0, 10}, args: args{0}, want: true},
|
||||
{name: "normal end range true", fields: fields{0, 10}, args: args{10}, want: true},
|
||||
{name: "small range true", fields: fields{1, 1}, args: args{1}, want: true},
|
||||
{name: "normal start range false", fields: fields{1, 2}, args: args{0}, want: false},
|
||||
{name: "normal end range false", fields: fields{1, 2}, args: args{3}, want: false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
r := IntRange{
|
||||
min: tt.fields.min,
|
||||
max: tt.fields.max,
|
||||
}
|
||||
if got := r.Get(tt.args.n); got != tt.want {
|
||||
t.Errorf("IntRange.Get() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func intRangesEqual(a, b IntRanges) bool {
|
||||
if a == nil && b == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
if a == nil || b == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
|
||||
for n := range a {
|
||||
r1 := a[n]
|
||||
r2 := b[n]
|
||||
|
||||
if r1.min != r2.min || r1.max != r2.max {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func TestIntRanges_Get(t *testing.T) {
|
||||
t.Parallel()
|
||||
type args struct {
|
||||
n int
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
rs IntRanges
|
||||
args args
|
||||
want bool
|
||||
}{
|
||||
{name: "normal range true", rs: IntRanges{{0, 10}}, args: args{5}, want: true},
|
||||
{name: "normal ranges in between true", rs: IntRanges{{0, 4}, {5, 10}}, args: args{5}, want: true},
|
||||
{name: "normal ranges in between false", rs: IntRanges{{0, 4}, {6, 10}}, args: args{5}, want: false},
|
||||
{name: "normal start range true", rs: IntRanges{{0, 10}}, args: args{0}, want: true},
|
||||
{name: "normal end range true", rs: IntRanges{{0, 10}}, args: args{10}, want: true},
|
||||
{name: "small range true", rs: IntRanges{{1, 1}, {3, 3}}, args: args{1}, want: true},
|
||||
{name: "normal start range false", rs: IntRanges{{1, 2}}, args: args{0}, want: false},
|
||||
{name: "normal end range false", rs: IntRanges{{1, 2}}, args: args{3}, want: false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
if got := tt.rs.Get(tt.args.n); got != tt.want {
|
||||
t.Errorf("IntRanges.Get() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
78
pkg/menus/clean_menu.go
Normal file
78
pkg/menus/clean_menu.go
Normal file
@ -0,0 +1,78 @@
|
||||
// Clean Build Menu functions
|
||||
package menus
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
mapset "github.com/deckarep/golang-set/v2"
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/runtime"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
func anyExistInCache(pkgbuildDirs map[string]string) bool {
|
||||
for _, dir := range pkgbuildDirs {
|
||||
if _, err := os.Stat(dir); !os.IsNotExist(err) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func CleanFn(ctx context.Context, run *runtime.Runtime, w io.Writer,
|
||||
pkgbuildDirsByBase map[string]string, installed mapset.Set[string],
|
||||
) error {
|
||||
if len(pkgbuildDirsByBase) == 0 {
|
||||
return nil // no work to do
|
||||
}
|
||||
|
||||
if !anyExistInCache(pkgbuildDirsByBase) {
|
||||
return nil
|
||||
}
|
||||
|
||||
skipFunc := func(pkg string) bool {
|
||||
dir := pkgbuildDirsByBase[pkg]
|
||||
// TOFIX: new install engine dir will always exist, check if unclean instead
|
||||
if _, err := os.Stat(dir); os.IsNotExist(err) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
bases := make([]string, 0, len(pkgbuildDirsByBase))
|
||||
for pkg := range pkgbuildDirsByBase {
|
||||
bases = append(bases, pkg)
|
||||
}
|
||||
|
||||
toClean, errClean := selectionMenu(run.Logger, pkgbuildDirsByBase, bases, installed,
|
||||
gotext.Get("Packages to cleanBuild?"),
|
||||
settings.NoConfirm, run.Cfg.AnswerClean, skipFunc)
|
||||
if errClean != nil {
|
||||
return errClean
|
||||
}
|
||||
|
||||
for i, base := range toClean {
|
||||
dir := pkgbuildDirsByBase[base]
|
||||
run.Logger.OperationInfoln(gotext.Get("Deleting (%d/%d): %s", i+1, len(toClean), text.Cyan(dir)))
|
||||
|
||||
if err := run.CmdBuilder.Show(run.CmdBuilder.BuildGitCmd(ctx, dir, "reset", "--hard", "origin/HEAD")); err != nil {
|
||||
run.Logger.Warnln(gotext.Get("Unable to clean:"), dir)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
if err := run.CmdBuilder.Show(run.CmdBuilder.BuildGitCmd(ctx, dir, "clean", "-fdx")); err != nil {
|
||||
run.Logger.Warnln(gotext.Get("Unable to clean:"), dir)
|
||||
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
181
pkg/menus/diff_menu.go
Normal file
181
pkg/menus/diff_menu.go
Normal file
@ -0,0 +1,181 @@
|
||||
// file dedicated to diff menu
|
||||
package menus
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
mapset "github.com/deckarep/golang-set/v2"
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/multierror"
|
||||
"github.com/Jguer/yay/v12/pkg/runtime"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/settings/exe"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
const (
|
||||
gitEmptyTree = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"
|
||||
gitDiffRefName = "AUR_SEEN"
|
||||
)
|
||||
|
||||
func showPkgbuildDiffs(ctx context.Context, cmdBuilder exe.ICmdBuilder, logger *text.Logger,
|
||||
pkgbuildDirs map[string]string, bases []string,
|
||||
) error {
|
||||
var errMulti multierror.MultiError
|
||||
|
||||
for _, pkg := range bases {
|
||||
dir := pkgbuildDirs[pkg]
|
||||
|
||||
start, err := getLastSeenHash(ctx, cmdBuilder, dir)
|
||||
if err != nil {
|
||||
errMulti.Add(err)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if start != gitEmptyTree {
|
||||
hasDiff, err := gitHasDiff(ctx, cmdBuilder, dir)
|
||||
if err != nil {
|
||||
errMulti.Add(err)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if !hasDiff {
|
||||
logger.Warnln(gotext.Get("%s: No changes -- skipping", text.Cyan(pkg)))
|
||||
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
args := []string{
|
||||
"diff",
|
||||
start + "..HEAD@{upstream}", "--src-prefix",
|
||||
dir + "/", "--dst-prefix", dir + "/", "--", ".", ":(exclude).SRCINFO",
|
||||
}
|
||||
if text.UseColor {
|
||||
args = append(args, "--color=always")
|
||||
} else {
|
||||
args = append(args, "--color=never")
|
||||
}
|
||||
|
||||
_ = cmdBuilder.Show(cmdBuilder.BuildGitCmd(ctx, dir, args...))
|
||||
}
|
||||
|
||||
return errMulti.Return()
|
||||
}
|
||||
|
||||
// Check whether or not a diff exists between the last reviewed diff and
|
||||
// HEAD@{upstream}.
|
||||
func gitHasDiff(ctx context.Context, cmdBuilder exe.ICmdBuilder, dir string) (bool, error) {
|
||||
if gitHasLastSeenRef(ctx, cmdBuilder, dir) {
|
||||
stdout, stderr, err := cmdBuilder.Capture(
|
||||
cmdBuilder.BuildGitCmd(ctx, dir, "rev-parse", gitDiffRefName, "HEAD@{upstream}"))
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("%s%w", stderr, err)
|
||||
}
|
||||
|
||||
lines := strings.Split(stdout, "\n")
|
||||
lastseen := lines[0]
|
||||
upstream := lines[1]
|
||||
|
||||
return lastseen != upstream, nil
|
||||
}
|
||||
// If AUR_SEEN does not exists, we have never reviewed a diff for this package
|
||||
// and should display it.
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// Return whether or not we have reviewed a diff yet. It checks for the existence of
|
||||
// AUR_SEEN in the git ref-list.
|
||||
func gitHasLastSeenRef(ctx context.Context, cmdBuilder exe.ICmdBuilder, dir string) bool {
|
||||
_, _, err := cmdBuilder.Capture(
|
||||
cmdBuilder.BuildGitCmd(ctx,
|
||||
dir, "rev-parse", "--quiet", "--verify", gitDiffRefName))
|
||||
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// Returns the last reviewed hash. If AUR_SEEN exists it will return this hash.
|
||||
// If it does not it will return empty tree as no diff have been reviewed yet.
|
||||
func getLastSeenHash(ctx context.Context, cmdBuilder exe.ICmdBuilder, dir string) (string, error) {
|
||||
if gitHasLastSeenRef(ctx, cmdBuilder, dir) {
|
||||
stdout, stderr, err := cmdBuilder.Capture(
|
||||
cmdBuilder.BuildGitCmd(ctx,
|
||||
dir, "rev-parse", gitDiffRefName))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("%s %w", stderr, err)
|
||||
}
|
||||
|
||||
lines := strings.Split(stdout, "\n")
|
||||
|
||||
return lines[0], nil
|
||||
}
|
||||
|
||||
return gitEmptyTree, nil
|
||||
}
|
||||
|
||||
// Update the AUR_SEEN ref to HEAD. We use this ref to determine which diff were
|
||||
// reviewed by the user.
|
||||
func gitUpdateSeenRef(ctx context.Context, cmdBuilder exe.ICmdBuilder, dir string) error {
|
||||
_, stderr, err := cmdBuilder.Capture(
|
||||
cmdBuilder.BuildGitCmd(ctx,
|
||||
dir, "update-ref", gitDiffRefName, "HEAD"))
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s %w", stderr, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func updatePkgbuildSeenRef(ctx context.Context, cmdBuilder exe.ICmdBuilder, pkgbuildDirs map[string]string, bases []string) error {
|
||||
var errMulti multierror.MultiError
|
||||
|
||||
for _, pkg := range bases {
|
||||
dir := pkgbuildDirs[pkg]
|
||||
if err := gitUpdateSeenRef(ctx, cmdBuilder, dir); err != nil {
|
||||
errMulti.Add(err)
|
||||
}
|
||||
}
|
||||
|
||||
return errMulti.Return()
|
||||
}
|
||||
|
||||
func DiffFn(ctx context.Context, run *runtime.Runtime, w io.Writer,
|
||||
pkgbuildDirsByBase map[string]string, installed mapset.Set[string],
|
||||
) error {
|
||||
if len(pkgbuildDirsByBase) == 0 {
|
||||
return nil // no work to do
|
||||
}
|
||||
|
||||
bases := make([]string, 0, len(pkgbuildDirsByBase))
|
||||
for base := range pkgbuildDirsByBase {
|
||||
bases = append(bases, base)
|
||||
}
|
||||
|
||||
toDiff, errMenu := selectionMenu(run.Logger, pkgbuildDirsByBase, bases, installed, gotext.Get("Diffs to show?"),
|
||||
settings.NoConfirm, run.Cfg.AnswerDiff, nil)
|
||||
if errMenu != nil || len(toDiff) == 0 {
|
||||
return errMenu
|
||||
}
|
||||
|
||||
if errD := showPkgbuildDiffs(ctx, run.CmdBuilder, run.Logger, pkgbuildDirsByBase, toDiff); errD != nil {
|
||||
return errD
|
||||
}
|
||||
|
||||
run.Logger.Println()
|
||||
|
||||
if !run.Logger.ContinueTask(gotext.Get("Proceed with install?"), true, false) {
|
||||
return settings.ErrUserAbort{}
|
||||
}
|
||||
|
||||
if errUpd := updatePkgbuildSeenRef(ctx, run.CmdBuilder, pkgbuildDirsByBase, toDiff); errUpd != nil {
|
||||
return errUpd
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
148
pkg/menus/edit_menu.go
Normal file
148
pkg/menus/edit_menu.go
Normal file
@ -0,0 +1,148 @@
|
||||
// edit menu
|
||||
package menus
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
gosrc "github.com/Morganamilo/go-srcinfo"
|
||||
mapset "github.com/deckarep/golang-set/v2"
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/runtime"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
// Editor returns the preferred system editor.
|
||||
func editor(log *text.Logger, editorConfig, editorFlags string, noConfirm bool) (editor string, args []string) {
|
||||
switch {
|
||||
case editorConfig != "":
|
||||
editor, err := exec.LookPath(editorConfig)
|
||||
if err != nil {
|
||||
log.Errorln(err)
|
||||
} else {
|
||||
return editor, strings.Fields(editorFlags)
|
||||
}
|
||||
|
||||
fallthrough
|
||||
case os.Getenv("VISUAL") != "":
|
||||
if editorArgs := strings.Fields(os.Getenv("VISUAL")); len(editorArgs) != 0 {
|
||||
editor, err := exec.LookPath(editorArgs[0])
|
||||
if err != nil {
|
||||
log.Errorln(err)
|
||||
} else {
|
||||
return editor, editorArgs[1:]
|
||||
}
|
||||
}
|
||||
|
||||
fallthrough
|
||||
case os.Getenv("EDITOR") != "":
|
||||
if editorArgs := strings.Fields(os.Getenv("EDITOR")); len(editorArgs) != 0 {
|
||||
editor, err := exec.LookPath(editorArgs[0])
|
||||
if err != nil {
|
||||
log.Errorln(err)
|
||||
} else {
|
||||
return editor, editorArgs[1:]
|
||||
}
|
||||
}
|
||||
|
||||
fallthrough
|
||||
default:
|
||||
log.Errorln("\n", gotext.Get("%s is not set", text.Bold(text.Cyan("$EDITOR"))))
|
||||
log.Warnln(gotext.Get("Add %s or %s to your environment variables", text.Bold(text.Cyan("$EDITOR")), text.Bold(text.Cyan("$VISUAL"))))
|
||||
|
||||
for {
|
||||
log.Infoln(gotext.Get("Edit PKGBUILD with?"))
|
||||
|
||||
editorInput, err := log.GetInput("", noConfirm)
|
||||
if err != nil {
|
||||
log.Errorln(err)
|
||||
continue
|
||||
}
|
||||
|
||||
editorArgs := strings.Fields(editorInput)
|
||||
if len(editorArgs) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
editor, err := exec.LookPath(editorArgs[0])
|
||||
if err != nil {
|
||||
log.Errorln(err)
|
||||
continue
|
||||
}
|
||||
|
||||
return editor, editorArgs[1:]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func editPkgbuilds(log *text.Logger, pkgbuildDirs map[string]string, bases []string, editorConfig,
|
||||
editorFlags string, srcinfos map[string]*gosrc.Srcinfo, noConfirm bool,
|
||||
) error {
|
||||
pkgbuilds := make([]string, 0, len(bases))
|
||||
|
||||
for _, pkg := range bases {
|
||||
dir := pkgbuildDirs[pkg]
|
||||
pkgbuilds = append(pkgbuilds, filepath.Join(dir, "PKGBUILD"))
|
||||
|
||||
if srcinfos != nil {
|
||||
for _, splitPkg := range srcinfos[pkg].SplitPackages() {
|
||||
if splitPkg.Install != "" {
|
||||
pkgbuilds = append(pkgbuilds, filepath.Join(dir, splitPkg.Install))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(pkgbuilds) > 0 {
|
||||
editor, editorArgs := editor(log, editorConfig, editorFlags, noConfirm)
|
||||
editorArgs = append(editorArgs, pkgbuilds...)
|
||||
editcmd := exec.Command(editor, editorArgs...)
|
||||
editcmd.Stdin, editcmd.Stdout, editcmd.Stderr = os.Stdin, os.Stdout, os.Stderr
|
||||
|
||||
if err := editcmd.Run(); err != nil {
|
||||
return errors.New(gotext.Get("editor did not exit successfully, aborting: %s", err))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func EditFn(ctx context.Context, run *runtime.Runtime, w io.Writer,
|
||||
pkgbuildDirsByBase map[string]string, installed mapset.Set[string],
|
||||
) error {
|
||||
if len(pkgbuildDirsByBase) == 0 {
|
||||
return nil // no work to do
|
||||
}
|
||||
|
||||
bases := make([]string, 0, len(pkgbuildDirsByBase))
|
||||
for pkg := range pkgbuildDirsByBase {
|
||||
bases = append(bases, pkg)
|
||||
}
|
||||
|
||||
toEdit, errMenu := selectionMenu(run.Logger, pkgbuildDirsByBase, bases, installed,
|
||||
gotext.Get("PKGBUILDs to edit?"), settings.NoConfirm, run.Cfg.AnswerEdit, nil)
|
||||
if errMenu != nil || len(toEdit) == 0 {
|
||||
return errMenu
|
||||
}
|
||||
|
||||
// TOFIX: remove or use srcinfo data
|
||||
if errEdit := editPkgbuilds(run.Logger, pkgbuildDirsByBase,
|
||||
toEdit, run.Cfg.Editor, run.Cfg.EditorFlags, nil, settings.NoConfirm); errEdit != nil {
|
||||
return errEdit
|
||||
}
|
||||
|
||||
run.Logger.Println()
|
||||
|
||||
if !run.Logger.ContinueTask(gotext.Get("Proceed with install?"), true, false) {
|
||||
return settings.ErrUserAbort{}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
103
pkg/menus/menu.go
Normal file
103
pkg/menus/menu.go
Normal file
@ -0,0 +1,103 @@
|
||||
package menus
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/intrange"
|
||||
"github.com/Jguer/yay/v12/pkg/settings"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
|
||||
mapset "github.com/deckarep/golang-set/v2"
|
||||
)
|
||||
|
||||
func pkgbuildNumberMenu(logger *text.Logger, pkgbuildDirs map[string]string,
|
||||
bases []string, installed mapset.Set[string],
|
||||
) {
|
||||
toPrint := ""
|
||||
|
||||
for n, pkgBase := range bases {
|
||||
dir := pkgbuildDirs[pkgBase]
|
||||
toPrint += fmt.Sprintf(text.Magenta("%3d")+" %-40s", len(pkgbuildDirs)-n,
|
||||
text.Bold(pkgBase))
|
||||
|
||||
if installed.Contains(pkgBase) {
|
||||
toPrint += text.Bold(text.Green(gotext.Get(" (Installed)")))
|
||||
}
|
||||
|
||||
// TODO: remove or refactor to check if git dir is unclean
|
||||
if _, err := os.Stat(dir); !os.IsNotExist(err) {
|
||||
toPrint += text.Bold(text.Green(gotext.Get(" (Build Files Exist)")))
|
||||
}
|
||||
|
||||
toPrint += "\n"
|
||||
}
|
||||
|
||||
logger.Print(toPrint)
|
||||
}
|
||||
|
||||
func selectionMenu(logger *text.Logger, pkgbuildDirs map[string]string, bases []string, installed mapset.Set[string],
|
||||
message string, noConfirm bool, defaultAnswer string, skipFunc func(string) bool,
|
||||
) ([]string, error) {
|
||||
selected := make([]string, 0)
|
||||
|
||||
pkgbuildNumberMenu(logger, pkgbuildDirs, bases, installed)
|
||||
|
||||
logger.Infoln(message)
|
||||
logger.Infoln(gotext.Get("%s [A]ll [Ab]ort [I]nstalled [No]tInstalled or (1 2 3, 1-3, ^4)", text.Cyan(gotext.Get("[N]one"))))
|
||||
|
||||
selectInput, err := logger.GetInput(defaultAnswer, noConfirm)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
eInclude, eExclude, eOtherInclude, eOtherExclude := intrange.ParseNumberMenu(selectInput)
|
||||
eIsInclude := len(eExclude) == 0 && eOtherExclude.Cardinality() == 0
|
||||
|
||||
if eOtherInclude.Contains("abort") || eOtherInclude.Contains("ab") {
|
||||
return nil, settings.ErrUserAbort{}
|
||||
}
|
||||
|
||||
if eOtherInclude.Contains("n") || eOtherInclude.Contains("none") {
|
||||
return selected, nil
|
||||
}
|
||||
|
||||
for i, pkgBase := range bases {
|
||||
if skipFunc != nil && skipFunc(pkgBase) {
|
||||
continue
|
||||
}
|
||||
|
||||
anyInstalled := installed.Contains(pkgBase)
|
||||
|
||||
if !eIsInclude && eExclude.Get(len(bases)-i) {
|
||||
continue
|
||||
}
|
||||
|
||||
if anyInstalled && (eOtherInclude.Contains("i") || eOtherInclude.Contains("installed")) {
|
||||
selected = append(selected, pkgBase)
|
||||
continue
|
||||
}
|
||||
|
||||
if !anyInstalled && (eOtherInclude.Contains("no") || eOtherInclude.Contains("notinstalled")) {
|
||||
selected = append(selected, pkgBase)
|
||||
continue
|
||||
}
|
||||
|
||||
if eOtherInclude.Contains("a") || eOtherInclude.Contains("all") {
|
||||
selected = append(selected, pkgBase)
|
||||
continue
|
||||
}
|
||||
|
||||
if eIsInclude && (eInclude.Get(len(bases)-i) || eOtherInclude.Contains(pkgBase)) {
|
||||
selected = append(selected, pkgBase)
|
||||
}
|
||||
|
||||
if !eIsInclude && (!eExclude.Get(len(bases)-i) && !eOtherExclude.Contains(pkgBase)) {
|
||||
selected = append(selected, pkgBase)
|
||||
}
|
||||
}
|
||||
|
||||
return selected, nil
|
||||
}
|
41
pkg/multierror/multierror.go
Normal file
41
pkg/multierror/multierror.go
Normal file
@ -0,0 +1,41 @@
|
||||
package multierror
|
||||
|
||||
import "sync"
|
||||
|
||||
// MultiError type handles error accumulation from goroutines.
|
||||
type MultiError struct {
|
||||
Errors []error
|
||||
mux sync.Mutex
|
||||
}
|
||||
|
||||
// Error turns the MultiError structure into a string.
|
||||
func (err *MultiError) Error() string {
|
||||
str := ""
|
||||
|
||||
for _, e := range err.Errors {
|
||||
str += e.Error() + "\n"
|
||||
}
|
||||
|
||||
return str[:len(str)-1]
|
||||
}
|
||||
|
||||
// Add adds an error to the Multierror structure.
|
||||
func (err *MultiError) Add(e error) {
|
||||
if e == nil {
|
||||
return
|
||||
}
|
||||
|
||||
err.mux.Lock()
|
||||
err.Errors = append(err.Errors, e)
|
||||
err.mux.Unlock()
|
||||
}
|
||||
|
||||
// Return is used as a wrapper on return on whether to return the
|
||||
// MultiError Structure if errors exist or nil instead of delivering an empty structure.
|
||||
func (err *MultiError) Return() error {
|
||||
if len(err.Errors) > 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
11
pkg/news/.snapshots/TestPrintNewsFeed-all-quiet
Normal file
11
pkg/news/.snapshots/TestPrintNewsFeed-all-quiet
Normal file
@ -0,0 +1,11 @@
|
||||
[1m[35m2019-12-20[0m[0m [1mXorg cleanup requires manual intervention[0m
|
||||
[1m[35m2020-01-04[0m[0m [1mNow using Zstandard instead of xz for package compression[0m
|
||||
[1m[35m2020-01-15[0m[0m [1mrsync compatibility[0m
|
||||
[1m[35m2020-02-17[0m[0m [1msshd needs restarting after upgrading to openssh-8.2p1[0m
|
||||
[1m[35m2020-02-22[0m[0m [1mPlanet Arch Linux migration[0m
|
||||
[1m[35m2020-02-24[0m[0m [1mThe Future of the Arch Linux Project Leader[0m
|
||||
[1m[35m2020-03-01[0m[0m [1mfirewalld>=0.8.1-2 update requires manual intervention[0m
|
||||
[1m[35m2020-03-19[0m[0m [1mhplip 3.20.3-2 update requires manual intervention[0m
|
||||
[1m[35m2020-04-13[0m[0m [1mnss>=3.51.1-1 and lib32-nss>=3.51.1-1 updates require manual intervention[0m
|
||||
[1m[35m2020-04-14[0m[0m [1mzn_poly 0.9.2-2 update requires manual intervention[0m
|
||||
|
114
pkg/news/.snapshots/TestPrintNewsFeed-all-verbose
Normal file
114
pkg/news/.snapshots/TestPrintNewsFeed-all-verbose
Normal file
@ -0,0 +1,114 @@
|
||||
[1m[35m2019-12-20[0m[0m [1mXorg cleanup requires manual intervention[0m
|
||||
In the process of Xorg cleanup the update requires manual
|
||||
intervention when you hit this message:
|
||||
|
||||
[36m:: installing xorgproto (2019.2-2) breaks dependency 'inputproto' required by lib32-libxi
|
||||
:: installing xorgproto (2019.2-2) breaks dependency 'dmxproto' required by libdmx
|
||||
:: installing xorgproto (2019.2-2) breaks dependency 'xf86dgaproto' required by libxxf86dga
|
||||
:: installing xorgproto (2019.2-2) breaks dependency 'xf86miscproto' required by libxxf86misc
|
||||
[0m
|
||||
when updating, use: [36mpacman -Rdd libdmx libxxf86dga libxxf86misc && pacman -Syu[0m to perform the upgrade.
|
||||
[0m
|
||||
[1m[35m2020-01-04[0m[0m [1mNow using Zstandard instead of xz for package compression[0m
|
||||
As announced on the mailing list, on Friday, Dec 27 2019, our package compression scheme has changed from xz (.pkg.tar.xz) to zstd (.pkg.tar.zst).
|
||||
|
||||
zstd and xz trade blows in their compression ratio. Recompressing all packages to zstd with our options yields a total ~0.8% increase in package size on all of our packages combined, but the decompression time for all packages saw a ~1300% speedup.
|
||||
|
||||
We already have more than 545 zstd-compressed packages in our repositories, and as packages get updated more will keep rolling in. We have not found any user-facing issues as of yet, so things appear to be working.
|
||||
|
||||
As a packager, you will automatically start building .pkg.tar.zst packages if you are using the latest version of devtools (>= 20191227).
|
||||
As an end-user no manual intervention is required, assuming that you have read and followed the news post from late last year.
|
||||
|
||||
If you nevertheless haven't updated libarchive since 2018, all hope is not lost! Binary builds of pacman-static are available from Eli Schwartz' personal repository (or direct link to binary), signed with their Trusted User keys, with which you can perform the update.
|
||||
[0m
|
||||
[1m[35m2020-01-15[0m[0m [1mrsync compatibility[0m
|
||||
Our [36mrsync[0m package was shipped with bundled [36mzlib[0m to provide compatibility
|
||||
with the old-style [36m--compress[0m option up to version 3.1.0. Version 3.1.1 was
|
||||
released on 2014-06-22 and is shipped by all major distributions now.
|
||||
|
||||
So we decided to finally drop the bundled library and ship a package with
|
||||
system [36mzlib[0m. This also fixes security issues, actual ones and in future. Go
|
||||
and blame those running old versions if you encounter errors with [36mrsync
|
||||
3.1.3-3[0m.
|
||||
[0m
|
||||
[1m[35m2020-02-17[0m[0m [1msshd needs restarting after upgrading to openssh-8.2p1[0m
|
||||
After upgrading to openssh-8.2p1, the existing SSH daemon will be unable to accept new connections. (See FS#65517.) When upgrading remote hosts, please make sure to restart the SSH daemon using [36msystemctl restart sshd[0m right after running [36mpacman -Syu[0m. If you are upgrading to openssh-8.2p1-3 or higher, this restart will happen automatically.
|
||||
[0m
|
||||
[1m[35m2020-02-22[0m[0m [1mPlanet Arch Linux migration[0m
|
||||
The software behind planet.archlinux.org was implemented in Python 2 and is no longer maintained upstream. This functionality has now been implemented in archlinux.org's archweb backend which is actively maintained but offers a slightly different experience.
|
||||
|
||||
The most notable changes are the offered feeds and the feed location. Archweb only offers an Atom feed which is located at here.
|
||||
[0m
|
||||
[1m[35m2020-02-24[0m[0m [1mThe Future of the Arch Linux Project Leader[0m
|
||||
Hello everyone,
|
||||
|
||||
Some of you may know me from the days when I was much more involved in Arch, but most of you probably just know me as a name on the website. I’ve been with Arch for some time, taking the leadership of this beast over from Judd back in 2007. But, as these things often go, my involvement has slid down to minimal levels over time. It’s high time that changes.
|
||||
|
||||
Arch Linux needs involved leadership to make hard decisions and direct the project where it needs to go. And I am not in a position to do this.
|
||||
|
||||
In a team effort, the Arch Linux staff devised a new process for determining future leaders. From now on, leaders will be elected by the staff for a term length of two years. Details of this new process can be found here
|
||||
|
||||
In the first official vote with Levente Polyak (anthraxx), Gaetan Bisson (vesath), Giancarlo Razzolini (grazzolini), and Sven-Hendrik Haase (svenstaro) as candidates, and through 58 verified votes, a winner was chosen:
|
||||
|
||||
Levente Polyak (anthraxx) will be taking over the reins of this ship. Congratulations!
|
||||
|
||||
Thanks for everything over all these years,
|
||||
Aaron Griffin (phrakture)
|
||||
[0m
|
||||
[1m[35m2020-03-01[0m[0m [1mfirewalld>=0.8.1-2 update requires manual intervention[0m
|
||||
The firewalld package prior to version 0.8.1-2 was missing the compiled python modules. This has been fixed in 0.8.1-2, so the upgrade will need to overwrite the untracked pyc files created. If you get errors like these
|
||||
|
||||
[36mfirewalld: /usr/lib/python3.8/site-packages/firewall/__pycache__/__init__.cpython-38.pyc exists in filesystem
|
||||
firewalld: /usr/lib/python3.8/site-packages/firewall/__pycache__/client.cpython-38.pyc exists in filesystem
|
||||
firewalld: /usr/lib/python3.8/site-packages/firewall/__pycache__/dbus_utils.cpython-38.pyc exists in filesystem
|
||||
...many more...
|
||||
[0m
|
||||
when updating, use
|
||||
|
||||
[36mpacman -Suy --overwrite /usr/lib/python3.8/site-packages/firewall/\*
|
||||
[0m
|
||||
to perform the upgrade.
|
||||
[0m
|
||||
[1m[35m2020-03-19[0m[0m [1mhplip 3.20.3-2 update requires manual intervention[0m
|
||||
The hplip package prior to version 3.20.3-2 was missing the compiled
|
||||
python modules. This has been fixed in 3.20.3-2, so the upgrade will
|
||||
need to overwrite the untracked pyc files that were created. If you get errors
|
||||
such as these
|
||||
|
||||
[36mhplip: /usr/share/hplip/base/__pycache__/__init__.cpython-38.pyc exists in filesystem
|
||||
hplip: /usr/share/hplip/base/__pycache__/avahi.cpython-38.pyc exists in filesystem
|
||||
hplip: /usr/share/hplip/base/__pycache__/codes.cpython-38.pyc exists in filesystem
|
||||
...many more...
|
||||
[0m
|
||||
when updating, use
|
||||
|
||||
[36mpacman -Suy --overwrite /usr/share/hplip/\*
|
||||
[0m
|
||||
to perform the upgrade.
|
||||
[0m
|
||||
[1m[35m2020-04-13[0m[0m [1mnss>=3.51.1-1 and lib32-nss>=3.51.1-1 updates require manual intervention[0m
|
||||
The nss and lib32-nss packages prior to version 3.51.1-1 were missing a soname link each. This has been fixed in 3.51.1-1, so the upgrade will need to overwrite the untracked files created by ldconfig. If you get any of these errors
|
||||
|
||||
[36mnss: /usr/lib/p11-kit-trust.so exists in filesystem
|
||||
lib32-nss: /usr/lib32/p11-kit-trust.so exists in filesystem
|
||||
[0m
|
||||
when updating, use
|
||||
|
||||
[36mpacman -Syu --overwrite /usr/lib\*/p11-kit-trust.so
|
||||
[0m
|
||||
to perform the upgrade.
|
||||
[0m
|
||||
[1m[35m2020-04-14[0m[0m [1mzn_poly 0.9.2-2 update requires manual intervention[0m
|
||||
The zn_poly package prior to version 0.9.2-2 was missing a soname link.
|
||||
This has been fixed in 0.9.2-2, so the upgrade will need to overwrite the
|
||||
untracked files created by ldconfig. If you get an error
|
||||
|
||||
[36mzn_poly: /usr/lib/libzn_poly-0.9.so exists in filesystem
|
||||
[0m
|
||||
when updating, use
|
||||
|
||||
[36mpacman -Syu --overwrite usr/lib/libzn_poly-0.9.so
|
||||
[0m
|
||||
to perform the upgrade.
|
||||
[0m
|
||||
|
3
pkg/news/.snapshots/TestPrintNewsFeed-latest-quiet
Normal file
3
pkg/news/.snapshots/TestPrintNewsFeed-latest-quiet
Normal file
@ -0,0 +1,3 @@
|
||||
[1m[35m2020-04-13[0m[0m [1mnss>=3.51.1-1 and lib32-nss>=3.51.1-1 updates require manual intervention[0m
|
||||
[1m[35m2020-04-14[0m[0m [1mzn_poly 0.9.2-2 update requires manual intervention[0m
|
||||
|
@ -0,0 +1,3 @@
|
||||
[1m[35m2020-04-14[0m[0m [1mzn_poly 0.9.2-2 update requires manual intervention[0m
|
||||
[1m[35m2020-04-13[0m[0m [1mnss>=3.51.1-1 and lib32-nss>=3.51.1-1 updates require manual intervention[0m
|
||||
|
3
pkg/news/.snapshots/TestPrintNewsFeedSameDay
Normal file
3
pkg/news/.snapshots/TestPrintNewsFeedSameDay
Normal file
@ -0,0 +1,3 @@
|
||||
[1m[35m2020-04-14[0m[0m [1mzn_poly 0.9.2-2 update requires manual intervention[0m
|
||||
The zn_poly package prior to version 0.9.2-2 was missing a soname link.[0m
|
||||
|
174
pkg/news/news.go
Normal file
174
pkg/news/news.go
Normal file
@ -0,0 +1,174 @@
|
||||
package news
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/xml"
|
||||
"html"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
type item struct {
|
||||
Title string `xml:"title"`
|
||||
Link string `xml:"link"`
|
||||
Description string `xml:"description"`
|
||||
PubDate string `xml:"pubDate"`
|
||||
Creator string `xml:"dc:creator"`
|
||||
}
|
||||
|
||||
func (item *item) printNews(logger *text.Logger, buildTime time.Time, all, quiet bool) {
|
||||
var fd string
|
||||
|
||||
date, err := time.Parse(time.RFC1123Z, item.PubDate)
|
||||
|
||||
if err != nil {
|
||||
logger.Errorln(err)
|
||||
} else {
|
||||
fd = text.FormatTime(int(date.Unix()))
|
||||
if !all && !buildTime.IsZero() {
|
||||
if buildTime.After(date) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.Println(text.Bold(text.Magenta(fd)), text.Bold(strings.TrimSpace(item.Title)))
|
||||
|
||||
if !quiet {
|
||||
desc := strings.TrimSpace(parseNews(item.Description))
|
||||
logger.Println(desc)
|
||||
}
|
||||
}
|
||||
|
||||
type channel struct {
|
||||
Title string `xml:"title"`
|
||||
Link string `xml:"link"`
|
||||
Description string `xml:"description"`
|
||||
Language string `xml:"language"`
|
||||
Lastbuilddate string `xml:"lastbuilddate"`
|
||||
Items []item `xml:"item"`
|
||||
}
|
||||
|
||||
type rss struct {
|
||||
Channel channel `xml:"channel"`
|
||||
}
|
||||
|
||||
func PrintNewsFeed(ctx context.Context, client *http.Client, logger *text.Logger,
|
||||
cutOffDate time.Time, bottomUp, all, quiet bool,
|
||||
) error {
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, "https://archlinux.org/feeds/news", http.NoBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
rssGot := rss{}
|
||||
|
||||
d := xml.NewDecoder(bytes.NewReader(body))
|
||||
if err := d.Decode(&rssGot); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if bottomUp {
|
||||
for i := len(rssGot.Channel.Items) - 1; i >= 0; i-- {
|
||||
rssGot.Channel.Items[i].printNews(logger, cutOffDate, all, quiet)
|
||||
}
|
||||
} else {
|
||||
for i := 0; i < len(rssGot.Channel.Items); i++ {
|
||||
rssGot.Channel.Items[i].printNews(logger, cutOffDate, all, quiet)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Crude html parsing, good enough for the arch news
|
||||
// This is only displayed in the terminal so there should be no security
|
||||
// concerns.
|
||||
func parseNews(str string) string {
|
||||
var (
|
||||
buffer bytes.Buffer
|
||||
tagBuffer bytes.Buffer
|
||||
escapeBuffer bytes.Buffer
|
||||
inTag = false
|
||||
inEscape = false
|
||||
)
|
||||
|
||||
for _, char := range str {
|
||||
if inTag {
|
||||
if char == '>' {
|
||||
inTag = false
|
||||
|
||||
switch tagBuffer.String() {
|
||||
case "code":
|
||||
buffer.WriteString(text.CyanCode)
|
||||
case "/code":
|
||||
buffer.WriteString(text.ResetCode)
|
||||
case "/p":
|
||||
buffer.WriteRune('\n')
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
tagBuffer.WriteRune(char)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if inEscape {
|
||||
if char == ';' {
|
||||
inEscape = false
|
||||
|
||||
escapeBuffer.WriteRune(char)
|
||||
s := html.UnescapeString(escapeBuffer.String())
|
||||
buffer.WriteString(s)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
escapeBuffer.WriteRune(char)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if char == '<' {
|
||||
inTag = true
|
||||
|
||||
tagBuffer.Reset()
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if char == '&' {
|
||||
inEscape = true
|
||||
|
||||
escapeBuffer.Reset()
|
||||
escapeBuffer.WriteRune(char)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
buffer.WriteRune(char)
|
||||
}
|
||||
|
||||
buffer.WriteString(text.ResetCode)
|
||||
|
||||
return buffer.String()
|
||||
}
|
179
pkg/news/news_test.go
Normal file
179
pkg/news/news_test.go
Normal file
@ -0,0 +1,179 @@
|
||||
//go:build !integration
|
||||
// +build !integration
|
||||
|
||||
package news
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/bradleyjkemp/cupaloy"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"gopkg.in/h2non/gock.v1"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
const lastNews = `
|
||||
<rss xmlns:atom="http://www.w3.org/2005/Atom" version="2.0">
|
||||
<channel>
|
||||
<title>Arch Linux: Recent news updates</title>
|
||||
<link>https://www.archlinux.org/news/</link>
|
||||
<description>The latest and greatest news from the Arch Linux distribution.</description>
|
||||
<atom:link href="https://www.archlinux.org/feeds/news/" rel="self" />
|
||||
<language>en-us</language>
|
||||
<lastBuildDate>Tue, 14 Apr 2020 16:30:32 +0000</lastBuildDate>
|
||||
<item>
|
||||
<title>zn_poly 0.9.2-2 update requires manual intervention</title>
|
||||
<link>https://www.archlinux.org/news/zn_poly-092-2-update-requires-manual-intervention/</link>
|
||||
<description><p>The zn_poly package prior to version 0.9.2-2 was missing a soname link.</description>
|
||||
<dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Antonio Rojas</dc:creator>
|
||||
<pubDate>Tue, 14 Apr 2020 16:30:30 +0000</pubDate>
|
||||
<guid isPermaLink="false">tag:www.archlinux.org,2020-04-14:/news/zn_poly-092-2-update-requires-manual-intervention/</guid>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
`
|
||||
|
||||
const sampleNews = `<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom"><channel><title>Arch Linux: Recent news updates</title><link>https://www.archlinux.org/news/</link><description>The latest and greatest news from the Arch Linux distribution.</description><atom:link href="https://www.archlinux.org/feeds/news/" rel="self"></atom:link><language>en-us</language><lastBuildDate>Tue, 14 Apr 2020 16:30:32 +0000</lastBuildDate><item><title>zn_poly 0.9.2-2 update requires manual intervention</title><link>https://www.archlinux.org/news/zn_poly-092-2-update-requires-manual-intervention/</link><description><p>The zn_poly package prior to version 0.9.2-2 was missing a soname link.
|
||||
This has been fixed in 0.9.2-2, so the upgrade will need to overwrite the
|
||||
untracked files created by ldconfig. If you get an error</p>
|
||||
<pre><code>zn_poly: /usr/lib/libzn_poly-0.9.so exists in filesystem
|
||||
</code></pre>
|
||||
<p>when updating, use</p>
|
||||
<pre><code>pacman -Syu --overwrite usr/lib/libzn_poly-0.9.so
|
||||
</code></pre>
|
||||
<p>to perform the upgrade.</p></description><dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Antonio Rojas</dc:creator><pubDate>Tue, 14 Apr 2020 16:30:30 +0000</pubDate><guid isPermaLink="false">tag:www.archlinux.org,2020-04-14:/news/zn_poly-092-2-update-requires-manual-intervention/</guid></item><item><title>nss>=3.51.1-1 and lib32-nss>=3.51.1-1 updates require manual intervention</title><link>https://www.archlinux.org/news/nss3511-1-and-lib32-nss3511-1-updates-require-manual-intervention/</link><description><p>The nss and lib32-nss packages prior to version 3.51.1-1 were missing a soname link each. This has been fixed in 3.51.1-1, so the upgrade will need to overwrite the untracked files created by ldconfig. If you get any of these errors</p>
|
||||
<pre><code>nss: /usr/lib/p11-kit-trust.so exists in filesystem
|
||||
lib32-nss: /usr/lib32/p11-kit-trust.so exists in filesystem
|
||||
</code></pre>
|
||||
<p>when updating, use</p>
|
||||
<pre><code>pacman -Syu --overwrite /usr/lib\*/p11-kit-trust.so
|
||||
</code></pre>
|
||||
<p>to perform the upgrade.</p></description><dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Jan Alexander Steffens</dc:creator><pubDate>Mon, 13 Apr 2020 00:35:58 +0000</pubDate><guid isPermaLink="false">tag:www.archlinux.org,2020-04-13:/news/nss3511-1-and-lib32-nss3511-1-updates-require-manual-intervention/</guid></item><item><title>hplip 3.20.3-2 update requires manual intervention</title><link>https://www.archlinux.org/news/hplip-3203-2-update-requires-manual-intervention/</link><description><p>The hplip package prior to version 3.20.3-2 was missing the compiled
|
||||
python modules. This has been fixed in 3.20.3-2, so the upgrade will
|
||||
need to overwrite the untracked pyc files that were created. If you get errors
|
||||
such as these</p>
|
||||
<pre><code>hplip: /usr/share/hplip/base/__pycache__/__init__.cpython-38.pyc exists in filesystem
|
||||
hplip: /usr/share/hplip/base/__pycache__/avahi.cpython-38.pyc exists in filesystem
|
||||
hplip: /usr/share/hplip/base/__pycache__/codes.cpython-38.pyc exists in filesystem
|
||||
...many more...
|
||||
</code></pre>
|
||||
<p>when updating, use</p>
|
||||
<pre><code>pacman -Suy --overwrite /usr/share/hplip/\*
|
||||
</code></pre>
|
||||
<p>to perform the upgrade.</p></description><dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Andreas Radke</dc:creator><pubDate>Thu, 19 Mar 2020 06:53:30 +0000</pubDate><guid isPermaLink="false">tag:www.archlinux.org,2020-03-19:/news/hplip-3203-2-update-requires-manual-intervention/</guid></item><item><title>firewalld>=0.8.1-2 update requires manual intervention</title><link>https://www.archlinux.org/news/firewalld081-2-update-requires-manual-intervention/</link><description><p>The firewalld package prior to version 0.8.1-2 was missing the compiled python modules. This has been fixed in 0.8.1-2, so the upgrade will need to overwrite the untracked pyc files created. If you get errors like these</p>
|
||||
<pre><code>firewalld: /usr/lib/python3.8/site-packages/firewall/__pycache__/__init__.cpython-38.pyc exists in filesystem
|
||||
firewalld: /usr/lib/python3.8/site-packages/firewall/__pycache__/client.cpython-38.pyc exists in filesystem
|
||||
firewalld: /usr/lib/python3.8/site-packages/firewall/__pycache__/dbus_utils.cpython-38.pyc exists in filesystem
|
||||
...many more...
|
||||
</code></pre>
|
||||
<p>when updating, use</p>
|
||||
<pre><code>pacman -Suy --overwrite /usr/lib/python3.8/site-packages/firewall/\*
|
||||
</code></pre>
|
||||
<p>to perform the upgrade.</p></description><dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Jan Alexander Steffens</dc:creator><pubDate>Sun, 01 Mar 2020 16:36:48 +0000</pubDate><guid isPermaLink="false">tag:www.archlinux.org,2020-03-01:/news/firewalld081-2-update-requires-manual-intervention/</guid></item><item><title>The Future of the Arch Linux Project Leader</title><link>https://www.archlinux.org/news/the-future-of-the-arch-linux-project-leader/</link><description><p>Hello everyone,</p>
|
||||
<p>Some of you may know me from the days when I was much more involved in Arch, but most of you probably just know me as a name on the website. I’ve been with Arch for some time, taking the leadership of this beast over from Judd back in 2007. But, as these things often go, my involvement has slid down to minimal levels over time. It’s high time that changes.</p>
|
||||
<p>Arch Linux needs involved leadership to make hard decisions and direct the project where it needs to go. And I am not in a position to do this.</p>
|
||||
<p>In a team effort, the Arch Linux staff devised a new process for determining future leaders. From now on, leaders will be elected by the staff for a term length of two years. Details of this new process can be found <a href="https://wiki.archlinux.org/index.php/DeveloperWiki:Project_Leader">here</a></p>
|
||||
<p>In the first official vote with Levente Polyak (anthraxx), Gaetan Bisson (vesath), Giancarlo Razzolini (grazzolini), and Sven-Hendrik Haase (svenstaro) as candidates, and through 58 verified votes, a winner was chosen:</p>
|
||||
<p><strong>Levente Polyak (anthraxx) will be taking over the reins of this ship. Congratulations!</strong></p>
|
||||
<p><em>Thanks for everything over all these years,<br />
|
||||
Aaron Griffin (phrakture)</em></p></description><dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Aaron Griffin</dc:creator><pubDate>Mon, 24 Feb 2020 15:56:28 +0000</pubDate><guid isPermaLink="false">tag:www.archlinux.org,2020-02-24:/news/the-future-of-the-arch-linux-project-leader/</guid></item><item><title>Planet Arch Linux migration</title><link>https://www.archlinux.org/news/planet-arch-linux-migration/</link><description><p>The software behind planet.archlinux.org was implemented in Python 2 and is no longer maintained upstream. This functionality has now been implemented in archlinux.org's archweb backend which is actively maintained but offers a slightly different experience.</p>
|
||||
<p>The most notable changes are the offered feeds and the feed location. Archweb only offers an Atom feed which is located at <a href="https://archlinux.org/feeds/planet">here</a>.</p></description><dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Jelle van der Waa</dc:creator><pubDate>Sat, 22 Feb 2020 22:43:00 +0000</pubDate><guid isPermaLink="false">tag:www.archlinux.org,2020-02-22:/news/planet-arch-linux-migration/</guid></item><item><title>sshd needs restarting after upgrading to openssh-8.2p1</title><link>https://www.archlinux.org/news/sshd-needs-restarting-after-upgrading-to-openssh-82p1/</link><description><p>After upgrading to openssh-8.2p1, the existing SSH daemon will be unable to accept new connections. (See <a href="https://bugs.archlinux.org/task/65517">FS#65517</a>.) When upgrading remote hosts, please make sure to restart the SSH daemon using <code>systemctl restart sshd</code> right after running <code>pacman -Syu</code>. If you are upgrading to openssh-8.2p1-3 or higher, this restart will happen automatically.</p></description><dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Gaetan Bisson</dc:creator><pubDate>Mon, 17 Feb 2020 01:35:04 +0000</pubDate><guid isPermaLink="false">tag:www.archlinux.org,2020-02-17:/news/sshd-needs-restarting-after-upgrading-to-openssh-82p1/</guid></item><item><title>rsync compatibility</title><link>https://www.archlinux.org/news/rsync-compatibility/</link><description><p>Our <code>rsync</code> package was shipped with bundled <code>zlib</code> to provide compatibility
|
||||
with the old-style <code>--compress</code> option up to version 3.1.0. Version 3.1.1 was
|
||||
released on 2014-06-22 and is shipped by all major distributions now.</p>
|
||||
<p>So we decided to finally drop the bundled library and ship a package with
|
||||
system <code>zlib</code>. This also fixes security issues, actual ones and in future. Go
|
||||
and blame those running old versions if you encounter errors with <code>rsync
|
||||
3.1.3-3</code>.</p></description><dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Christian Hesse</dc:creator><pubDate>Wed, 15 Jan 2020 20:14:43 +0000</pubDate><guid isPermaLink="false">tag:www.archlinux.org,2020-01-15:/news/rsync-compatibility/</guid></item><item><title>Now using Zstandard instead of xz for package compression</title><link>https://www.archlinux.org/news/now-using-zstandard-instead-of-xz-for-package-compression/</link><description><p>As announced on the <a href="https://lists.archlinux.org/pipermail/arch-dev-public/2019-December/029752.html">mailing list</a>, on Friday, Dec 27 2019, our package compression scheme has changed from xz (.pkg.tar.xz) to <a href="https://lists.archlinux.org/pipermail/arch-dev-public/2019-December/029778.html">zstd (.pkg.tar.zst)</a>.</p>
|
||||
<p>zstd and xz trade blows in their compression ratio. Recompressing all packages to zstd with our options yields a total ~0.8% increase in package size on all of our packages combined, but the decompression time for all packages saw a ~1300% speedup.</p>
|
||||
<p>We already have more than 545 zstd-compressed packages in our repositories, and as packages get updated more will keep rolling in. We have not found any user-facing issues as of yet, so things appear to be working.</p>
|
||||
<p>As a packager, you will automatically start building .pkg.tar.zst packages if you are using the latest version of devtools (&gt;= 20191227).<br />
|
||||
As an end-user no manual intervention is required, assuming that you have read and followed the news post <a href="https://www.archlinux.org/news/required-update-to-recent-libarchive/">from late last year</a>.</p>
|
||||
<p>If you nevertheless haven't updated libarchive since 2018, all hope is not lost! Binary builds of pacman-static are available from Eli Schwartz' <a href="https://wiki.archlinux.org/index.php/Unofficial_user_repositories#eschwartz">personal repository</a> (or direct link to <a href="https://pkgbuild.com/~eschwartz/repo/x86_64-extracted/">binary</a>), signed with their Trusted User keys, with which you can perform the update.</p></description><dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Robin Broda</dc:creator><pubDate>Sat, 04 Jan 2020 20:35:55 +0000</pubDate><guid isPermaLink="false">tag:www.archlinux.org,2020-01-04:/news/now-using-zstandard-instead-of-xz-for-package-compression/</guid></item><item><title>Xorg cleanup requires manual intervention</title><link>https://www.archlinux.org/news/xorg-cleanup-requires-manual-intervention/</link><description><p>In the process of <a href="https://bugs.archlinux.org/task/64892">Xorg cleanup</a> the update requires manual
|
||||
intervention when you hit this message:</p>
|
||||
<pre><code>:: installing xorgproto (2019.2-2) breaks dependency 'inputproto' required by lib32-libxi
|
||||
:: installing xorgproto (2019.2-2) breaks dependency 'dmxproto' required by libdmx
|
||||
:: installing xorgproto (2019.2-2) breaks dependency 'xf86dgaproto' required by libxxf86dga
|
||||
:: installing xorgproto (2019.2-2) breaks dependency 'xf86miscproto' required by libxxf86misc
|
||||
</code></pre>
|
||||
<p>when updating, use: <code>pacman -Rdd libdmx libxxf86dga libxxf86misc &amp;&amp; pacman -Syu</code> to perform the upgrade.</p></description><dc:creator xmlns:dc="http://purl.org/dc/elements/1.1/">Andreas Radke</dc:creator><pubDate>Fri, 20 Dec 2019 13:37:40 +0000</pubDate><guid isPermaLink="false">tag:www.archlinux.org,2019-12-20:/news/xorg-cleanup-requires-manual-intervention/</guid></item></channel></rss>
|
||||
`
|
||||
|
||||
func TestPrintNewsFeed(t *testing.T) {
|
||||
layout := "2006-01-02"
|
||||
str := "2020-04-13"
|
||||
lastNewsTime, _ := time.Parse(layout, str)
|
||||
|
||||
type args struct {
|
||||
cutOffDate time.Time
|
||||
bottomUp bool
|
||||
all bool
|
||||
quiet bool
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wantErr bool
|
||||
}{
|
||||
{name: "all-verbose", args: args{bottomUp: true, cutOffDate: time.Now(), all: true, quiet: false}, wantErr: false},
|
||||
{name: "all-quiet", args: args{bottomUp: true, cutOffDate: lastNewsTime, all: true, quiet: true}, wantErr: false},
|
||||
{name: "latest-quiet", args: args{bottomUp: true, cutOffDate: lastNewsTime, all: false, quiet: true}, wantErr: false},
|
||||
{name: "latest-quiet-topdown", args: args{bottomUp: false, cutOffDate: lastNewsTime, all: false, quiet: true}, wantErr: false},
|
||||
}
|
||||
t.Setenv("TZ", "UTC")
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
gock.New("https://archlinux.org").
|
||||
Get("/feeds/news").
|
||||
Reply(200).
|
||||
BodyString(sampleNews)
|
||||
|
||||
defer gock.Off()
|
||||
|
||||
r, w, _ := os.Pipe()
|
||||
logger := text.NewLogger(w, w, strings.NewReader(""), false, "logger")
|
||||
|
||||
err := PrintNewsFeed(context.Background(), &http.Client{}, logger,
|
||||
tt.args.cutOffDate, tt.args.bottomUp, tt.args.all, tt.args.quiet)
|
||||
assert.NoError(t, err)
|
||||
|
||||
w.Close()
|
||||
out, _ := io.ReadAll(r)
|
||||
cupaloy.SnapshotT(t, out)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// GIVEN last build time at 13h00
|
||||
// WHEN there's a news posted at 18h00
|
||||
// THEN it should still be printed
|
||||
func TestPrintNewsFeedSameDay(t *testing.T) {
|
||||
str := "2020-04-14T13:04:05Z"
|
||||
lastNewsTime, _ := time.Parse(time.RFC3339, str)
|
||||
|
||||
gock.New("https://archlinux.org").
|
||||
Get("/feeds/news").
|
||||
Reply(200).
|
||||
BodyString(lastNews)
|
||||
|
||||
defer gock.Off()
|
||||
|
||||
r, w, _ := os.Pipe()
|
||||
logger := text.NewLogger(w, w, strings.NewReader(""), false, "logger")
|
||||
|
||||
err := PrintNewsFeed(context.Background(), &http.Client{}, logger,
|
||||
lastNewsTime, true, false, false)
|
||||
assert.NoError(t, err)
|
||||
|
||||
w.Close()
|
||||
out, _ := io.ReadAll(r)
|
||||
cupaloy.SnapshotT(t, out)
|
||||
}
|
110
pkg/query/aur_warnings.go
Normal file
110
pkg/query/aur_warnings.go
Normal file
@ -0,0 +1,110 @@
|
||||
package query
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/aur"
|
||||
"github.com/Jguer/go-alpm/v2"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/db"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
type AURWarnings struct {
|
||||
Orphans []string
|
||||
OutOfDate []string
|
||||
Missing []string
|
||||
LocalNewer []string
|
||||
|
||||
log *text.Logger
|
||||
}
|
||||
|
||||
func NewWarnings(logger *text.Logger) *AURWarnings {
|
||||
return &AURWarnings{log: logger}
|
||||
}
|
||||
|
||||
func (warnings *AURWarnings) AddToWarnings(remote map[string]alpm.IPackage, aurPkg *aur.Pkg) {
|
||||
name := aurPkg.Name
|
||||
pkg, ok := remote[name]
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
if aurPkg.Maintainer == "" && !pkg.ShouldIgnore() {
|
||||
warnings.Orphans = append(warnings.Orphans, name)
|
||||
}
|
||||
|
||||
if aurPkg.OutOfDate != 0 && !pkg.ShouldIgnore() {
|
||||
warnings.OutOfDate = append(warnings.OutOfDate, name)
|
||||
}
|
||||
|
||||
if !pkg.ShouldIgnore() && !isDevelPackage(pkg) && db.VerCmp(pkg.Version(), aurPkg.Version) > 0 {
|
||||
left, right := GetVersionDiff(pkg.Version(), aurPkg.Version)
|
||||
|
||||
newerMsg := gotext.Get("%s: local (%s) is newer than AUR (%s)",
|
||||
text.Cyan(name),
|
||||
left, right,
|
||||
)
|
||||
|
||||
warnings.LocalNewer = append(warnings.LocalNewer, newerMsg)
|
||||
}
|
||||
}
|
||||
|
||||
func (warnings *AURWarnings) CalculateMissing(remoteNames []string,
|
||||
remote map[string]alpm.IPackage, aurData map[string]*aur.Pkg,
|
||||
) {
|
||||
for _, name := range remoteNames {
|
||||
if _, ok := aurData[name]; !ok && !remote[name].ShouldIgnore() {
|
||||
if _, ok := aurData[strings.TrimSuffix(name, "-debug")]; !ok {
|
||||
warnings.Missing = append(warnings.Missing, name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (warnings *AURWarnings) Print() {
|
||||
normalMissing, debugMissing := filterDebugPkgs(warnings.Missing)
|
||||
|
||||
if len(normalMissing) > 0 {
|
||||
warnings.log.Warnln(gotext.Get("Packages not in AUR:"), formatNames(normalMissing))
|
||||
}
|
||||
|
||||
if len(debugMissing) > 0 {
|
||||
warnings.log.Warnln(gotext.Get("Missing AUR Debug Packages:"), formatNames(debugMissing))
|
||||
}
|
||||
|
||||
if len(warnings.Orphans) > 0 {
|
||||
warnings.log.Warnln(gotext.Get("Orphan (unmaintained) AUR Packages:"), formatNames(warnings.Orphans))
|
||||
}
|
||||
|
||||
if len(warnings.OutOfDate) > 0 {
|
||||
warnings.log.Warnln(gotext.Get("Flagged Out Of Date AUR Packages:"), formatNames(warnings.OutOfDate))
|
||||
}
|
||||
|
||||
if len(warnings.LocalNewer) > 0 {
|
||||
for _, newer := range warnings.LocalNewer {
|
||||
warnings.log.Warnln(newer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func filterDebugPkgs(names []string) (normal, debug []string) {
|
||||
normal = make([]string, 0, len(names))
|
||||
debug = make([]string, 0, len(names))
|
||||
|
||||
for _, name := range names {
|
||||
if strings.HasSuffix(name, "-debug") {
|
||||
debug = append(debug, name)
|
||||
} else {
|
||||
normal = append(normal, name)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func formatNames(names []string) string {
|
||||
return " " + text.Cyan(strings.Join(names, " "))
|
||||
}
|
21
pkg/query/errors.go
Normal file
21
pkg/query/errors.go
Normal file
@ -0,0 +1,21 @@
|
||||
package query
|
||||
|
||||
import (
|
||||
"github.com/leonelquinteros/gotext"
|
||||
)
|
||||
|
||||
// ErrAURSearch means that it was not possible to connect to the AUR.
|
||||
type ErrAURSearch struct {
|
||||
inner error
|
||||
}
|
||||
|
||||
func (e ErrAURSearch) Error() string {
|
||||
return gotext.Get("Error during AUR search: %s\n", e.inner.Error())
|
||||
}
|
||||
|
||||
// ErrNoQuery means that query was not executed.
|
||||
type ErrNoQuery struct{}
|
||||
|
||||
func (e ErrNoQuery) Error() string {
|
||||
return gotext.Get("no query was executed")
|
||||
}
|
30
pkg/query/filter.go
Normal file
30
pkg/query/filter.go
Normal file
@ -0,0 +1,30 @@
|
||||
package query
|
||||
|
||||
import (
|
||||
"github.com/leonelquinteros/gotext"
|
||||
|
||||
"github.com/Jguer/yay/v12/pkg/settings/parser"
|
||||
"github.com/Jguer/yay/v12/pkg/text"
|
||||
)
|
||||
|
||||
func RemoveInvalidTargets(logger *text.Logger, targets []string, mode parser.TargetMode) []string {
|
||||
filteredTargets := make([]string, 0)
|
||||
|
||||
for _, target := range targets {
|
||||
dbName, _ := text.SplitDBFromName(target)
|
||||
|
||||
if dbName == "aur" && !mode.AtLeastAUR() {
|
||||
logger.Warnln(gotext.Get("%s: can't use target with option --repo -- skipping", text.Cyan(target)))
|
||||
continue
|
||||
}
|
||||
|
||||
if dbName != "aur" && dbName != "" && !mode.AtLeastRepo() {
|
||||
logger.Warnln(gotext.Get("%s: can't use target with option --aur -- skipping", text.Cyan(target)))
|
||||
continue
|
||||
}
|
||||
|
||||
filteredTargets = append(filteredTargets, target)
|
||||
}
|
||||
|
||||
return filteredTargets
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user