mirror of
https://github.com/zedeus/nitter.git
synced 2025-12-06 03:55:36 -05:00
Compare commits
14 Commits
3f3196d103
...
feature/em
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
45f34c2da1 | ||
|
|
53edbbc4e9 | ||
|
|
5b4a3fe691 | ||
|
|
f8a17fdaa5 | ||
|
|
b0d9c1d51a | ||
|
|
78d788b27f | ||
|
|
824a7e346a | ||
|
|
e8de18317e | ||
|
|
6b655cddd8 | ||
|
|
886f2d2a45 | ||
|
|
bb6eb81a20 | ||
|
|
0bb0b7e78c | ||
|
|
a666c4867c | ||
|
|
778eb35ee3 |
2
.github/workflows/run-tests.yml
vendored
2
.github/workflows/run-tests.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
runs-on: buildjet-2vcpu-ubuntu-2204
|
||||
strategy:
|
||||
matrix:
|
||||
nim: ["1.6.x", "2.0.x", "2.2.x", "devel"]
|
||||
nim: ["2.0.x", "2.2.x", "devel"]
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
@@ -10,7 +10,7 @@ bin = @["nitter"]
|
||||
|
||||
# Dependencies
|
||||
|
||||
requires "nim >= 1.6.10"
|
||||
requires "nim >= 2.0.0"
|
||||
requires "jester#baca3f"
|
||||
requires "karax#5cf360c"
|
||||
requires "sass#7dfdd03"
|
||||
|
||||
44
public/css/fontello.css
vendored
44
public/css/fontello.css
vendored
@@ -1,16 +1,15 @@
|
||||
@font-face {
|
||||
font-family: 'fontello';
|
||||
src: url('/fonts/fontello.eot?21002321');
|
||||
src: url('/fonts/fontello.eot?21002321#iefix') format('embedded-opentype'),
|
||||
url('/fonts/fontello.woff2?21002321') format('woff2'),
|
||||
url('/fonts/fontello.woff?21002321') format('woff'),
|
||||
url('/fonts/fontello.ttf?21002321') format('truetype'),
|
||||
url('/fonts/fontello.svg?21002321#fontello') format('svg');
|
||||
src: url('/fonts/fontello.eot?61663884');
|
||||
src: url('/fonts/fontello.eot?61663884#iefix') format('embedded-opentype'),
|
||||
url('/fonts/fontello.woff2?61663884') format('woff2'),
|
||||
url('/fonts/fontello.woff?61663884') format('woff'),
|
||||
url('/fonts/fontello.ttf?61663884') format('truetype'),
|
||||
url('/fonts/fontello.svg?61663884#fontello') format('svg');
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
}
|
||||
|
||||
[class^="icon-"]:before, [class*=" icon-"]:before {
|
||||
[class^="icon-"]:before, [class*=" icon-"]:before {
|
||||
font-family: "fontello";
|
||||
font-style: normal;
|
||||
font-weight: normal;
|
||||
@@ -32,22 +31,23 @@
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
.icon-heart:before { content: '\2665'; } /* '♥' */
|
||||
.icon-quote:before { content: '\275e'; } /* '❞' */
|
||||
.icon-comment:before { content: '\e802'; } /* '' */
|
||||
.icon-ok:before { content: '\e803'; } /* '' */
|
||||
.icon-play:before { content: '\e804'; } /* '' */
|
||||
.icon-link:before { content: '\e805'; } /* '' */
|
||||
.icon-calendar:before { content: '\e806'; } /* '' */
|
||||
.icon-location:before { content: '\e807'; } /* '' */
|
||||
|
||||
.icon-views:before { content: '\e800'; } /* '' */
|
||||
.icon-heart:before { content: '\e801'; } /* '' */
|
||||
.icon-quote:before { content: '\e802'; } /* '' */
|
||||
.icon-comment:before { content: '\e803'; } /* '' */
|
||||
.icon-ok:before { content: '\e804'; } /* '' */
|
||||
.icon-play:before { content: '\e805'; } /* '' */
|
||||
.icon-link:before { content: '\e806'; } /* '' */
|
||||
.icon-calendar:before { content: '\e807'; } /* '' */
|
||||
.icon-location:before { content: '\e808'; } /* '' */
|
||||
.icon-picture:before { content: '\e809'; } /* '' */
|
||||
.icon-lock:before { content: '\e80a'; } /* '' */
|
||||
.icon-down:before { content: '\e80b'; } /* '' */
|
||||
.icon-retweet:before { content: '\e80d'; } /* '' */
|
||||
.icon-search:before { content: '\e80e'; } /* '' */
|
||||
.icon-pin:before { content: '\e80f'; } /* '' */
|
||||
.icon-cog:before { content: '\e812'; } /* '' */
|
||||
.icon-rss-feed:before { content: '\e813'; } /* '' */
|
||||
.icon-retweet:before { content: '\e80c'; } /* '' */
|
||||
.icon-search:before { content: '\e80d'; } /* '' */
|
||||
.icon-pin:before { content: '\e80e'; } /* '' */
|
||||
.icon-cog:before { content: '\e80f'; } /* '' */
|
||||
.icon-rss:before { content: '\e810'; } /* '' */
|
||||
.icon-info:before { content: '\f128'; } /* '' */
|
||||
.icon-bird:before { content: '\f309'; } /* '' */
|
||||
|
||||
@@ -1,6 +1,15 @@
|
||||
Font license info
|
||||
|
||||
|
||||
## Modern Pictograms
|
||||
|
||||
Copyright (c) 2012 by John Caserta. All rights reserved.
|
||||
|
||||
Author: John Caserta
|
||||
License: SIL (http://scripts.sil.org/OFL)
|
||||
Homepage: http://thedesignoffice.org/project/modern-pictograms/
|
||||
|
||||
|
||||
## Entypo
|
||||
|
||||
Copyright (C) 2012 by Daniel Bruce
|
||||
@@ -37,12 +46,3 @@ Font license info
|
||||
Homepage: http://aristeides.com/
|
||||
|
||||
|
||||
## Modern Pictograms
|
||||
|
||||
Copyright (c) 2012 by John Caserta. All rights reserved.
|
||||
|
||||
Author: John Caserta
|
||||
License: SIL (http://scripts.sil.org/OFL)
|
||||
Homepage: http://thedesignoffice.org/project/modern-pictograms/
|
||||
|
||||
|
||||
|
||||
Binary file not shown.
@@ -1,26 +1,28 @@
|
||||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg xmlns="http://www.w3.org/2000/svg">
|
||||
<metadata>Copyright (C) 2020 by original authors @ fontello.com</metadata>
|
||||
<metadata>Copyright (C) 2025 by original authors @ fontello.com</metadata>
|
||||
<defs>
|
||||
<font id="fontello" horiz-adv-x="1000" >
|
||||
<font-face font-family="fontello" font-weight="400" font-stretch="normal" units-per-em="1000" ascent="850" descent="-150" />
|
||||
<missing-glyph horiz-adv-x="1000" />
|
||||
<glyph glyph-name="heart" unicode="♥" d="M790 644q70-64 70-156t-70-158l-360-330-360 330q-70 66-70 158t70 156q62 58 151 58t153-58l56-52 58 52q62 58 150 58t152-58z" horiz-adv-x="860" />
|
||||
<glyph glyph-name="views" unicode="" d="M180 516l0-538-180 0 0 538 180 0z m250-138l0-400-180 0 0 400 180 0z m250 344l0-744-180 0 0 744 180 0z" horiz-adv-x="680" />
|
||||
|
||||
<glyph glyph-name="quote" unicode="❞" d="M18 685l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z m558 0l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z" horiz-adv-x="928" />
|
||||
<glyph glyph-name="heart" unicode="" d="M790 644q70-64 70-156t-70-158l-360-330-360 330q-70 66-70 158t70 156q62 58 151 58t153-58l56-52 58 52q62 58 150 58t152-58z" horiz-adv-x="860" />
|
||||
|
||||
<glyph glyph-name="comment" unicode="" d="M1000 350q0-97-67-179t-182-130-251-48q-39 0-81 4-110-97-257-135-27-8-63-12-10-1-17 5t-10 16v1q-2 2 0 6t1 6 2 5l4 5t4 5 4 5q4 5 17 19t20 22 17 22 18 28 15 33 15 42q-88 50-138 123t-51 157q0 73 40 139t106 114 160 76 194 28q136 0 251-48t182-130 67-179z" horiz-adv-x="1000" />
|
||||
<glyph glyph-name="quote" unicode="" d="M18 685l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z m558 0l335 0 0-334q0-140-98-238t-237-97l0 111q92 0 158 65t65 159l-223 0 0 334z" horiz-adv-x="928" />
|
||||
|
||||
<glyph glyph-name="ok" unicode="" d="M0 260l162 162 166-164 508 510 164-164-510-510-162-162-162 164z" horiz-adv-x="1000" />
|
||||
<glyph glyph-name="comment" unicode="" d="M1000 350q0-97-67-179t-182-130-251-48q-39 0-81 4-110-97-257-135-27-8-63-12-10-1-17 5t-10 16v1q-2 2 0 6t1 6 2 5l4 5t4 5 4 5q4 5 17 19t20 22 17 22 18 28 15 33 15 42q-88 50-138 123t-51 157q0 73 40 139t106 114 160 76 194 28q136 0 251-48t182-130 67-179z" horiz-adv-x="1000" />
|
||||
|
||||
<glyph glyph-name="play" unicode="" d="M772 333l-741-412q-13-7-22-2t-9 20v822q0 14 9 20t22-2l741-412q13-7 13-17t-13-17z" horiz-adv-x="785.7" />
|
||||
<glyph glyph-name="ok" unicode="" d="M0 260l162 162 166-164 508 510 164-164-510-510-162-162-162 164z" horiz-adv-x="1000" />
|
||||
|
||||
<glyph glyph-name="link" unicode="" d="M294 116q14 14 34 14t36-14q32-34 0-70l-42-40q-56-56-132-56-78 0-134 56t-56 132q0 78 56 134l148 148q70 68 144 77t128-43q16-16 16-36t-16-36q-36-32-70 0-50 48-132-34l-148-146q-26-26-26-64t26-62q26-26 63-26t63 26z m450 574q56-56 56-132 0-78-56-134l-158-158q-74-72-150-72-62 0-112 50-14 14-14 34t14 36q14 14 35 14t35-14q50-48 122 24l158 156q28 28 28 64 0 38-28 62-24 26-56 31t-60-21l-50-50q-16-14-36-14t-34 14q-34 34 0 70l50 50q54 54 127 51t129-61z" horiz-adv-x="800" />
|
||||
<glyph glyph-name="play" unicode="" d="M772 333l-741-412q-13-7-22-2t-9 20v822q0 14 9 20t22-2l741-412q13-7 13-17t-13-17z" horiz-adv-x="785.7" />
|
||||
|
||||
<glyph glyph-name="calendar" unicode="" d="M800 700q42 0 71-29t29-71l0-600q0-40-29-70t-71-30l-700 0q-40 0-70 30t-30 70l0 600q0 42 30 71t70 29l46 0 0-100 160 0 0 100 290 0 0-100 160 0 0 100 44 0z m0-700l0 400-700 0 0-400 700 0z m-540 800l0-170-70 0 0 170 70 0z m450 0l0-170-70 0 0 170 70 0z" horiz-adv-x="900" />
|
||||
<glyph glyph-name="link" unicode="" d="M294 116q14 14 34 14t36-14q32-34 0-70l-42-40q-56-56-132-56-78 0-134 56t-56 132q0 78 56 134l148 148q70 68 144 77t128-43q16-16 16-36t-16-36q-36-32-70 0-50 48-132-34l-148-146q-26-26-26-64t26-62q26-26 63-26t63 26z m450 574q56-56 56-132 0-78-56-134l-158-158q-74-72-150-72-62 0-112 50-14 14-14 34t14 36q14 14 35 14t35-14q50-48 122 24l158 156q28 28 28 64 0 38-28 62-24 26-56 31t-60-21l-50-50q-16-14-36-14t-34 14q-34 34 0 70l50 50q54 54 127 51t129-61z" horiz-adv-x="800" />
|
||||
|
||||
<glyph glyph-name="location" unicode="" d="M250 750q104 0 177-73t73-177q0-106-62-243t-126-223l-62-84q-10 12-27 35t-60 89-76 130-60 147-27 149q0 104 73 177t177 73z m0-388q56 0 96 40t40 96-40 95-96 39-95-39-39-95 39-96 95-40z" horiz-adv-x="500" />
|
||||
<glyph glyph-name="calendar" unicode="" d="M800 700q42 0 71-29t29-71l0-600q0-40-29-70t-71-30l-700 0q-40 0-70 30t-30 70l0 600q0 42 30 71t70 29l46 0 0-100 160 0 0 100 290 0 0-100 160 0 0 100 44 0z m0-700l0 400-700 0 0-400 700 0z m-540 800l0-170-70 0 0 170 70 0z m450 0l0-170-70 0 0 170 70 0z" horiz-adv-x="900" />
|
||||
|
||||
<glyph glyph-name="location" unicode="" d="M250 750q104 0 177-73t73-177q0-106-62-243t-126-223l-62-84q-10 12-27 35t-60 89-76 130-60 147-27 149q0 104 73 177t177 73z m0-388q56 0 96 40t40 96-40 95-96 39-95-39-39-95 39-96 95-40z" horiz-adv-x="500" />
|
||||
|
||||
<glyph glyph-name="picture" unicode="" d="M357 529q0-45-31-76t-76-32-76 32-31 76 31 76 76 31 76-31 31-76z m572-215v-250h-786v107l178 179 90-89 285 285z m53 393h-893q-7 0-12-5t-6-13v-678q0-7 6-13t12-5h893q7 0 13 5t5 13v678q0 8-5 13t-13 5z m89-18v-678q0-37-26-63t-63-27h-893q-36 0-63 27t-26 63v678q0 37 26 63t63 27h893q37 0 63-27t26-63z" horiz-adv-x="1071.4" />
|
||||
|
||||
@@ -28,19 +30,19 @@
|
||||
|
||||
<glyph glyph-name="down" unicode="" d="M939 399l-414-413q-10-11-25-11t-25 11l-414 413q-11 11-11 26t11 25l93 92q10 11 25 11t25-11l296-296 296 296q11 11 25 11t26-11l92-92q11-11 11-25t-11-26z" horiz-adv-x="1000" />
|
||||
|
||||
<glyph glyph-name="retweet" unicode="" d="M714 11q0-7-5-13t-13-5h-535q-5 0-8 1t-5 4-3 4-2 7 0 6v335h-107q-15 0-25 11t-11 25q0 13 8 23l179 214q11 12 27 12t28-12l178-214q9-10 9-23 0-15-11-25t-25-11h-107v-214h321q9 0 14-6l89-108q4-5 4-11z m357 232q0-13-8-23l-178-214q-12-13-28-13t-27 13l-179 214q-8 10-8 23 0 14 11 25t25 11h107v214h-322q-9 0-14 7l-89 107q-4 5-4 11 0 7 5 12t13 6h536q4 0 7-1t5-4 3-5 2-6 1-7v-334h107q14 0 25-11t10-25z" horiz-adv-x="1071.4" />
|
||||
<glyph glyph-name="retweet" unicode="" d="M714 11q0-7-5-13t-13-5h-535q-5 0-8 1t-5 4-3 4-2 7 0 6v335h-107q-15 0-25 11t-11 25q0 13 8 23l179 214q11 12 27 12t28-12l178-214q9-10 9-23 0-15-11-25t-25-11h-107v-214h321q9 0 14-6l89-108q4-5 4-11z m357 232q0-13-8-23l-178-214q-12-13-28-13t-27 13l-179 214q-8 10-8 23 0 14 11 25t25 11h107v214h-322q-9 0-14 7l-89 107q-4 5-4 11 0 7 5 12t13 6h536q4 0 7-1t5-4 3-5 2-6 1-7v-334h107q14 0 25-11t10-25z" horiz-adv-x="1071.4" />
|
||||
|
||||
<glyph glyph-name="search" unicode="" d="M772 78q30-34 6-62l-46-46q-36-32-68 0l-190 190q-74-42-156-42-128 0-223 95t-95 223 90 219 218 91 224-95 96-223q0-88-46-162z m-678 358q0-88 68-156t156-68 151 63 63 153q0 88-68 155t-156 67-151-63-63-151z" horiz-adv-x="789" />
|
||||
<glyph glyph-name="search" unicode="" d="M772 78q30-34 6-62l-46-46q-36-32-68 0l-190 190q-74-42-156-42-128 0-223 95t-95 223 90 219 218 91 224-95 96-223q0-88-46-162z m-678 358q0-88 68-156t156-68 151 63 63 153q0 88-68 155t-156 67-151-63-63-151z" horiz-adv-x="789" />
|
||||
|
||||
<glyph glyph-name="pin" unicode="" d="M268 368v250q0 8-5 13t-13 5-13-5-5-13v-250q0-8 5-13t13-5 13 5 5 13z m375-197q0-14-11-25t-25-10h-239l-29-270q-1-7-6-11t-11-5h-1q-15 0-17 15l-43 271h-225q-15 0-25 10t-11 25q0 69 44 124t99 55v286q-29 0-50 21t-22 50 22 50 50 22h357q29 0 50-22t21-50-21-50-50-21v-286q55 0 99-55t44-124z" horiz-adv-x="642.9" />
|
||||
<glyph glyph-name="pin" unicode="" d="M268 368v250q0 8-5 13t-13 5-13-5-5-13v-250q0-8 5-13t13-5 13 5 5 13z m375-197q0-14-11-25t-25-10h-239l-29-270q-1-7-6-11t-11-5h-1q-15 0-17 15l-43 271h-225q-15 0-25 10t-11 25q0 69 44 124t99 55v286q-29 0-50 21t-22 50 22 50 50 22h357q29 0 50-22t21-50-21-50-50-21v-286q55 0 99-55t44-124z" horiz-adv-x="642.9" />
|
||||
|
||||
<glyph glyph-name="cog" unicode="" d="M911 295l-133-56q-8-22-12-31l55-133-79-79-135 53q-9-4-31-12l-55-134-112 0-56 133q-11 4-33 13l-132-55-78 79 53 134q-1 3-4 9t-6 12-4 11l-131 55 0 112 131 56 14 33-54 132 78 79 133-54q22 9 33 13l55 132 112 0 56-132q14-5 31-13l133 55 80-79-54-135q6-12 12-30l133-56 0-112z m-447-111q69 0 118 48t49 118-49 119-118 50-119-50-49-119 49-118 119-48z" horiz-adv-x="928" />
|
||||
<glyph glyph-name="cog" unicode="" d="M911 295l-133-56q-8-22-12-31l55-133-79-79-135 53q-9-4-31-12l-55-134-112 0-56 133q-11 4-33 13l-132-55-78 79 53 134q-1 3-4 9t-6 12-4 11l-131 55 0 112 131 56 14 33-54 132 78 79 133-54q22 9 33 13l55 132 112 0 56-132q14-5 31-13l133 55 80-79-54-135q6-12 12-30l133-56 0-112z m-447-111q69 0 118 48t49 118-49 119-118 50-119-50-49-119 49-118 119-48z" horiz-adv-x="928" />
|
||||
|
||||
<glyph glyph-name="rss-feed" unicode="" d="M184 93c0-51-43-91-93-91s-91 40-91 91c0 50 41 91 91 91s93-41 93-91z m261-85l-125 0c0 174-140 323-315 323l0 118c231 0 440-163 440-441z m259 0l-136 0c0 300-262 561-563 561l0 129c370 0 699-281 699-690z" horiz-adv-x="704" />
|
||||
<glyph glyph-name="rss" unicode="" d="M184 93c0-51-43-91-93-91s-91 40-91 91c0 50 41 91 91 91s93-41 93-91z m261-85l-125 0c0 174-140 323-315 323l0 118c231 0 440-163 440-441z m259 0l-136 0c0 300-262 561-563 561l0 129c370 0 699-281 699-690z" horiz-adv-x="704" />
|
||||
|
||||
<glyph glyph-name="info" unicode="" d="M393 149v-134q0-9-7-15t-15-7h-134q-9 0-16 7t-7 15v134q0 9 7 16t16 6h134q9 0 15-6t7-16z m176 335q0-30-8-56t-20-43-31-33-32-25-34-19q-23-13-38-37t-15-37q0-10-7-18t-16-9h-134q-8 0-14 11t-6 20v26q0 46 37 87t79 60q33 16 47 32t14 42q0 24-26 41t-60 18q-36 0-60-16-20-14-60-64-7-9-17-9-7 0-14 4l-91 70q-8 6-9 14t3 16q89 148 259 148 45 0 90-17t81-46 59-72 23-88z" horiz-adv-x="571.4" />
|
||||
|
||||
<glyph glyph-name="bird" unicode="" d="M920 636q-36-54-94-98l0-24q0-130-60-250t-186-203-290-83q-160 0-290 84 14-2 46-2 132 0 234 80-62 2-110 38t-66 94q10-4 34-4 26 0 50 6-66 14-108 66t-42 120l0 2q36-20 84-24-84 58-84 158 0 48 26 94 154-188 390-196-6 18-6 42 0 78 55 133t135 55q82 0 136-58 60 12 120 44-20-66-82-104 56 8 108 30z" horiz-adv-x="920" />
|
||||
</font>
|
||||
</defs>
|
||||
</svg>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 5.9 KiB After Width: | Height: | Size: 6.1 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
85
src/api.nim
85
src/api.nim
@@ -1,45 +1,77 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import asyncdispatch, httpclient, uri, strutils, sequtils, sugar
|
||||
import asyncdispatch, httpclient, uri, strutils, sequtils, sugar, tables
|
||||
import packedjson
|
||||
import types, query, formatters, consts, apiutils, parser
|
||||
import experimental/parser as newParser
|
||||
|
||||
# Helper to generate params object for GraphQL requests
|
||||
proc genParams(variables: string; fieldToggles = ""): seq[(string, string)] =
|
||||
result.add ("variables", variables)
|
||||
result.add ("features", gqlFeatures)
|
||||
if fieldToggles.len > 0:
|
||||
result.add ("fieldToggles", fieldToggles)
|
||||
|
||||
proc mediaUrl(id: string; cursor: string): SessionAwareUrl =
|
||||
let
|
||||
cookieVariables = userMediaVariables % [id, cursor]
|
||||
oauthVariables = userTweetsVariables % [id, cursor]
|
||||
oauthVariables = restIdVariables % [id, cursor]
|
||||
result = SessionAwareUrl(
|
||||
cookieUrl: graphUserMedia ? {"variables": cookieVariables, "features": gqlFeatures},
|
||||
oauthUrl: graphUserMediaV2 ? {"variables": oauthVariables, "features": gqlFeatures}
|
||||
cookieUrl: graphUserMedia ? genParams(cookieVariables),
|
||||
oauthUrl: graphUserMediaV2 ? genParams(oauthVariables)
|
||||
)
|
||||
|
||||
proc userTweetsUrl(id: string; cursor: string): SessionAwareUrl =
|
||||
let
|
||||
cookieVariables = userTweetsVariables % [id, cursor]
|
||||
oauthVariables = restIdVariables % [id, cursor]
|
||||
result = SessionAwareUrl(
|
||||
# cookieUrl: graphUserTweets ? genParams(cookieVariables, fieldToggles),
|
||||
oauthUrl: graphUserTweetsV2 ? genParams(oauthVariables)
|
||||
)
|
||||
# might change this in the future pending testing
|
||||
result.cookieUrl = result.oauthUrl
|
||||
|
||||
proc userTweetsAndRepliesUrl(id: string; cursor: string): SessionAwareUrl =
|
||||
let
|
||||
cookieVariables = userTweetsAndRepliesVariables % [id, cursor]
|
||||
oauthVariables = restIdVariables % [id, cursor]
|
||||
result = SessionAwareUrl(
|
||||
cookieUrl: graphUserTweetsAndReplies ? genParams(cookieVariables, fieldToggles),
|
||||
oauthUrl: graphUserTweetsAndRepliesV2 ? genParams(oauthVariables)
|
||||
)
|
||||
|
||||
proc tweetDetailUrl(id: string; cursor: string): SessionAwareUrl =
|
||||
let
|
||||
cookieVariables = tweetDetailVariables % [id, cursor]
|
||||
oauthVariables = tweetVariables % [id, cursor]
|
||||
result = SessionAwareUrl(
|
||||
cookieUrl: graphTweetDetail ? genParams(cookieVariables, tweetDetailFieldToggles),
|
||||
oauthUrl: graphTweet ? genParams(oauthVariables)
|
||||
)
|
||||
|
||||
proc getGraphUser*(username: string): Future[User] {.async.} =
|
||||
if username.len == 0: return
|
||||
let
|
||||
variables = """{"screen_name": "$1"}""" % username
|
||||
params = {"variables": variables, "features": gqlFeatures}
|
||||
js = await fetchRaw(graphUser ? params, Api.userScreenName)
|
||||
url = graphUser ? genParams("""{"screen_name": "$1"}""" % username)
|
||||
js = await fetchRaw(url, Api.userScreenName)
|
||||
result = parseGraphUser(js)
|
||||
|
||||
proc getGraphUserById*(id: string): Future[User] {.async.} =
|
||||
if id.len == 0 or id.any(c => not c.isDigit): return
|
||||
let
|
||||
variables = """{"rest_id": "$1"}""" % id
|
||||
params = {"variables": variables, "features": gqlFeatures}
|
||||
js = await fetchRaw(graphUserById ? params, Api.userRestId)
|
||||
url = graphUserById ? genParams("""{"rest_id": "$1"}""" % id)
|
||||
js = await fetchRaw(url, Api.userRestId)
|
||||
result = parseGraphUser(js)
|
||||
|
||||
proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Profile] {.async.} =
|
||||
if id.len == 0: return
|
||||
let
|
||||
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
|
||||
variables = userTweetsVariables % [id, cursor]
|
||||
params = {"variables": variables, "features": gqlFeatures}
|
||||
js = case kind
|
||||
of TimelineKind.tweets:
|
||||
await fetch(graphUserTweets ? params, Api.userTweets)
|
||||
await fetch(userTweetsUrl(id, cursor), Api.userTweets)
|
||||
of TimelineKind.replies:
|
||||
await fetch(graphUserTweetsAndReplies ? params, Api.userTweetsAndReplies)
|
||||
await fetch(userTweetsAndRepliesUrl(id, cursor), Api.userTweetsAndReplies)
|
||||
of TimelineKind.media:
|
||||
await fetch(mediaUrl(id, cursor), Api.userMedia)
|
||||
result = parseGraphTimeline(js, after)
|
||||
@@ -48,23 +80,18 @@ proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
|
||||
if id.len == 0: return
|
||||
let
|
||||
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
|
||||
variables = listTweetsVariables % [id, cursor]
|
||||
params = {"variables": variables, "features": gqlFeatures}
|
||||
js = await fetch(graphListTweets ? params, Api.listTweets)
|
||||
result = parseGraphTimeline(js, after).tweets
|
||||
url = graphListTweets ? genParams(restIdVariables % [id, cursor])
|
||||
result = parseGraphTimeline(await fetch(url, Api.listTweets), after).tweets
|
||||
|
||||
proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
|
||||
let
|
||||
variables = %*{"screenName": name, "listSlug": list}
|
||||
params = {"variables": $variables, "features": gqlFeatures}
|
||||
url = graphListBySlug ? params
|
||||
url = graphListBySlug ? genParams($variables)
|
||||
result = parseGraphList(await fetch(url, Api.listBySlug))
|
||||
|
||||
proc getGraphList*(id: string): Future[List] {.async.} =
|
||||
let
|
||||
variables = """{"listId": "$1"}""" % id
|
||||
params = {"variables": variables, "features": gqlFeatures}
|
||||
url = graphListById ? params
|
||||
url = graphListById ? genParams("""{"listId": "$1"}""" % id)
|
||||
result = parseGraphList(await fetch(url, Api.list))
|
||||
|
||||
proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} =
|
||||
@@ -79,7 +106,7 @@ proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.}
|
||||
}
|
||||
if after.len > 0:
|
||||
variables["cursor"] = % after
|
||||
let url = graphListMembers ? {"variables": $variables, "features": gqlFeatures}
|
||||
let url = graphListMembers ? genParams($variables)
|
||||
result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after)
|
||||
|
||||
proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} =
|
||||
@@ -94,9 +121,7 @@ proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} =
|
||||
if id.len == 0: return
|
||||
let
|
||||
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
|
||||
variables = tweetVariables % [id, cursor]
|
||||
params = {"variables": variables, "features": gqlFeatures}
|
||||
js = await fetch(graphTweet ? params, Api.tweetDetail)
|
||||
js = await fetch(tweetDetailUrl(id, cursor), Api.tweetDetail)
|
||||
result = parseGraphConversation(js, id)
|
||||
|
||||
proc getReplies*(id, after: string): Future[Result[Chain]] {.async.} =
|
||||
@@ -116,6 +141,7 @@ proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
|
||||
var
|
||||
variables = %*{
|
||||
"rawQuery": q,
|
||||
"query_source": "typedQuery",
|
||||
"count": 20,
|
||||
"product": "Latest",
|
||||
"withDownvotePerspective": false,
|
||||
@@ -124,7 +150,7 @@ proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
|
||||
}
|
||||
if after.len > 0:
|
||||
variables["cursor"] = % after
|
||||
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
|
||||
let url = graphSearchTimeline ? genParams($variables)
|
||||
result = parseGraphSearch[Tweets](await fetch(url, Api.search), after)
|
||||
result.query = query
|
||||
|
||||
@@ -135,6 +161,7 @@ proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.}
|
||||
var
|
||||
variables = %*{
|
||||
"rawQuery": query.text,
|
||||
"query_source": "typedQuery",
|
||||
"count": 20,
|
||||
"product": "People",
|
||||
"withDownvotePerspective": false,
|
||||
@@ -145,7 +172,7 @@ proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.}
|
||||
variables["cursor"] = % after
|
||||
result.beginning = false
|
||||
|
||||
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
|
||||
let url = graphSearchTimeline ? genParams($variables)
|
||||
result = parseGraphSearch[User](await fetch(url, Api.search), after)
|
||||
result.query = query
|
||||
|
||||
|
||||
@@ -60,11 +60,11 @@ proc getAndValidateSession*(api: Api): Future[Session] {.async.} =
|
||||
case result.kind
|
||||
of SessionKind.oauth:
|
||||
if result.oauthToken.len == 0:
|
||||
echo "[sessions] Empty oauth token, session: ", result.id
|
||||
echo "[sessions] Empty oauth token, session: ", result.pretty
|
||||
raise rateLimitError()
|
||||
of SessionKind.cookie:
|
||||
if result.authToken.len == 0 or result.ct0.len == 0:
|
||||
echo "[sessions] Empty cookie credentials, session: ", result.id
|
||||
echo "[sessions] Empty cookie credentials, session: ", result.pretty
|
||||
raise rateLimitError()
|
||||
|
||||
template fetchImpl(result, fetchBody) {.dirty.} =
|
||||
@@ -107,7 +107,7 @@ template fetchImpl(result, fetchBody) {.dirty.} =
|
||||
setLimited(session, api)
|
||||
raise rateLimitError()
|
||||
elif result.startsWith("429 Too Many Requests"):
|
||||
echo "[sessions] 429 error, API: ", api, ", session: ", session.id
|
||||
echo "[sessions] 429 error, API: ", api, ", session: ", session.pretty
|
||||
session.apis[api].remaining = 0
|
||||
# rate limit hit, resets after the 15 minute window
|
||||
raise rateLimitError()
|
||||
@@ -124,8 +124,8 @@ template fetchImpl(result, fetchBody) {.dirty.} =
|
||||
except OSError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
let id = if session.isNil: "null" else: $session.id
|
||||
echo "error: ", e.name, ", msg: ", e.msg, ", sessionId: ", id, ", url: ", url
|
||||
let s = session.pretty
|
||||
echo "error: ", e.name, ", msg: ", e.msg, ", session: ", s, ", url: ", url
|
||||
raise rateLimitError()
|
||||
finally:
|
||||
release(session)
|
||||
|
||||
37
src/auth.nim
37
src/auth.nim
@@ -7,20 +7,6 @@ import experimental/parser/session
|
||||
const
|
||||
maxConcurrentReqs = 2
|
||||
hourInSeconds = 60 * 60
|
||||
apiMaxReqs: Table[Api, int] = {
|
||||
Api.search: 50,
|
||||
Api.tweetDetail: 500,
|
||||
Api.userTweets: 500,
|
||||
Api.userTweetsAndReplies: 500,
|
||||
Api.userMedia: 500,
|
||||
Api.userRestId: 500,
|
||||
Api.userScreenName: 500,
|
||||
Api.tweetResult: 500,
|
||||
Api.list: 500,
|
||||
Api.listTweets: 500,
|
||||
Api.listMembers: 500,
|
||||
Api.listBySlug: 500
|
||||
}.toTable
|
||||
|
||||
var
|
||||
sessionPool: seq[Session]
|
||||
@@ -29,6 +15,20 @@ var
|
||||
template log(str: varargs[string, `$`]) =
|
||||
echo "[sessions] ", str.join("")
|
||||
|
||||
proc pretty*(session: Session): string =
|
||||
if session.isNil:
|
||||
return "<null>"
|
||||
|
||||
if session.id > 0 and session.username.len > 0:
|
||||
result = $session.id & " (" & session.username & ")"
|
||||
elif session.username.len > 0:
|
||||
result = session.username
|
||||
elif session.id > 0:
|
||||
result = $session.id
|
||||
else:
|
||||
result = "<unknown>"
|
||||
result = $session.kind & " " & result
|
||||
|
||||
proc snowflakeToEpoch(flake: int64): int64 =
|
||||
int64(((flake shr 22) + 1288834974657) div 1000)
|
||||
|
||||
@@ -57,8 +57,7 @@ proc getSessionPoolHealth*(): JsonNode =
|
||||
for api in session.apis.keys:
|
||||
let
|
||||
apiStatus = session.apis[api]
|
||||
limit = if apiStatus.limit > 0: apiStatus.limit else: apiMaxReqs.getOrDefault(api, 0)
|
||||
reqs = limit - apiStatus.remaining
|
||||
reqs = apiStatus.limit - apiStatus.remaining
|
||||
|
||||
# no requests made with this session and endpoint since the limit reset
|
||||
if apiStatus.reset < now:
|
||||
@@ -130,7 +129,7 @@ proc isLimited(session: Session; api: Api): bool =
|
||||
if session.limited and api != Api.userTweets:
|
||||
if (epochTime().int - session.limitedAt) > hourInSeconds:
|
||||
session.limited = false
|
||||
log "resetting limit: ", session.id
|
||||
log "resetting limit: ", session.pretty
|
||||
return false
|
||||
else:
|
||||
return true
|
||||
@@ -146,7 +145,7 @@ proc isReady(session: Session; api: Api): bool =
|
||||
|
||||
proc invalidate*(session: var Session) =
|
||||
if session.isNil: return
|
||||
log "invalidating: ", session.id
|
||||
log "invalidating: ", session.pretty
|
||||
|
||||
# TODO: This isn't sufficient, but it works for now
|
||||
let idx = sessionPool.find(session)
|
||||
@@ -171,7 +170,7 @@ proc getSession*(api: Api): Future[Session] {.async.} =
|
||||
proc setLimited*(session: Session; api: Api) =
|
||||
session.limited = true
|
||||
session.limitedAt = epochTime().int
|
||||
log "rate limited by api: ", api, ", reqs left: ", session.apis[api].remaining, ", id: ", session.id
|
||||
log "rate limited by api: ", api, ", reqs left: ", session.apis[api].remaining, ", ", session.pretty
|
||||
|
||||
proc setRateLimit*(session: Session; api: Api; remaining, reset, limit: int) =
|
||||
# avoid undefined behavior in race conditions
|
||||
|
||||
@@ -7,23 +7,29 @@ const
|
||||
|
||||
gql = parseUri("https://api.x.com") / "graphql"
|
||||
|
||||
graphUser* = gql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery"
|
||||
graphUserById* = gql / "oPppcargziU1uDQHAUmH-A/UserResultByIdQuery"
|
||||
graphUserTweets* = gql / "JLApJKFY0MxGTzCoK6ps8Q/UserWithProfileTweetsQueryV2"
|
||||
graphUserTweetsAndReplies* = gql / "Y86LQY7KMvxn5tu3hFTyPg/UserWithProfileTweetsAndRepliesQueryV2"
|
||||
graphUser* = gql / "WEoGnYB0EG1yGwamDCF6zg/UserResultByScreenNameQuery"
|
||||
graphUserById* = gql / "VN33vKXrPT7p35DgNR27aw/UserResultByIdQuery"
|
||||
graphUserTweetsV2* = gql / "6QdSuZ5feXxOadEdXa4XZg/UserWithProfileTweetsQueryV2"
|
||||
graphUserTweetsAndRepliesV2* = gql / "BDX77Xzqypdt11-mDfgdpQ/UserWithProfileTweetsAndRepliesQueryV2"
|
||||
graphUserTweets* = gql / "oRJs8SLCRNRbQzuZG93_oA/UserTweets"
|
||||
graphUserTweetsAndReplies* = gql / "kkaJ0Mf34PZVarrxzLihjg/UserTweetsAndReplies"
|
||||
graphUserMedia* = gql / "36oKqyQ7E_9CmtONGjJRsA/UserMedia"
|
||||
graphUserMediaV2* = gql / "PDfFf8hGeJvUCiTyWtw4wQ/MediaTimelineV2"
|
||||
graphTweet* = gql / "Vorskcd2tZ-tc4Gx3zbk4Q/ConversationTimelineV2"
|
||||
graphTweetResult* = gql / "sITyJdhRPpvpEjg4waUmTA/TweetResultByIdQuery"
|
||||
graphSearchTimeline* = gql / "KI9jCXUx3Ymt-hDKLOZb9Q/SearchTimeline"
|
||||
graphListById* = gql / "oygmAig8kjn0pKsx_bUadQ/ListByRestId"
|
||||
graphListBySlug* = gql / "88GTz-IPPWLn1EiU8XoNVg/ListBySlug"
|
||||
graphListMembers* = gql / "kSmxeqEeelqdHSR7jMnb_w/ListMembers"
|
||||
graphListTweets* = gql / "BbGLL1ZfMibdFNWlk7a0Pw/ListTimeline"
|
||||
graphUserMediaV2* = gql / "bp0e_WdXqgNBIwlLukzyYA/MediaTimelineV2"
|
||||
graphTweet* = gql / "Y4Erk_-0hObvLpz0Iw3bzA/ConversationTimeline"
|
||||
graphTweetDetail* = gql / "YVyS4SfwYW7Uw5qwy0mQCA/TweetDetail"
|
||||
graphTweetResult* = gql / "nzme9KiYhfIOrrLrPP_XeQ/TweetResultByIdQuery"
|
||||
graphSearchTimeline* = gql / "bshMIjqDk8LTXTq4w91WKw/SearchTimeline"
|
||||
graphListById* = gql / "cIUpT1UjuGgl_oWiY7Snhg/ListByRestId"
|
||||
graphListBySlug* = gql / "K6wihoTiTrzNzSF8y1aeKQ/ListBySlug"
|
||||
graphListMembers* = gql / "fuVHh5-gFn8zDBBxb8wOMA/ListMembers"
|
||||
graphListTweets* = gql / "VQf8_XQynI3WzH6xopOMMQ/ListTimeline"
|
||||
|
||||
gqlFeatures* = """{
|
||||
"android_ad_formats_media_component_render_overlay_enabled": false,
|
||||
"android_graphql_skip_api_media_color_palette": false,
|
||||
"android_professional_link_spotlight_display_enabled": false,
|
||||
"blue_business_profile_image_shape_enabled": false,
|
||||
"commerce_android_shop_module_enabled": false,
|
||||
"creator_subscriptions_subscription_count_enabled": false,
|
||||
"creator_subscriptions_tweet_preview_api_enabled": true,
|
||||
"freedom_of_speech_not_reach_fetch_enabled": true,
|
||||
@@ -33,8 +39,9 @@ const
|
||||
"interactive_text_enabled": false,
|
||||
"longform_notetweets_consumption_enabled": true,
|
||||
"longform_notetweets_inline_media_enabled": true,
|
||||
"longform_notetweets_richtext_consumption_enabled": true,
|
||||
"longform_notetweets_rich_text_read_enabled": true,
|
||||
"longform_notetweets_richtext_consumption_enabled": true,
|
||||
"mobile_app_spotlight_module_enabled": false,
|
||||
"responsive_web_edit_tweet_api_enabled": true,
|
||||
"responsive_web_enhance_cards_enabled": false,
|
||||
"responsive_web_graphql_exclude_directive_enabled": true,
|
||||
@@ -43,6 +50,7 @@ const
|
||||
"responsive_web_media_download_video_enabled": false,
|
||||
"responsive_web_text_conversations_enabled": false,
|
||||
"responsive_web_twitter_article_tweet_consumption_enabled": true,
|
||||
"unified_cards_destination_url_params_enabled": false,
|
||||
"responsive_web_twitter_blue_verified_badge_is_enabled": true,
|
||||
"rweb_lists_timeline_redesign_enabled": true,
|
||||
"spaces_2022_h2_clipping": true,
|
||||
@@ -83,11 +91,17 @@ const
|
||||
"payments_enabled": false,
|
||||
"responsive_web_profile_redirect_enabled": false,
|
||||
"responsive_web_grok_show_grok_translated_post": false,
|
||||
"responsive_web_grok_community_note_auto_translation_is_enabled": false
|
||||
"responsive_web_grok_community_note_auto_translation_is_enabled": false,
|
||||
"profile_label_improvements_pcf_label_in_profile_enabled": false,
|
||||
"grok_android_analyze_trend_fetch_enabled": false,
|
||||
"grok_translations_community_note_auto_translation_is_enabled": false,
|
||||
"grok_translations_post_auto_translation_is_enabled": false,
|
||||
"grok_translations_community_note_translation_is_enabled": false,
|
||||
"grok_translations_timeline_user_bio_auto_translation_is_enabled": false
|
||||
}""".replace(" ", "").replace("\n", "")
|
||||
|
||||
tweetVariables* = """{
|
||||
"focalTweetId": "$1",
|
||||
"postId": "$1",
|
||||
$2
|
||||
"includeHasBirdwatchNotes": false,
|
||||
"includePromotedContent": false,
|
||||
@@ -96,24 +110,20 @@ const
|
||||
"withV2Timeline": true
|
||||
}""".replace(" ", "").replace("\n", "")
|
||||
|
||||
# oldUserTweetsVariables* = """{
|
||||
# "userId": "$1", $2
|
||||
# "count": 20,
|
||||
# "includePromotedContent": false,
|
||||
# "withDownvotePerspective": false,
|
||||
# "withReactionsMetadata": false,
|
||||
# "withReactionsPerspective": false,
|
||||
# "withVoice": false,
|
||||
# "withV2Timeline": true
|
||||
# }
|
||||
# """
|
||||
tweetDetailVariables* = """{
|
||||
"focalTweetId": "$1",
|
||||
$2
|
||||
"referrer": "profile",
|
||||
"with_rux_injections": false,
|
||||
"rankingMode": "Relevance",
|
||||
"includePromotedContent": true,
|
||||
"withCommunity": true,
|
||||
"withQuickPromoteEligibilityTweetFields": true,
|
||||
"withBirdwatchNotes": true,
|
||||
"withVoice": true
|
||||
}""".replace(" ", "").replace("\n", "")
|
||||
|
||||
userTweetsVariables* = """{
|
||||
"rest_id": "$1", $2
|
||||
"count": 20
|
||||
}"""
|
||||
|
||||
listTweetsVariables* = """{
|
||||
restIdVariables* = """{
|
||||
"rest_id": "$1", $2
|
||||
"count": 20
|
||||
}"""
|
||||
@@ -126,3 +136,22 @@ const
|
||||
"withBirdwatchNotes": false,
|
||||
"withVoice": true
|
||||
}""".replace(" ", "").replace("\n", "")
|
||||
|
||||
userTweetsVariables* = """{
|
||||
"userId": "$1", $2
|
||||
"count": 20,
|
||||
"includePromotedContent": false,
|
||||
"withQuickPromoteEligibilityTweetFields": true,
|
||||
"withVoice": true
|
||||
}""".replace(" ", "").replace("\n", "")
|
||||
|
||||
userTweetsAndRepliesVariables* = """{
|
||||
"userId": "$1", $2
|
||||
"count": 20,
|
||||
"includePromotedContent": false,
|
||||
"withCommunity": true,
|
||||
"withVoice": true
|
||||
}""".replace(" ", "").replace("\n", "")
|
||||
|
||||
fieldToggles* = """{"withArticlePlainText":false}"""
|
||||
tweetDetailFieldToggles* = """{"withArticleRichContentState":true,"withArticlePlainText":false,"withGrokAnalyze":false,"withDisallowedReplyControls":false}"""
|
||||
|
||||
@@ -1,21 +1,39 @@
|
||||
import options
|
||||
import options, strutils
|
||||
import jsony
|
||||
import user, ../types/[graphuser, graphlistmembers]
|
||||
from ../../types import User, VerifiedType, Result, Query, QueryKind
|
||||
|
||||
proc parseUserResult*(userResult: UserResult): User =
|
||||
result = userResult.legacy
|
||||
|
||||
if result.verifiedType == none and userResult.isBlueVerified:
|
||||
result.verifiedType = blue
|
||||
|
||||
if result.username.len == 0 and userResult.core.screenName.len > 0:
|
||||
result.id = userResult.restId
|
||||
result.username = userResult.core.screenName
|
||||
result.fullname = userResult.core.name
|
||||
result.userPic = userResult.avatar.imageUrl.replace("_normal", "")
|
||||
|
||||
if userResult.verification.isSome:
|
||||
let v = userResult.verification.get
|
||||
if v.verifiedType != VerifiedType.none:
|
||||
result.verifiedType = v.verifiedType
|
||||
|
||||
if userResult.profileBio.isSome:
|
||||
result.bio = userResult.profileBio.get.description
|
||||
|
||||
proc parseGraphUser*(json: string): User =
|
||||
if json.len == 0 or json[0] != '{':
|
||||
return
|
||||
|
||||
let raw = json.fromJson(GraphUser)
|
||||
let userResult = raw.data.userResult.result
|
||||
|
||||
if raw.data.userResult.result.unavailableReason.get("") == "Suspended":
|
||||
if userResult.unavailableReason.get("") == "Suspended":
|
||||
return User(suspended: true)
|
||||
|
||||
result = raw.data.userResult.result.legacy
|
||||
result.id = raw.data.userResult.result.restId
|
||||
if result.verifiedType == VerifiedType.none and raw.data.userResult.result.isBlueVerified:
|
||||
result.verifiedType = blue
|
||||
result = parseUserResult(userResult)
|
||||
|
||||
proc parseGraphListMembers*(json, cursor: string): Result[User] =
|
||||
result = Result[User](
|
||||
@@ -31,7 +49,7 @@ proc parseGraphListMembers*(json, cursor: string): Result[User] =
|
||||
of TimelineTimelineItem:
|
||||
let userResult = entry.content.itemContent.userResults.result
|
||||
if userResult.restId.len > 0:
|
||||
result.content.add userResult.legacy
|
||||
result.content.add parseUserResult(userResult)
|
||||
of TimelineTimelineCursor:
|
||||
if entry.content.cursorType == "Bottom":
|
||||
result.bottom = entry.content.value
|
||||
|
||||
@@ -13,6 +13,7 @@ proc parseSession*(raw: string): Session =
|
||||
result = Session(
|
||||
kind: SessionKind.oauth,
|
||||
id: parseBiggestInt(id),
|
||||
username: session.username,
|
||||
oauthToken: session.oauthToken,
|
||||
oauthSecret: session.oauthTokenSecret
|
||||
)
|
||||
@@ -21,6 +22,7 @@ proc parseSession*(raw: string): Session =
|
||||
result = Session(
|
||||
kind: SessionKind.cookie,
|
||||
id: id,
|
||||
username: session.username,
|
||||
authToken: session.authToken,
|
||||
ct0: session.ct0
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import std/[options, tables, strutils, strformat, sugar]
|
||||
import jsony
|
||||
import user, ../types/unifiedcard
|
||||
import ../../formatters
|
||||
from ../../types import Card, CardKind, Video
|
||||
from ../../utils import twimg, https
|
||||
|
||||
@@ -77,6 +78,18 @@ proc parseMedia(component: Component; card: UnifiedCard; result: var Card) =
|
||||
of model3d:
|
||||
result.title = "Unsupported 3D model ad"
|
||||
|
||||
proc parseGrokShare(data: ComponentData; card: UnifiedCard; result: var Card) =
|
||||
result.kind = summaryLarge
|
||||
|
||||
data.destination.parseDestination(card, result)
|
||||
result.dest = "Answer by Grok"
|
||||
|
||||
for msg in data.conversationPreview:
|
||||
if msg.sender == "USER":
|
||||
result.title = msg.message.shorten(70)
|
||||
elif msg.sender == "AGENT":
|
||||
result.text = msg.message.shorten(500)
|
||||
|
||||
proc parseUnifiedCard*(json: string): Card =
|
||||
let card = json.fromJson(UnifiedCard)
|
||||
|
||||
@@ -92,6 +105,8 @@ proc parseUnifiedCard*(json: string): Card =
|
||||
component.parseMedia(card, result)
|
||||
of buttonGroup:
|
||||
discard
|
||||
of grokShare:
|
||||
component.data.parseGrokShare(card, result)
|
||||
of ComponentType.jobDetails:
|
||||
component.data.parseJobDetails(card, result)
|
||||
of ComponentType.hidden:
|
||||
|
||||
@@ -72,21 +72,3 @@ proc parseHook*(s: string; i: var int; v: var User) =
|
||||
var u: RawUser
|
||||
parseHook(s, i, u)
|
||||
v = toUser u
|
||||
|
||||
proc parseUser*(json: string; username=""): User =
|
||||
handleErrors:
|
||||
case error.code
|
||||
of suspended: return User(username: username, suspended: true)
|
||||
of userNotFound: return
|
||||
else: echo "[error - parseUser]: ", error
|
||||
|
||||
result = json.fromJson(User)
|
||||
|
||||
proc parseUsers*(json: string; after=""): Result[User] =
|
||||
result = Result[User](beginning: after.len == 0)
|
||||
|
||||
# starting with '{' means it's an error
|
||||
if json[0] == '[':
|
||||
let raw = json.fromJson(seq[RawUser])
|
||||
for user in raw:
|
||||
result.content.add user.toUser
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import options
|
||||
from ../../types import User
|
||||
import options, strutils
|
||||
from ../../types import User, VerifiedType
|
||||
|
||||
type
|
||||
GraphUser* = object
|
||||
@@ -8,8 +8,32 @@ type
|
||||
UserData* = object
|
||||
result*: UserResult
|
||||
|
||||
UserResult = object
|
||||
UserCore* = object
|
||||
name*: string
|
||||
screenName*: string
|
||||
createdAt*: string
|
||||
|
||||
UserBio* = object
|
||||
description*: string
|
||||
|
||||
UserAvatar* = object
|
||||
imageUrl*: string
|
||||
|
||||
Verification* = object
|
||||
verifiedType*: VerifiedType
|
||||
|
||||
UserResult* = object
|
||||
legacy*: User
|
||||
restId*: string
|
||||
isBlueVerified*: bool
|
||||
unavailableReason*: Option[string]
|
||||
core*: UserCore
|
||||
avatar*: UserAvatar
|
||||
profileBio*: Option[UserBio]
|
||||
verification*: Option[Verification]
|
||||
|
||||
proc enumHook*(s: string; v: var VerifiedType) =
|
||||
v = try:
|
||||
parseEnum[VerifiedType](s)
|
||||
except:
|
||||
VerifiedType.none
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
type
|
||||
RawSession* = object
|
||||
kind*: string
|
||||
username*: string
|
||||
id*: string
|
||||
username*: string
|
||||
oauthToken*: string
|
||||
oauthTokenSecret*: string
|
||||
authToken*: string
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
import std/tables
|
||||
from ../../types import User
|
||||
|
||||
type
|
||||
Search* = object
|
||||
globalObjects*: GlobalObjects
|
||||
timeline*: Timeline
|
||||
|
||||
GlobalObjects = object
|
||||
users*: Table[string, User]
|
||||
|
||||
Timeline = object
|
||||
instructions*: seq[Instructions]
|
||||
|
||||
Instructions = object
|
||||
addEntries*: tuple[entries: seq[Entry]]
|
||||
|
||||
Entry = object
|
||||
entryId*: string
|
||||
content*: tuple[operation: Operation]
|
||||
|
||||
Operation = object
|
||||
cursor*: tuple[value, cursorType: string]
|
||||
@@ -22,6 +22,7 @@ type
|
||||
communityDetails
|
||||
mediaWithDetailsHorizontal
|
||||
hidden
|
||||
grokShare
|
||||
unknown
|
||||
|
||||
Component* = object
|
||||
@@ -42,6 +43,7 @@ type
|
||||
topicDetail*: tuple[title: Text]
|
||||
profileUser*: User
|
||||
shortDescriptionText*: string
|
||||
conversationPreview*: seq[GrokConversation]
|
||||
|
||||
MediaItem* = object
|
||||
id*: string
|
||||
@@ -76,6 +78,10 @@ type
|
||||
title*: Text
|
||||
category*: Text
|
||||
|
||||
GrokConversation* = object
|
||||
message*: string
|
||||
sender*: string
|
||||
|
||||
TypeField = Component | Destination | MediaEntity | AppStoreData
|
||||
|
||||
converter fromText*(text: Text): string = string(text)
|
||||
@@ -96,6 +102,7 @@ proc enumHook*(s: string; v: var ComponentType) =
|
||||
of "community_details": communityDetails
|
||||
of "media_with_details_horizontal": mediaWithDetailsHorizontal
|
||||
of "commerce_drop_details": hidden
|
||||
of "grok_share": grokShare
|
||||
else: echo "ERROR: Unknown enum value (ComponentType): ", s; unknown
|
||||
|
||||
proc enumHook*(s: string; v: var AppType) =
|
||||
|
||||
@@ -33,10 +33,13 @@ proc getUrlPrefix*(cfg: Config): string =
|
||||
if cfg.useHttps: https & cfg.hostname
|
||||
else: "http://" & cfg.hostname
|
||||
|
||||
proc shortLink*(text: string; length=28): string =
|
||||
result = text.replace(wwwRegex, "")
|
||||
proc shorten*(text: string; length=28): string =
|
||||
result = text
|
||||
if result.len > length:
|
||||
result = result[0 ..< length] & "…"
|
||||
|
||||
proc shortLink*(text: string; length=28): string =
|
||||
result = text.replace(wwwRegex, "").shorten(length)
|
||||
|
||||
proc stripHtml*(text: string; shorten=false): string =
|
||||
var html = parseHtml(text)
|
||||
|
||||
355
src/parser.nim
355
src/parser.nim
@@ -1,10 +1,10 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import strutils, options, times, math
|
||||
import strutils, options, times, math, tables
|
||||
import packedjson, packedjson/deserialiser
|
||||
import types, parserutils, utils
|
||||
import experimental/parser/unifiedcard
|
||||
|
||||
proc parseGraphTweet(js: JsonNode; isLegacy=false): Tweet
|
||||
proc parseGraphTweet(js: JsonNode): Tweet
|
||||
|
||||
proc parseUser(js: JsonNode; id=""): User =
|
||||
if js.isNull: return
|
||||
@@ -21,11 +21,16 @@ proc parseUser(js: JsonNode; id=""): User =
|
||||
tweets: js{"statuses_count"}.getInt,
|
||||
likes: js{"favourites_count"}.getInt,
|
||||
media: js{"media_count"}.getInt,
|
||||
verifiedType: parseEnum[VerifiedType](js{"verified_type"}.getStr("None")),
|
||||
protected: js{"protected"}.getBool,
|
||||
joinDate: js{"created_at"}.getTime
|
||||
)
|
||||
|
||||
if js{"is_blue_verified"}.getBool(false):
|
||||
result.verifiedType = blue
|
||||
|
||||
with verifiedType, js{"verified_type"}:
|
||||
result.verifiedType = parseEnum[VerifiedType](verifiedType.getStr)
|
||||
|
||||
result.expandUserEntities(js)
|
||||
|
||||
proc parseGraphUser(js: JsonNode): User =
|
||||
@@ -41,17 +46,20 @@ proc parseGraphUser(js: JsonNode): User =
|
||||
|
||||
result = parseUser(user{"legacy"}, user{"rest_id"}.getStr)
|
||||
|
||||
if result.verifiedType == none and user{"is_blue_verified"}.getBool(false):
|
||||
result.verifiedType = blue
|
||||
|
||||
# fallback to support UserMedia/recent GraphQL updates
|
||||
if result.username.len == 0 and user{"core", "screen_name"}.notNull:
|
||||
if result.username.len == 0:
|
||||
result.username = user{"core", "screen_name"}.getStr
|
||||
result.fullname = user{"core", "name"}.getStr
|
||||
result.userPic = user{"avatar", "image_url"}.getImageStr.replace("_normal", "")
|
||||
|
||||
if user{"is_blue_verified"}.getBool(false):
|
||||
result.verifiedType = blue
|
||||
elif user{"verification", "verified_type"}.notNull:
|
||||
let verifiedType = user{"verification", "verified_type"}.getStr("None")
|
||||
result.verifiedType = parseEnum[VerifiedType](verifiedType)
|
||||
|
||||
with verifiedType, user{"verification", "verified_type"}:
|
||||
result.verifiedType = parseEnum[VerifiedType](verifiedType.getStr)
|
||||
|
||||
proc parseGraphList*(js: JsonNode): List =
|
||||
if js.isNull: return
|
||||
@@ -90,16 +98,24 @@ proc parsePoll(js: JsonNode): Poll =
|
||||
result.leader = result.values.find(max(result.values))
|
||||
result.votes = result.values.sum
|
||||
|
||||
proc parseGif(js: JsonNode): Gif =
|
||||
result = Gif(
|
||||
url: js{"video_info", "variants"}[0]{"url"}.getImageStr,
|
||||
thumb: js{"media_url_https"}.getImageStr
|
||||
)
|
||||
proc parseVideoVariants(variants: JsonNode): seq[VideoVariant] =
|
||||
result = @[]
|
||||
for v in variants:
|
||||
let
|
||||
url = v{"url"}.getStr
|
||||
contentType = parseEnum[VideoType](v{"content_type"}.getStr("video/mp4"))
|
||||
bitrate = v{"bit_rate"}.getInt(v{"bitrate"}.getInt(0))
|
||||
|
||||
result.add VideoVariant(
|
||||
contentType: contentType,
|
||||
bitrate: bitrate,
|
||||
url: url,
|
||||
resolution: if contentType == mp4: getMp4Resolution(url) else: 0
|
||||
)
|
||||
|
||||
proc parseVideo(js: JsonNode): Video =
|
||||
result = Video(
|
||||
thumb: js{"media_url_https"}.getImageStr,
|
||||
views: getVideoViewCount(js),
|
||||
available: true,
|
||||
title: js{"ext_alt_text"}.getStr,
|
||||
durationMs: js{"video_info", "duration_millis"}.getInt
|
||||
@@ -116,17 +132,62 @@ proc parseVideo(js: JsonNode): Video =
|
||||
with description, js{"additional_media_info", "description"}:
|
||||
result.description = description.getStr
|
||||
|
||||
for v in js{"video_info", "variants"}:
|
||||
let
|
||||
contentType = parseEnum[VideoType](v{"content_type"}.getStr("summary"))
|
||||
url = v{"url"}.getStr
|
||||
result.variants = parseVideoVariants(js{"video_info", "variants"})
|
||||
|
||||
result.variants.add VideoVariant(
|
||||
contentType: contentType,
|
||||
bitrate: v{"bitrate"}.getInt,
|
||||
url: url,
|
||||
resolution: if contentType == mp4: getMp4Resolution(url) else: 0
|
||||
)
|
||||
proc parseLegacyMediaEntities(js: JsonNode; result: var Tweet) =
|
||||
with jsMedia, js{"extended_entities", "media"}:
|
||||
for m in jsMedia:
|
||||
case m.getTypeName:
|
||||
of "photo":
|
||||
result.photos.add m{"media_url_https"}.getImageStr
|
||||
of "video":
|
||||
result.video = some(parseVideo(m))
|
||||
with user, m{"additional_media_info", "source_user"}:
|
||||
if user{"id"}.getInt > 0:
|
||||
result.attribution = some(parseUser(user))
|
||||
else:
|
||||
result.attribution = some(parseGraphUser(user))
|
||||
of "animated_gif":
|
||||
result.gif = some Gif(
|
||||
url: m{"video_info", "variants"}[0]{"url"}.getImageStr,
|
||||
thumb: m{"media_url_https"}.getImageStr
|
||||
)
|
||||
else: discard
|
||||
|
||||
with url, m{"url"}:
|
||||
if result.text.endsWith(url.getStr):
|
||||
result.text.removeSuffix(url.getStr)
|
||||
result.text = result.text.strip()
|
||||
|
||||
proc parseMediaEntities(js: JsonNode; result: var Tweet) =
|
||||
with mediaEntities, js{"media_entities"}:
|
||||
for mediaEntity in mediaEntities:
|
||||
with mediaInfo, mediaEntity{"media_results", "result", "media_info"}:
|
||||
case mediaInfo.getTypeName
|
||||
of "ApiImage":
|
||||
result.photos.add mediaInfo{"original_img_url"}.getImageStr
|
||||
of "ApiVideo":
|
||||
let status = mediaEntity{"media_results", "result", "media_availability_v2", "status"}
|
||||
result.video = some Video(
|
||||
available: status.getStr == "Available",
|
||||
thumb: mediaInfo{"preview_image", "original_img_url"}.getImageStr,
|
||||
durationMs: mediaInfo{"duration_millis"}.getInt,
|
||||
variants: parseVideoVariants(mediaInfo{"variants"})
|
||||
)
|
||||
of "ApiGif":
|
||||
result.gif = some Gif(
|
||||
url: mediaInfo{"variants"}[0]{"url"}.getImageStr,
|
||||
thumb: mediaInfo{"preview_image", "original_img_url"}.getImageStr
|
||||
)
|
||||
else: discard
|
||||
|
||||
# Remove media URLs from text
|
||||
with mediaList, js{"legacy", "entities", "media"}:
|
||||
for url in mediaList:
|
||||
let expandedUrl = url{"expanded_url"}.getStr
|
||||
if result.text.endsWith(expandedUrl):
|
||||
result.text.removeSuffix(expandedUrl)
|
||||
result.text = result.text.strip()
|
||||
|
||||
proc parsePromoVideo(js: JsonNode): Video =
|
||||
result = Video(
|
||||
@@ -218,12 +279,17 @@ proc parseCard(js: JsonNode; urls: JsonNode): Card =
|
||||
|
||||
proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
|
||||
if js.isNull: return
|
||||
|
||||
let time =
|
||||
if js{"created_at"}.notNull: js{"created_at"}.getTime
|
||||
else: js{"created_at_ms"}.getTimeFromMs
|
||||
|
||||
result = Tweet(
|
||||
id: js{"id_str"}.getId,
|
||||
threadId: js{"conversation_id_str"}.getId,
|
||||
replyId: js{"in_reply_to_status_id_str"}.getId,
|
||||
text: js{"full_text"}.getStr,
|
||||
time: js{"created_at"}.getTime,
|
||||
time: time,
|
||||
hasThread: js{"self_thread"}.notNull,
|
||||
available: true,
|
||||
user: User(id: js{"user_id_str"}.getStr),
|
||||
@@ -231,7 +297,7 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
|
||||
replies: js{"reply_count"}.getInt,
|
||||
retweets: js{"retweet_count"}.getInt,
|
||||
likes: js{"favorite_count"}.getInt,
|
||||
quotes: js{"quote_count"}.getInt
|
||||
views: js{"views_count"}.getInt
|
||||
)
|
||||
)
|
||||
|
||||
@@ -256,6 +322,12 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
|
||||
result.retweet = some parseGraphTweet(rt)
|
||||
return
|
||||
|
||||
with reposts, js{"repostedStatusResults"}:
|
||||
with rt, reposts{"result"}:
|
||||
if "legacy" in rt:
|
||||
result.retweet = some parseGraphTweet(rt)
|
||||
return
|
||||
|
||||
if jsCard.kind != JNull:
|
||||
let name = jsCard{"name"}.getStr
|
||||
if "poll" in name:
|
||||
@@ -269,27 +341,7 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
|
||||
result.card = some parseCard(jsCard, js{"entities", "urls"})
|
||||
|
||||
result.expandTweetEntities(js)
|
||||
|
||||
with jsMedia, js{"extended_entities", "media"}:
|
||||
for m in jsMedia:
|
||||
case m{"type"}.getStr
|
||||
of "photo":
|
||||
result.photos.add m{"media_url_https"}.getImageStr
|
||||
of "video":
|
||||
result.video = some(parseVideo(m))
|
||||
with user, m{"additional_media_info", "source_user"}:
|
||||
if user{"id"}.getInt > 0:
|
||||
result.attribution = some(parseUser(user))
|
||||
else:
|
||||
result.attribution = some(parseGraphUser(user))
|
||||
of "animated_gif":
|
||||
result.gif = some(parseGif(m))
|
||||
else: discard
|
||||
|
||||
with url, m{"url"}:
|
||||
if result.text.endsWith(url.getStr):
|
||||
result.text.removeSuffix(url.getStr)
|
||||
result.text = result.text.strip()
|
||||
parseLegacyMediaEntities(js, result)
|
||||
|
||||
with jsWithheld, js{"withheld_in_countries"}:
|
||||
let withheldInCountries: seq[string] =
|
||||
@@ -305,91 +357,108 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
|
||||
result.text.removeSuffix(" Learn more.")
|
||||
result.available = false
|
||||
|
||||
proc parseGraphTweet(js: JsonNode; isLegacy=false): Tweet =
|
||||
proc parseGraphTweet(js: JsonNode): Tweet =
|
||||
if js.kind == JNull:
|
||||
return Tweet()
|
||||
|
||||
case js{"__typename"}.getStr
|
||||
case js.getTypeName:
|
||||
of "TweetUnavailable":
|
||||
return Tweet()
|
||||
of "TweetTombstone":
|
||||
with text, js{"tombstone", "richText"}:
|
||||
return Tweet(text: text.getTombstone)
|
||||
with text, js{"tombstone", "text"}:
|
||||
with text, select(js{"tombstone", "richText"}, js{"tombstone", "text"}):
|
||||
return Tweet(text: text.getTombstone)
|
||||
return Tweet()
|
||||
of "TweetPreviewDisplay":
|
||||
return Tweet(text: "You're unable to view this Tweet because it's only available to the Subscribers of the account owner.")
|
||||
of "TweetWithVisibilityResults":
|
||||
return parseGraphTweet(js{"tweet"}, isLegacy)
|
||||
return parseGraphTweet(js{"tweet"})
|
||||
else:
|
||||
discard
|
||||
|
||||
if not js.hasKey("legacy"):
|
||||
return Tweet()
|
||||
|
||||
var jsCard = copy(js{if isLegacy: "card" else: "tweet_card", "legacy"})
|
||||
var jsCard = select(js{"card"}, js{"tweet_card"}, js{"legacy", "tweet_card"})
|
||||
if jsCard.kind != JNull:
|
||||
var values = newJObject()
|
||||
for val in jsCard["binding_values"]:
|
||||
values[val["key"].getStr] = val["value"]
|
||||
jsCard["binding_values"] = values
|
||||
let legacyCard = jsCard{"legacy"}
|
||||
if legacyCard.kind != JNull:
|
||||
let bindingArray = legacyCard{"binding_values"}
|
||||
if bindingArray.kind == JArray:
|
||||
var bindingObj: seq[(string, JsonNode)]
|
||||
for item in bindingArray:
|
||||
bindingObj.add((item{"key"}.getStr, item{"value"}))
|
||||
# Create a new card object with flattened structure
|
||||
jsCard = %*{
|
||||
"name": legacyCard{"name"},
|
||||
"url": legacyCard{"url"},
|
||||
"binding_values": %bindingObj
|
||||
}
|
||||
|
||||
result = parseTweet(js{"legacy"}, jsCard)
|
||||
result.id = js{"rest_id"}.getId
|
||||
result.user = parseGraphUser(js{"core"})
|
||||
|
||||
if result.replyId == 0:
|
||||
result.replyId = js{"reply_to_results", "rest_id"}.getId
|
||||
|
||||
with count, js{"views", "count"}:
|
||||
result.stats.views = count.getStr("0").parseInt
|
||||
|
||||
with noteTweet, js{"note_tweet", "note_tweet_results", "result"}:
|
||||
result.expandNoteTweetEntities(noteTweet)
|
||||
|
||||
parseMediaEntities(js, result)
|
||||
|
||||
if result.quote.isSome:
|
||||
result.quote = some(parseGraphTweet(js{"quoted_status_result", "result"}, isLegacy))
|
||||
result.quote = some(parseGraphTweet(js{"quoted_status_result", "result"}))
|
||||
|
||||
with quoted, js{"quotedPostResults", "result"}:
|
||||
result.quote = some(parseGraphTweet(quoted))
|
||||
|
||||
proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] =
|
||||
for t in js{"content", "items"}:
|
||||
let entryId = t{"entryId"}.getStr
|
||||
for t in ? js{"content", "items"}:
|
||||
let entryId = t.getEntryId
|
||||
if "cursor-showmore" in entryId:
|
||||
let cursor = t{"item", "content", "value"}
|
||||
result.thread.cursor = cursor.getStr
|
||||
result.thread.hasMore = true
|
||||
elif "tweet" in entryId and "promoted" notin entryId:
|
||||
let
|
||||
isLegacy = t{"item"}.hasKey("itemContent")
|
||||
(contentKey, resultKey) = if isLegacy: ("itemContent", "tweet_results")
|
||||
else: ("content", "tweetResult")
|
||||
with tweet, t.getTweetResult("item"):
|
||||
result.thread.content.add parseGraphTweet(tweet)
|
||||
|
||||
with content, t{"item", contentKey}:
|
||||
result.thread.content.add parseGraphTweet(content{resultKey, "result"}, isLegacy)
|
||||
|
||||
if content{"tweetDisplayType"}.getStr == "SelfThread":
|
||||
let tweetDisplayType = select(
|
||||
t{"item", "content", "tweet_display_type"},
|
||||
t{"item", "itemContent", "tweetDisplayType"}
|
||||
)
|
||||
if tweetDisplayType.getStr == "SelfThread":
|
||||
result.self = true
|
||||
|
||||
proc parseGraphTweetResult*(js: JsonNode): Tweet =
|
||||
with tweet, js{"data", "tweet_result", "result"}:
|
||||
result = parseGraphTweet(tweet, false)
|
||||
result = parseGraphTweet(tweet)
|
||||
|
||||
proc parseGraphConversation*(js: JsonNode; tweetId: string; v2=true): Conversation =
|
||||
proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
|
||||
result = Conversation(replies: Result[Chain](beginning: true))
|
||||
|
||||
let
|
||||
rootKey = if v2: "timeline_response" else: "threaded_conversation_with_injections_v2"
|
||||
contentKey = if v2: "content" else: "itemContent"
|
||||
resultKey = if v2: "tweetResult" else: "tweet_results"
|
||||
|
||||
let instructions = ? js{"data", rootKey, "instructions"}
|
||||
let instructions = ? select(
|
||||
js{"data", "timelineResponse", "instructions"},
|
||||
js{"data", "timeline_response", "instructions"},
|
||||
js{"data", "threaded_conversation_with_injections_v2", "instructions"}
|
||||
)
|
||||
if instructions.len == 0:
|
||||
return
|
||||
|
||||
for i in instructions:
|
||||
if i{"__typename"}.getStr == "TimelineAddEntries":
|
||||
if i.getTypeName == "TimelineAddEntries":
|
||||
for e in i{"entries"}:
|
||||
let entryId = e{"entryId"}.getStr
|
||||
let entryId = e.getEntryId
|
||||
if entryId.startsWith("tweet"):
|
||||
with tweetResult, e{"content", contentKey, resultKey, "result"}:
|
||||
let tweet = parseGraphTweet(tweetResult, not v2)
|
||||
let tweetResult = getTweetResult(e)
|
||||
if tweetResult.notNull:
|
||||
let tweet = parseGraphTweet(tweetResult)
|
||||
|
||||
if not tweet.available:
|
||||
tweet.id = parseBiggestInt(entryId.getId())
|
||||
tweet.id = entryId.getId
|
||||
|
||||
if $tweet.id == tweetId:
|
||||
result.tweet = tweet
|
||||
@@ -402,66 +471,67 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string; v2=true): Conversati
|
||||
elif thread.content.len > 0:
|
||||
result.replies.content.add thread
|
||||
elif entryId.startsWith("tombstone"):
|
||||
let id = entryId.getId()
|
||||
let tweet = Tweet(
|
||||
id: parseBiggestInt(id),
|
||||
available: false,
|
||||
text: e{"content", contentKey, "tombstoneInfo", "richText"}.getTombstone
|
||||
)
|
||||
let
|
||||
content = select(e{"content", "content"}, e{"content", "itemContent"})
|
||||
tweet = Tweet(
|
||||
id: entryId.getId,
|
||||
available: false,
|
||||
text: content{"tombstoneInfo", "richText"}.getTombstone
|
||||
)
|
||||
|
||||
if id == tweetId:
|
||||
if $tweet.id == tweetId:
|
||||
result.tweet = tweet
|
||||
else:
|
||||
result.before.content.add tweet
|
||||
elif entryId.startsWith("cursor-bottom"):
|
||||
result.replies.bottom = e{"content", contentKey, "value"}.getStr
|
||||
var cursorValue = select(
|
||||
e{"content", "value"},
|
||||
e{"content", "content", "value"},
|
||||
e{"content", "itemContent", "value"}
|
||||
)
|
||||
result.replies.bottom = cursorValue.getStr
|
||||
|
||||
proc extractTweetsFromEntry*(e: JsonNode; entryId: string): seq[Tweet] =
|
||||
if e{"content", "items"}.notNull:
|
||||
for item in e{"content", "items"}:
|
||||
with tweetResult, item{"item", "itemContent", "tweet_results", "result"}:
|
||||
var tweet = parseGraphTweet(tweetResult, false)
|
||||
if not tweet.available:
|
||||
tweet.id = parseBiggestInt(item{"entryId"}.getStr.getId())
|
||||
result.add tweet
|
||||
proc extractTweetsFromEntry*(e: JsonNode): seq[Tweet] =
|
||||
with tweetResult, getTweetResult(e):
|
||||
var tweet = parseGraphTweet(tweetResult)
|
||||
if not tweet.available:
|
||||
tweet.id = e.getEntryId.getId
|
||||
result.add tweet
|
||||
return
|
||||
|
||||
with tweetResult, e{"content", "content", "tweetResult", "result"}:
|
||||
var tweet = parseGraphTweet(tweetResult, false)
|
||||
if not tweet.available:
|
||||
tweet.id = parseBiggestInt(entryId.getId())
|
||||
result.add tweet
|
||||
for item in e{"content", "items"}:
|
||||
with tweetResult, item.getTweetResult("item"):
|
||||
var tweet = parseGraphTweet(tweetResult)
|
||||
if not tweet.available:
|
||||
tweet.id = item.getEntryId.getId
|
||||
result.add tweet
|
||||
|
||||
proc parseGraphTimeline*(js: JsonNode; after=""): Profile =
|
||||
result = Profile(tweets: Timeline(beginning: after.len == 0))
|
||||
|
||||
let instructions =
|
||||
if js{"data", "list"}.notNull:
|
||||
? js{"data", "list", "timeline_response", "timeline", "instructions"}
|
||||
elif js{"data", "user"}.notNull:
|
||||
? js{"data", "user", "result", "timeline", "timeline", "instructions"}
|
||||
else:
|
||||
? js{"data", "user_result", "result", "timeline_response", "timeline", "instructions"}
|
||||
|
||||
let instructions = ? select(
|
||||
js{"data", "list", "timeline_response", "timeline", "instructions"},
|
||||
js{"data", "user", "result", "timeline", "timeline", "instructions"},
|
||||
js{"data", "user_result", "result", "timeline_response", "timeline", "instructions"}
|
||||
)
|
||||
if instructions.len == 0:
|
||||
return
|
||||
|
||||
for i in instructions:
|
||||
# TimelineAddToModule instruction is used by UserMedia
|
||||
if i{"moduleItems"}.notNull:
|
||||
for item in i{"moduleItems"}:
|
||||
with tweetResult, item{"item", "itemContent", "tweet_results", "result"}:
|
||||
let tweet = parseGraphTweet(tweetResult, false)
|
||||
with tweetResult, item.getTweetResult("item"):
|
||||
let tweet = parseGraphTweet(tweetResult)
|
||||
if not tweet.available:
|
||||
tweet.id = parseBiggestInt(item{"entryId"}.getStr.getId())
|
||||
tweet.id = item.getEntryId.getId
|
||||
result.tweets.content.add tweet
|
||||
continue
|
||||
|
||||
if i{"entries"}.notNull:
|
||||
for e in i{"entries"}:
|
||||
let entryId = e{"entryId"}.getStr
|
||||
let entryId = e.getEntryId
|
||||
if entryId.startsWith("tweet") or entryId.startsWith("profile-grid"):
|
||||
for tweet in extractTweetsFromEntry(e, entryId):
|
||||
for tweet in extractTweetsFromEntry(e):
|
||||
result.tweets.content.add tweet
|
||||
elif "-conversation-" in entryId or entryId.startsWith("homeConversation"):
|
||||
let (thread, self) = parseGraphThread(e)
|
||||
@@ -469,36 +539,31 @@ proc parseGraphTimeline*(js: JsonNode; after=""): Profile =
|
||||
elif entryId.startsWith("cursor-bottom"):
|
||||
result.tweets.bottom = e{"content", "value"}.getStr
|
||||
|
||||
if after.len == 0 and i{"__typename"}.getStr == "TimelinePinEntry":
|
||||
with tweetResult, i{"entry", "content", "content", "tweetResult", "result"}:
|
||||
let tweet = parseGraphTweet(tweetResult, false)
|
||||
tweet.pinned = true
|
||||
if not tweet.available and tweet.tombstone.len == 0:
|
||||
let entryId = i{"entry", "entryId"}.getEntryId
|
||||
if entryId.len > 0:
|
||||
tweet.id = parseBiggestInt(entryId)
|
||||
result.pinned = some tweet
|
||||
if after.len == 0:
|
||||
if i.getTypeName == "TimelinePinEntry":
|
||||
let tweets = extractTweetsFromEntry(i{"entry"})
|
||||
if tweets.len > 0:
|
||||
var tweet = tweets[0]
|
||||
tweet.pinned = true
|
||||
result.pinned = some tweet
|
||||
|
||||
proc parseGraphPhotoRail*(js: JsonNode): PhotoRail =
|
||||
result = @[]
|
||||
|
||||
let instructions =
|
||||
if js{"data", "user"}.notNull:
|
||||
? js{"data", "user", "result", "timeline", "timeline", "instructions"}
|
||||
else:
|
||||
? js{"data", "user_result", "result", "timeline_response", "timeline", "instructions"}
|
||||
|
||||
let instructions = select(
|
||||
js{"data", "user", "result", "timeline", "timeline", "instructions"},
|
||||
js{"data", "user_result", "result", "timeline_response", "timeline", "instructions"}
|
||||
)
|
||||
if instructions.len == 0:
|
||||
return
|
||||
|
||||
for i in instructions:
|
||||
# TimelineAddToModule instruction is used by MediaTimelineV2
|
||||
if i{"moduleItems"}.notNull:
|
||||
for item in i{"moduleItems"}:
|
||||
with tweetResult, item{"item", "itemContent", "tweet_results", "result"}:
|
||||
let t = parseGraphTweet(tweetResult, false)
|
||||
with tweetResult, item.getTweetResult("item"):
|
||||
let t = parseGraphTweet(tweetResult)
|
||||
if not t.available:
|
||||
t.id = parseBiggestInt(item{"entryId"}.getStr.getId())
|
||||
t.id = item.getEntryId.getId
|
||||
|
||||
let photo = extractGalleryPhoto(t)
|
||||
if photo.url.len > 0:
|
||||
@@ -508,14 +573,13 @@ proc parseGraphPhotoRail*(js: JsonNode): PhotoRail =
|
||||
return
|
||||
continue
|
||||
|
||||
let instrType = i{"type"}.getStr(i{"__typename"}.getStr)
|
||||
if instrType != "TimelineAddEntries":
|
||||
if i.getTypeName != "TimelineAddEntries":
|
||||
continue
|
||||
|
||||
for e in i{"entries"}:
|
||||
let entryId = e{"entryId"}.getStr
|
||||
let entryId = e.getEntryId
|
||||
if entryId.startsWith("tweet") or entryId.startsWith("profile-grid"):
|
||||
for t in extractTweetsFromEntry(e, entryId):
|
||||
for t in extractTweetsFromEntry(e):
|
||||
let photo = extractGalleryPhoto(t)
|
||||
if photo.url.len > 0:
|
||||
result.add photo
|
||||
@@ -526,21 +590,24 @@ proc parseGraphPhotoRail*(js: JsonNode): PhotoRail =
|
||||
proc parseGraphSearch*[T: User | Tweets](js: JsonNode; after=""): Result[T] =
|
||||
result = Result[T](beginning: after.len == 0)
|
||||
|
||||
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
|
||||
let instructions = select(
|
||||
js{"data", "search", "timeline_response", "timeline", "instructions"},
|
||||
js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
|
||||
)
|
||||
if instructions.len == 0:
|
||||
return
|
||||
|
||||
for instruction in instructions:
|
||||
let typ = instruction{"type"}.getStr
|
||||
let typ = getTypeName(instruction)
|
||||
if typ == "TimelineAddEntries":
|
||||
for e in instruction{"entries"}:
|
||||
let entryId = e{"entryId"}.getStr
|
||||
let entryId = e.getEntryId
|
||||
when T is Tweets:
|
||||
if entryId.startsWith("tweet"):
|
||||
with tweetRes, e{"content", "itemContent", "tweet_results", "result"}:
|
||||
with tweetRes, getTweetResult(e):
|
||||
let tweet = parseGraphTweet(tweetRes)
|
||||
if not tweet.available:
|
||||
tweet.id = parseBiggestInt(entryId.getId())
|
||||
tweet.id = entryId.getId
|
||||
result.content.add tweet
|
||||
elif T is User:
|
||||
if entryId.startsWith("user"):
|
||||
|
||||
@@ -36,6 +36,12 @@ template `?`*(js: JsonNode): untyped =
|
||||
if j.isNull: return
|
||||
j
|
||||
|
||||
template select*(a, b: JsonNode): untyped =
|
||||
if a.notNull: a else: b
|
||||
|
||||
template select*(a, b, c: JsonNode): untyped =
|
||||
if a.notNull: a elif b.notNull: b else: c
|
||||
|
||||
template with*(ident, value, body): untyped =
|
||||
if true:
|
||||
let ident {.inject.} = value
|
||||
@@ -44,8 +50,7 @@ template with*(ident, value, body): untyped =
|
||||
template with*(ident; value: JsonNode; body): untyped =
|
||||
if true:
|
||||
let ident {.inject.} = value
|
||||
# value.notNull causes a compilation error for versions < 1.6.14
|
||||
if notNull(value): body
|
||||
if value.notNull: body
|
||||
|
||||
template getCursor*(js: JsonNode): string =
|
||||
js{"content", "operation", "cursor", "value"}.getStr
|
||||
@@ -54,6 +59,20 @@ template getError*(js: JsonNode): Error =
|
||||
if js.kind != JArray or js.len == 0: null
|
||||
else: Error(js[0]{"code"}.getInt)
|
||||
|
||||
proc getTweetResult*(js: JsonNode; root="content"): JsonNode =
|
||||
select(
|
||||
js{root, "content", "tweet_results", "result"},
|
||||
js{root, "itemContent", "tweet_results", "result"},
|
||||
js{root, "content", "tweetResult", "result"}
|
||||
)
|
||||
|
||||
template getTypeName*(js: JsonNode): string =
|
||||
js{"__typename"}.getStr(js{"type"}.getStr)
|
||||
|
||||
template getEntryId*(e: JsonNode): string =
|
||||
e{"entryId"}.getStr(e{"entry_id"}.getStr)
|
||||
|
||||
|
||||
template parseTime(time: string; f: static string; flen: int): DateTime =
|
||||
if time.len != flen: return
|
||||
parse(time, f, utc())
|
||||
@@ -64,29 +83,24 @@ proc getDateTime*(js: JsonNode): DateTime =
|
||||
proc getTime*(js: JsonNode): DateTime =
|
||||
parseTime(js.getStr, "ddd MMM dd hh:mm:ss \'+0000\' yyyy", 30)
|
||||
|
||||
proc getId*(id: string): string {.inline.} =
|
||||
proc getTimeFromMs*(js: JsonNode): DateTime =
|
||||
let ms = js.getInt(0)
|
||||
if ms == 0: return
|
||||
let seconds = ms div 1000
|
||||
return fromUnix(seconds).utc()
|
||||
|
||||
proc getId*(id: string): int64 {.inline.} =
|
||||
let start = id.rfind("-")
|
||||
if start < 0: return id
|
||||
id[start + 1 ..< id.len]
|
||||
if start < 0:
|
||||
return parseBiggestInt(id)
|
||||
return parseBiggestInt(id[start + 1 ..< id.len])
|
||||
|
||||
proc getId*(js: JsonNode): int64 {.inline.} =
|
||||
case js.kind
|
||||
of JString: return parseBiggestInt(js.getStr("0"))
|
||||
of JString: return js.getStr("0").getId
|
||||
of JInt: return js.getBiggestInt()
|
||||
else: return 0
|
||||
|
||||
proc getEntryId*(js: JsonNode): string {.inline.} =
|
||||
let entry = js{"entryId"}.getStr
|
||||
if entry.len == 0: return
|
||||
|
||||
if "tweet" in entry or "sq-I-t" in entry:
|
||||
return entry.getId
|
||||
elif "tombstone" in entry:
|
||||
return js{"content", "item", "content", "tombstone", "tweet", "id"}.getStr
|
||||
else:
|
||||
echo "unknown entry: ", entry
|
||||
return
|
||||
|
||||
template getStrVal*(js: JsonNode; default=""): string =
|
||||
js{"string_value"}.getStr(default)
|
||||
|
||||
@@ -157,12 +171,6 @@ proc getMp4Resolution*(url: string): int =
|
||||
# cannot determine resolution (e.g. m3u8/non-mp4 video)
|
||||
return 0
|
||||
|
||||
proc getVideoViewCount*(js: JsonNode): string =
|
||||
with stats, js{"ext_media_stats"}:
|
||||
return stats{"view_count"}.getStr($stats{"viewCount"}.getInt)
|
||||
|
||||
return $js{"mediaStats", "viewCount"}.getInt(0)
|
||||
|
||||
proc extractSlice(js: JsonNode): Slice[int] =
|
||||
result = js["indices"][0].getInt ..< js["indices"][1].getInt
|
||||
|
||||
|
||||
@@ -31,8 +31,6 @@ proc createStatusRouter*(cfg: Config) =
|
||||
resp $renderReplies(replies, prefs, getPath())
|
||||
|
||||
let conv = await getTweet(id, getCursor())
|
||||
if conv == nil:
|
||||
echo "nil conv"
|
||||
|
||||
if conv == nil or conv.tweet == nil or conv.tweet.id == 0:
|
||||
var error = "Tweet not found"
|
||||
@@ -68,7 +66,7 @@ proc createStatusRouter*(cfg: Config) =
|
||||
|
||||
get "/@name/@s/@id/@m/?@i?":
|
||||
cond @"s" in ["status", "statuses"]
|
||||
cond @"m" in ["video", "photo"]
|
||||
cond @"m" in ["video", "photo", "history"]
|
||||
redirect("/$1/status/$2" % [@"name", @"id"])
|
||||
|
||||
get "/@name/statuses/@id/?":
|
||||
@@ -76,6 +74,6 @@ proc createStatusRouter*(cfg: Config) =
|
||||
|
||||
get "/i/web/status/@id":
|
||||
redirect("/i/status/" & @"id")
|
||||
|
||||
|
||||
get "/@name/thread/@id/?":
|
||||
redirect("/$1/status/$2" % [@"name", @"id"])
|
||||
|
||||
@@ -42,6 +42,7 @@
|
||||
|
||||
.card-description {
|
||||
margin: 0.3em 0;
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
.card-destination {
|
||||
|
||||
@@ -38,6 +38,7 @@ type
|
||||
|
||||
Session* = ref object
|
||||
id*: int64
|
||||
username*: string
|
||||
pending*: int
|
||||
limited*: bool
|
||||
limitedAt*: int
|
||||
@@ -120,7 +121,6 @@ type
|
||||
durationMs*: int
|
||||
url*: string
|
||||
thumb*: string
|
||||
views*: string
|
||||
available*: bool
|
||||
reason*: string
|
||||
title*: string
|
||||
@@ -201,7 +201,7 @@ type
|
||||
replies*: int
|
||||
retweets*: int
|
||||
likes*: int
|
||||
quotes*: int
|
||||
views*: int
|
||||
|
||||
Tweet* = ref object
|
||||
id*: int64
|
||||
|
||||
@@ -30,7 +30,7 @@ proc renderNavbar(cfg: Config; req: Request; rss, canonical: string): VNode =
|
||||
tdiv(class="nav-item right"):
|
||||
icon "search", title="Search", href="/search"
|
||||
if cfg.enableRss and rss.len > 0:
|
||||
icon "rss-feed", title="RSS Feed", href=rss
|
||||
icon "rss", title="RSS Feed", href=rss
|
||||
icon "bird", title="Open in Twitter", href=canonical
|
||||
a(href="https://liberapay.com/zedeus"): verbatim lp
|
||||
icon "info", title="About", href="/about"
|
||||
@@ -53,7 +53,7 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
|
||||
|
||||
buildHtml(head):
|
||||
link(rel="stylesheet", type="text/css", href="/css/style.css?v=19")
|
||||
link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=2")
|
||||
link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=3")
|
||||
|
||||
if theme.len > 0:
|
||||
link(rel="stylesheet", type="text/css", href=(&"/css/themes/{theme}.css"))
|
||||
@@ -119,7 +119,7 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
|
||||
# this is last so images are also preloaded
|
||||
# if this is done earlier, Chrome only preloads one image for some reason
|
||||
link(rel="preload", type="font/woff2", `as`="font",
|
||||
href="/fonts/fontello.woff2?21002321", crossorigin="anonymous")
|
||||
href="/fonts/fontello.woff2?61663884", crossorigin="anonymous")
|
||||
|
||||
proc renderMain*(body: VNode; req: Request; cfg: Config; prefs=defaultPrefs;
|
||||
titleText=""; desc=""; ogTitle=""; rss=""; video="";
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
#end proc
|
||||
#
|
||||
#proc getDescription(desc: string; cfg: Config): string =
|
||||
Twitter feed for: ${desc}. Generated by ${cfg.hostname}
|
||||
Twitter feed for: ${desc}. Generated by ${getUrlPrefix(cfg)}
|
||||
#end proc
|
||||
#
|
||||
#proc getTweetsWithPinned(profile: Profile): seq[Tweets] =
|
||||
@@ -51,10 +51,6 @@ Twitter feed for: ${desc}. Generated by ${cfg.hostname}
|
||||
#let urlPrefix = getUrlPrefix(cfg)
|
||||
#let text = replaceUrls(tweet.text, defaultPrefs, absolute=urlPrefix)
|
||||
<p>${text.replace("\n", "<br>\n")}</p>
|
||||
#if tweet.quote.isSome and get(tweet.quote).available:
|
||||
# let quoteLink = getLink(get(tweet.quote))
|
||||
<p><a href="${urlPrefix}${quoteLink}">${cfg.hostname}${quoteLink}</a></p>
|
||||
#end if
|
||||
#if tweet.photos.len > 0:
|
||||
# for photo in tweet.photos:
|
||||
<img src="${urlPrefix}${getPicUrl(photo)}" style="max-width:250px;" />
|
||||
@@ -72,6 +68,20 @@ Twitter feed for: ${desc}. Generated by ${cfg.hostname}
|
||||
<img src="${urlPrefix}${getPicUrl(card.image)}" style="max-width:250px;" />
|
||||
# end if
|
||||
#end if
|
||||
#if tweet.quote.isSome and get(tweet.quote).available:
|
||||
# let quoteTweet = get(tweet.quote)
|
||||
# let quoteLink = urlPrefix & getLink(quoteTweet)
|
||||
<hr/>
|
||||
<blockquote>
|
||||
<b>${quoteTweet.user.fullname} (@${quoteTweet.user.username})</b>
|
||||
<p>
|
||||
${renderRssTweet(quoteTweet, cfg)}
|
||||
</p>
|
||||
<footer>
|
||||
— <cite><a href="${quoteLink}">${quoteLink}</a>
|
||||
</footer>
|
||||
</blockquote>
|
||||
#end if
|
||||
#end proc
|
||||
#
|
||||
#proc renderRssTweets(tweets: seq[Tweets]; cfg: Config; userId=""): string =
|
||||
|
||||
@@ -178,14 +178,12 @@ func formatStat(stat: int): string =
|
||||
if stat > 0: insertSep($stat, ',')
|
||||
else: ""
|
||||
|
||||
proc renderStats(stats: TweetStats; views: string): VNode =
|
||||
proc renderStats(stats: TweetStats): VNode =
|
||||
buildHtml(tdiv(class="tweet-stats")):
|
||||
span(class="tweet-stat"): icon "comment", formatStat(stats.replies)
|
||||
span(class="tweet-stat"): icon "retweet", formatStat(stats.retweets)
|
||||
span(class="tweet-stat"): icon "quote", formatStat(stats.quotes)
|
||||
span(class="tweet-stat"): icon "heart", formatStat(stats.likes)
|
||||
if views.len > 0:
|
||||
span(class="tweet-stat"): icon "play", insertSep(views, ',')
|
||||
span(class="tweet-stat"): icon "views", formatStat(stats.views)
|
||||
|
||||
proc renderReply(tweet: Tweet): VNode =
|
||||
buildHtml(tdiv(class="replying-to")):
|
||||
@@ -301,7 +299,6 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
|
||||
a(class="tweet-link", href=getLink(tweet))
|
||||
|
||||
tdiv(class="tweet-body"):
|
||||
var views = ""
|
||||
renderHeader(tweet, retweet, pinned, prefs)
|
||||
|
||||
if not afterTweet and index == 0 and tweet.reply.len > 0 and
|
||||
@@ -325,10 +322,8 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
|
||||
renderAlbum(tweet)
|
||||
elif tweet.video.isSome:
|
||||
renderVideo(tweet.video.get(), prefs, path)
|
||||
views = tweet.video.get().views
|
||||
elif tweet.gif.isSome:
|
||||
renderGif(tweet.gif.get(), prefs)
|
||||
views = "GIF"
|
||||
|
||||
if tweet.poll.isSome:
|
||||
renderPoll(tweet.poll.get())
|
||||
@@ -343,7 +338,7 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
|
||||
renderMediaTags(tweet.mediaTags)
|
||||
|
||||
if not prefs.hideTweetStats:
|
||||
renderStats(tweet.stats, views)
|
||||
renderStats(tweet.stats)
|
||||
|
||||
if showThread:
|
||||
a(class="show-thread", href=("/i/status/" & $tweet.threadId)):
|
||||
|
||||
@@ -1,23 +1,20 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Authenticates with X.com/Twitter and extracts session cookies for use with Nitter.
|
||||
Handles 2FA, extracts user info, and outputs clean JSON for sessions.jsonl.
|
||||
|
||||
Requirements:
|
||||
pip install -r tools/requirements.txt
|
||||
|
||||
Usage:
|
||||
python3 tools/get_web_session.py <username> <password> [totp_seed] [--append sessions.jsonl] [--headless]
|
||||
python3 tools/create_session_browser.py <username> <password> [totp_seed] [--append sessions.jsonl] [--headless]
|
||||
|
||||
Examples:
|
||||
# Output to terminal
|
||||
python3 tools/get_web_session.py myusername mypassword TOTP_BASE32_SECRET
|
||||
python3 tools/create_session_browser.py myusername mypassword TOTP_SECRET
|
||||
|
||||
# Append to sessions.jsonl
|
||||
python3 tools/get_web_session.py myusername mypassword TOTP_SECRET --append sessions.jsonl
|
||||
python3 tools/create_session_browser.py myusername mypassword TOTP_SECRET --append sessions.jsonl
|
||||
|
||||
# Headless mode (may increase detection risk)
|
||||
python3 tools/get_web_session.py myusername mypassword TOTP_SECRET --headless
|
||||
python3 tools/create_session_browser.py myusername mypassword TOTP_SECRET --headless
|
||||
|
||||
Output:
|
||||
{"kind": "cookie", "username": "...", "id": "...", "auth_token": "...", "ct0": "..."}
|
||||
328
tools/create_session_curl.py
Normal file
328
tools/create_session_curl.py
Normal file
@@ -0,0 +1,328 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Requirements:
|
||||
pip install curl_cffi pyotp
|
||||
|
||||
Usage:
|
||||
python3 tools/create_session_curl.py <username> <password> [totp_seed] [--append sessions.jsonl]
|
||||
|
||||
Examples:
|
||||
# Output to terminal
|
||||
python3 tools/create_session_curl.py myusername mypassword TOTP_SECRET
|
||||
|
||||
# Append to sessions.jsonl
|
||||
python3 tools/create_session_curl.py myusername mypassword TOTP_SECRET --append sessions.jsonl
|
||||
|
||||
Output:
|
||||
{"kind": "cookie", "username": "...", "id": "...", "auth_token": "...", "ct0": "..."}
|
||||
"""
|
||||
|
||||
import sys
|
||||
import json
|
||||
import pyotp
|
||||
from curl_cffi import requests
|
||||
|
||||
BEARER_TOKEN = "AAAAAAAAAAAAAAAAAAAAAFQODgEAAAAAVHTp76lzh3rFzcHbmHVvQxYYpTw%3DckAlMINMjmCwxUcaXbAN4XqJVdgMJaHqNOFgPMK0zN1qLqLQCF"
|
||||
BASE_URL = "https://api.x.com/1.1/onboarding/task.json"
|
||||
GUEST_ACTIVATE_URL = "https://api.x.com/1.1/guest/activate.json"
|
||||
|
||||
# Subtask versions required by API
|
||||
SUBTASK_VERSIONS = {
|
||||
"action_list": 2, "alert_dialog": 1, "app_download_cta": 1,
|
||||
"check_logged_in_account": 2, "choice_selection": 3,
|
||||
"contacts_live_sync_permission_prompt": 0, "cta": 7, "email_verification": 2,
|
||||
"end_flow": 1, "enter_date": 1, "enter_email": 2, "enter_password": 5,
|
||||
"enter_phone": 2, "enter_recaptcha": 1, "enter_text": 5, "generic_urt": 3,
|
||||
"in_app_notification": 1, "interest_picker": 3, "js_instrumentation": 1,
|
||||
"menu_dialog": 1, "notifications_permission_prompt": 2, "open_account": 2,
|
||||
"open_home_timeline": 1, "open_link": 1, "phone_verification": 4,
|
||||
"privacy_options": 1, "security_key": 3, "select_avatar": 4,
|
||||
"select_banner": 2, "settings_list": 7, "show_code": 1, "sign_up": 2,
|
||||
"sign_up_review": 4, "tweet_selection_urt": 1, "update_users": 1,
|
||||
"upload_media": 1, "user_recommendations_list": 4,
|
||||
"user_recommendations_urt": 1, "wait_spinner": 3, "web_modal": 1
|
||||
}
|
||||
|
||||
|
||||
def get_base_headers(guest_token=None):
|
||||
"""Build base headers for API requests."""
|
||||
headers = {
|
||||
"Authorization": f"Bearer {BEARER_TOKEN}",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "*/*",
|
||||
"Accept-Language": "en-US",
|
||||
"X-Twitter-Client-Language": "en-US",
|
||||
"Origin": "https://x.com",
|
||||
"Referer": "https://x.com/",
|
||||
}
|
||||
if guest_token:
|
||||
headers["X-Guest-Token"] = guest_token
|
||||
return headers
|
||||
|
||||
|
||||
def get_cookies_dict(session):
|
||||
"""Extract cookies from session."""
|
||||
return session.cookies.get_dict() if hasattr(session.cookies, 'get_dict') else dict(session.cookies)
|
||||
|
||||
|
||||
def make_request(session, headers, flow_token, subtask_data, print_msg):
|
||||
"""Generic request handler for flow steps."""
|
||||
print(f"[*] {print_msg}...", file=sys.stderr)
|
||||
|
||||
payload = {
|
||||
"flow_token": flow_token,
|
||||
"subtask_inputs": [subtask_data] if isinstance(subtask_data, dict) else subtask_data
|
||||
}
|
||||
|
||||
response = session.post(BASE_URL, json=payload, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
new_flow_token = data.get('flow_token')
|
||||
if not new_flow_token:
|
||||
raise Exception(f"Failed to get flow token: {print_msg}")
|
||||
|
||||
return new_flow_token, data
|
||||
|
||||
|
||||
def get_guest_token(session):
|
||||
"""Get guest token for unauthenticated requests."""
|
||||
print("[*] Getting guest token...", file=sys.stderr)
|
||||
response = session.post(GUEST_ACTIVATE_URL, headers={"Authorization": f"Bearer {BEARER_TOKEN}"})
|
||||
response.raise_for_status()
|
||||
|
||||
guest_token = response.json().get('guest_token')
|
||||
if not guest_token:
|
||||
raise Exception("Failed to obtain guest token")
|
||||
|
||||
print(f"[*] Got guest token: {guest_token}", file=sys.stderr)
|
||||
return guest_token
|
||||
|
||||
|
||||
def init_flow(session, guest_token):
|
||||
"""Initialize the login flow."""
|
||||
print("[*] Initializing login flow...", file=sys.stderr)
|
||||
|
||||
headers = get_base_headers(guest_token)
|
||||
payload = {
|
||||
"input_flow_data": {
|
||||
"flow_context": {
|
||||
"debug_overrides": {},
|
||||
"start_location": {"location": "manual_link"}
|
||||
},
|
||||
"subtask_versions": SUBTASK_VERSIONS
|
||||
}
|
||||
}
|
||||
|
||||
response = session.post(f"{BASE_URL}?flow_name=login", json=payload, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
flow_token = response.json().get('flow_token')
|
||||
if not flow_token:
|
||||
raise Exception("Failed to get initial flow token")
|
||||
|
||||
print("[*] Got initial flow token", file=sys.stderr)
|
||||
return flow_token, headers
|
||||
|
||||
|
||||
def submit_username(session, flow_token, headers, guest_token, username):
|
||||
"""Submit username."""
|
||||
headers = headers.copy()
|
||||
headers["X-Guest-Token"] = guest_token
|
||||
|
||||
subtask = {
|
||||
"subtask_id": "LoginEnterUserIdentifierSSO",
|
||||
"settings_list": {
|
||||
"setting_responses": [{
|
||||
"key": "user_identifier",
|
||||
"response_data": {"text_data": {"result": username}}
|
||||
}],
|
||||
"link": "next_link"
|
||||
}
|
||||
}
|
||||
|
||||
flow_token, data = make_request(session, headers, flow_token, subtask, "Submitting username")
|
||||
|
||||
# Check for denial (suspicious activity)
|
||||
if data.get('subtasks') and 'cta' in data['subtasks'][0]:
|
||||
error_msg = data['subtasks'][0]['cta'].get('primary_text', {}).get('text')
|
||||
if error_msg:
|
||||
raise Exception(f"Login denied: {error_msg}")
|
||||
|
||||
return flow_token
|
||||
|
||||
|
||||
def submit_password(session, flow_token, headers, guest_token, password):
|
||||
"""Submit password and detect if 2FA is needed."""
|
||||
headers = headers.copy()
|
||||
headers["X-Guest-Token"] = guest_token
|
||||
|
||||
subtask = {
|
||||
"subtask_id": "LoginEnterPassword",
|
||||
"enter_password": {"password": password, "link": "next_link"}
|
||||
}
|
||||
|
||||
flow_token, data = make_request(session, headers, flow_token, subtask, "Submitting password")
|
||||
|
||||
needs_2fa = any(s.get('subtask_id') == 'LoginTwoFactorAuthChallenge' for s in data.get('subtasks', []))
|
||||
if needs_2fa:
|
||||
print("[*] 2FA required", file=sys.stderr)
|
||||
|
||||
return flow_token, needs_2fa
|
||||
|
||||
|
||||
def submit_2fa(session, flow_token, headers, guest_token, totp_seed):
|
||||
"""Submit 2FA code."""
|
||||
if not totp_seed:
|
||||
raise Exception("2FA required but no TOTP seed provided")
|
||||
|
||||
code = pyotp.TOTP(totp_seed).now()
|
||||
print("[*] Generating 2FA code...", file=sys.stderr)
|
||||
|
||||
headers = headers.copy()
|
||||
headers["X-Guest-Token"] = guest_token
|
||||
|
||||
subtask = {
|
||||
"subtask_id": "LoginTwoFactorAuthChallenge",
|
||||
"enter_text": {"text": code, "link": "next_link"}
|
||||
}
|
||||
|
||||
flow_token, _ = make_request(session, headers, flow_token, subtask, "Submitting 2FA code")
|
||||
return flow_token
|
||||
|
||||
|
||||
def submit_js_instrumentation(session, flow_token, headers, guest_token):
|
||||
"""Submit JS instrumentation response."""
|
||||
headers = headers.copy()
|
||||
headers["X-Guest-Token"] = guest_token
|
||||
|
||||
subtask = {
|
||||
"subtask_id": "LoginJsInstrumentationSubtask",
|
||||
"js_instrumentation": {
|
||||
"response": '{"rf":{"a4fc506d24bb4843c48a1966940c2796bf4fb7617a2d515ad3297b7df6b459b6":121,"bff66e16f1d7ea28c04653dc32479cf416a9c8b67c80cb8ad533b2a44fee82a3":-1,"ac4008077a7e6ca03210159dbe2134dea72a616f03832178314bb9931645e4f7":-22,"c3a8a81a9b2706c6fec42c771da65a9597c537b8e4d9b39e8e58de9fe31ff239":-12},"s":"ZHYaDA9iXRxOl2J3AZ9cc23iJx-Fg5E82KIBA_fgeZFugZGYzRtf8Bl3EUeeYgsK30gLFD2jTQx9fAMsnYCw0j8ahEy4Pb5siM5zD6n7YgOeWmFFaXoTwaGY4H0o-jQnZi5yWZRAnFi4lVuCVouNz_xd2BO2sobCO7QuyOsOxQn2CWx7bjD8vPAzT5BS1mICqUWyjZDjLnRZJU6cSQG5YFIHEPBa8Kj-v1JFgkdAfAMIdVvP7C80HWoOqYivQR7IBuOAI4xCeLQEdxlGeT-JYStlP9dcU5St7jI6ExyMeQnRicOcxXLXsan8i5Joautk2M8dAJFByzBaG4wtrPhQ3QAAAZEi-_t7"}',
|
||||
"link": "next_link"
|
||||
}
|
||||
}
|
||||
|
||||
flow_token, _ = make_request(session, headers, flow_token, subtask, "Submitting JS instrumentation")
|
||||
return flow_token
|
||||
|
||||
|
||||
def complete_flow(session, flow_token, headers):
|
||||
"""Complete the login flow."""
|
||||
cookies = get_cookies_dict(session)
|
||||
|
||||
headers = headers.copy()
|
||||
headers["X-Twitter-Auth-Type"] = "OAuth2Session"
|
||||
if cookies.get('ct0'):
|
||||
headers["X-Csrf-Token"] = cookies['ct0']
|
||||
|
||||
subtask = {
|
||||
"subtask_id": "AccountDuplicationCheck",
|
||||
"check_logged_in_account": {"link": "AccountDuplicationCheck_false"}
|
||||
}
|
||||
|
||||
make_request(session, headers, flow_token, subtask, "Completing login flow")
|
||||
|
||||
|
||||
def extract_user_id(cookies_dict):
|
||||
"""Extract user ID from twid cookie."""
|
||||
twid = cookies_dict.get('twid', '').strip('"')
|
||||
|
||||
for prefix in ['u=', 'u%3D']:
|
||||
if prefix in twid:
|
||||
return twid.split(prefix)[1].split('&')[0].strip('"')
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def login_and_get_cookies(username, password, totp_seed=None):
|
||||
"""Authenticate with X.com and extract session cookies."""
|
||||
session = requests.Session(impersonate="chrome")
|
||||
|
||||
try:
|
||||
guest_token = get_guest_token(session)
|
||||
flow_token, headers = init_flow(session, guest_token)
|
||||
flow_token = submit_js_instrumentation(session, flow_token, headers, guest_token)
|
||||
flow_token = submit_username(session, flow_token, headers, guest_token, username)
|
||||
flow_token, needs_2fa = submit_password(session, flow_token, headers, guest_token, password)
|
||||
|
||||
if needs_2fa:
|
||||
flow_token = submit_2fa(session, flow_token, headers, guest_token, totp_seed)
|
||||
|
||||
complete_flow(session, flow_token, headers)
|
||||
|
||||
cookies_dict = get_cookies_dict(session)
|
||||
cookies_dict['username'] = username
|
||||
|
||||
user_id = extract_user_id(cookies_dict)
|
||||
if user_id:
|
||||
cookies_dict['id'] = user_id
|
||||
|
||||
print("[*] Successfully authenticated", file=sys.stderr)
|
||||
return cookies_dict
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 3:
|
||||
print('Usage: python3 create_session_curl.py username password [totp_seed] [--append sessions.jsonl]', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
username = sys.argv[1]
|
||||
password = sys.argv[2]
|
||||
totp_seed = None
|
||||
append_file = None
|
||||
|
||||
# Parse optional arguments
|
||||
i = 3
|
||||
while i < len(sys.argv):
|
||||
arg = sys.argv[i]
|
||||
if arg == '--append':
|
||||
if i + 1 < len(sys.argv):
|
||||
append_file = sys.argv[i + 1]
|
||||
i += 2
|
||||
else:
|
||||
print('[!] Error: --append requires a filename', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
elif not arg.startswith('--'):
|
||||
if totp_seed is None:
|
||||
totp_seed = arg
|
||||
i += 1
|
||||
else:
|
||||
print(f'[!] Warning: Unknown argument: {arg}', file=sys.stderr)
|
||||
i += 1
|
||||
|
||||
try:
|
||||
cookies = login_and_get_cookies(username, password, totp_seed)
|
||||
|
||||
session = {
|
||||
'kind': 'cookie',
|
||||
'username': cookies['username'],
|
||||
'id': cookies.get('id'),
|
||||
'auth_token': cookies['auth_token'],
|
||||
'ct0': cookies['ct0']
|
||||
}
|
||||
|
||||
output = json.dumps(session)
|
||||
|
||||
if append_file:
|
||||
with open(append_file, 'a') as f:
|
||||
f.write(output + '\n')
|
||||
print(f'✓ Session appended to {append_file}', file=sys.stderr)
|
||||
else:
|
||||
print(output)
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
except Exception as error:
|
||||
print(f'[!] Error: {error}', file=sys.stderr)
|
||||
import traceback
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,2 +1,3 @@
|
||||
nodriver>=0.48.0
|
||||
pyotp
|
||||
curl_cffi
|
||||
|
||||
Reference in New Issue
Block a user